1
0

Merge pull request #424 from ucb-bar/coreplex-cake

Coreplex cake
This commit is contained in:
Wesley W. Terpstra 2016-10-31 16:49:27 -07:00 committed by GitHub
commit 1b016051e8
33 changed files with 1101 additions and 1140 deletions

View File

@ -23,7 +23,7 @@ case object BankIdLSB extends Field[Int]
/** Function for building some kind of coherence manager agent */
case object BuildL2CoherenceManager extends Field[(Int, Parameters) => CoherenceAgent]
/** Function for building some kind of tile connected to a reset signal */
case object BuildTiles extends Field[Seq[(Bool, Parameters) => Tile]]
case object BuildTiles extends Field[Seq[Parameters => LazyTile]]
/** The file to read the BootROM contents from */
case object BootROMFile extends Field[String]
@ -35,50 +35,137 @@ trait HasCoreplexParameters {
lazy val outerMemParams = p.alterPartial({ case TLId => "L2toMC" })
lazy val outerMMIOParams = p.alterPartial({ case TLId => "L2toMMIO" })
lazy val globalAddrMap = p(rocketchip.GlobalAddrMap)
lazy val nTiles = p(uncore.devices.NTiles)
lazy val nSlaves = p(rocketchip.NCoreplexExtClients)
lazy val nMemChannels = p(NMemoryChannels)
lazy val hasSupervisor = p(rocket.UseVM)
}
case class CoreplexConfig(
nTiles: Int,
nExtInterrupts: Int,
nSlaves: Int,
nMemChannels: Int,
hasSupervisor: Boolean)
{
val nInterruptPriorities = if (nExtInterrupts <= 1) 0 else (nExtInterrupts min 7)
val plicKey = PLICConfig(nTiles, hasSupervisor, nExtInterrupts, nInterruptPriorities)
case class CoreplexParameters(implicit val p: Parameters) extends HasCoreplexParameters
abstract class BareCoreplex(implicit val p: Parameters) extends LazyModule
abstract class BareCoreplexBundle[+L <: BareCoreplex](_outer: L) extends Bundle {
val outer = _outer
}
abstract class BareCoreplexModule[+L <: BareCoreplex, +B <: BareCoreplexBundle[L]](_outer: L, _io: () => B) extends LazyModuleImp(_outer) {
val outer = _outer
val io = _io ()
}
abstract class BaseCoreplex(c: CoreplexConfig)(implicit p: Parameters) extends LazyModule
trait CoreplexNetwork extends HasCoreplexParameters {
this: BareCoreplex =>
abstract class BaseCoreplexBundle(val c: CoreplexConfig)(implicit val p: Parameters) extends Bundle with HasCoreplexParameters {
val master = new Bundle {
val mem = Vec(c.nMemChannels, new ClientUncachedTileLinkIO()(outerMemParams))
val mmio = new ClientUncachedTileLinkIO()(outerMMIOParams)
}
val slave = Vec(c.nSlaves, new ClientUncachedTileLinkIO()(innerParams)).flip
val interrupts = Vec(c.nExtInterrupts, Bool()).asInput
val debug = new DebugBusIO()(p).flip
val clint = Vec(c.nTiles, new CoreplexLocalInterrupts).asInput
val resetVector = UInt(INPUT, p(XLen))
val success = Bool(OUTPUT) // used for testing
val l1tol2 = LazyModule(new TLXbar)
val l1tol2_beatBytes = p(TLKey("L2toMMIO")).dataBitsPerBeat/8
val l1tol2_lineBytes = p(CacheBlockBytes)
override def cloneType = this.getClass.getConstructors.head.newInstance(c, p).asInstanceOf[this.type]
val cbus = LazyModule(new TLXbar)
val cbus_beatBytes = p(XLen)/8
val cbus_lineBytes = l1tol2_lineBytes
val mmio = TLOutputNode()
val mmioInt = IntInputNode()
cbus.node :=
TLAtomicAutomata(arithmetic = true)( // disable once TLB uses TL2 metadata
TLWidthWidget(l1tol2_beatBytes)(
TLBuffer()(
l1tol2.node)))
mmio :=
TLBuffer()(
TLWidthWidget(l1tol2_beatBytes)(
l1tol2.node))
}
abstract class BaseCoreplexModule[+L <: BaseCoreplex, +B <: BaseCoreplexBundle](
c: CoreplexConfig, l: L, b: => B)(implicit val p: Parameters) extends LazyModuleImp(l) with HasCoreplexParameters {
val outer: L = l
val io: B = b
trait CoreplexNetworkBundle extends HasCoreplexParameters {
this: {
val outer: CoreplexNetwork
} =>
implicit val p = outer.p
val mmio = outer.mmio.bundleOut
val interrupts = outer.mmioInt.bundleIn
}
trait CoreplexNetworkModule extends HasCoreplexParameters {
this: BareCoreplexModule[BareCoreplex, BareCoreplexBundle[BareCoreplex]] =>
implicit val p = outer.p
}
trait CoreplexRISCVPlatform {
this: CoreplexNetwork =>
// Build a set of Tiles
val tiles = p(BuildTiles) map { _(reset, p) }
val lazyTiles = p(BuildTiles) map { _(p) }
val legacy = LazyModule(new TLLegacy()(outerMMIOParams))
val tileIntNodes = lazyTiles.map { _ => IntInternalOutputNode() } // this should be moved into the Tile...
val debug = LazyModule(new TLDebugModule())
val plic = LazyModule(new TLPLIC(hasSupervisor, maxPriorities = 7))
val clint = LazyModule(new CoreplexLocalInterrupter)
// Kill this once we move TL2 into rocket
l1tol2.node :=
TLHintHandler()(
legacy.node)
debug.node := TLFragmenter(cbus_beatBytes, cbus_lineBytes)(cbus.node)
plic.node := TLFragmenter(cbus_beatBytes, cbus_lineBytes)(cbus.node)
clint.node := TLFragmenter(cbus_beatBytes, cbus_lineBytes)(cbus.node)
plic.intnode := mmioInt
tileIntNodes.foreach { _ := plic.intnode }
}
trait CoreplexRISCVPlatformBundle {
this: CoreplexNetworkBundle {
val outer: CoreplexRISCVPlatform
} =>
val mem = Vec(nMemChannels, new ClientUncachedTileLinkIO()(outerMemParams))
val slave = Vec(nSlaves, new ClientUncachedTileLinkIO()(innerParams)).flip
val debug = new DebugBusIO().flip
val rtcTick = Bool(INPUT)
val resetVector = UInt(INPUT, p(XLen))
val success = Bool(OUTPUT) // used for testing
}
trait CoreplexRISCVPlatformModule {
this: CoreplexNetworkModule {
val outer: CoreplexNetwork with CoreplexRISCVPlatform
val io: CoreplexRISCVPlatformBundle
} =>
val tiles = outer.lazyTiles.map(_.module)
val uncoreTileIOs = (tiles zipWithIndex) map { case (tile, i) => Wire(tile.io) }
println("\nGenerated Address Map")
for (entry <- p(rocketchip.GlobalAddrMap).flatten) {
val name = entry.name
val start = entry.region.start
val end = entry.region.start + entry.region.size - 1
val prot = entry.region.attr.prot
val protStr = (if ((prot & AddrMapProt.R) > 0) "R" else "") +
(if ((prot & AddrMapProt.W) > 0) "W" else "") +
(if ((prot & AddrMapProt.X) > 0) "X" else "")
val cacheable = if (entry.region.attr.cacheable) " [C]" else ""
println(f"\t$name%s $start%x - $end%x, $protStr$cacheable")
}
// Create and export the ConfigString
val managers = outer.l1tol2.node.edgesIn(0).manager.managers
val configString = rocketchip.GenerateConfigString(p, outer.clint, outer.plic, managers)
// Allow something else to have override the config string
if (!ConfigStringOutput.contents.isDefined) {
ConfigStringOutput.contents = Some(configString)
}
println(s"\nGenerated Configuration String\n${ConfigStringOutput.contents.get}")
val nCachedPorts = tiles.map(tile => tile.io.cached.size).reduce(_ + _)
val nUncachedPorts = tiles.map(tile => tile.io.uncached.size).reduce(_ + _)
val nBanks = c.nMemChannels * nBanksPerMemChannel
val nBanks = nMemChannels * nBanksPerMemChannel
// Build an uncore backing the Tiles
buildUncore(p.alterPartial({
case HastiId => "TL"
case TLId => "L1toL2"
@ -86,7 +173,7 @@ abstract class BaseCoreplexModule[+L <: BaseCoreplex, +B <: BaseCoreplexBundle](
case NUncachedTileLinkPorts => nUncachedPorts
}))
def buildUncore(implicit p: Parameters) = {
def buildUncore(implicit p: Parameters) {
// Create a simple L1toL2 NoC between the tiles and the banks of outer memory
// Cached ports are first in client list, making sharerToClientId just an indentity function
// addrToBank is sed to hash physical addresses (of cache blocks) to banks (and thereby memory channels)
@ -112,8 +199,9 @@ abstract class BaseCoreplexModule[+L <: BaseCoreplex, +B <: BaseCoreplexBundle](
l1tol2net.io.clients_cached <> uncoreTileIOs.map(_.cached).flatten
l1tol2net.io.clients_uncached <> uncoreTileIOs.map(_.uncached).flatten ++ io.slave
l1tol2net.io.managers <> managerEndpoints.map(_.innerTL) :+ mmioManager.io.inner
outer.legacy.module.io.legacy <> mmioManager.io.outer
val mem_ic = Module(new TileLinkMemoryInterconnect(nBanksPerMemChannel, c.nMemChannels)(outerMemParams))
val mem_ic = Module(new TileLinkMemoryInterconnect(nBanksPerMemChannel, nMemChannels)(outerMemParams))
val backendBuffering = TileLinkDepths(0,0,0,0,0)
for ((bank, icPort) <- managerEndpoints zip mem_ic.io.in) {
@ -121,46 +209,36 @@ abstract class BaseCoreplexModule[+L <: BaseCoreplex, +B <: BaseCoreplexBundle](
icPort <> TileLinkIOUnwrapper(enqueued)
}
io.master.mem <> mem_ic.io.out
buildMMIONetwork(TileLinkEnqueuer(mmioManager.io.outer, 1))(outerMMIOParams)
io.mem <> mem_ic.io.out
}
def buildMMIONetwork(mmio: ClientUncachedTileLinkIO)(implicit p: Parameters) = {
val ioAddrMap = globalAddrMap.subMap("io")
val cBus = Module(new TileLinkRecursiveInterconnect(1, ioAddrMap))
cBus.io.in.head <> mmio
val plic = Module(new PLIC(c.plicKey))
plic.io.tl <> cBus.port("cbus:plic")
for (i <- 0 until io.interrupts.size) {
val gateway = Module(new LevelGateway)
gateway.io.interrupt := io.interrupts(i)
plic.io.devices(i) <> gateway.io.plic
}
val debugModule = Module(new DebugModule)
debugModule.io.tl <> cBus.port("cbus:debug")
debugModule.io.db <> io.debug
// connect coreplex-internal interrupts to tiles
for ((tile, i) <- (uncoreTileIOs zipWithIndex)) {
tile.interrupts <> io.clint(i)
tile.interrupts.meip := plic.io.harts(plic.cfg.context(i, 'M'))
tile.interrupts.seip.foreach(_ := plic.io.harts(plic.cfg.context(i, 'S')))
tile.interrupts.debug := debugModule.io.debugInterrupts(i)
tile.hartid := UInt(i)
tile.resetVector := io.resetVector
}
val tileSlavePorts = (0 until c.nTiles) map (i => s"cbus:dmem$i") filter (ioAddrMap contains _)
for ((t, m) <- (uncoreTileIOs.map(_.slave).flatten) zip (tileSlavePorts map (cBus port _)))
t <> m
io.master.mmio <> cBus.port("pbus")
// connect coreplex-internal interrupts to tiles
for ((tile, i) <- (uncoreTileIOs zipWithIndex)) {
tile.hartid := UInt(i)
tile.resetVector := io.resetVector
tile.interrupts := outer.clint.module.io.tiles(i)
tile.interrupts.debug := outer.debug.module.io.debugInterrupts(i)
tile.interrupts.meip := outer.tileIntNodes(i).bundleOut(0)(0)
tile.interrupts.seip.foreach(_ := outer.tileIntNodes(i).bundleOut(0)(1))
}
outer.debug.module.io.db <> io.debug
outer.clint.module.io.rtcTick := io.rtcTick
// Coreplex doesn't know when to stop running
io.success := Bool(false)
}
class BaseCoreplex(implicit p: Parameters) extends BareCoreplex
with CoreplexNetwork
with CoreplexRISCVPlatform {
override lazy val module = new BaseCoreplexModule(this, () => new BaseCoreplexBundle(this))
}
class BaseCoreplexBundle[+L <: BaseCoreplex](_outer: L) extends BareCoreplexBundle(_outer)
with CoreplexNetworkBundle
with CoreplexRISCVPlatformBundle
class BaseCoreplexModule[+L <: BaseCoreplex, +B <: BaseCoreplexBundle[L]](_outer: L, _io: () => B) extends BareCoreplexModule(_outer, _io)
with CoreplexNetworkModule
with CoreplexRISCVPlatformModule

View File

@ -4,6 +4,7 @@ package coreplex
import Chisel._
import junctions._
import diplomacy._
import uncore.tilelink._
import uncore.coherence._
import uncore.agents._
@ -69,8 +70,8 @@ class BaseCoreplexConfig extends Config (
case NUncachedTileLinkPorts => 1
//Tile Constants
case BuildTiles => {
List.tabulate(site(NTiles)){ i => (r: Bool, p: Parameters) =>
Module(new RocketTile(resetSignal = r)(p.alterPartial({
List.tabulate(site(NTiles)){ i => (p: Parameters) =>
LazyModule(new RocketTile()(p.alterPartial({
case TileId => i
case TLId => "L1toL2"
case NUncachedTileLinkPorts => 1 + site(RoccNMemChannels)

View File

@ -3,14 +3,20 @@ package coreplex
import Chisel._
import cde.{Parameters, Field}
import junctions._
import diplomacy._
import uncore.tilelink._
import uncore.tilelink2._
import uncore.util._
import util._
import rocket._
trait DirectConnection {
val tiles: Seq[Tile]
val uncoreTileIOs: Seq[TileIO]
this: CoreplexNetwork with CoreplexRISCVPlatform =>
lazyTiles.map(_.slave).flatten.foreach { scratch => scratch := cbus.node }
}
trait DirectConnectionModule {
this: CoreplexNetworkModule with CoreplexRISCVPlatformModule =>
val tlBuffering = TileLinkDepths(1,1,2,2,0)
val ultBuffering = UncachedTileLinkDepths(1,2)
@ -18,7 +24,6 @@ trait DirectConnection {
(tiles zip uncoreTileIOs) foreach { case (tile, uncore) =>
(uncore.cached zip tile.io.cached) foreach { case (u, t) => u <> TileLinkEnqueuer(t, tlBuffering) }
(uncore.uncached zip tile.io.uncached) foreach { case (u, t) => u <> TileLinkEnqueuer(t, ultBuffering) }
tile.io.slave.foreach { _ <> TileLinkEnqueuer(uncore.slave.get, 1) }
tile.io.interrupts <> uncore.interrupts
@ -27,30 +32,54 @@ trait DirectConnection {
}
}
class DefaultCoreplex(c: CoreplexConfig)(implicit p: Parameters) extends BaseCoreplex(c)(p) {
override lazy val module = Module(new DefaultCoreplexModule(c, this, new DefaultCoreplexBundle(c)(p))(p))
class DefaultCoreplex(implicit p: Parameters) extends BaseCoreplex
with DirectConnection {
override lazy val module = new DefaultCoreplexModule(this, () => new DefaultCoreplexBundle(this))
}
class DefaultCoreplexBundle(c: CoreplexConfig)(implicit p: Parameters) extends BaseCoreplexBundle(c)(p)
class DefaultCoreplexBundle[+L <: DefaultCoreplex](_outer: L) extends BaseCoreplexBundle(_outer)
class DefaultCoreplexModule[+L <: DefaultCoreplex, +B <: DefaultCoreplexBundle](
c: CoreplexConfig, l: L, b: => B)(implicit p: Parameters) extends BaseCoreplexModule(c, l, b)(p)
with DirectConnection
class DefaultCoreplexModule[+L <: DefaultCoreplex, +B <: DefaultCoreplexBundle[L]](_outer: L, _io: () => B) extends BaseCoreplexModule(_outer, _io)
with DirectConnectionModule
/////
trait TileClockResetBundle {
val c: CoreplexConfig
val tcrs = Vec(c.nTiles, new Bundle {
trait AsyncConnection {
this: CoreplexNetwork with CoreplexRISCVPlatform =>
val crossings = lazyTiles.map(_.slave).map(_.map { scratch =>
val crossing = LazyModule(new TLAsyncCrossing)
crossing.node := cbus.node
val monitor = (scratch := crossing.node)
(crossing, monitor)
})
}
trait AsyncConnectionBundle {
this: CoreplexNetworkBundle with CoreplexRISCVPlatformBundle =>
val tcrs = Vec(nTiles, new Bundle {
val clock = Clock(INPUT)
val reset = Bool(INPUT)
})
}
trait AsyncConnection {
val io: TileClockResetBundle
val tiles: Seq[Tile]
val uncoreTileIOs: Seq[TileIO]
trait AsyncConnectionModule {
this: Module with CoreplexNetworkModule with CoreplexRISCVPlatformModule {
val outer: AsyncConnection
val io: AsyncConnectionBundle
} =>
(outer.crossings zip io.tcrs) foreach { case (slaves, tcr) =>
slaves.foreach { case (crossing, monitor) =>
crossing.module.io.in_clock := clock
crossing.module.io.in_reset := reset
crossing.module.io.out_clock := tcr.clock
crossing.module.io.out_reset := tcr.reset
monitor.foreach { m =>
m.module.clock := tcr.clock
m.module.reset := tcr.reset
}
}
}
(tiles, uncoreTileIOs, io.tcrs).zipped foreach { case (tile, uncore, tcr) =>
tile.clock := tcr.clock
@ -58,7 +87,6 @@ trait AsyncConnection {
(uncore.cached zip tile.io.cached) foreach { case (u, t) => u <> AsyncTileLinkFrom(tcr.clock, tcr.reset, t) }
(uncore.uncached zip tile.io.uncached) foreach { case (u, t) => u <> AsyncUTileLinkFrom(tcr.clock, tcr.reset, t) }
tile.io.slave.foreach { _ <> AsyncUTileLinkTo(tcr.clock, tcr.reset, uncore.slave.get)}
val ti = tile.io.interrupts
val ui = uncore.interrupts
@ -73,13 +101,13 @@ trait AsyncConnection {
}
}
class MultiClockCoreplex(c: CoreplexConfig)(implicit p: Parameters) extends BaseCoreplex(c)(p) {
override lazy val module = Module(new MultiClockCoreplexModule(c, this, new MultiClockCoreplexBundle(c)(p))(p))
class MultiClockCoreplex(implicit p: Parameters) extends BaseCoreplex
with AsyncConnection {
override lazy val module = new MultiClockCoreplexModule(this, () => new MultiClockCoreplexBundle(this))
}
class MultiClockCoreplexBundle(c: CoreplexConfig)(implicit p: Parameters) extends BaseCoreplexBundle(c)(p)
with TileClockResetBundle
class MultiClockCoreplexBundle[+L <: MultiClockCoreplex](_outer: L) extends BaseCoreplexBundle(_outer)
with AsyncConnectionBundle
class MultiClockCoreplexModule[+L <: MultiClockCoreplex, +B <: MultiClockCoreplexBundle](
c: CoreplexConfig, l: L, b: => B)(implicit p: Parameters) extends BaseCoreplexModule(c, l, b)(p)
with AsyncConnection
class MultiClockCoreplexModule[+L <: MultiClockCoreplex, +B <: MultiClockCoreplexBundle[L]](_outer: L, _io: () => B) extends BaseCoreplexModule(_outer, _io)
with AsyncConnectionModule

View File

@ -62,7 +62,7 @@ abstract class LazyModule
private def nodesGraphML(buf: StringBuilder, pad: String) {
buf ++= s"""${pad}<node id=\"${index}\">\n"""
buf ++= s"""${pad} <data key=\"n\"><y:ShapeNode><y:NodeLabel modelName=\"sides\" modelPosition=\"w\" fontSize=\"10\" borderDistance=\"1.0\" rotationAngle=\"270.0\">${module.name}</y:NodeLabel></y:ShapeNode></data>\n"""
buf ++= s"""${pad} <data key=\"n\"><y:ShapeNode><y:NodeLabel modelName=\"sides\" modelPosition=\"w\" rotationAngle=\"270.0\">${module.instanceName}</y:NodeLabel></y:ShapeNode></data>\n"""
buf ++= s"""${pad} <graph id=\"${index}::\" edgedefault=\"directed\">\n"""
nodes.filter(!_.omitGraphML).foreach { n =>
buf ++= s"""${pad} <node id=\"${index}::${n.index}\"/>\n"""
@ -72,11 +72,15 @@ abstract class LazyModule
buf ++= s"""${pad}</node>\n"""
}
private def edgesGraphML(buf: StringBuilder, pad: String) {
nodes.filter(!_.omitGraphML) foreach { n => n.outputs.filter(!_.omitGraphML).foreach { o =>
nodes.filter(!_.omitGraphML) foreach { n => n.outputs.filter(!_._1.omitGraphML).foreach { case (o, l) =>
buf ++= pad
buf ++= "<edge"
buf ++= s""" source=\"${index}::${n.index}\""""
buf ++= s""" target=\"${o.lazyModule.index}::${o.index}\"><data key=\"e\"><y:PolyLineEdge><y:Arrows source=\"none\" target=\"standard\"/><y:LineStyle color=\"${o.colour}\" type=\"line\" width=\"1.0\"/></y:PolyLineEdge></data></edge>\n"""
buf ++= s""" target=\"${o.lazyModule.index}::${o.index}\"><data key=\"e\"><y:PolyLineEdge>"""
buf ++= s"""<y:Arrows source=\"none\" target=\"standard\"/>"""
buf ++= s"""<y:LineStyle color=\"${o.colour}\" type=\"line\" width=\"1.0\"/>"""
buf ++= s"""<y:EdgeLabel modelName=\"centered\" rotationAngle=\"270.0\">${l}</y:EdgeLabel>"""
buf ++= s"""</y:PolyLineEdge></data></edge>\n"""
} }
children.filter(!_.omitGraphML).foreach { c => c.edgesGraphML(buf, pad) }
}

View File

@ -20,6 +20,7 @@ trait InwardNodeImp[DI, UI, EI, BI <: Data]
// optional methods to track node graph
def mixI(pu: UI, node: InwardNode[DI, UI, BI]): UI = pu // insert node into parameters
def getO(pu: UI): Option[BaseNode] = None // most-outward common node
def labelI(ei: EI) = ""
}
// DO = Downwards flowing Parameters generated by the outer side of the node
@ -34,6 +35,7 @@ trait OutwardNodeImp[DO, UO, EO, BO <: Data]
// optional methods to track node graph
def mixO(pd: DO, node: OutwardNode[DO, UO, BO]): DO = pd // insert node into parameters
def getI(pd: DO): Option[BaseNode] = None // most-inward common node
def labelO(eo: EO) = ""
}
abstract class NodeImp[D, U, EO, EI, B <: Data]
@ -54,8 +56,8 @@ abstract class BaseNode
protected[diplomacy] def gci: Option[BaseNode] // greatest common inner
protected[diplomacy] def gco: Option[BaseNode] // greatest common outer
protected[diplomacy] def outputs: Seq[BaseNode]
protected[diplomacy] def inputs: Seq[BaseNode]
protected[diplomacy] def outputs: Seq[(BaseNode, String)]
protected[diplomacy] def inputs: Seq[(BaseNode, String)]
protected[diplomacy] def colour: String
}
@ -94,7 +96,7 @@ trait InwardNode[DI, UI, BI <: Data] extends BaseNode with InwardNodeHandle[DI,
protected[diplomacy] lazy val iPorts = { iRealized = true; reqI(); accPI.result() }
protected[diplomacy] val iParams: Seq[UI]
protected[diplomacy] def iConnect: Vec[BI]
val bundleIn: Vec[BI]
}
trait OutwardNodeHandle[DO, UO, BO <: Data]
@ -126,7 +128,7 @@ trait OutwardNode[DO, UO, BO <: Data] extends BaseNode with OutwardNodeHandle[DO
protected[diplomacy] lazy val oPorts = { oRealized = true; reqO(); accPO.result() }
protected[diplomacy] val oParams: Seq[DO]
protected[diplomacy] def oConnect: Vec[BO]
val bundleOut: Vec[BO]
}
class MixedNode[DI, UI, EI, BI <: Data, DO, UO, EO, BO <: Data](
@ -140,8 +142,8 @@ class MixedNode[DI, UI, EI, BI <: Data, DO, UO, EO, BO <: Data](
{
// meta-data for printing the node graph
protected[diplomacy] def colour = inner.colour
protected[diplomacy] def outputs = oPorts.map(_._2)
protected[diplomacy] def inputs = iPorts.map(_._2)
protected[diplomacy] def outputs = oPorts.map(_._2) zip edgesOut.map(e => outer.labelO(e))
protected[diplomacy] def inputs = iPorts.map(_._2) zip edgesIn .map(e => inner.labelI(e))
private def reqE(o: Int, i: Int) = require(i == o, s"${name} has ${i} inputs and ${o} outputs; they must match${lazyModule.line}")
protected[diplomacy] lazy val oParams: Seq[DO] = {
@ -161,11 +163,15 @@ class MixedNode[DI, UI, EI, BI <: Data, DO, UO, EO, BO <: Data](
lazy val edgesOut = (oPorts zip oParams).map { case ((i, n), o) => outer.edgeO(o, n.iParams(i)) }
lazy val edgesIn = (iPorts zip iParams).map { case ((o, n), i) => inner.edgeI(n.oParams(o), i) }
lazy val bundleOut = outer.bundleO(edgesOut)
lazy val bundleIn = inner.bundleI(edgesIn)
val flip = false // needed for blind nodes
private def flipO(b: Vec[BO]) = if (flip) b.flip else b
private def flipI(b: Vec[BI]) = if (flip) b else b.flip
val wire = false // needed if you want to grab access to from inside a module
private def wireO(b: Vec[BO]) = if (wire) Wire(b) else b
private def wireI(b: Vec[BI]) = if (wire) Wire(b) else b
def oConnect = bundleOut
def iConnect = bundleIn
lazy val bundleOut = wireO(flipO(outer.bundleO(edgesOut)))
lazy val bundleIn = wireI(flipI(inner.bundleI(edgesIn)))
// connects the outward part of a node with the inward part of this node
override def := (h: OutwardNodeHandle[DI, UI, BI])(implicit sourceInfo: SourceInfo): Option[LazyModule] = {
@ -177,7 +183,7 @@ class MixedNode[DI, UI, EI, BI <: Data, DO, UO, EO, BO <: Data](
val o = y.oPushed
y.oPush(i, x)
x.iPush(o, y)
val (out, binding) = inner.connect(y.oConnect(o), x.iConnect(i), x.edgesIn(i))
val (out, binding) = inner.connect(y.bundleOut(o), x.bundleIn(i), x.edgesIn(i))
LazyModule.stack.head.bindings = binding :: LazyModule.stack.head.bindings
out
}
@ -195,26 +201,54 @@ class IdentityNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])
class OutputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B]) extends IdentityNode(imp)
{
override def oConnect = bundleOut
override def iConnect = bundleOut
override lazy val bundleIn = bundleOut
}
class InputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B]) extends IdentityNode(imp)
{
override def oConnect = bundleIn
override def iConnect = bundleIn
override lazy val bundleOut = bundleIn
}
class SourceNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(po: PO, num: Range.Inclusive = 1 to 1)
extends SimpleNode(imp)({case (n, Seq()) => Seq.fill(n)(po)}, {case (0, _) => Seq()}, num, 0 to 0)
{
require (num.end >= 1, s"${name} is a source which does not accept outputs${lazyModule.line}")
override lazy val bundleIn = { require(false, s"${name} has no bundleIn; try bundleOut?"); bundleOut }
}
class SinkNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(pi: PI, num: Range.Inclusive = 1 to 1)
extends SimpleNode(imp)({case (0, _) => Seq()}, {case (n, Seq()) => Seq.fill(n)(pi)}, 0 to 0, num)
{
require (num.end >= 1, s"${name} is a sink which does not accept inputs${lazyModule.line}")
override lazy val bundleOut = { require(false, s"${name} has no bundleOut; try bundleIn?"); bundleIn }
}
class BlindOutputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(pi: PI)
extends SimpleNode(imp)({case (0, _) => Seq()}, {case (n, Seq()) => Seq.fill(n)(pi)}, 0 to 0, 1 to 1)
{
override val flip = true
override lazy val bundleOut = bundleIn
}
class BlindInputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(po: PO)
extends SimpleNode(imp)({case (n, Seq()) => Seq.fill(n)(po)}, {case (0, _) => Seq()}, 1 to 1, 0 to 0)
{
override val flip = true
override lazy val bundleIn = bundleOut
}
class InternalOutputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(pi: PI)
extends SimpleNode(imp)({case (0, _) => Seq()}, {case (n, Seq()) => Seq.fill(n)(pi)}, 0 to 0, 1 to 1)
{
override val wire = true
override lazy val bundleOut = bundleIn
}
class InternalInputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(po: PO)
extends SimpleNode(imp)({case (n, Seq()) => Seq.fill(n)(po)}, {case (0, _) => Seq()}, 1 to 1, 0 to 0)
{
override val wire = true
override lazy val bundleIn = bundleOut
}
class InteriorNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])

View File

@ -72,8 +72,6 @@ class Edge32BitMemtestConfig extends Config(
/* Composable Configs to set individual parameters */
class WithGroundTest extends Config(
(pname, site, here) => pname match {
case BuildCoreplex =>
(c: CoreplexConfig, p: Parameters) => LazyModule(new GroundTestCoreplex(c)(p)).module
case TLKey("L1toL2") => {
val useMEI = site(NTiles) <= 1 && site(NCachedTileLinkPorts) <= 1
val dataBeats = (8 * site(CacheBlockBytes)) / site(XLen)
@ -95,8 +93,8 @@ class WithGroundTest extends Config(
case BuildTiles => {
(0 until site(NTiles)).map { i =>
val tileSettings = site(GroundTestKey)(i)
(r: Bool, p: Parameters) => {
Module(new GroundTestTile(resetSignal = r)(p.alterPartial({
(p: Parameters) => {
LazyModule(new GroundTestTile()(p.alterPartial({
case TLId => "L1toL2"
case TileId => i
case NCachedTileLinkPorts => if(tileSettings.cached > 0) 1 else 0
@ -106,7 +104,7 @@ class WithGroundTest extends Config(
}
}
case BuildExampleTop =>
(p: Parameters) => LazyModule(new ExampleTopWithTestRAM(p))
(p: Parameters) => LazyModule(new ExampleTopWithTestRAM(new GroundTestCoreplex()(_))(p))
case FPUKey => None
case UseAtomics => false
case UseCompressed => false
@ -116,12 +114,12 @@ class WithGroundTest extends Config(
class WithComparator extends Config(
(pname, site, here) => pname match {
case GroundTestKey => Seq.fill(site(NTiles)) {
GroundTestTileSettings(uncached = site(ComparatorKey).targets.size)
GroundTestTileSettings(uncached = 2)
}
case BuildGroundTest =>
(p: Parameters) => Module(new ComparatorCore()(p))
case ComparatorKey => ComparatorParameters(
targets = Seq("mem", "io:pbus:TL2:testram").map(name =>
targets = Seq("mem", "TL2:testram").map(name =>
site(GlobalAddrMap)(name).start.longValue),
width = 8,
operations = 1000,

View File

@ -4,13 +4,14 @@ import Chisel._
import cde.{Parameters}
import coreplex._
class GroundTestCoreplex(c: CoreplexConfig)(implicit p: Parameters) extends BaseCoreplex(c)(p) {
override lazy val module = Module(new GroundTestCoreplexModule(c, this, new GroundTestCoreplexBundle(c)(p))(p))
class GroundTestCoreplex(implicit p: Parameters) extends BaseCoreplex
with DirectConnection {
override lazy val module = new GroundTestCoreplexModule(this, () => new GroundTestCoreplexBundle(this))
}
class GroundTestCoreplexBundle(c: CoreplexConfig)(implicit p: Parameters) extends BaseCoreplexBundle(c)(p)
class GroundTestCoreplexBundle[+L <: GroundTestCoreplex](_outer: L) extends BaseCoreplexBundle(_outer)
class GroundTestCoreplexModule[+L <: GroundTestCoreplex, +B <: GroundTestCoreplexBundle](
c: CoreplexConfig, l: L, b: => B)(implicit p: Parameters) extends BaseCoreplexModule(c, l, b)(p) with DirectConnection {
class GroundTestCoreplexModule[+L <: GroundTestCoreplex, +B <: GroundTestCoreplexBundle[L]](_outer: L, _io: () => B) extends BaseCoreplexModule(_outer, _io)
with DirectConnectionModule {
io.success := tiles.flatMap(_.io.elements get "success").map(_.asInstanceOf[Bool]).reduce(_&&_)
}

View File

@ -5,6 +5,7 @@ package groundtest
object Generator extends util.GeneratorApp {
val longName = names.topModuleProject + "." + names.configs
generateFirrtl
generateGraphML
generateTestSuiteMakefrags // TODO: Needed only for legacy make targets
generateParameterDump // TODO: Needed only for legacy make targets
}

View File

@ -71,7 +71,7 @@ class IOGetAfterPutBlockRegression(implicit p: Parameters) extends Regression()(
io.mem.grant.ready := Bool(true)
io.cache.req.valid := !get_sent && started
io.cache.req.bits.addr := UInt(addrMap("io:pbus:TL2:bootrom").start)
io.cache.req.bits.addr := UInt(addrMap("TL2:bootrom").start)
io.cache.req.bits.typ := MT_WU
io.cache.req.bits.cmd := M_XRD
io.cache.req.bits.tag := UInt(0)

View File

@ -96,48 +96,47 @@ abstract class GroundTest(implicit val p: Parameters) extends Module
val io = new GroundTestIO
}
class GroundTestTile(resetSignal: Bool)
(implicit val p: Parameters)
extends Tile(resetSignal = resetSignal)(p)
with HasGroundTestParameters {
override val io = new TileIO(bc) {
val success = Bool(OUTPUT)
}
val test = p(BuildGroundTest)(dcacheParams)
val ptwPorts = ListBuffer.empty ++= test.io.ptw
val memPorts = ListBuffer.empty ++= test.io.mem
if (nCached > 0) {
val dcache_io = HellaCache(p(DCacheKey))(dcacheParams)
val dcacheArb = Module(new HellaCacheArbiter(nCached)(dcacheParams))
dcacheArb.io.requestor.zip(test.io.cache).foreach {
case (requestor, cache) =>
val dcacheIF = Module(new SimpleHellaCacheIF()(dcacheParams))
dcacheIF.io.requestor <> cache
requestor <> dcacheIF.io.cache
class GroundTestTile(implicit val p: Parameters) extends LazyTile {
val slave = None
lazy val module = new TileImp(this) with HasGroundTestParameters {
val io = new TileIO(bc) {
val success = Bool(OUTPUT)
}
dcache_io.cpu <> dcacheArb.io.mem
io.cached.head <> dcache_io.mem
// SimpleHellaCacheIF leaves invalidate_lr dangling, so we wire it to false
dcache_io.cpu.invalidate_lr := Bool(false)
val test = p(BuildGroundTest)(dcacheParams)
ptwPorts += dcache_io.ptw
val ptwPorts = ListBuffer.empty ++= test.io.ptw
val memPorts = ListBuffer.empty ++= test.io.mem
if (nCached > 0) {
val dcache_io = HellaCache(p(DCacheKey))(dcacheParams)
val dcacheArb = Module(new HellaCacheArbiter(nCached)(dcacheParams))
dcacheArb.io.requestor.zip(test.io.cache).foreach {
case (requestor, cache) =>
val dcacheIF = Module(new SimpleHellaCacheIF()(dcacheParams))
dcacheIF.io.requestor <> cache
requestor <> dcacheIF.io.cache
}
dcache_io.cpu <> dcacheArb.io.mem
io.cached.head <> dcache_io.mem
// SimpleHellaCacheIF leaves invalidate_lr dangling, so we wire it to false
dcache_io.cpu.invalidate_lr := Bool(false)
ptwPorts += dcache_io.ptw
}
if (ptwPorts.size > 0) {
val ptw = Module(new DummyPTW(ptwPorts.size))
ptw.io.requestors <> ptwPorts
}
require(memPorts.size == io.uncached.size)
if (memPorts.size > 0) {
io.uncached <> memPorts
}
io.success := test.io.status.finished
}
if (ptwPorts.size > 0) {
val ptw = Module(new DummyPTW(ptwPorts.size))
ptw.io.requestors <> ptwPorts
}
require(memPorts.size == io.uncached.size)
if (memPorts.size > 0) {
io.uncached <> memPorts
}
io.success := test.io.status.finished
}

View File

@ -76,12 +76,7 @@ class AddrMap(
var cacheable = true
for (AddrMapEntry(name, r) <- entriesIn) {
require (!mapping.contains(name))
if (r.start != 0) {
base = r.start
} else {
base = (base + r.size - 1) / r.size * r.size
}
base = r.start
r match {
case r: AddrMap =>

View File

@ -4,7 +4,9 @@ package rocket
import Chisel._
import junctions._
import diplomacy._
import uncore.tilelink._
import uncore.tilelink2._
import uncore.agents._
import uncore.coherence._
import uncore.constants._
@ -125,7 +127,7 @@ class DCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
require(nWays == 1)
metaWriteArb.io.out.ready := true
metaReadArb.io.out.ready := !metaWriteArb.io.out.valid
val inScratchpad = addrMap(s"io:cbus:dmem${tileId}").containsAddress(s1_paddr)
val inScratchpad = addrMap(s"TL2:dmem${tileId}").containsAddress(s1_paddr)
val hitState = Mux(inScratchpad, ClientMetadata.onReset.onHit(M_XWR), ClientMetadata.onReset)
(inScratchpad, hitState, L1Metadata(UInt(0), ClientMetadata.onReset))
} else {
@ -495,60 +497,97 @@ class DCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
}
}
class ScratchpadSlavePort(implicit p: Parameters) extends CoreModule()(p) {
val io = new Bundle {
val tl = new ClientUncachedTileLinkIO().flip
val dmem = new HellaCacheIO
class ScratchpadSlavePort(implicit val p: Parameters) extends LazyModule with HasCoreParameters {
val node = TLManagerNode(TLManagerPortParameters(
Seq(TLManagerParameters(
address = List(AddressSet(0x80000000L, BigInt(p(DataScratchpadSize)-1))),
regionType = RegionType.UNCACHED,
executable = true,
supportsArithmetic = if (p(UseAtomics)) TransferSizes(1, coreDataBytes) else TransferSizes.none,
supportsLogical = if (p(UseAtomics)) TransferSizes(1, coreDataBytes) else TransferSizes.none,
supportsPutPartial = TransferSizes(1, coreDataBytes),
supportsPutFull = TransferSizes(1, coreDataBytes),
supportsGet = TransferSizes(1, coreDataBytes),
fifoId = Some(0))), // requests handled in FIFO order
beatBytes = coreDataBytes,
minLatency = 1))
// Make sure this ends up with the same name as before
override def name = "dmem0"
lazy val module = new LazyModuleImp(this) {
val io = new Bundle {
val tl_in = node.bundleIn
val dmem = new HellaCacheIO
}
val tl_in = io.tl_in(0)
val edge = node.edgesIn(0)
require(usingDataScratchpad)
val s_ready :: s_wait :: s_replay :: s_grant :: Nil = Enum(UInt(), 4)
val state = Reg(init = s_ready)
when (io.dmem.resp.valid) { state := s_grant }
when (tl_in.d.fire()) { state := s_ready }
when (io.dmem.s2_nack) { state := s_replay }
when (io.dmem.req.fire()) { state := s_wait }
val acq = Reg(tl_in.a.bits)
when (io.dmem.resp.valid) { acq.data := io.dmem.resp.bits.data }
when (tl_in.a.fire()) { acq := tl_in.a.bits }
val isWrite = acq.opcode === TLMessages.PutFullData || acq.opcode === TLMessages.PutPartialData
val isRead = !edge.hasData(acq)
def formCacheReq(acq: TLBundleA) = {
val req = Wire(new HellaCacheReq)
req.cmd := MuxLookup(acq.opcode, Wire(M_XRD), Array(
TLMessages.PutFullData -> M_XWR,
TLMessages.PutPartialData -> M_XWR,
TLMessages.ArithmeticData -> MuxLookup(acq.param, Wire(M_XRD), Array(
TLAtomics.MIN -> M_XA_MIN,
TLAtomics.MAX -> M_XA_MAX,
TLAtomics.MINU -> M_XA_MINU,
TLAtomics.MAXU -> M_XA_MAXU,
TLAtomics.ADD -> M_XA_ADD)),
TLMessages.LogicalData -> MuxLookup(acq.param, Wire(M_XRD), Array(
TLAtomics.XOR -> M_XA_XOR,
TLAtomics.OR -> M_XA_OR,
TLAtomics.AND -> M_XA_AND,
TLAtomics.SWAP -> M_XA_SWAP)),
TLMessages.Get -> M_XRD))
// treat all loads as full words, so bytes appear in correct lane
req.typ := Mux(isRead, log2Ceil(coreDataBytes), acq.size)
req.addr := Mux(isRead, ~(~acq.address | (coreDataBytes-1)), acq.address)
req.tag := UInt(0)
req
}
val ready = state === s_ready || tl_in.d.fire()
io.dmem.req.valid := (tl_in.a.valid && ready) || state === s_replay
tl_in.a.ready := io.dmem.req.ready && ready
io.dmem.req.bits := formCacheReq(Mux(state === s_replay, acq, tl_in.a.bits))
// the TL data is already in the correct byte lane, but the D$
// expects right-justified store data, so that it can steer the bytes.
io.dmem.s1_data := new LoadGen(acq.size, Bool(false), acq.address(log2Ceil(coreDataBytes)-1,0), acq.data, Bool(false), coreDataBytes).data
io.dmem.s1_kill := false
io.dmem.invalidate_lr := false
// place AMO data in correct word lane
val minAMOBytes = 4
val grantData = Mux(io.dmem.resp.valid, io.dmem.resp.bits.data, acq.data)
val alignedGrantData = Mux(acq.size <= log2Ceil(minAMOBytes), Fill(coreDataBytes/minAMOBytes, grantData(8*minAMOBytes-1, 0)), grantData)
tl_in.d.valid := io.dmem.resp.valid || state === s_grant
tl_in.d.bits := Mux(isWrite,
edge.AccessAck(acq, UInt(0)),
edge.AccessAck(acq, UInt(0), UInt(0)))
tl_in.d.bits.data := alignedGrantData
// Tie off unused channels
tl_in.b.valid := Bool(false)
tl_in.c.ready := Bool(true)
tl_in.e.ready := Bool(true)
}
val s_ready :: s_wait :: s_replay :: s_grant :: Nil = Enum(UInt(), 4)
val state = Reg(init = s_ready)
when (io.dmem.resp.valid) { state := s_grant }
when (io.tl.grant.fire()) { state := s_ready }
when (io.dmem.s2_nack) { state := s_replay }
when (io.dmem.req.fire()) { state := s_wait }
val acq = Reg(io.tl.acquire.bits)
when (io.dmem.resp.valid) { acq.data := io.dmem.resp.bits.data }
when (io.tl.acquire.fire()) { acq := io.tl.acquire.bits }
val isRead = acq.isBuiltInType(Acquire.getType)
val isWrite = acq.isBuiltInType(Acquire.putType)
assert(state === s_ready || isRead || isWrite)
require(coreDataBits == acq.tlDataBits)
require(usingDataScratchpad)
def formCacheReq(acq: Acquire) = {
val req = Wire(new HellaCacheReq)
// treat all loads as full words, so bytes appear in correct lane
req.typ := Mux(isRead, log2Ceil(acq.tlDataBytes), acq.op_size())
req.cmd := acq.op_code()
req.addr := Mux(isRead, ~(~acq.full_addr() | (acq.tlDataBytes-1)), acq.full_addr())
req.tag := UInt(0)
req
}
val ready = state === s_ready || io.tl.grant.fire()
io.dmem.req.valid := (io.tl.acquire.valid && ready) || state === s_replay
io.tl.acquire.ready := io.dmem.req.ready && ready
io.dmem.req.bits := formCacheReq(Mux(state === s_replay, acq, io.tl.acquire.bits))
// this blows. the TL data is already in the correct byte lane, but the D$
// expects right-justified store data, so that it can steer the bytes.
io.dmem.s1_data := new LoadGen(acq.op_size(), Bool(false), acq.addr_byte(), acq.data, Bool(false), acq.tlDataBytes).data
io.dmem.s1_kill := false
io.dmem.invalidate_lr := false
// place AMO data in correct word lane
val minAMOBytes = 4
val grantData = Mux(io.dmem.resp.valid, io.dmem.resp.bits.data, acq.data)
val alignedGrantData = Mux(acq.op_size() <= log2Ceil(minAMOBytes), Fill(coreDataBytes/minAMOBytes, grantData(8*minAMOBytes-1, 0)), grantData)
io.tl.grant.valid := io.dmem.resp.valid || state === s_grant
io.tl.grant.bits := Grant(
is_builtin_type = Bool(true),
g_type = acq.getBuiltInGrantType(),
client_xact_id = acq.client_xact_id,
manager_xact_id = UInt(0),
addr_beat = acq.addr_beat,
data = alignedGrantData)
}

View File

@ -3,7 +3,9 @@
package rocket
import Chisel._
import diplomacy._
import uncore.tilelink._
import uncore.tilelink2._
import uncore.agents._
import uncore.converters._
import uncore.devices._
@ -25,136 +27,141 @@ case class RoccParameters(
case class TileBundleConfig(
nCachedTileLinkPorts: Int,
nUncachedTileLinkPorts: Int,
xLen: Int,
hasSlavePort: Boolean)
xLen: Int)
class TileIO(c: TileBundleConfig)(implicit p: Parameters) extends Bundle {
class TileIO(c: TileBundleConfig, node: Option[TLInwardNode] = None)(implicit p: Parameters) extends Bundle {
val cached = Vec(c.nCachedTileLinkPorts, new ClientTileLinkIO)
val uncached = Vec(c.nUncachedTileLinkPorts, new ClientUncachedTileLinkIO)
val hartid = UInt(INPUT, c.xLen)
val interrupts = new TileInterrupts().asInput
val slave = c.hasSlavePort.option(new ClientUncachedTileLinkIO().flip)
val slave = node.map(_.inward.bundleIn)
val resetVector = UInt(INPUT, c.xLen)
override def cloneType = new TileIO(c).asInstanceOf[this.type]
}
abstract class Tile(clockSignal: Clock = null, resetSignal: Bool = null)
(implicit p: Parameters) extends Module(Option(clockSignal), Option(resetSignal)) {
abstract class TileImp(l: LazyTile)(implicit val p: Parameters) extends LazyModuleImp(l) {
val io: TileIO
}
abstract class LazyTile(implicit p: Parameters) extends LazyModule {
val nCachedTileLinkPorts = p(NCachedTileLinkPorts)
val nUncachedTileLinkPorts = p(NUncachedTileLinkPorts)
val dcacheParams = p.alterPartial({ case CacheName => "L1D" })
val bc = TileBundleConfig(
nCachedTileLinkPorts = nCachedTileLinkPorts,
nUncachedTileLinkPorts = nUncachedTileLinkPorts,
xLen = p(XLen),
hasSlavePort = p(DataScratchpadSize) > 0)
xLen = p(XLen))
val io = new TileIO(bc)
val module: TileImp
val slave: Option[TLInputNode]
}
class RocketTile(clockSignal: Clock = null, resetSignal: Bool = null)
(implicit p: Parameters) extends Tile(clockSignal, resetSignal)(p) {
val buildRocc = p(BuildRoCC)
val usingRocc = !buildRocc.isEmpty
val nRocc = buildRocc.size
val nFPUPorts = buildRocc.filter(_.useFPU).size
class RocketTile(implicit p: Parameters) extends LazyTile {
val slave = if (p(DataScratchpadSize) == 0) None else Some(TLInputNode())
val scratch = if (p(DataScratchpadSize) == 0) None else Some(LazyModule(new ScratchpadSlavePort()(dcacheParams)))
val core = Module(new Rocket)
val icache = Module(new Frontend()(p.alterPartial({ case CacheName => "L1I" })))
val dcache = HellaCache(p(DCacheKey))(dcacheParams)
(slave zip scratch) foreach { case (node, lm) => lm.node := TLFragmenter(p(XLen)/8, p(CacheBlockBytes))(node) }
val ptwPorts = collection.mutable.ArrayBuffer(icache.io.ptw, dcache.ptw)
val dcPorts = collection.mutable.ArrayBuffer(core.io.dmem)
val uncachedArbPorts = collection.mutable.ArrayBuffer(icache.io.mem)
val uncachedPorts = collection.mutable.ArrayBuffer[ClientUncachedTileLinkIO]()
val cachedPorts = collection.mutable.ArrayBuffer(dcache.mem)
core.io.interrupts := io.interrupts
core.io.hartid := io.hartid
icache.io.cpu <> core.io.imem
icache.io.resetVector := io.resetVector
lazy val module = new TileImp(this) {
val io = new TileIO(bc, slave)
val buildRocc = p(BuildRoCC)
val usingRocc = !buildRocc.isEmpty
val nRocc = buildRocc.size
val nFPUPorts = buildRocc.filter(_.useFPU).size
val fpuOpt = p(FPUKey).map(cfg => Module(new FPU(cfg)))
fpuOpt.foreach(fpu => core.io.fpu <> fpu.io)
val core = Module(new Rocket)
val icache = Module(new Frontend()(p.alterPartial({ case CacheName => "L1I" })))
val dcache = HellaCache(p(DCacheKey))(dcacheParams)
if (usingRocc) {
val respArb = Module(new RRArbiter(new RoCCResponse, nRocc))
core.io.rocc.resp <> respArb.io.out
val ptwPorts = collection.mutable.ArrayBuffer(icache.io.ptw, dcache.ptw)
val dcPorts = collection.mutable.ArrayBuffer(core.io.dmem)
val uncachedArbPorts = collection.mutable.ArrayBuffer(icache.io.mem)
val uncachedPorts = collection.mutable.ArrayBuffer[ClientUncachedTileLinkIO]()
val cachedPorts = collection.mutable.ArrayBuffer(dcache.mem)
core.io.interrupts := io.interrupts
core.io.hartid := io.hartid
icache.io.cpu <> core.io.imem
icache.io.resetVector := io.resetVector
val roccOpcodes = buildRocc.map(_.opcodes)
val cmdRouter = Module(new RoccCommandRouter(roccOpcodes))
cmdRouter.io.in <> core.io.rocc.cmd
val fpuOpt = p(FPUKey).map(cfg => Module(new FPU(cfg)))
fpuOpt.foreach(fpu => core.io.fpu <> fpu.io)
val roccs = buildRocc.zipWithIndex.map { case (accelParams, i) =>
val rocc = accelParams.generator(p.alterPartial({
case RoccNMemChannels => accelParams.nMemChannels
case RoccNPTWPorts => accelParams.nPTWPorts
}))
val dcIF = Module(new SimpleHellaCacheIF()(dcacheParams))
rocc.io.cmd <> cmdRouter.io.out(i)
rocc.io.exception := core.io.rocc.exception
dcIF.io.requestor <> rocc.io.mem
dcPorts += dcIF.io.cache
uncachedArbPorts += rocc.io.autl
rocc
if (usingRocc) {
val respArb = Module(new RRArbiter(new RoCCResponse, nRocc))
core.io.rocc.resp <> respArb.io.out
val roccOpcodes = buildRocc.map(_.opcodes)
val cmdRouter = Module(new RoccCommandRouter(roccOpcodes))
cmdRouter.io.in <> core.io.rocc.cmd
val roccs = buildRocc.zipWithIndex.map { case (accelParams, i) =>
val rocc = accelParams.generator(p.alterPartial({
case RoccNMemChannels => accelParams.nMemChannels
case RoccNPTWPorts => accelParams.nPTWPorts
}))
val dcIF = Module(new SimpleHellaCacheIF()(dcacheParams))
rocc.io.cmd <> cmdRouter.io.out(i)
rocc.io.exception := core.io.rocc.exception
dcIF.io.requestor <> rocc.io.mem
dcPorts += dcIF.io.cache
uncachedArbPorts += rocc.io.autl
rocc
}
if (nFPUPorts > 0) {
fpuOpt.foreach { fpu =>
val fpArb = Module(new InOrderArbiter(new FPInput, new FPResult, nFPUPorts))
val fp_roccs = roccs.zip(buildRocc)
.filter { case (_, params) => params.useFPU }
.map { case (rocc, _) => rocc.io }
fpArb.io.in_req <> fp_roccs.map(_.fpu_req)
fp_roccs.zip(fpArb.io.in_resp).foreach {
case (rocc, fpu_resp) => rocc.fpu_resp <> fpu_resp
}
fpu.io.cp_req <> fpArb.io.out_req
fpArb.io.out_resp <> fpu.io.cp_resp
}
}
core.io.rocc.busy := cmdRouter.io.busy || roccs.map(_.io.busy).reduce(_ || _)
core.io.rocc.interrupt := roccs.map(_.io.interrupt).reduce(_ || _)
respArb.io.in <> roccs.map(rocc => Queue(rocc.io.resp))
ptwPorts ++= roccs.flatMap(_.io.ptw)
uncachedPorts ++= roccs.flatMap(_.io.utl)
}
if (nFPUPorts > 0) {
val uncachedArb = Module(new ClientUncachedTileLinkIOArbiter(uncachedArbPorts.size))
uncachedArb.io.in <> uncachedArbPorts
uncachedArb.io.out +=: uncachedPorts
// Connect the caches and RoCC to the outer memory system
io.uncached <> uncachedPorts
io.cached <> cachedPorts
// TODO remove nCached/nUncachedTileLinkPorts parameters and these assertions
require(uncachedPorts.size == nUncachedTileLinkPorts)
require(cachedPorts.size == nCachedTileLinkPorts)
if (p(UseVM)) {
val ptw = Module(new PTW(ptwPorts.size)(dcacheParams))
ptw.io.requestor <> ptwPorts
ptw.io.mem +=: dcPorts
core.io.ptw <> ptw.io.dpath
}
scratch.foreach { lm => lm.module.io.dmem +=: dcPorts }
require(dcPorts.size == core.dcacheArbPorts)
val dcArb = Module(new HellaCacheArbiter(dcPorts.size)(dcacheParams))
dcArb.io.requestor <> dcPorts
dcache.cpu <> dcArb.io.mem
if (nFPUPorts == 0) {
fpuOpt.foreach { fpu =>
val fpArb = Module(new InOrderArbiter(new FPInput, new FPResult, nFPUPorts))
val fp_roccs = roccs.zip(buildRocc)
.filter { case (_, params) => params.useFPU }
.map { case (rocc, _) => rocc.io }
fpArb.io.in_req <> fp_roccs.map(_.fpu_req)
fp_roccs.zip(fpArb.io.in_resp).foreach {
case (rocc, fpu_resp) => rocc.fpu_resp <> fpu_resp
}
fpu.io.cp_req <> fpArb.io.out_req
fpArb.io.out_resp <> fpu.io.cp_resp
fpu.io.cp_req.valid := Bool(false)
fpu.io.cp_resp.ready := Bool(false)
}
}
core.io.rocc.busy := cmdRouter.io.busy || roccs.map(_.io.busy).reduce(_ || _)
core.io.rocc.interrupt := roccs.map(_.io.interrupt).reduce(_ || _)
respArb.io.in <> roccs.map(rocc => Queue(rocc.io.resp))
ptwPorts ++= roccs.flatMap(_.io.ptw)
uncachedPorts ++= roccs.flatMap(_.io.utl)
}
val uncachedArb = Module(new ClientUncachedTileLinkIOArbiter(uncachedArbPorts.size))
uncachedArb.io.in <> uncachedArbPorts
uncachedArb.io.out +=: uncachedPorts
// Connect the caches and RoCC to the outer memory system
io.uncached <> uncachedPorts
io.cached <> cachedPorts
// TODO remove nCached/nUncachedTileLinkPorts parameters and these assertions
require(uncachedPorts.size == nUncachedTileLinkPorts)
require(cachedPorts.size == nCachedTileLinkPorts)
if (p(UseVM)) {
val ptw = Module(new PTW(ptwPorts.size)(dcacheParams))
ptw.io.requestor <> ptwPorts
ptw.io.mem +=: dcPorts
core.io.ptw <> ptw.io.dpath
}
io.slave foreach { case slavePort =>
val adapter = Module(new ScratchpadSlavePort()(dcacheParams))
adapter.io.tl <> TileLinkFragmenter(slavePort)
adapter.io.dmem +=: dcPorts
}
require(dcPorts.size == core.dcacheArbPorts)
val dcArb = Module(new HellaCacheArbiter(dcPorts.size)(dcacheParams))
dcArb.io.requestor <> dcPorts
dcache.cpu <> dcArb.io.mem
if (nFPUPorts == 0) {
fpuOpt.foreach { fpu =>
fpu.io.cp_req.valid := Bool(false)
fpu.io.cp_resp.ready := Bool(false)
}
}
}

View File

@ -15,103 +15,97 @@ import coreplex._
// the following parameters will be refactored properly with TL2
case object GlobalAddrMap extends Field[AddrMap]
case object ConfigString extends Field[String]
case object NCoreplexExtClients extends Field[Int]
/** Enable or disable monitoring of Diplomatic buses */
case object TLEmitMonitors extends Field[Bool]
/** Function for building Coreplex */
case object BuildCoreplex extends Field[(CoreplexConfig, Parameters) => BaseCoreplexModule[BaseCoreplex, BaseCoreplexBundle]]
/** Base Top with no Periphery */
abstract class BaseTop(q: Parameters) extends LazyModule {
// the following variables will be refactored properly with TL2
val pInterrupts = new RangeManager
abstract class BareTop[+C <: BaseCoreplex](_coreplex: Parameters => C)(implicit val q: Parameters) extends LazyModule {
// Fill in the TL1 legacy parameters; remove these once rocket/groundtest/unittest are TL2
val pBusMasters = new RangeManager
val pDevices = new ResourceManager[AddrMapEntry]
TLImp.emitMonitors = q(TLEmitMonitors)
// Add a SoC and peripheral bus
val socBus = LazyModule(new TLXbar)
val peripheryBus = LazyModule(new TLXbar)
lazy val peripheryManagers = socBus.node.edgesIn(0).manager.managers
lazy val c = CoreplexConfig(
nTiles = q(NTiles),
nExtInterrupts = pInterrupts.sum,
nSlaves = pBusMasters.sum,
nMemChannels = q(NMemoryChannels),
hasSupervisor = q(UseVM)
)
lazy val genGlobalAddrMap = GenerateGlobalAddrMap(q, pDevices.get, peripheryManagers)
private val qWithMap = q.alterPartial({case GlobalAddrMap => genGlobalAddrMap})
lazy val genConfigString = GenerateConfigString(qWithMap, c, pDevices.get, peripheryManagers)
implicit val p = qWithMap.alterPartial({
case ConfigString => genConfigString
case NCoreplexExtClients => pBusMasters.sum})
val legacy = LazyModule(new TLLegacy()(p.alterPartial({ case TLId => "L2toMMIO" })))
peripheryBus.node :=
TLWidthWidget(p(SOCBusKey).beatBytes)(
TLBuffer()(
TLAtomicAutomata(arithmetic = p(PeripheryBusKey).arithAMO)(
socBus.node)))
socBus.node :=
TLWidthWidget(legacy.tlDataBytes)(
TLHintHandler()(
legacy.node))
lazy val legacyAddrMap = GenerateGlobalAddrMap(q, coreplex.l1tol2.node.edgesIn(0).manager.managers)
val coreplex : C = LazyModule(_coreplex(q.alterPartial {
case NCoreplexExtClients => pBusMasters.sum
case GlobalAddrMap => legacyAddrMap
}))
TopModule.contents = Some(this)
}
abstract class BaseTopBundle(val p: Parameters) extends Bundle {
abstract class BareTopBundle[+L <: BareTop[BaseCoreplex]](_outer: L) extends Bundle {
val outer = _outer
}
abstract class BareTopModule[+L <: BareTop[BaseCoreplex], +B <: BareTopBundle[L]](_outer: L, _io: () => B) extends LazyModuleImp(_outer) {
val outer = _outer
val io = _io ()
}
/** Base Top with no Periphery */
trait TopNetwork extends HasPeripheryParameters {
this: BareTop[BaseCoreplex] =>
implicit val p = q
TLImp.emitMonitors = p(TLEmitMonitors)
// Add a SoC and peripheral bus
val socBus = LazyModule(new TLXbar)
val peripheryBus = LazyModule(new TLXbar)
val intBus = LazyModule(new IntXbar)
peripheryBus.node :=
TLWidthWidget(p(SOCBusKey).beatBytes)(
TLAtomicAutomata(arithmetic = p(PeripheryBusKey).arithAMO)(
socBus.node))
}
trait TopNetworkBundle extends HasPeripheryParameters {
this: BareTopBundle[BareTop[BaseCoreplex]] =>
implicit val p = outer.q
val success = Bool(OUTPUT)
}
abstract class BaseTopModule[+L <: BaseTop, +B <: BaseTopBundle](
val p: Parameters, l: L, b: => B) extends LazyModuleImp(l) {
val outer: L = l
val io: B = b
trait TopNetworkModule extends HasPeripheryParameters {
this: {
val outer: BareTop[BaseCoreplex] with TopNetwork
val io: TopNetworkBundle
} =>
implicit val p = outer.p
val coreplex = p(BuildCoreplex)(outer.c, p)
val coreplexIO = Wire(coreplex.io)
val coreplexMem : Vec[ClientUncachedTileLinkIO] = Wire(outer.coreplex.module.io.mem)
val coreplexSlave: Vec[ClientUncachedTileLinkIO] = Wire(outer.coreplex.module.io.slave)
val coreplexDebug: DebugBusIO = Wire(outer.coreplex.module.io.debug)
val coreplexRtc : Bool = Wire(outer.coreplex.module.io.rtcTick)
val pBus =
Module(new TileLinkRecursiveInterconnect(1, p(GlobalAddrMap).subMap("io:pbus"))(
p.alterPartial({ case TLId => "L2toMMIO" })))
pBus.io.in.head <> coreplexIO.master.mmio
outer.legacy.module.io.legacy <> pBus.port("TL2")
io.success := outer.coreplex.module.io.success
println("Generated Address Map")
for (entry <- p(GlobalAddrMap).flatten) {
val name = entry.name
val start = entry.region.start
val end = entry.region.start + entry.region.size - 1
val prot = entry.region.attr.prot
val protStr = (if ((prot & AddrMapProt.R) > 0) "R" else "") +
(if ((prot & AddrMapProt.W) > 0) "W" else "") +
(if ((prot & AddrMapProt.X) > 0) "X" else "")
val cacheable = if (entry.region.attr.cacheable) " [C]" else ""
println(f"\t$name%s $start%x - $end%x, $protStr$cacheable")
}
println("\nGenerated Interrupt Vector")
outer.pInterrupts.print
println("\nGenerated Configuration String")
println(p(ConfigString))
ConfigStringOutput.contents = Some(p(ConfigString))
io.success := coreplexIO.success
outer.coreplex.module.io.rtcTick := coreplexRtc
coreplexRtc := Counter(p(rocketchip.RTCPeriod)).inc()
}
/** Base Top with no Periphery */
class BaseTop[+C <: BaseCoreplex](_coreplex: Parameters => C)(implicit p: Parameters) extends BareTop(_coreplex)
with TopNetwork {
override lazy val module = new BaseTopModule(this, () => new BaseTopBundle(this))
}
class BaseTopBundle[+L <: BaseTop[BaseCoreplex]](_outer: L) extends BareTopBundle(_outer)
with TopNetworkBundle
class BaseTopModule[+L <: BaseTop[BaseCoreplex], +B <: BaseTopBundle[L]](_outer: L, _io: () => B) extends BareTopModule(_outer, _io)
with TopNetworkModule
trait DirectConnection {
val coreplexIO: BaseCoreplexBundle
val coreplex: BaseCoreplexModule[BaseCoreplex, BaseCoreplexBundle]
this: BareTop[BaseCoreplex] with TopNetwork =>
coreplexIO <> coreplex.io
socBus.node := coreplex.mmio
coreplex.mmioInt := intBus.intnode
}
trait DirectConnectionModule {
this: TopNetworkModule {
val outer: BaseTop[BaseCoreplex]
} =>
coreplexMem <> outer.coreplex.module.io.mem
outer.coreplex.module.io.slave <> coreplexSlave
outer.coreplex.module.io.debug <> coreplexDebug
}

View File

@ -40,19 +40,12 @@ class BasePlatformConfig extends Config(
site(TLKey("L2toMC")).copy(dataBeats = edgeDataBeats)
case TLKey("MMIOtoEdge") =>
site(TLKey("L2toMMIO")).copy(dataBeats = edgeDataBeats)
case BuildCoreplex =>
(c: CoreplexConfig, p: Parameters) => LazyModule(new DefaultCoreplex(c)(p)).module
case NExtTopInterrupts => 2
case SOCBusKey => SOCBusConfig(beatBytes = site(TLKey("L2toMMIO")).dataBitsPerBeat/8)
case PeripheryBusKey => PeripheryBusConfig(arithAMO = true, beatBytes = 4)
// Note that PLIC asserts that this is > 0.
case AsyncDebugBus => false
case IncludeJtagDTM => false
case AsyncMMIOChannels => false
case ExtMMIOPorts => Nil
case NExtMMIOAXIChannels => 0
case NExtMMIOAHBChannels => 0
case NExtMMIOTLChannels => 0
case AsyncBusChannels => false
case NExtBusAXIChannels => 0
case HastiId => "Ext"
@ -70,7 +63,7 @@ class BasePlatformConfig extends Config(
case ExtMemSize => Dump("MEM_SIZE", 0x10000000L)
case RTCPeriod => 100 // gives 10 MHz RTC assuming 1 GHz uncore clock
case BuildExampleTop =>
(p: Parameters) => LazyModule(new ExampleTop(p))
(p: Parameters) => LazyModule(new ExampleTop(new DefaultCoreplex()(_))(p))
case SimMemLatency => 0
case _ => throw new CDEMatchError
}
@ -110,17 +103,6 @@ class WithExtMemSize(n: Long) extends Config(
case _ => throw new CDEMatchError
}
)
class WithAHB extends Config(
(pname, site, here) => pname match {
case TMemoryChannels => BusType.AHB
case NExtMMIOAHBChannels => 1
})
class WithTL extends Config(
(pname, site, here) => pname match {
case TMemoryChannels => BusType.TL
case NExtMMIOTLChannels => 1
})
class WithScratchpads extends Config(new WithNMemoryChannels(0) ++ new WithDataScratchpad(16384))

View File

@ -9,45 +9,43 @@ import coreplex._
import rocketchip._
/** Example Top with Periphery */
class ExampleTop(q: Parameters) extends BaseTop(q)
class ExampleTop[+C <: BaseCoreplex](_coreplex: Parameters => C)(implicit p: Parameters) extends BaseTop(_coreplex)
with PeripheryBootROM
with PeripheryDebug
with PeripheryExtInterrupts
with PeripheryCoreplexLocalInterrupter
with PeripheryMasterMem
with PeripheryMasterMMIO
with PeripherySlave {
override lazy val module = Module(new ExampleTopModule(p, this, new ExampleTopBundle(p)))
with PeripheryMasterAXI4MMIO
with PeripherySlave
with DirectConnection {
override lazy val module = new ExampleTopModule(this, () => new ExampleTopBundle(this))
}
class ExampleTopBundle(p: Parameters) extends BaseTopBundle(p)
class ExampleTopBundle[+L <: ExampleTop[BaseCoreplex]](_outer: L) extends BaseTopBundle(_outer)
with PeripheryBootROMBundle
with PeripheryDebugBundle
with PeripheryExtInterruptsBundle
with PeripheryCoreplexLocalInterrupterBundle
with PeripheryMasterMemBundle
with PeripheryMasterMMIOBundle
with PeripheryMasterAXI4MMIOBundle
with PeripherySlaveBundle
class ExampleTopModule[+L <: ExampleTop, +B <: ExampleTopBundle](p: Parameters, l: L, b: => B) extends BaseTopModule(p, l, b)
class ExampleTopModule[+L <: ExampleTop[BaseCoreplex], +B <: ExampleTopBundle[L]](_outer: L, _io: () => B) extends BaseTopModule(_outer, _io)
with PeripheryBootROMModule
with PeripheryDebugModule
with PeripheryExtInterruptsModule
with PeripheryCoreplexLocalInterrupterModule
with PeripheryMasterMemModule
with PeripheryMasterMMIOModule
with PeripheryMasterAXI4MMIOModule
with PeripherySlaveModule
with HardwiredResetVector
with DirectConnection
with DirectConnectionModule
/** Example Top with TestRAM */
class ExampleTopWithTestRAM(q: Parameters) extends ExampleTop(q)
class ExampleTopWithTestRAM[+C <: BaseCoreplex](_coreplex: Parameters => C)(implicit p: Parameters) extends ExampleTop(_coreplex)
with PeripheryTestRAM {
override lazy val module = Module(new ExampleTopWithTestRAMModule(p, this, new ExampleTopWithTestRAMBundle(p)))
override lazy val module = new ExampleTopWithTestRAMModule(this, () => new ExampleTopWithTestRAMBundle(this))
}
class ExampleTopWithTestRAMBundle(p: Parameters) extends ExampleTopBundle(p)
class ExampleTopWithTestRAMBundle[+L <: ExampleTopWithTestRAM[BaseCoreplex]](_outer: L) extends ExampleTopBundle(_outer)
with PeripheryTestRAMBundle
class ExampleTopWithTestRAMModule[+L <: ExampleTopWithTestRAM, +B <: ExampleTopWithTestRAMBundle](p: Parameters, l: L, b: => B) extends ExampleTopModule(p, l, b)
class ExampleTopWithTestRAMModule[+L <: ExampleTopWithTestRAM[BaseCoreplex], +B <: ExampleTopWithTestRAMBundle[L]](_outer: L, _io: () => B) extends ExampleTopModule(_outer, _io)
with PeripheryTestRAMModule

View File

@ -9,6 +9,7 @@ import junctions.NastiConstants._
import diplomacy._
import uncore.tilelink._
import uncore.tilelink2._
import uncore.axi4._
import uncore.converters._
import uncore.devices._
import uncore.agents._
@ -29,19 +30,12 @@ object BusType {
/** Memory channel controls */
case object TMemoryChannels extends Field[BusType.EnumVal]
/** External MMIO controls */
case object NExtMMIOAXIChannels extends Field[Int]
case object NExtMMIOAHBChannels extends Field[Int]
case object NExtMMIOTLChannels extends Field[Int]
/** External Bus controls */
case object NExtBusAXIChannels extends Field[Int]
/** Async configurations */
case object AsyncBusChannels extends Field[Boolean]
case object AsyncDebugBus extends Field[Boolean]
case object AsyncMemChannels extends Field[Boolean]
case object AsyncMMIOChannels extends Field[Boolean]
/** External address map settings */
case object ExtMMIOPorts extends Field[Seq[AddrMapEntry]]
/** Specifies the size of external memory */
case object ExtMemSize extends Field[Long]
/** Specifies the number of external interrupts */
@ -89,10 +83,8 @@ trait HasPeripheryParameters {
lazy val nMemAXIChannels = if (tMemChannels == BusType.AXI) nMemChannels else 0
lazy val nMemAHBChannels = if (tMemChannels == BusType.AHB) nMemChannels else 0
lazy val nMemTLChannels = if (tMemChannels == BusType.TL) nMemChannels else 0
lazy val outerMMIOParams = p.alterPartial({ case TLId => "L2toMMIO" })
lazy val edgeSlaveParams = p.alterPartial({ case TLId => "EdgetoSlave" })
lazy val edgeMemParams = p.alterPartial({ case TLId => "MCtoEdge" })
lazy val edgeMMIOParams = p.alterPartial({ case TLId => "MMIOtoEdge" })
lazy val peripheryBusConfig = p(PeripheryBusKey)
lazy val socBusConfig = p(SOCBusKey)
lazy val cacheBlockBytes = p(CacheBlockBytes)
@ -100,12 +92,14 @@ trait HasPeripheryParameters {
/////
trait PeripheryDebug extends LazyModule {
implicit val p: Parameters
trait PeripheryDebug {
this: TopNetwork =>
}
trait PeripheryDebugBundle {
implicit val p: Parameters
this: TopNetworkBundle {
val outer: PeripheryDebug
} =>
val debug_clk = (p(AsyncDebugBus) && !p(IncludeJtagDTM)).option(Clock(INPUT))
val debug_rst = (p(AsyncDebugBus) && !p(IncludeJtagDTM)).option(Bool(INPUT))
val debug = (!p(IncludeJtagDTM)).option(new DebugBusIO()(p).flip)
@ -113,19 +107,19 @@ trait PeripheryDebugBundle {
}
trait PeripheryDebugModule {
implicit val p: Parameters
val outer: PeripheryDebug
val io: PeripheryDebugBundle
val coreplexIO: BaseCoreplexBundle
this: TopNetworkModule {
val outer: PeripheryDebug
val io: PeripheryDebugBundle
} =>
if (p(IncludeJtagDTM)) {
// JtagDTMWithSync is a wrapper which
// handles the synchronization as well.
val dtm = Module (new JtagDTMWithSync()(p))
dtm.io.jtag <> io.jtag.get
coreplexIO.debug <> dtm.io.debug
coreplexDebug <> dtm.io.debug
} else {
coreplexIO.debug <>
coreplexDebug <>
(if (p(AsyncDebugBus)) AsyncDebugBusFrom(io.debug_clk.get, io.debug_rst.get, io.debug.get)
else io.debug.get)
}
@ -133,40 +127,40 @@ trait PeripheryDebugModule {
/////
trait PeripheryExtInterrupts extends LazyModule {
implicit val p: Parameters
val pInterrupts: RangeManager
trait PeripheryExtInterrupts {
this: TopNetwork =>
pInterrupts.add("ext", p(NExtTopInterrupts))
val extInterrupts = IntBlindInputNode(p(NExtTopInterrupts))
val extInterruptXing = LazyModule(new IntXing)
intBus.intnode := extInterruptXing.intnode
extInterruptXing.intnode := extInterrupts
}
trait PeripheryExtInterruptsBundle {
implicit val p: Parameters
val interrupts = Vec(p(NExtTopInterrupts), Bool()).asInput
this: TopNetworkBundle {
val outer: PeripheryExtInterrupts
} =>
val interrupts = outer.extInterrupts.bundleIn
}
trait PeripheryExtInterruptsModule {
implicit val p: Parameters
val outer: PeripheryExtInterrupts
val io: PeripheryExtInterruptsBundle
val coreplexIO: BaseCoreplexBundle
{
val r = outer.pInterrupts.range("ext")
((r._1 until r._2) zipWithIndex) foreach { case (c, i) =>
coreplexIO.interrupts(c) := io.interrupts(i)
}
}
this: TopNetworkModule {
val outer: PeripheryExtInterrupts
val io: PeripheryExtInterruptsBundle
} =>
}
/////
trait PeripheryMasterMem extends LazyModule {
implicit val p: Parameters
trait PeripheryMasterMem {
this: TopNetwork =>
}
trait PeripheryMasterMemBundle extends HasPeripheryParameters {
implicit val p: Parameters
trait PeripheryMasterMemBundle {
this: TopNetworkBundle {
val outer: PeripheryMasterMem
} =>
val mem_clk = p(AsyncMemChannels).option(Vec(nMemChannels, Clock(INPUT)))
val mem_rst = p(AsyncMemChannels).option(Vec(nMemChannels, Bool (INPUT)))
val mem_axi = Vec(nMemAXIChannels, new NastiIO)
@ -174,13 +168,13 @@ trait PeripheryMasterMemBundle extends HasPeripheryParameters {
val mem_tl = Vec(nMemTLChannels, new ClientUncachedTileLinkIO()(edgeMemParams))
}
trait PeripheryMasterMemModule extends HasPeripheryParameters {
implicit val p: Parameters
val outer: PeripheryMasterMem
val io: PeripheryMasterMemBundle
val coreplexIO: BaseCoreplexBundle
trait PeripheryMasterMemModule {
this: TopNetworkModule {
val outer: PeripheryMasterMem
val io: PeripheryMasterMemBundle
} =>
val edgeMem = coreplexIO.master.mem.map(TileLinkWidthAdapter(_, edgeMemParams))
val edgeMem = coreplexMem.map(TileLinkWidthAdapter(_, edgeMemParams))
// Abuse the fact that zip takes the shorter of the two lists
((io.mem_axi zip edgeMem) zipWithIndex) foreach { case ((axi, mem), idx) =>
@ -204,78 +198,65 @@ trait PeripheryMasterMemModule extends HasPeripheryParameters {
/////
trait PeripheryMasterMMIO extends LazyModule {
implicit val p: Parameters
// PeripheryMasterAXI4MMIO is an example, make your own cake pattern like this one.
trait PeripheryMasterAXI4MMIO {
this: TopNetwork =>
val mmio_axi4 = AXI4BlindOutputNode(AXI4SlavePortParameters(
slaves = Seq(AXI4SlaveParameters(
address = List(AddressSet(0x60000000L, 0x1fffffffL)),
executable = true, // Can we run programs on this memory?
supportsWrite = TransferSizes(1, 256), // The slave supports 1-256 byte transfers
supportsRead = TransferSizes(1, 256),
interleavedId = Some(0))), // slave does not interleave read responses
beatBytes = 8)) // 64-bit AXI interface
mmio_axi4 :=
// AXI4Fragmenter(lite=false, maxInFlight = 20)( // beef device up to support awlen = 0xff
TLToAXI4(idBits = 4)( // use idBits = 0 for AXI4-Lite
TLWidthWidget(socBusConfig.beatBytes)( // convert width before attaching to socBus
socBus.node))
}
trait PeripheryMasterMMIOBundle extends HasPeripheryParameters {
implicit val p: Parameters
val mmio_clk = p(AsyncMMIOChannels).option(Vec(p(NExtMMIOAXIChannels), Clock(INPUT)))
val mmio_rst = p(AsyncMMIOChannels).option(Vec(p(NExtMMIOAXIChannels), Bool (INPUT)))
val mmio_axi = Vec(p(NExtMMIOAXIChannels), new NastiIO)
val mmio_ahb = Vec(p(NExtMMIOAHBChannels), new HastiMasterIO)
val mmio_tl = Vec(p(NExtMMIOTLChannels), new ClientUncachedTileLinkIO()(edgeMMIOParams))
trait PeripheryMasterAXI4MMIOBundle {
this: TopNetworkBundle {
val outer: PeripheryMasterAXI4MMIO
} =>
val mmio_axi = outer.mmio_axi4.bundleOut
}
trait PeripheryMasterMMIOModule extends HasPeripheryParameters {
implicit val p: Parameters
val outer: PeripheryMasterMMIO
val io: PeripheryMasterMMIOBundle
val pBus: TileLinkRecursiveInterconnect
val mmio_ports = p(ExtMMIOPorts) map { port =>
TileLinkWidthAdapter(pBus.port(port.name), edgeMMIOParams)
}
val mmio_axi_start = 0
val mmio_axi_end = mmio_axi_start + p(NExtMMIOAXIChannels)
val mmio_ahb_start = mmio_axi_end
val mmio_ahb_end = mmio_ahb_start + p(NExtMMIOAHBChannels)
val mmio_tl_start = mmio_ahb_end
val mmio_tl_end = mmio_tl_start + p(NExtMMIOTLChannels)
require (mmio_tl_end == mmio_ports.size)
for (i <- 0 until mmio_ports.size) {
if (mmio_axi_start <= i && i < mmio_axi_end) {
val idx = i-mmio_axi_start
val axi_sync = PeripheryUtils.convertTLtoAXI(mmio_ports(i))
io.mmio_axi(idx) <> (
if (!p(AsyncMMIOChannels)) axi_sync
else AsyncNastiTo(io.mmio_clk.get(idx), io.mmio_rst.get(idx), axi_sync)
)
} else if (mmio_ahb_start <= i && i < mmio_ahb_end) {
val idx = i-mmio_ahb_start
io.mmio_ahb(idx) <> PeripheryUtils.convertTLtoAHB(mmio_ports(i), atomics = true)
} else if (mmio_tl_start <= i && i < mmio_tl_end) {
val idx = i-mmio_tl_start
io.mmio_tl(idx) <> TileLinkEnqueuer(mmio_ports(i), 2)
} else {
require(false, "Unconnected external MMIO port")
}
}
trait PeripheryMasterAXI4MMIOModule {
this: TopNetworkModule {
val outer: PeripheryMasterAXI4MMIO
val io: PeripheryMasterAXI4MMIOBundle
} =>
// nothing to do
}
/////
trait PeripherySlave extends LazyModule {
implicit val p: Parameters
val pBusMasters: RangeManager
trait PeripherySlave {
this: TopNetwork {
val pBusMasters: RangeManager
} =>
if (p(NExtBusAXIChannels) > 0) pBusMasters.add("ext", 1) // NExtBusAXIChannels are arbitrated into one TL port
}
trait PeripherySlaveBundle extends HasPeripheryParameters {
implicit val p: Parameters
trait PeripherySlaveBundle {
this: TopNetworkBundle {
val outer: PeripherySlave
} =>
val bus_clk = p(AsyncBusChannels).option(Vec(p(NExtBusAXIChannels), Clock(INPUT)))
val bus_rst = p(AsyncBusChannels).option(Vec(p(NExtBusAXIChannels), Bool (INPUT)))
val bus_axi = Vec(p(NExtBusAXIChannels), new NastiIO).flip
}
trait PeripherySlaveModule extends HasPeripheryParameters {
implicit val p: Parameters
val outer: PeripherySlave
val io: PeripherySlaveBundle
val coreplexIO: BaseCoreplexBundle
trait PeripherySlaveModule {
this: TopNetworkModule {
val outer: PeripherySlave { val pBusMasters: RangeManager }
val io: PeripherySlaveBundle
} =>
if (p(NExtBusAXIChannels) > 0) {
val arb = Module(new NastiArbiter(p(NExtBusAXIChannels)))
@ -290,105 +271,82 @@ trait PeripherySlaveModule extends HasPeripheryParameters {
val (r_start, r_end) = outer.pBusMasters.range("ext")
require(r_end - r_start == 1, "RangeManager should return 1 slot")
TileLinkWidthAdapter(coreplexIO.slave(r_start), conv.io.tl)
TileLinkWidthAdapter(coreplexSlave(r_start), conv.io.tl)
}
}
/////
trait PeripheryCoreplexLocalInterrupter extends LazyModule with HasPeripheryParameters {
implicit val p: Parameters
val peripheryBus: TLXbar
trait PeripheryBootROM {
this: TopNetwork =>
// CoreplexLocalInterrupter must be at least 64b if XLen >= 64
val beatBytes = max((outerMMIOParams(XLen) min 64) / 8, peripheryBusConfig.beatBytes)
val clintConfig = CoreplexLocalInterrupterConfig(beatBytes)
val clint = LazyModule(new CoreplexLocalInterrupter(clintConfig)(outerMMIOParams))
// The periphery bus is 32-bit, so we may need to adapt its width to XLen
clint.node := TLFragmenter(beatBytes, cacheBlockBytes)(TLWidthWidget(peripheryBusConfig.beatBytes)(peripheryBus.node))
}
trait PeripheryCoreplexLocalInterrupterBundle {
implicit val p: Parameters
}
trait PeripheryCoreplexLocalInterrupterModule extends HasPeripheryParameters {
implicit val p: Parameters
val outer: PeripheryCoreplexLocalInterrupter
val io: PeripheryCoreplexLocalInterrupterBundle
val coreplexIO: BaseCoreplexBundle
outer.clint.module.io.rtcTick := Counter(p(RTCPeriod)).inc()
coreplexIO.clint <> outer.clint.module.io.tiles
}
/////
trait PeripheryBootROM extends LazyModule with HasPeripheryParameters {
implicit val p: Parameters
val peripheryBus: TLXbar
val address = 0x1000
val size = 0x1000
val bootrom = LazyModule(new TLROM(address, size, GenerateBootROM(p, address), true, peripheryBusConfig.beatBytes))
val bootrom_address = 0x1000
val bootrom_size = 0x1000
val bootrom = LazyModule(new TLROM(bootrom_address, bootrom_size, GenerateBootROM(p, bootrom_address), true, peripheryBusConfig.beatBytes))
bootrom.node := TLFragmenter(peripheryBusConfig.beatBytes, cacheBlockBytes)(peripheryBus.node)
}
trait PeripheryBootROMBundle {
implicit val p: Parameters
this: TopNetworkBundle {
val outer: PeripheryBootROM
} =>
}
trait PeripheryBootROMModule extends HasPeripheryParameters {
implicit val p: Parameters
val outer: PeripheryBootROM
val io: PeripheryBootROMBundle
trait PeripheryBootROMModule {
this: TopNetworkModule {
val outer: PeripheryBootROM
val io: PeripheryBootROMBundle
} =>
}
/////
trait PeripheryTestRAM extends LazyModule with HasPeripheryParameters {
implicit val p: Parameters
val peripheryBus: TLXbar
trait PeripheryTestRAM {
this: TopNetwork =>
val ramBase = 0x52000000
val ramSize = 0x1000
val sram = LazyModule(new TLRAM(AddressSet(ramBase, ramSize-1), true, peripheryBusConfig.beatBytes)
{ override def name = "testram" })
sram.node := TLFragmenter(peripheryBusConfig.beatBytes, cacheBlockBytes)(peripheryBus.node)
val testram = LazyModule(new TLRAM(AddressSet(0x52000000, 0xfff), true, peripheryBusConfig.beatBytes))
testram.node := TLFragmenter(peripheryBusConfig.beatBytes, cacheBlockBytes)(peripheryBus.node)
}
trait PeripheryTestRAMBundle {
implicit val p: Parameters
this: TopNetworkBundle {
val outer: PeripheryTestRAM
} =>
}
trait PeripheryTestRAMModule extends HasPeripheryParameters {
implicit val p: Parameters
val outer: PeripheryTestRAM
trait PeripheryTestRAMModule {
this: TopNetworkModule {
val outer: PeripheryTestRAM
val io: PeripheryTestRAMBundle
} =>
}
/////
trait PeripheryTestBusMaster extends LazyModule {
implicit val p: Parameters
val peripheryBus: TLXbar
trait PeripheryTestBusMaster {
this: TopNetwork =>
val fuzzer = LazyModule(new TLFuzzer(5000))
peripheryBus.node := fuzzer.node
}
trait PeripheryTestBusMasterBundle {
implicit val p: Parameters
this: TopNetworkBundle {
val outer: PeripheryTestBusMaster
} =>
}
trait PeripheryTestBusMasterModule {
implicit val p: Parameters
val outer: PeripheryTestBusMaster
this: TopNetworkModule {
val outer: PeripheryTestBusMaster
val io: PeripheryTestBusMasterBundle
} =>
}
/////
trait HardwiredResetVector {
val coreplexIO: BaseCoreplexBundle
coreplexIO.resetVector := UInt(0x1000) // boot ROM
this: TopNetworkModule {
val outer: BaseTop[BaseCoreplex]
} =>
outer.coreplex.module.io.resetVector := UInt(0x1000) // boot ROM
}

View File

@ -8,14 +8,14 @@ import junctions._
import junctions.NastiConstants._
import util.LatencyPipe
case object BuildExampleTop extends Field[Parameters => ExampleTop]
case object BuildExampleTop extends Field[Parameters => ExampleTop[coreplex.BaseCoreplex]]
case object SimMemLatency extends Field[Int]
class TestHarness(q: Parameters) extends Module {
val io = new Bundle {
val success = Bool(OUTPUT)
}
val dut = q(BuildExampleTop)(q).module
val dut = Module(q(BuildExampleTop)(q).module)
implicit val p = dut.p
// This test harness isn't especially flexible yet
@ -25,16 +25,12 @@ class TestHarness(q: Parameters) extends Module {
require(dut.io.mem_tl.isEmpty)
require(dut.io.bus_clk.isEmpty)
require(dut.io.bus_rst.isEmpty)
require(dut.io.mmio_clk.isEmpty)
require(dut.io.mmio_rst.isEmpty)
require(dut.io.mmio_ahb.isEmpty)
require(dut.io.mmio_tl.isEmpty)
for (int <- dut.io.interrupts)
for (int <- dut.io.interrupts(0))
int := Bool(false)
if (dut.io.mem_axi.nonEmpty) {
val memSize = p(GlobalAddrMap)("mem").size
val memSize = p(ExtMemSize)
require(memSize % dut.io.mem_axi.size == 0)
for (axi <- dut.io.mem_axi) {
val mem = Module(new SimAXIMem(memSize / dut.io.mem_axi.size))

View File

@ -53,20 +53,8 @@ class GlobalVariable[T] {
}
object GenerateGlobalAddrMap {
def apply(p: Parameters, pDevicesEntries: Seq[AddrMapEntry], peripheryManagers: Seq[TLManagerParameters]) = {
lazy val cBusIOAddrMap: AddrMap = {
val entries = collection.mutable.ArrayBuffer[AddrMapEntry]()
entries += AddrMapEntry("debug", MemSize(4096, MemAttr(AddrMapProt.RWX)))
entries += AddrMapEntry("plic", MemRange(0x0C000000, 0x4000000, MemAttr(AddrMapProt.RW)))
if (p(DataScratchpadSize) > 0) { // TODO heterogeneous tiles
require(p(NTiles) == 1) // TODO relax this
require(p(NMemoryChannels) == 0) // TODO allow both scratchpad & DRAM
entries += AddrMapEntry("dmem0", MemRange(0x80000000L, BigInt(p(DataScratchpadSize)), MemAttr(AddrMapProt.RWX)))
}
new AddrMap(entries)
}
lazy val tl2Devices = peripheryManagers.map { manager =>
def apply(p: Parameters, peripheryManagers: Seq[TLManagerParameters]) = {
val tl2Devices = peripheryManagers.map { manager =>
val cacheable = manager.regionType match {
case RegionType.CACHED => true
case RegionType.TRACKED => true
@ -84,41 +72,28 @@ object GenerateGlobalAddrMap {
}
}.flatten
lazy val uniquelyNamedTL2Devices =
val uniquelyNamedTL2Devices =
tl2Devices.groupBy(_.name).values.map(_.zipWithIndex.map {
case (e, i) => if (i == 0) e else e.copy(name = e.name + "_" + i)
}).flatten.toList
lazy val tl2AddrMap = new AddrMap(uniquelyNamedTL2Devices, collapse = true)
lazy val pBusIOAddrMap = new AddrMap(AddrMapEntry("TL2", tl2AddrMap) +: (p(ExtMMIOPorts) ++ pDevicesEntries), collapse = true)
val memBase = 0x80000000L
val memSize = p(ExtMemSize)
Dump("MEM_BASE", memBase)
val cBus = AddrMapEntry("cbus", cBusIOAddrMap)
val pBus = AddrMapEntry("pbus", pBusIOAddrMap)
val io = AddrMapEntry("io", AddrMap((cBus +: (!pBusIOAddrMap.isEmpty).option(pBus).toSeq):_*))
val tl2 = AddrMapEntry("TL2", new AddrMap(uniquelyNamedTL2Devices, collapse = true))
val mem = AddrMapEntry("mem", MemRange(memBase, memSize, MemAttr(AddrMapProt.RWX, true)))
AddrMap((io +: (p(NMemoryChannels) > 0).option(mem).toSeq):_*)
AddrMap((tl2 +: (p(NMemoryChannels) > 0).option(mem).toSeq):_*)
}
}
object GenerateConfigString {
def apply(p: Parameters, c: CoreplexConfig, pDevicesEntries: Seq[AddrMapEntry], peripheryManagers: Seq[TLManagerParameters]) = {
def apply(p: Parameters, clint: CoreplexLocalInterrupter, plic: TLPLIC, peripheryManagers: Seq[TLManagerParameters]) = {
val c = CoreplexParameters()(p)
val addrMap = p(GlobalAddrMap)
val plicAddr = addrMap("io:cbus:plic").start
val clint = CoreplexLocalInterrupterConfig(0, addrMap("io:pbus:TL2:clint").start)
val xLen = p(XLen)
val res = new StringBuilder
res append "plic {\n"
res append s" priority 0x${plicAddr.toString(16)};\n"
res append s" pending 0x${(plicAddr + c.plicKey.pendingBase).toString(16)};\n"
res append s" ndevs ${c.plicKey.nDevices};\n"
res append "};\n"
res append "rtc {\n"
res append s" addr 0x${clint.timeAddress.toString(16)};\n"
res append "};\n"
res append plic.module.globalConfigString
res append clint.module.globalConfigString
if (addrMap contains "mem") {
res append "ram {\n"
res append " 0 {\n"
@ -140,33 +115,12 @@ object GenerateConfigString {
res append s" $i {\n"
res append " 0 {\n"
res append s" isa $isa;\n"
res append s" timecmp 0x${clint.timecmpAddress(i).toString(16)};\n"
res append s" ipi 0x${clint.msipAddress(i).toString(16)};\n"
res append s" plic {\n"
res append s" m {\n"
res append s" ie 0x${(plicAddr + c.plicKey.enableAddr(i, 'M')).toString(16)};\n"
res append s" thresh 0x${(plicAddr + c.plicKey.threshAddr(i, 'M')).toString(16)};\n"
res append s" claim 0x${(plicAddr + c.plicKey.claimAddr(i, 'M')).toString(16)};\n"
res append s" };\n"
if (c.hasSupervisor) {
res append s" s {\n"
res append s" ie 0x${(plicAddr + c.plicKey.enableAddr(i, 'S')).toString(16)};\n"
res append s" thresh 0x${(plicAddr + c.plicKey.threshAddr(i, 'S')).toString(16)};\n"
res append s" claim 0x${(plicAddr + c.plicKey.claimAddr(i, 'S')).toString(16)};\n"
res append s" };\n"
}
res append " };\n"
res append clint.module.hartConfigStrings(i)
res append plic.module.hartConfigStrings(i)
res append " };\n"
res append " };\n"
}
res append "};\n"
pDevicesEntries foreach { entry =>
val region = addrMap("io:pbus:" + entry.name)
res append s"${entry.name} {\n"
res append s" addr 0x${region.start.toString(16)};\n"
res append s" size 0x${region.size.toString(16)}; \n"
res append "}\n"
}
peripheryManagers.foreach { manager => res append manager.dts }
res append '\u0000'
res.toString
@ -185,6 +139,6 @@ object GenerateBootROM {
require(rom.getInt(12) == 0,
"Config string address position should not be occupied by code")
rom.putInt(12, configStringAddr)
rom.array() ++ (p(ConfigString).getBytes.toSeq)
rom.array() ++ (ConfigStringOutput.contents.get.getBytes.toSeq)
}
}

View File

@ -10,7 +10,7 @@ import scala.math.{min,max}
import uncore.tilelink2.{leftOR, rightOR, UIntToOH1, OH1ToOH}
// lite: masters all use only one ID => reads will not be interleaved
class AXI4Fragmenter(lite: Boolean = false, maxInFlight: Int = 32, combinational: Boolean = true) extends LazyModule
class AXI4Fragmenter(lite: Boolean = false, maxInFlight: => Int = 32, combinational: Boolean = true) extends LazyModule
{
val maxBeats = 1 << AXI4Parameters.lenBits
def expandTransfer(x: TransferSizes, beatBytes: Int, alignment: BigInt) =
@ -287,7 +287,7 @@ class AXI4FragmenterSideband(maxInFlight: Int, flow: Boolean = false) extends Mo
object AXI4Fragmenter
{
// applied to the AXI4 source node; y.node := AXI4Fragmenter()(x.node)
def apply(lite: Boolean = false, maxInFlight: Int = 32, combinational: Boolean = true)(x: AXI4OutwardNode)(implicit sourceInfo: SourceInfo): AXI4OutwardNode = {
def apply(lite: Boolean = false, maxInFlight: => Int = 32, combinational: Boolean = true)(x: AXI4OutwardNode)(implicit sourceInfo: SourceInfo): AXI4OutwardNode = {
val fragmenter = LazyModule(new AXI4Fragmenter(lite, maxInFlight, combinational))
fragmenter.node := x
fragmenter.node

View File

@ -16,10 +16,13 @@ object AXI4Imp extends NodeImp[AXI4MasterPortParameters, AXI4SlavePortParameters
}
def bundleI(ei: Seq[AXI4EdgeParameters]): Vec[AXI4Bundle] = {
require (!ei.isEmpty)
Vec(ei.size, AXI4Bundle(ei.map(_.bundle).reduce(_.union(_)))).flip
Vec(ei.size, AXI4Bundle(ei.map(_.bundle).reduce(_.union(_))))
}
def colour = "#00ccff" // bluish
override def labelI(ei: AXI4EdgeParameters) = (ei.slave.beatBytes * 8).toString
override def labelO(eo: AXI4EdgeParameters) = (eo.slave.beatBytes * 8).toString
def connect(bo: => AXI4Bundle, bi: => AXI4Bundle, ei: => AXI4EdgeParameters)(implicit sourceInfo: SourceInfo): (Option[LazyModule], () => Unit) = {
(None, () => { bi <> bo })
}
@ -30,18 +33,23 @@ object AXI4Imp extends NodeImp[AXI4MasterPortParameters, AXI4SlavePortParameters
pu.copy(slaves = pu.slaves.map { m => m.copy (nodePath = node +: m.nodePath) })
}
// Nodes implemented inside modules
case class AXI4IdentityNode() extends IdentityNode(AXI4Imp)
case class AXI4OutputNode() extends OutputNode(AXI4Imp)
case class AXI4InputNode() extends InputNode(AXI4Imp)
case class AXI4MasterNode(portParams: AXI4MasterPortParameters, numPorts: Range.Inclusive = 1 to 1)
extends SourceNode(AXI4Imp)(portParams, numPorts)
case class AXI4SlaveNode(portParams: AXI4SlavePortParameters, numPorts: Range.Inclusive = 1 to 1)
extends SinkNode(AXI4Imp)(portParams, numPorts)
case class AXI4AdapterNode(
masterFn: Seq[AXI4MasterPortParameters] => AXI4MasterPortParameters,
slaveFn: Seq[AXI4SlavePortParameters] => AXI4SlavePortParameters,
numMasterPorts: Range.Inclusive = 1 to 1,
numSlavePorts: Range.Inclusive = 1 to 1)
extends InteriorNode(AXI4Imp)(masterFn, slaveFn, numMasterPorts, numSlavePorts)
// Nodes passed from an inner module
case class AXI4OutputNode() extends OutputNode(AXI4Imp)
case class AXI4InputNode() extends InputNode(AXI4Imp)
// Nodes used for external ports
case class AXI4BlindOutputNode(portParams: AXI4SlavePortParameters) extends BlindOutputNode(AXI4Imp)(portParams)
case class AXI4BlindInputNode(portParams: AXI4MasterPortParameters) extends BlindInputNode(AXI4Imp)(portParams)

View File

@ -7,10 +7,11 @@ import diplomacy._
import regmapper._
import scala.math.{min,max}
class AXI4RegisterNode(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true)
class AXI4RegisterNode(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true, executable: Boolean = false)
extends AXI4SlaveNode(AXI4SlavePortParameters(
Seq(AXI4SlaveParameters(
address = Seq(address),
executable = executable,
supportsWrite = TransferSizes(1, beatBytes),
supportsRead = TransferSizes(1, beatBytes),
interleavedId = Some(0))),
@ -69,16 +70,16 @@ class AXI4RegisterNode(address: AddressSet, concurrency: Int = 0, beatBytes: Int
object AXI4RegisterNode
{
def apply(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true) =
new AXI4RegisterNode(address, concurrency, beatBytes, undefZero)
def apply(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true, executable: Boolean = false) =
new AXI4RegisterNode(address, concurrency, beatBytes, undefZero, executable)
}
// These convenience methods below combine to make it possible to create a AXI4
// register mapped device from a totally abstract register mapped device.
abstract class AXI4RegisterRouterBase(address: AddressSet, interrupts: Int, concurrency: Int, beatBytes: Int, undefZero: Boolean) extends LazyModule
abstract class AXI4RegisterRouterBase(address: AddressSet, interrupts: Int, concurrency: Int, beatBytes: Int, undefZero: Boolean, executable: Boolean) extends LazyModule
{
val node = AXI4RegisterNode(address, concurrency, beatBytes, undefZero)
val node = AXI4RegisterNode(address, concurrency, beatBytes, undefZero, executable)
val intnode = uncore.tilelink2.IntSourceNode(interrupts)
}
@ -101,10 +102,10 @@ class AXI4RegModule[P, B <: AXI4RegBundleBase](val params: P, bundleBuilder: =>
}
class AXI4RegisterRouter[B <: AXI4RegBundleBase, M <: LazyModuleImp]
(val base: BigInt, val interrupts: Int = 0, val size: BigInt = 4096, val concurrency: Int = 0, val beatBytes: Int = 4, undefZero: Boolean = true)
(val base: BigInt, val interrupts: Int = 0, val size: BigInt = 4096, val concurrency: Int = 0, val beatBytes: Int = 4, undefZero: Boolean = true, executable: Boolean = false)
(bundleBuilder: AXI4RegBundleArg => B)
(moduleBuilder: (=> B, AXI4RegisterRouterBase) => M)
extends AXI4RegisterRouterBase(AddressSet(base, size-1), interrupts, concurrency, beatBytes, undefZero)
extends AXI4RegisterRouterBase(AddressSet(base, size-1), interrupts, concurrency, beatBytes, undefZero, executable)
{
require (isPow2(size))
// require (size >= 4096) ... not absolutely required, but highly recommended

View File

@ -4,8 +4,9 @@ package uncore.devices
import Chisel._
import junctions._
import uncore.tilelink._
import util._
import regmapper._
import uncore.tilelink2._
import cde.{Parameters, Config, Field}
// *****************************************
@ -15,6 +16,7 @@ import cde.{Parameters, Config, Field}
object DbRegAddrs{
def DMRAMBASE = UInt(0x0)
def DMCONTROL = UInt(0x10)
def DMINFO = UInt(0x11)
@ -69,7 +71,7 @@ object DsbBusConsts {
// See $RISCV/riscv-tools/riscv-isa-sim/debug_rom/debug_rom.h/S
// The code assumes 64 bytes of Debug RAM.
def defaultRomContents : Array[Byte] = Array(
def xlenAnyRomContents : Array[Byte] = Array(
0x6f, 0x00, 0xc0, 0x04, 0x6f, 0x00, 0xc0, 0x00, 0x13, 0x04, 0xf0, 0xff,
0x6f, 0x00, 0x80, 0x00, 0x13, 0x04, 0x00, 0x00, 0x0f, 0x00, 0xf0, 0x0f,
0xf3, 0x24, 0x00, 0xf1, 0x63, 0xc6, 0x04, 0x00, 0x83, 0x24, 0xc0, 0x43,
@ -121,10 +123,10 @@ object DsbBusConsts {
object DsbRegAddrs{
def CLEARDEBINT = UInt(0x100)
def SETHALTNOT = UInt(0x10C)
def SERINFO = UInt(0x110)
def SERBASE = UInt(0x114)
def CLEARDEBINT = 0x100
def SETHALTNOT = 0x10C
def SERINFO = 0x110
def SERBASE = 0x114
// For each serial, there are
// 3 registers starting here:
// SERSEND0
@ -132,9 +134,11 @@ object DsbRegAddrs{
// SERSTATUS0
// ...
// SERSTATUS7
def SERTX_OFFSET = UInt(0)
def SERRX_OFFSET = UInt(4)
def SERSTAT_OFFSET = UInt(8)
def SERTX_OFFSET = 0
def SERRX_OFFSET = 4
def SERSTAT_OFFSET = 8
def RAMBASE = 0x400
def ROMBASE = 0x800
}
@ -302,6 +306,24 @@ class DebugBusIO(implicit val p: cde.Parameters) extends ParameterizedBundle()(p
val resp = new DecoupledIO(new DebugBusResp).flip()
}
trait HasDebugModuleParameters {
val params : Parameters
implicit val p = params
val cfg = p(DMKey)
}
/** Debug Module I/O, with the exclusion of the RegisterRouter
* Access interface.
*/
trait DebugModuleBundle extends Bundle with HasDebugModuleParameters {
val db = new DebugBusIO()(p).flip()
val debugInterrupts = Vec(cfg.nComponents, Bool()).asOutput
val ndreset = Bool(OUTPUT)
val fullreset = Bool(OUTPUT)
}
// *****************************************
// The Module
//
@ -313,7 +335,7 @@ class DebugBusIO(implicit val p: cde.Parameters) extends ParameterizedBundle()(p
* DebugModule is a slave to two masters:
* The Debug Bus -- implemented as a generic Decoupled IO with request
* and response channels
* The System Bus -- implemented as Uncached Tile Link.
* The System Bus -- implemented as generic RegisterRouter
*
* DebugModule is responsible for holding registers, RAM, and ROM
* to support debug interactions, as well as driving interrupts
@ -321,10 +343,9 @@ class DebugBusIO(implicit val p: cde.Parameters) extends ParameterizedBundle()(p
* It is also responsible for some reset lines.
*/
class DebugModule ()(implicit val p:cde.Parameters)
extends Module
with HasTileLinkParameters {
val cfg = p(DMKey)
trait DebugModule extends Module with HasDebugModuleParameters with HasRegMap {
val io: DebugModuleBundle
//--------------------------------------------------------------
// Import constants for shorter variable names
@ -344,6 +365,7 @@ class DebugModule ()(implicit val p:cde.Parameters)
require (cfg.hasBusMaster == false)
require (cfg.nDebugRamBytes <= 64)
require (cfg.authType == DebugModuleAuthType.None)
require((DbBusConsts.dbRamWordBits % 8) == 0)
//--------------------------------------------------------------
// Private Classes (Register Fields)
@ -403,17 +425,6 @@ class DebugModule ()(implicit val p:cde.Parameters)
}
//--------------------------------------------------------------
// Module I/O
//--------------------------------------------------------------
val io = new Bundle {
val db = new DebugBusIO()(p).flip()
val debugInterrupts = Vec(cfg.nComponents, Bool()).asOutput
val tl = new ClientUncachedTileLinkIO().flip
val ndreset = Bool(OUTPUT)
val fullreset = Bool(OUTPUT)
}
//--------------------------------------------------------------
// Register & Wire Declarations
@ -455,46 +466,23 @@ class DebugModule ()(implicit val p:cde.Parameters)
// --- Debug RAM
// Since the access size from Debug Bus and System Bus may not be consistent,
// use the maximum to build the RAM, and then select as needed for the smaller
// size.
val ramDataWidth = DbBusConsts.dbRamWordBits
val ramDataBytes = ramDataWidth / 8;
val ramAddrWidth = log2Up(cfg.nDebugRamBytes / ramDataBytes)
val dbRamDataWidth = DbBusConsts.dbRamWordBits
val sbRamDataWidth = tlDataBits
val dbRamAddrWidth = log2Up((cfg.nDebugRamBytes * 8) / dbRamDataWidth)
val sbRamAddrWidth = log2Up((cfg.nDebugRamBytes * 8) / sbRamDataWidth)
val sbRamAddrOffset = log2Up(tlDataBits/8)
val ramMem = Reg(init = Vec.fill(cfg.nDebugRamBytes){UInt(0, width = 8)})
val ramDataWidth = dbRamDataWidth max sbRamDataWidth
val ramAddrWidth = dbRamAddrWidth min sbRamAddrWidth
val ramMem = Mem(1 << ramAddrWidth , UInt(width=ramDataWidth))
val ramAddr = Wire(UInt(width=ramAddrWidth))
val ramRdData = Wire(UInt(width=ramDataWidth))
val ramWrData = Wire(UInt(width=ramDataWidth))
val ramWrMask = Wire(UInt(width=ramDataWidth))
val ramWrEn = Wire(Bool())
val dbRamAddr = Wire(UInt(width=dbRamAddrWidth))
val dbRamAddr = Wire(UInt(width=ramAddrWidth))
val dbRamAddrValid = Wire(Bool())
val dbRamRdData = Wire (UInt(width=dbRamDataWidth))
val dbRamWrData = Wire(UInt(width=dbRamDataWidth))
val dbRamRdData = Wire (UInt(width=ramDataWidth))
val dbRamWrData = Wire(UInt(width=ramDataWidth))
val dbRamWrEn = Wire(Bool())
val dbRamRdEn = Wire(Bool())
val dbRamWrEnFinal = Wire(Bool())
val dbRamRdEnFinal = Wire(Bool())
require((cfg.nDebugRamBytes % ramDataBytes) == 0)
val dbRamDataOffset = log2Up(ramDataBytes)
val sbRamAddr = Wire(UInt(width=sbRamAddrWidth))
val sbRamAddrValid = Wire(Bool())
val sbRamRdData = Wire (UInt(width=sbRamDataWidth))
val sbRamWrData = Wire(UInt(width=sbRamDataWidth))
val sbRamWrEn = Wire(Bool())
val sbRamRdEn = Wire(Bool())
val sbRamWrEnFinal = Wire(Bool())
val sbRamRdEnFinal = Wire(Bool())
val sbRomRdData = Wire(UInt(width=tlDataBits))
val sbRomAddrOffset = log2Up(tlDataBits/8)
// --- Debug Bus Accesses
@ -513,16 +501,6 @@ class DebugModule ()(implicit val p:cde.Parameters)
val rdCondWrFailure = Wire(Bool())
val dbWrNeeded = Wire(Bool())
// --- System Bus Access
val sbAddr = Wire(UInt(width=sbAddrWidth))
val sbRdData = Wire(UInt(width=tlDataBits))
val sbWrData = Wire(UInt(width=tlDataBits))
val sbWrMask = Wire(UInt(width=tlDataBits))
val sbWrEn = Wire(Bool())
val sbRdEn = Wire(Bool())
val stallFromDb = Wire(Bool())
val stallFromSb = Wire(Bool())
//--------------------------------------------------------------
// Interrupt Registers
//--------------------------------------------------------------
@ -622,63 +600,29 @@ class DebugModule ()(implicit val p:cde.Parameters)
HALTSUMRdData.acks := haltnotSummary
//--------------------------------------------------------------
// Debug RAM Access (Debug Bus & System Bus)
// Debug RAM Access (Debug Bus ... System Bus can override)
//--------------------------------------------------------------
dbReq := io.db.req.bits
// Debug Bus RAM Access
// From Specification: Debug RAM is 0x00 - 0x0F
// 0x40 - 0x6F Not Implemented
dbRamAddr := dbReq.addr( dbRamAddrWidth-1 , 0)
dbRamAddr := dbReq.addr( ramAddrWidth-1 , 0)
dbRamWrData := dbReq.data
dbRamAddrValid := Bool(true)
if (dbRamAddrWidth < 4){
dbRamAddrValid := (dbReq.addr(3, dbRamAddrWidth) === UInt(0))
dbRamAddrValid := (dbReq.addr(3,0) <= UInt((cfg.nDebugRamBytes/ramDataBytes)))
val dbRamRdDataFields = List.tabulate(cfg.nDebugRamBytes / ramDataBytes) { ii =>
val slice = ramMem.slice(ii * ramDataBytes, (ii+1)*ramDataBytes)
slice.reduce[UInt]{ case (x: UInt, y: UInt) => Cat(y, x)}
}
sbRamAddr := sbAddr(sbRamAddrWidth + sbRamAddrOffset - 1, sbRamAddrOffset)
sbRamWrData := sbWrData
sbRamAddrValid := Bool(true)
// From Specification: Debug RAM is 0x400 - 0x4FF
if ((sbRamAddrWidth + sbRamAddrOffset) < 8){
sbRamAddrValid := (sbAddr(7, sbRamAddrWidth + sbRamAddrOffset) === UInt(0))
dbRamRdData := dbRamRdDataFields(dbRamAddr)
when (dbRamWrEnFinal) {
for (ii <- 0 until ramDataBytes) {
ramMem((dbRamAddr << UInt(dbRamDataOffset)) + UInt(ii)) := dbRamWrData((8*(ii+1)-1), (8*ii))
}
}
require (dbRamAddrWidth >= ramAddrWidth) // SB accesses less than 32 bits Not Implemented.
val dbRamWrMask = Wire(init=Vec.fill(1 << (dbRamAddrWidth - ramAddrWidth)){Fill(dbRamDataWidth, UInt(1, width=1))})
if (dbRamDataWidth < ramDataWidth){
val dbRamSel = dbRamAddr(dbRamAddrWidth - ramAddrWidth - 1 , 0)
val rdDataWords = Vec.tabulate(1 << (dbRamAddrWidth - ramAddrWidth)){ ii =>
ramRdData((ii+1)*dbRamDataWidth - 1 , ii*dbRamDataWidth)}
dbRamWrMask := Vec.fill(1 << (dbRamAddrWidth - ramAddrWidth)){UInt(0, width = dbRamDataWidth)}
dbRamWrMask(dbRamSel) := Fill(dbRamDataWidth, UInt(1, width=1))
dbRamRdData := rdDataWords(dbRamSel)
} else {
dbRamRdData := ramRdData
}
sbRamRdData := ramRdData
ramWrMask := Mux(sbRamWrEn, sbWrMask, dbRamWrMask.asUInt)
assert (!((dbRamWrEn | dbRamRdEn) & (sbRamRdEn | sbRamWrEn)), "Stall logic should have prevented concurrent SB/DB RAM Access")
// Make copies of DB RAM data before writing.
val dbRamWrDataVec = Fill(1 << (dbRamAddrWidth - ramAddrWidth), dbRamWrData)
ramWrData := Mux(sbRamWrEn,
(ramWrMask & sbRamWrData ) | (~ramWrMask & ramRdData),
(ramWrMask & dbRamWrDataVec) | (~ramWrMask & ramRdData))
ramAddr := Mux(sbRamWrEn | sbRamRdEn, sbRamAddr,
dbRamAddr >> (dbRamAddrWidth - ramAddrWidth))
ramRdData := ramMem(ramAddr)
when (ramWrEn) { ramMem(ramAddr) := ramWrData }
ramWrEn := sbRamWrEnFinal | dbRamWrEnFinal
//--------------------------------------------------------------
// Debug Bus Access
@ -813,8 +757,8 @@ class DebugModule ()(implicit val p:cde.Parameters)
// -----------------------------------------
// DB Access State Machine Decode (Combo)
io.db.req.ready := !stallFromSb && ((dbStateReg === s_DB_READY) ||
(dbStateReg === s_DB_RESP && io.db.resp.fire()))
io.db.req.ready := (dbStateReg === s_DB_READY) ||
(dbStateReg === s_DB_RESP && io.db.resp.fire())
io.db.resp.valid := (dbStateReg === s_DB_RESP)
io.db.resp.bits := dbRespReg
@ -844,172 +788,28 @@ class DebugModule ()(implicit val p:cde.Parameters)
// Debug ROM
//--------------------------------------------------------------
sbRomRdData := UInt(0)
if (cfg.hasDebugRom) {
// Inspired by ROMSlave
val romContents = cfg.debugRomContents.get
val romByteWidth = tlDataBits / 8
val romRows = (romContents.size + romByteWidth - 1)/romByteWidth
val romMem = Vec.tabulate(romRows) { ii =>
val slice = romContents.slice(ii*romByteWidth, (ii+1)*romByteWidth)
UInt(slice.foldRight(BigInt(0)) { case (x,y) => ((y << 8) + (x.toInt & 0xFF))}, width = romByteWidth*8)
}
val sbRomRdAddr = Wire(UInt())
if (romRows == 1) {
sbRomRdAddr := UInt(0)
} else {
sbRomRdAddr := sbAddr(log2Up(romRows) + sbRomAddrOffset - 1, sbRomAddrOffset)
}
sbRomRdData := romMem (sbRomRdAddr)
val romRegFields = if (cfg.hasDebugRom) {
cfg.debugRomContents.get.map( x => RegField.r(8, UInt(x.toInt & 0xFF)))
} else {
Seq(RegField(8))
}
//--------------------------------------------------------------
// System Bus Access
//--------------------------------------------------------------
// -----------------------------------------
// SB Access Write Decoder
sbRamWrEn := Bool(false)
sbRamWrEnFinal := Bool(false)
SETHALTNOTWrEn := Bool(false)
CLEARDEBINTWrEn := Bool(false)
if (tlDataBits == 32) {
SETHALTNOTWrData := sbWrData
CLEARDEBINTWrData := sbWrData
when (sbAddr(11, 8) === UInt(4)){ // 0x400-0x4ff is Debug RAM
sbRamWrEn := sbWrEn
sbRamRdEn := sbRdEn
when (sbRamAddrValid) {
sbRamWrEnFinal := sbWrEn
sbRamRdEnFinal := sbRdEn
}
}.elsewhen (sbAddr === SETHALTNOT){
SETHALTNOTWrEn := sbWrEn
}.elsewhen (sbAddr === CLEARDEBINT){
CLEARDEBINTWrEn := sbWrEn
}.otherwise {
//Other registers/RAM are Not Implemented.
}
} else {
// Pick out the correct word based on the address.
val sbWrDataWords = Vec.tabulate (tlDataBits / 32) {ii => sbWrData((ii+1)*32 - 1, ii*32)}
val sbWrMaskWords = Vec.tabulate (tlDataBits / 32) {ii => sbWrMask ((ii+1)*32 -1, ii*32)}
val sbWrSelTop = log2Up(tlDataBits/8) - 1
val sbWrSelBottom = 2
SETHALTNOTWrData := sbWrDataWords(SETHALTNOT(sbWrSelTop, sbWrSelBottom))
CLEARDEBINTWrData := sbWrDataWords(CLEARDEBINT(sbWrSelTop, sbWrSelBottom))
when (sbAddr(11,8) === UInt(4)){ //0x400-0x4ff is Debug RAM
sbRamWrEn := sbWrEn
sbRamRdEn := sbRdEn
when (sbRamAddrValid){
sbRamWrEnFinal := sbWrEn
sbRamRdEnFinal := sbRdEn
}
}
SETHALTNOTWrEn := sbAddr(sbAddrWidth - 1, sbWrSelTop + 1) === SETHALTNOT(sbAddrWidth-1, sbWrSelTop + 1) &&
(sbWrMaskWords(SETHALTNOT(sbWrSelTop, sbWrSelBottom))).orR &&
sbWrEn
CLEARDEBINTWrEn := sbAddr(sbAddrWidth - 1, sbWrSelTop + 1) === CLEARDEBINT(sbAddrWidth-1, sbWrSelTop + 1) &&
(sbWrMaskWords(CLEARDEBINT(sbWrSelTop, sbWrSelBottom))).orR &&
sbWrEn
// Local reg mapper function : Notify when written, but give the value.
def wValue (n: Int, value: UInt, set: Bool) : RegField = {
RegField(n, value, RegWriteFn((valid, data) => {set := valid ; value := data; Bool(true)}))
}
// -----------------------------------------
// SB Access Read Mux
sbRdData := UInt(0)
sbRamRdEn := Bool(false)
sbRamRdEnFinal := Bool(false)
when (sbAddr(11, 8) === UInt(4)) { //0x400-0x4FF Debug RAM
sbRamRdEn := sbRdEn
when (sbRamAddrValid) {
sbRdData := sbRamRdData
sbRamRdEnFinal := sbRdEn
}
}.elsewhen (sbAddr(11,8).isOneOf(UInt(8), UInt(9))){ //0x800-0x9FF Debug ROM
if (cfg.hasDebugRom) {
sbRdData := sbRomRdData
} else {
sbRdData := UInt(0)
}
}. otherwise {
// All readable registers are Not Implemented.
sbRdData := UInt(0)
}
// -----------------------------------------
// SB Access State Machine -- based on BRAM Slave
val sbAcqReg = Reg(io.tl.acquire.bits)
val sbAcqValidReg = Reg(init = Bool(false))
val (sbReg_get :: sbReg_getblk :: sbReg_put :: sbReg_putblk :: Nil) = Seq(
Acquire.getType, Acquire.getBlockType, Acquire.putType, Acquire.putBlockType
).map(sbAcqReg.isBuiltInType _)
val sbMultibeat = sbReg_getblk & sbAcqValidReg;
val sbBeatInc1 = sbAcqReg.addr_beat + UInt(1)
val sbLast = (sbAcqReg.addr_beat === UInt(tlDataBeats - 1))
sbAddr := sbAcqReg.full_addr()
sbRdEn := (sbAcqValidReg && (sbReg_get || sbReg_getblk))
sbWrEn := (sbAcqValidReg && (sbReg_put || sbReg_putblk))
sbWrData := sbAcqReg.data
sbWrMask := sbAcqReg.full_wmask()
// -----------------------------------------
// SB Access State Machine Update (Seq)
when (io.tl.acquire.fire()){
sbAcqReg := io.tl.acquire.bits
sbAcqValidReg := Bool(true)
} .elsewhen (io.tl.grant.fire()) {
when (sbMultibeat){
sbAcqReg.addr_beat := sbBeatInc1
when (sbLast) {
sbAcqValidReg := Bool(false)
}
} . otherwise {
sbAcqValidReg := Bool(false)
}
}
io.tl.grant.valid := sbAcqValidReg
io.tl.grant.bits := Grant(
is_builtin_type = Bool(true),
g_type = sbAcqReg.getBuiltInGrantType(),
client_xact_id = sbAcqReg.client_xact_id,
manager_xact_id = UInt(0),
addr_beat = sbAcqReg.addr_beat,
data = sbRdData
regmap(
CLEARDEBINT -> Seq(wValue(sbIdWidth, CLEARDEBINTWrData, CLEARDEBINTWrEn)),
SETHALTNOT -> Seq(wValue(sbIdWidth, SETHALTNOTWrData, SETHALTNOTWrEn)),
RAMBASE -> ramMem.map(x => RegField(8, x)),
ROMBASE -> romRegFields
)
stallFromDb := Bool(false) // SB always wins, and DB latches its read data so it is not necessary for SB to wait
stallFromSb := sbRamRdEn || sbRamWrEn // pessimistically assume that DB/SB are going to conflict on the RAM,
// and SB doesn't latch its read data to it is necessary for DB hold
// off while SB is accessing the RAM and waiting to send its result.
val sbStall = (sbMultibeat & !sbLast) || (io.tl.grant.valid && !io.tl.grant.ready) || stallFromDb
io.tl.acquire.ready := !sbStall
//--------------------------------------------------------------
// Misc. Outputs
//--------------------------------------------------------------
@ -1019,6 +819,21 @@ class DebugModule ()(implicit val p:cde.Parameters)
}
/** Create a concrete TL2 Slave for the DebugModule RegMapper interface.
*
*/
class TLDebugModule(address: BigInt = 0)(implicit p: Parameters)
extends TLRegisterRouter(address, beatBytes=p(rocket.XLen)/8, executable=true)(
new TLRegBundle(p, _ ) with DebugModuleBundle)(
new TLRegModule(p, _, _) with DebugModule)
/** Synchronizers for DebugBus
*
*/
object AsyncDebugBusCrossing {
// takes from_source from the 'from' clock domain to the 'to' clock domain
def apply(from_clock: Clock, from_reset: Bool, from_source: DebugBusIO, to_clock: Clock, to_reset: Bool, depth: Int = 1, sync: Int = 3) = {
@ -1029,6 +844,7 @@ object AsyncDebugBusCrossing {
}
}
object AsyncDebugBusFrom { // OutsideClockDomain
// takes from_source from the 'from' clock domain and puts it into your clock domain
def apply(from_clock: Clock, from_reset: Bool, from_source: DebugBusIO, depth: Int = 1, sync: Int = 3): DebugBusIO = {

View File

@ -6,8 +6,11 @@ import Chisel._
import Chisel.ImplicitConversions._
import junctions._
import uncore.tilelink._
import diplomacy._
import regmapper._
import uncore.tilelink2._
import cde.Parameters
import scala.math.min
class GatewayPLICIO extends Bundle {
val valid = Bool(OUTPUT)
@ -27,161 +30,183 @@ class LevelGateway extends Module {
io.plic.valid := io.interrupt && !inFlight
}
case class PLICConfig(nHartsIn: Int, supervisor: Boolean, nDevices: Int, nPriorities: Int) {
def contextsPerHart = if (supervisor) 2 else 1
def nHarts = contextsPerHart * nHartsIn
def context(i: Int, mode: Char) = mode match {
case 'M' => i * contextsPerHart
case 'S' => require(supervisor); i * contextsPerHart + 1
}
def claimAddr(i: Int, mode: Char) = hartBase + hartOffset(context(i, mode)) + claimOffset
def threshAddr(i: Int, mode: Char) = hartBase + hartOffset(context(i, mode))
def enableAddr(i: Int, mode: Char) = enableBase + enableOffset(context(i, mode))
def size = hartBase + hartOffset(maxHarts)
object PLICConsts
{
def maxDevices = 1023
def maxHarts = 15872
def priorityBase = 0x0
def pendingBase = 0x1000
def enableBase = 0x2000
def hartBase = 0x200000
require(hartBase >= enableBase + enableOffset(maxHarts))
def enableOffset(i: Int) = i * ((maxDevices+7)/8)
def hartOffset(i: Int) = i * 0x1000
def claimOffset = 4
def priorityBytes = 4
require(nDevices <= maxDevices)
require(nHarts > 0 && nHarts <= maxHarts)
require(nPriorities >= 0 && nPriorities <= nDevices)
def enableOffset(i: Int) = i * ((maxDevices+7)/8)
def hartOffset(i: Int) = i * 0x1000
def enableBase(i: Int):Int = enableOffset(i) + enableBase
def hartBase(i: Int):Int = hartOffset(i) + hartBase
def size = hartBase(maxHarts)
require(hartBase >= enableBase(maxHarts))
}
/** Platform-Level Interrupt Controller */
class PLIC(val cfg: PLICConfig)(implicit val p: Parameters) extends Module
with HasTileLinkParameters
with HasAddrMapParameters {
val io = new Bundle {
val devices = Vec(cfg.nDevices, new GatewayPLICIO).flip
val harts = Vec(cfg.nHarts, Bool()).asOutput
val tl = new ClientUncachedTileLinkIO().flip
}
class TLPLIC(supervisor: Boolean, maxPriorities: Int, address: BigInt = 0xC000000)(implicit val p: Parameters) extends LazyModule
{
val contextsPerHart = if (supervisor) 2 else 1
require (maxPriorities >= 0)
val priority =
if (cfg.nPriorities > 0) Reg(Vec(cfg.nDevices+1, UInt(width=log2Up(cfg.nPriorities+1))))
else Wire(init=Vec.fill(cfg.nDevices+1)(UInt(1)))
val threshold =
if (cfg.nPriorities > 0) Reg(Vec(cfg.nHarts, UInt(width = log2Up(cfg.nPriorities+1))))
else Wire(init=Vec.fill(cfg.nHarts)(UInt(0)))
val pending = Reg(init=Vec.fill(cfg.nDevices+1){Bool(false)})
val enables = Reg(Vec(cfg.nHarts, Vec(cfg.nDevices+1, Bool())))
val node = TLRegisterNode(
address = AddressSet(address, PLICConsts.size-1),
beatBytes = p(rocket.XLen)/8,
undefZero = false)
for ((p, g) <- pending.tail zip io.devices) {
g.ready := !p
g.complete := false
when (g.valid) { p := true }
}
val intnode = IntAdapterNode(
numSourcePorts = 0 to 1024,
numSinkPorts = 0 to 1024,
sourceFn = { _ => IntSourcePortParameters(Seq(IntSourceParameters(contextsPerHart))) },
sinkFn = { _ => IntSinkPortParameters(Seq(IntSinkParameters())) })
def findMax(x: Seq[UInt]): (UInt, UInt) = {
if (x.length > 1) {
val half = 1 << (log2Ceil(x.length) - 1)
val lMax = findMax(x take half)
val rMax = findMax(x drop half)
val useLeft = lMax._1 >= rMax._1
(Mux(useLeft, lMax._1, rMax._1), Mux(useLeft, lMax._2, UInt(half) + rMax._2))
} else (x.head, UInt(0))
}
val maxDevs = Wire(Vec(cfg.nHarts, UInt(width = log2Up(pending.size))))
for (hart <- 0 until cfg.nHarts) {
val effectivePriority =
for (((p, en), pri) <- (pending zip enables(hart) zip priority).tail)
yield Cat(p && en, pri)
val (maxPri, maxDev) = findMax((UInt(1) << priority(0).getWidth) +: effectivePriority)
maxDevs(hart) := Reg(next = maxDev)
io.harts(hart) := Reg(next = maxPri) > Cat(UInt(1), threshold(hart))
}
val acq = Queue(io.tl.acquire, 1)
val read = acq.fire() && acq.bits.isBuiltInType(Acquire.getType)
val write = acq.fire() && acq.bits.isBuiltInType(Acquire.putType)
assert(!acq.fire() || read || write, "unsupported PLIC operation")
val addr = acq.bits.full_addr()(log2Up(cfg.size)-1,0)
val claimant =
if (cfg.nHarts == 1) UInt(0)
else (addr - cfg.hartBase)(log2Up(cfg.hartOffset(cfg.nHarts))-1,log2Up(cfg.hartOffset(1)))
val hart = Wire(init = claimant)
val myMaxDev = maxDevs(claimant)
val myEnables = enables(hart)
val rdata = Wire(init = UInt(0, tlDataBits))
val masked_wdata = (acq.bits.data & acq.bits.full_wmask()) | (rdata & ~acq.bits.full_wmask())
if (cfg.nDevices > 0) when (addr >= cfg.hartBase) {
val word =
if (tlDataBytes > cfg.claimOffset) UInt(0)
else addr(log2Up(cfg.claimOffset),log2Up(tlDataBytes))
rdata := Cat(myMaxDev, UInt(0, 8*cfg.priorityBytes-threshold(0).getWidth), threshold(claimant)) >> (word * tlDataBits)
when (read && addr(log2Ceil(cfg.claimOffset))) {
pending(myMaxDev) := false
lazy val module = new LazyModuleImp(this) {
val io = new Bundle {
val tl_in = node.bundleIn
val devices = intnode.bundleIn
val harts = intnode.bundleOut
}
when (write) {
when (if (tlDataBytes > cfg.claimOffset) acq.bits.wmask()(cfg.claimOffset) else addr(log2Ceil(cfg.claimOffset))) {
val dev = (acq.bits.data >> ((8 * cfg.claimOffset) % tlDataBits))(log2Up(pending.size)-1,0)
when (myEnables(dev)) { io.devices(dev-1).complete := true }
}.otherwise {
if (cfg.nPriorities > 0) threshold(claimant) := acq.bits.data
}
// Assign all the devices unique ranges
val sources = intnode.edgesIn.map(_.source)
val flatSources = (sources zip sources.map(_.num).scanLeft(0)(_+_).init).map {
case (s, o) => s.sources.map(z => z.copy(range = z.range.offset(o)))
}.flatten
// Compact the interrupt vector the same way
val interrupts = (intnode.edgesIn zip io.devices).map { case (e, i) => i.take(e.source.num) }.flatten
// This flattens the harts into an MSMSMSMSMS... or MMMMM.... sequence
val harts = io.harts.flatten
println("\nInterrupt map:")
flatSources.foreach { s =>
// +1 because 0 is reserved, +1-1 because the range is half-open
println(s" [${s.range.start+1}, ${s.range.end}] => ${s.name}")
}
}.elsewhen (addr >= cfg.enableBase) {
val enableHart =
if (cfg.nHarts > 1) (addr - cfg.enableBase)(log2Up(cfg.enableOffset(cfg.nHarts))-1,log2Up(cfg.enableOffset(1)))
else UInt(0)
hart := enableHart
val word =
if (tlDataBits >= myEnables.size) UInt(0)
else addr(log2Ceil((myEnables.size-1)/tlDataBits+1) + tlByteAddrBits - 1, tlByteAddrBits)
for (i <- 0 until myEnables.size by tlDataBits) {
when (word === i/tlDataBits) {
rdata := Cat(myEnables.slice(i, i + tlDataBits).reverse)
for (j <- 0 until (tlDataBits min (myEnables.size - i))) {
when (write) { enables(enableHart)(i+j) := masked_wdata(j) }
}
}
val nDevices = interrupts.size
val nPriorities = min(maxPriorities, nDevices)
val nHarts = harts.size
require(nDevices <= PLICConsts.maxDevices)
require(nHarts > 0 && nHarts <= PLICConsts.maxHarts)
def context(i: Int, mode: Char) = mode match {
case 'M' => i * contextsPerHart
case 'S' => require(supervisor); i * contextsPerHart + 1
}
}.elsewhen (addr >= cfg.pendingBase) {
val word =
if (tlDataBytes >= pending.size) UInt(0)
else addr(log2Up(pending.size)-1,log2Up(tlDataBytes))
rdata := pending.asUInt >> (word * tlDataBits)
}.otherwise {
val regsPerBeat = tlDataBytes >> log2Up(cfg.priorityBytes)
val word =
if (regsPerBeat >= priority.size) UInt(0)
else addr(log2Up(priority.size*cfg.priorityBytes)-1,log2Up(tlDataBytes))
for (i <- 0 until priority.size by regsPerBeat) {
when (word === i/regsPerBeat) {
rdata := Cat(priority.slice(i, i + regsPerBeat).map(p => Cat(UInt(0, 8*cfg.priorityBytes-p.getWidth), p)).reverse)
for (j <- 0 until (regsPerBeat min (priority.size - i))) {
if (cfg.nPriorities > 0) when (write) { priority(i+j) := masked_wdata >> (j * 8 * cfg.priorityBytes) }
}
}
def claimAddr(i: Int, mode: Char) = address + PLICConsts.hartBase(context(i, mode)) + PLICConsts.claimOffset
def threshAddr(i: Int, mode: Char) = address + PLICConsts.hartBase(context(i, mode))
def enableAddr(i: Int, mode: Char) = address + PLICConsts.enableBase(context(i, mode))
// Create the global PLIC config string
val globalConfigString = Seq(
s"plic {\n",
s" priority 0x${address.toString(16)};\n",
s" pending 0x${(address + PLICConsts.pendingBase).toString(16)};\n",
s" ndevs ${nDevices};\n",
s"};\n").mkString
// Create the per-Hart config string
val hartConfigStrings = io.harts.zipWithIndex.map { case (_, i) => (Seq(
s" plic {\n",
s" m {\n",
s" ie 0x${enableAddr(i, 'M').toString(16)};\n",
s" thresh 0x${threshAddr(i, 'M').toString(16)};\n",
s" claim 0x${claimAddr(i, 'M').toString(16)};\n",
s" };\n") ++ (if (!supervisor) Seq() else Seq(
s" s {\n",
s" ie 0x${enableAddr(i, 'S').toString(16)};\n",
s" thresh 0x${threshAddr(i, 'S').toString(16)};\n",
s" claim 0x${claimAddr(i, 'S').toString(16)};\n",
s" };\n")) ++ Seq(
s" };\n")).mkString
}
// For now, use LevelGateways for all TL2 interrupts
val gateways = Vec(interrupts.map { case i =>
val gateway = Module(new LevelGateway)
gateway.io.interrupt := i
gateway.io.plic
})
val priority =
if (nPriorities > 0) Reg(Vec(nDevices+1, UInt(width=log2Up(nPriorities+1))))
else Wire(init=Vec.fill(nDevices+1)(UInt(1)))
val threshold =
if (nPriorities > 0) Reg(Vec(nHarts, UInt(width = log2Up(nPriorities+1))))
else Wire(init=Vec.fill(nHarts)(UInt(0)))
val pending = Reg(init=Vec.fill(nDevices+1){Bool(false)})
val enables = Reg(Vec(nHarts, Vec(nDevices+1, Bool())))
for ((p, g) <- pending.tail zip gateways) {
g.ready := !p
g.complete := false
when (g.valid) { p := true }
}
def findMax(x: Seq[UInt]): (UInt, UInt) = {
if (x.length > 1) {
val half = 1 << (log2Ceil(x.length) - 1)
val lMax = findMax(x take half)
val rMax = findMax(x drop half)
val useLeft = lMax._1 >= rMax._1
(Mux(useLeft, lMax._1, rMax._1), Mux(useLeft, lMax._2, UInt(half) | rMax._2))
} else (x.head, UInt(0))
}
val maxDevs = Reg(Vec(nHarts, UInt(width = log2Up(pending.size))))
for (hart <- 0 until nHarts) {
val effectivePriority =
for (((p, en), pri) <- (pending zip enables(hart) zip priority).tail)
yield Cat(p && en, pri)
val (maxPri, maxDev) = findMax((UInt(1) << priority(0).getWidth) +: effectivePriority)
maxDevs(hart) := maxDev
harts(hart) := Reg(next = maxPri) > Cat(UInt(1), threshold(hart))
}
def priorityRegField(x: UInt) = if (nPriorities > 0) RegField(32, x) else RegField.r(32, x)
val priorityRegFields = Seq(PLICConsts.priorityBase -> priority.map(p => priorityRegField(p)))
val pendingRegFields = Seq(PLICConsts.pendingBase -> pending .map(b => RegField.r(1, b)))
val enableRegFields = enables.zipWithIndex.map { case (e, i) =>
PLICConsts.enableBase(i) -> e.map(b => RegField(1, b))
}
val hartRegFields = Seq.tabulate(nHarts) { i =>
PLICConsts.hartBase(i) -> Seq(
priorityRegField(threshold(i)),
RegField(32,
RegReadFn { valid =>
when (valid) {
pending(maxDevs(i)) := Bool(false)
maxDevs(i) := UInt(0) // flush pipeline
}
(Bool(true), maxDevs(i))
},
RegWriteFn { (valid, data) =>
when (valid && enables(i)(data)) {
gateways(data - UInt(1)).complete := Bool(true)
}
Bool(true)
}
)
)
}
node.regmap((priorityRegFields ++ pendingRegFields ++ enableRegFields ++ hartRegFields):_*)
priority(0) := 0
pending(0) := false
for (e <- enables)
e(0) := false
}
priority(0) := 0
pending(0) := false
for (e <- enables)
e(0) := false
io.tl.grant.valid := acq.valid
acq.ready := io.tl.grant.ready
io.tl.grant.bits := Grant(
is_builtin_type = Bool(true),
g_type = acq.bits.getBuiltInGrantType(),
client_xact_id = acq.bits.client_xact_id,
manager_xact_id = UInt(0),
addr_beat = UInt(0),
data = rdata)
}

View File

@ -6,6 +6,7 @@ import Chisel._
import junctions._
import junctions.NastiConstants._
import regmapper._
import diplomacy._
import uncore.tilelink2._
import uncore.util._
import util._
@ -20,22 +21,19 @@ class CoreplexLocalInterrupts extends Bundle {
val msip = Bool()
}
case class CoreplexLocalInterrupterConfig(beatBytes: Int, address: BigInt = 0x02000000) {
object ClintConsts
{
def msipOffset(hart: Int) = hart * msipBytes
def msipAddress(hart: Int) = address + msipOffset(hart)
def timecmpOffset(hart: Int) = 0x4000 + hart * timecmpBytes
def timecmpAddress(hart: Int) = address + timecmpOffset(hart)
def timeOffset = 0xbff8
def timeAddress = address + timeOffset
def msipBytes = 4
def timecmpBytes = 8
def size = 0x10000
}
trait MixCoreplexLocalInterrupterParameters {
val params: (CoreplexLocalInterrupterConfig, Parameters)
val c = params._1
implicit val p = params._2
val params: Parameters
implicit val p = params
}
trait CoreplexLocalInterrupterBundle extends Bundle with MixCoreplexLocalInterrupterParameters {
@ -45,11 +43,10 @@ trait CoreplexLocalInterrupterBundle extends Bundle with MixCoreplexLocalInterru
trait CoreplexLocalInterrupterModule extends Module with HasRegMap with MixCoreplexLocalInterrupterParameters {
val io: CoreplexLocalInterrupterBundle
val address: AddressSet
val timeWidth = 64
val regWidth = 32
// demand atomic accesses for RV64
require(c.beatBytes >= (p(rocket.XLen) min timeWidth)/8)
val time = Seq.fill(timeWidth/regWidth)(Reg(init=UInt(0, width = regWidth)))
when (io.rtcTick) {
@ -66,6 +63,15 @@ trait CoreplexLocalInterrupterModule extends Module with HasRegMap with MixCorep
tile.mtip := time.asUInt >= timecmp(i).asUInt
}
val globalConfigString = Seq(
s"rtc {\n",
s" addr 0x${(address.base + ClintConsts.timeOffset).toString(16)};\n",
s"};\n").mkString
val hartConfigStrings = (0 until p(NTiles)).map { i => Seq(
s" timecmp 0x${(address.base + ClintConsts.timecmpOffset(i)).toString(16)};\n",
s" ipi 0x${(address.base + ClintConsts.msipOffset(i)).toString(16)};\n").mkString
}
/* 0000 msip hart 0
* 0004 msip hart 1
* 4000 mtimecmp hart 0 lo
@ -77,16 +83,16 @@ trait CoreplexLocalInterrupterModule extends Module with HasRegMap with MixCorep
*/
regmap(
0 -> makeRegFields(ipi),
c.timecmpOffset(0) -> makeRegFields(timecmp.flatten),
c.timeOffset -> makeRegFields(time))
0 -> makeRegFields(ipi),
ClintConsts.timecmpOffset(0) -> makeRegFields(timecmp.flatten),
ClintConsts.timeOffset -> makeRegFields(time))
def makeRegFields(s: Seq[UInt]) = s.map(r => RegField(regWidth, r))
}
/** Power, Reset, Clock, Interrupt */
// Magic TL2 Incantation to create a TL2 Slave
class CoreplexLocalInterrupter(c: CoreplexLocalInterrupterConfig)(implicit val p: Parameters)
extends TLRegisterRouter(c.address, 0, c.size, 0, c.beatBytes, false)(
new TLRegBundle((c, p), _) with CoreplexLocalInterrupterBundle)(
new TLRegModule((c, p), _, _) with CoreplexLocalInterrupterModule)
class CoreplexLocalInterrupter(address: BigInt = 0x02000000)(implicit val p: Parameters)
extends TLRegisterRouter(address, size = ClintConsts.size, beatBytes = p(rocket.XLen)/8, undefZero = false)(
new TLRegBundle(p, _) with CoreplexLocalInterrupterBundle)(
new TLRegModule(p, _, _) with CoreplexLocalInterrupterModule)

View File

@ -94,7 +94,7 @@ class TLAtomicAutomata(logical: Boolean = true, arithmetic: Boolean = true, conc
val cam_free = cam_s.map(_.state === FREE)
val cam_amo = cam_s.map(_.state === AMO)
val cam_abusy = cam_s.map(e => e.state === GET || e.state === AMO) // A is blocked
val cam_dmatch = cam_s.map(e => e.state === GET || e.state === ACK) // D should inspect these entries
val cam_dmatch = cam_s.map(e => e.state =/= FREE) // D should inspect these entries
// Can the manager already handle this message?
val a_size = edgeIn.size(in.a.bits)
@ -211,7 +211,7 @@ class TLAtomicAutomata(logical: Boolean = true, arithmetic: Boolean = true, conc
val d_cam_sel_match = (d_cam_sel_raw zip cam_dmatch) map { case (a,b) => a&&b }
val d_cam_data = Mux1H(d_cam_sel_match, cam_d.map(_.data))
val d_cam_sel_bypass = if (edgeOut.manager.minLatency > 0) Bool(false) else
out.d.bits.source === in.a.bits.source && in.a.valid && out.d.valid && !a_isSupported
out.d.bits.source === in.a.bits.source && in.a.valid && !a_isSupported
val d_cam_sel = (a_cam_sel_free zip d_cam_sel_match) map { case (a,d) => Mux(d_cam_sel_bypass, a, d) }
val d_cam_sel_any = d_cam_sel_bypass || d_cam_sel_match.reduce(_ || _)
val d_ackd = out.d.bits.opcode === TLMessages.AccessAckData

View File

@ -2,6 +2,7 @@
package uncore.tilelink2
import scala.math.min
import Chisel._
import chisel3.internal.sourceinfo.SourceInfo
import diplomacy._
@ -9,10 +10,9 @@ import diplomacy._
// Acks Hints for managers that don't support them or Acks all Hints if !passthrough
class TLHintHandler(supportManagers: Boolean = true, supportClients: Boolean = false, passthrough: Boolean = true) extends LazyModule
{
// HintAcks can come back combinationally => minLatency=0
val node = TLAdapterNode(
clientFn = { case Seq(c) => if (!supportClients) c else c.copy(minLatency = 0, clients = c.clients .map(_.copy(supportsHint = TransferSizes(1, c.maxTransfer)))) },
managerFn = { case Seq(m) => if (!supportManagers) m else m.copy(minLatency = 0, managers = m.managers.map(_.copy(supportsHint = TransferSizes(1, m.maxTransfer)))) })
clientFn = { case Seq(c) => if (!supportClients) c else c.copy(minLatency = min(1, c.minLatency), clients = c.clients .map(_.copy(supportsHint = TransferSizes(1, c.maxTransfer)))) },
managerFn = { case Seq(m) => if (!supportManagers) m else m.copy(minLatency = min(1, m.minLatency), managers = m.managers.map(_.copy(supportsHint = TransferSizes(1, m.maxTransfer)))) })
lazy val module = new LazyModuleImp(this) {
val io = new Bundle {
@ -46,7 +46,7 @@ class TLHintHandler(supportManagers: Boolean = true, supportClients: Boolean = f
hint.bits := edgeIn.HintAck(in.a.bits, edgeOut.manager.findIdStartFast(address))
out.a.bits := in.a.bits
TLArbiter(TLArbiter.lowestIndexFirst)(in.d, (edgeOut.numBeats1(out.d.bits), out.d), (UInt(0), hint))
TLArbiter(TLArbiter.lowestIndexFirst)(in.d, (edgeOut.numBeats1(out.d.bits), out.d), (UInt(0), Queue(hint, 1)))
} else {
out.a.valid := in.a.valid
in.a.ready := out.a.ready
@ -69,7 +69,7 @@ class TLHintHandler(supportManagers: Boolean = true, supportClients: Boolean = f
hint.bits := edgeOut.HintAck(out.b.bits)
in.b.bits := out.b.bits
TLArbiter(TLArbiter.lowestIndexFirst)(out.c, (edgeIn.numBeats1(in.c.bits), in.c), (UInt(0), hint))
TLArbiter(TLArbiter.lowestIndexFirst)(out.c, (edgeIn.numBeats1(in.c.bits), in.c), (UInt(0), Queue(hint, 1)))
} else if (bce) {
in.b.valid := out.b.valid
out.b.ready := in.b.ready

View File

@ -58,10 +58,13 @@ object IntImp extends NodeImp[IntSourcePortParameters, IntSinkPortParameters, In
}
def bundleI(ei: Seq[IntEdge]): Vec[Vec[Bool]] = {
require (!ei.isEmpty)
Vec(ei.size, Vec(ei.map(_.source.num).max, Bool())).flip
Vec(ei.size, Vec(ei.map(_.source.num).max, Bool()))
}
def colour = "#0000ff" // blue
override def labelI(ei: IntEdge) = ei.source.sources.map(_.range.size).sum.toString
override def labelO(eo: IntEdge) = eo.source.sources.map(_.range.size).sum.toString
def connect(bo: => Vec[Bool], bi: => Vec[Bool], ei: => IntEdge)(implicit sourceInfo: SourceInfo): (Option[LazyModule], () => Unit) = {
(None, () => {
// Cannot use bulk connect, because the widths could differ
@ -76,9 +79,6 @@ object IntImp extends NodeImp[IntSourcePortParameters, IntSinkPortParameters, In
}
case class IntIdentityNode() extends IdentityNode(IntImp)
case class IntOutputNode() extends OutputNode(IntImp)
case class IntInputNode() extends InputNode(IntImp)
case class IntSourceNode(num: Int) extends SourceNode(IntImp)(
IntSourcePortParameters(Seq(IntSourceParameters(num))), (if (num == 0) 0 else 1) to 1)
case class IntSinkNode() extends SinkNode(IntImp)(
@ -91,11 +91,20 @@ case class IntAdapterNode(
numSinkPorts: Range.Inclusive = 1 to 1)
extends InteriorNode(IntImp)(sourceFn, sinkFn, numSourcePorts, numSinkPorts)
case class IntOutputNode() extends OutputNode(IntImp)
case class IntInputNode() extends InputNode(IntImp)
case class IntBlindOutputNode() extends BlindOutputNode(IntImp)(IntSinkPortParameters(Seq(IntSinkParameters())))
case class IntBlindInputNode(num: Int) extends BlindInputNode(IntImp)(IntSourcePortParameters(Seq(IntSourceParameters(num))))
case class IntInternalOutputNode() extends InternalOutputNode(IntImp)(IntSinkPortParameters(Seq(IntSinkParameters())))
case class IntInternalInputNode(num: Int) extends InternalInputNode(IntImp)(IntSourcePortParameters(Seq(IntSourceParameters(num))))
class IntXbar extends LazyModule
{
val intnode = IntAdapterNode(
numSourcePorts = 1 to 1, // does it make sense to have more than one interrupt sink?
numSinkPorts = 1 to 128,
numSinkPorts = 0 to 128,
sinkFn = { _ => IntSinkPortParameters(Seq(IntSinkParameters())) },
sourceFn = { seq =>
IntSourcePortParameters((seq zip seq.map(_.num).scanLeft(0)(_+_).init).map {
@ -113,3 +122,17 @@ class IntXbar extends LazyModule
io.out.foreach { _ := cat }
}
}
class IntXing extends LazyModule
{
val intnode = IntIdentityNode()
lazy val module = new LazyModuleImp(this) {
val io = new Bundle {
val in = intnode.bundleIn
val out = intnode.bundleOut
}
io.out := RegNext(RegNext(RegNext(io.in)))
}
}

View File

@ -38,11 +38,9 @@ class TLLegacy(implicit val p: Parameters) extends LazyModule with HasTileLinkPa
require (m.supportsPutPartial.contains(TransferSizes(1, tlDataBytes)))
require (m.supportsPutPartial.contains(TransferSizes(tlDataBeats * tlDataBytes)))
}
// Any atomic support => must support 32-bit up to beat size of all types
if (m.supportsArithmetic || m.supportsLogical) {
require (m.supportsArithmetic.contains(TransferSizes(4, tlDataBytes)))
require (m.supportsLogical .contains(TransferSizes(4, tlDataBytes)))
}
// Any atomic support => must support 32-bit size
if (m.supportsArithmetic) { require (m.supportsArithmetic.contains(TransferSizes(4))) }
if (m.supportsLogical) { require (m.supportsLogical .contains(TransferSizes(4))) }
// We straight-up require hints
require (edge.manager.allSupportHint)
}

View File

@ -17,7 +17,7 @@ object TLImp extends NodeImp[TLClientPortParameters, TLManagerPortParameters, TL
}
def bundleI(ei: Seq[TLEdgeIn]): Vec[TLBundle] = {
require (!ei.isEmpty)
Vec(ei.size, TLBundle(ei.map(_.bundle).reduce(_.union(_)))).flip
Vec(ei.size, TLBundle(ei.map(_.bundle).reduce(_.union(_))))
}
var emitMonitors = true
@ -25,6 +25,9 @@ object TLImp extends NodeImp[TLClientPortParameters, TLManagerPortParameters, TL
var combinationalCheck = false
def colour = "#000000" // black
override def labelI(ei: TLEdgeIn) = (ei.manager.beatBytes * 8).toString
override def labelO(eo: TLEdgeOut) = (eo.manager.beatBytes * 8).toString
def connect(bo: => TLBundle, bi: => TLBundle, ei: => TLEdgeIn)(implicit sourceInfo: SourceInfo): (Option[LazyModule], () => Unit) = {
val monitor = if (emitMonitors) {
Some(LazyModule(new TLMonitor(() => new TLBundleSnoop(bo.params), () => ei, sourceInfo)))
@ -89,10 +92,8 @@ object TLImp extends NodeImp[TLClientPortParameters, TLManagerPortParameters, TL
}
}
// Nodes implemented inside modules
case class TLIdentityNode() extends IdentityNode(TLImp)
case class TLOutputNode() extends OutputNode(TLImp)
case class TLInputNode() extends InputNode(TLImp)
case class TLClientNode(portParams: TLClientPortParameters, numPorts: Range.Inclusive = 1 to 1)
extends SourceNode(TLImp)(portParams, numPorts)
case class TLManagerNode(portParams: TLManagerPortParameters, numPorts: Range.Inclusive = 1 to 1)
@ -117,6 +118,14 @@ case class TLAdapterNode(
numManagerPorts: Range.Inclusive = 1 to 1)
extends InteriorNode(TLImp)(clientFn, managerFn, numClientPorts, numManagerPorts)
// Nodes passed from an inner module
case class TLOutputNode() extends OutputNode(TLImp)
case class TLInputNode() extends InputNode(TLImp)
// Nodes used for external ports
case class TLBlindOutputNode(portParams: TLManagerPortParameters) extends BlindOutputNode(TLImp)(portParams)
case class TLBlindInputNode(portParams: TLClientPortParameters) extends BlindInputNode(TLImp)(portParams)
/** Synthesizeable unit tests */
import unittest._
@ -149,10 +158,13 @@ object TLAsyncImp extends NodeImp[TLAsyncClientPortParameters, TLAsyncManagerPor
}
def bundleI(ei: Seq[TLAsyncEdgeParameters]): Vec[TLAsyncBundle] = {
require (ei.size == 1)
Vec(ei.size, new TLAsyncBundle(ei(0).bundle)).flip
Vec(ei.size, new TLAsyncBundle(ei(0).bundle))
}
def colour = "#ff0000" // red
override def labelI(ei: TLAsyncEdgeParameters) = ei.manager.depth.toString
override def labelO(eo: TLAsyncEdgeParameters) = eo.manager.depth.toString
def connect(bo: => TLAsyncBundle, bi: => TLAsyncBundle, ei: => TLAsyncEdgeParameters)(implicit sourceInfo: SourceInfo): (Option[LazyModule], () => Unit) = {
(None, () => { bi <> bo })
}

View File

@ -7,10 +7,11 @@ import diplomacy._
import regmapper._
import scala.math.{min,max}
class TLRegisterNode(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true)
class TLRegisterNode(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true, executable: Boolean = false)
extends TLManagerNode(TLManagerPortParameters(
Seq(TLManagerParameters(
address = Seq(address),
executable = executable,
supportsGet = TransferSizes(1, beatBytes),
supportsPutPartial = TransferSizes(1, beatBytes),
supportsPutFull = TransferSizes(1, beatBytes),
@ -70,17 +71,17 @@ class TLRegisterNode(address: AddressSet, concurrency: Int = 0, beatBytes: Int =
object TLRegisterNode
{
def apply(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true) =
new TLRegisterNode(address, concurrency, beatBytes, undefZero)
def apply(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true, executable: Boolean = false) =
new TLRegisterNode(address, concurrency, beatBytes, undefZero, executable)
}
// These convenience methods below combine to make it possible to create a TL2
// register mapped device from a totally abstract register mapped device.
// See GPIO.scala in this directory for an example
abstract class TLRegisterRouterBase(address: AddressSet, interrupts: Int, concurrency: Int, beatBytes: Int, undefZero: Boolean) extends LazyModule
abstract class TLRegisterRouterBase(val address: AddressSet, interrupts: Int, concurrency: Int, beatBytes: Int, undefZero: Boolean, executable: Boolean) extends LazyModule
{
val node = TLRegisterNode(address, concurrency, beatBytes, undefZero)
val node = TLRegisterNode(address, concurrency, beatBytes, undefZero, executable)
val intnode = IntSourceNode(interrupts)
}
@ -99,14 +100,15 @@ class TLRegModule[P, B <: TLRegBundleBase](val params: P, bundleBuilder: => B, r
{
val io = bundleBuilder
val interrupts = if (io.interrupts.isEmpty) Vec(0, Bool()) else io.interrupts(0)
val address = router.address
def regmap(mapping: RegField.Map*) = router.node.regmap(mapping:_*)
}
class TLRegisterRouter[B <: TLRegBundleBase, M <: LazyModuleImp]
(val base: BigInt, val interrupts: Int = 0, val size: BigInt = 4096, val concurrency: Int = 0, val beatBytes: Int = 4, undefZero: Boolean = true)
(val base: BigInt, val interrupts: Int = 0, val size: BigInt = 4096, val concurrency: Int = 0, val beatBytes: Int = 4, undefZero: Boolean = true, executable: Boolean = false)
(bundleBuilder: TLRegBundleArg => B)
(moduleBuilder: (=> B, TLRegisterRouterBase) => M)
extends TLRegisterRouterBase(AddressSet(base, size-1), interrupts, concurrency, beatBytes, undefZero)
extends TLRegisterRouterBase(AddressSet(base, size-1), interrupts, concurrency, beatBytes, undefZero, executable)
{
require (isPow2(size))
// require (size >= 4096) ... not absolutely required, but highly recommended

View File

@ -5,9 +5,11 @@ import diplomacy._
package object tilelink2
{
type TLInwardNode = InwardNodeHandle[TLClientPortParameters, TLManagerPortParameters, TLBundle]
type TLOutwardNode = OutwardNodeHandle[TLClientPortParameters, TLManagerPortParameters, TLBundle]
type TLAsyncOutwardNode = OutwardNodeHandle[TLAsyncClientPortParameters, TLAsyncManagerPortParameters, TLAsyncBundle]
type IntOutwardNode = OutwardNodeHandle[IntSourcePortParameters, IntSinkPortParameters, Vec[Bool]]
def OH1ToOH(x: UInt) = (x << 1 | UInt(1)) & ~Cat(UInt(0, width=1), x)
def OH1ToUInt(x: UInt) = OHToUInt(OH1ToOH(x))
def UIntToOH1(x: UInt, width: Int) = ~(SInt(-1, width=width).asUInt << x)(width-1, 0)

View File

@ -5,6 +5,7 @@ package unittest
object Generator extends util.GeneratorApp {
val longName = names.topModuleProject + "." + names.configs
generateFirrtl
generateGraphML
generateTestSuiteMakefrags // TODO: Needed only for legacy make targets
generateParameterDump // TODO: Needed only for legacy make targets
}