1
0

Merge pull request #424 from ucb-bar/coreplex-cake

Coreplex cake
This commit is contained in:
Wesley W. Terpstra 2016-10-31 16:49:27 -07:00 committed by GitHub
commit 1b016051e8
33 changed files with 1101 additions and 1140 deletions

View File

@ -23,7 +23,7 @@ case object BankIdLSB extends Field[Int]
/** Function for building some kind of coherence manager agent */ /** Function for building some kind of coherence manager agent */
case object BuildL2CoherenceManager extends Field[(Int, Parameters) => CoherenceAgent] case object BuildL2CoherenceManager extends Field[(Int, Parameters) => CoherenceAgent]
/** Function for building some kind of tile connected to a reset signal */ /** Function for building some kind of tile connected to a reset signal */
case object BuildTiles extends Field[Seq[(Bool, Parameters) => Tile]] case object BuildTiles extends Field[Seq[Parameters => LazyTile]]
/** The file to read the BootROM contents from */ /** The file to read the BootROM contents from */
case object BootROMFile extends Field[String] case object BootROMFile extends Field[String]
@ -35,50 +35,137 @@ trait HasCoreplexParameters {
lazy val outerMemParams = p.alterPartial({ case TLId => "L2toMC" }) lazy val outerMemParams = p.alterPartial({ case TLId => "L2toMC" })
lazy val outerMMIOParams = p.alterPartial({ case TLId => "L2toMMIO" }) lazy val outerMMIOParams = p.alterPartial({ case TLId => "L2toMMIO" })
lazy val globalAddrMap = p(rocketchip.GlobalAddrMap) lazy val globalAddrMap = p(rocketchip.GlobalAddrMap)
lazy val nTiles = p(uncore.devices.NTiles)
lazy val nSlaves = p(rocketchip.NCoreplexExtClients)
lazy val nMemChannels = p(NMemoryChannels)
lazy val hasSupervisor = p(rocket.UseVM)
} }
case class CoreplexConfig( case class CoreplexParameters(implicit val p: Parameters) extends HasCoreplexParameters
nTiles: Int,
nExtInterrupts: Int, abstract class BareCoreplex(implicit val p: Parameters) extends LazyModule
nSlaves: Int, abstract class BareCoreplexBundle[+L <: BareCoreplex](_outer: L) extends Bundle {
nMemChannels: Int, val outer = _outer
hasSupervisor: Boolean) }
{ abstract class BareCoreplexModule[+L <: BareCoreplex, +B <: BareCoreplexBundle[L]](_outer: L, _io: () => B) extends LazyModuleImp(_outer) {
val nInterruptPriorities = if (nExtInterrupts <= 1) 0 else (nExtInterrupts min 7) val outer = _outer
val plicKey = PLICConfig(nTiles, hasSupervisor, nExtInterrupts, nInterruptPriorities) val io = _io ()
} }
abstract class BaseCoreplex(c: CoreplexConfig)(implicit p: Parameters) extends LazyModule trait CoreplexNetwork extends HasCoreplexParameters {
this: BareCoreplex =>
abstract class BaseCoreplexBundle(val c: CoreplexConfig)(implicit val p: Parameters) extends Bundle with HasCoreplexParameters { val l1tol2 = LazyModule(new TLXbar)
val master = new Bundle { val l1tol2_beatBytes = p(TLKey("L2toMMIO")).dataBitsPerBeat/8
val mem = Vec(c.nMemChannels, new ClientUncachedTileLinkIO()(outerMemParams)) val l1tol2_lineBytes = p(CacheBlockBytes)
val mmio = new ClientUncachedTileLinkIO()(outerMMIOParams)
}
val slave = Vec(c.nSlaves, new ClientUncachedTileLinkIO()(innerParams)).flip
val interrupts = Vec(c.nExtInterrupts, Bool()).asInput
val debug = new DebugBusIO()(p).flip
val clint = Vec(c.nTiles, new CoreplexLocalInterrupts).asInput
val resetVector = UInt(INPUT, p(XLen))
val success = Bool(OUTPUT) // used for testing
override def cloneType = this.getClass.getConstructors.head.newInstance(c, p).asInstanceOf[this.type] val cbus = LazyModule(new TLXbar)
val cbus_beatBytes = p(XLen)/8
val cbus_lineBytes = l1tol2_lineBytes
val mmio = TLOutputNode()
val mmioInt = IntInputNode()
cbus.node :=
TLAtomicAutomata(arithmetic = true)( // disable once TLB uses TL2 metadata
TLWidthWidget(l1tol2_beatBytes)(
TLBuffer()(
l1tol2.node)))
mmio :=
TLBuffer()(
TLWidthWidget(l1tol2_beatBytes)(
l1tol2.node))
} }
abstract class BaseCoreplexModule[+L <: BaseCoreplex, +B <: BaseCoreplexBundle]( trait CoreplexNetworkBundle extends HasCoreplexParameters {
c: CoreplexConfig, l: L, b: => B)(implicit val p: Parameters) extends LazyModuleImp(l) with HasCoreplexParameters { this: {
val outer: L = l val outer: CoreplexNetwork
val io: B = b } =>
implicit val p = outer.p
val mmio = outer.mmio.bundleOut
val interrupts = outer.mmioInt.bundleIn
}
trait CoreplexNetworkModule extends HasCoreplexParameters {
this: BareCoreplexModule[BareCoreplex, BareCoreplexBundle[BareCoreplex]] =>
implicit val p = outer.p
}
trait CoreplexRISCVPlatform {
this: CoreplexNetwork =>
// Build a set of Tiles // Build a set of Tiles
val tiles = p(BuildTiles) map { _(reset, p) } val lazyTiles = p(BuildTiles) map { _(p) }
val legacy = LazyModule(new TLLegacy()(outerMMIOParams))
val tileIntNodes = lazyTiles.map { _ => IntInternalOutputNode() } // this should be moved into the Tile...
val debug = LazyModule(new TLDebugModule())
val plic = LazyModule(new TLPLIC(hasSupervisor, maxPriorities = 7))
val clint = LazyModule(new CoreplexLocalInterrupter)
// Kill this once we move TL2 into rocket
l1tol2.node :=
TLHintHandler()(
legacy.node)
debug.node := TLFragmenter(cbus_beatBytes, cbus_lineBytes)(cbus.node)
plic.node := TLFragmenter(cbus_beatBytes, cbus_lineBytes)(cbus.node)
clint.node := TLFragmenter(cbus_beatBytes, cbus_lineBytes)(cbus.node)
plic.intnode := mmioInt
tileIntNodes.foreach { _ := plic.intnode }
}
trait CoreplexRISCVPlatformBundle {
this: CoreplexNetworkBundle {
val outer: CoreplexRISCVPlatform
} =>
val mem = Vec(nMemChannels, new ClientUncachedTileLinkIO()(outerMemParams))
val slave = Vec(nSlaves, new ClientUncachedTileLinkIO()(innerParams)).flip
val debug = new DebugBusIO().flip
val rtcTick = Bool(INPUT)
val resetVector = UInt(INPUT, p(XLen))
val success = Bool(OUTPUT) // used for testing
}
trait CoreplexRISCVPlatformModule {
this: CoreplexNetworkModule {
val outer: CoreplexNetwork with CoreplexRISCVPlatform
val io: CoreplexRISCVPlatformBundle
} =>
val tiles = outer.lazyTiles.map(_.module)
val uncoreTileIOs = (tiles zipWithIndex) map { case (tile, i) => Wire(tile.io) } val uncoreTileIOs = (tiles zipWithIndex) map { case (tile, i) => Wire(tile.io) }
println("\nGenerated Address Map")
for (entry <- p(rocketchip.GlobalAddrMap).flatten) {
val name = entry.name
val start = entry.region.start
val end = entry.region.start + entry.region.size - 1
val prot = entry.region.attr.prot
val protStr = (if ((prot & AddrMapProt.R) > 0) "R" else "") +
(if ((prot & AddrMapProt.W) > 0) "W" else "") +
(if ((prot & AddrMapProt.X) > 0) "X" else "")
val cacheable = if (entry.region.attr.cacheable) " [C]" else ""
println(f"\t$name%s $start%x - $end%x, $protStr$cacheable")
}
// Create and export the ConfigString
val managers = outer.l1tol2.node.edgesIn(0).manager.managers
val configString = rocketchip.GenerateConfigString(p, outer.clint, outer.plic, managers)
// Allow something else to have override the config string
if (!ConfigStringOutput.contents.isDefined) {
ConfigStringOutput.contents = Some(configString)
}
println(s"\nGenerated Configuration String\n${ConfigStringOutput.contents.get}")
val nCachedPorts = tiles.map(tile => tile.io.cached.size).reduce(_ + _) val nCachedPorts = tiles.map(tile => tile.io.cached.size).reduce(_ + _)
val nUncachedPorts = tiles.map(tile => tile.io.uncached.size).reduce(_ + _) val nUncachedPorts = tiles.map(tile => tile.io.uncached.size).reduce(_ + _)
val nBanks = c.nMemChannels * nBanksPerMemChannel val nBanks = nMemChannels * nBanksPerMemChannel
// Build an uncore backing the Tiles
buildUncore(p.alterPartial({ buildUncore(p.alterPartial({
case HastiId => "TL" case HastiId => "TL"
case TLId => "L1toL2" case TLId => "L1toL2"
@ -86,7 +173,7 @@ abstract class BaseCoreplexModule[+L <: BaseCoreplex, +B <: BaseCoreplexBundle](
case NUncachedTileLinkPorts => nUncachedPorts case NUncachedTileLinkPorts => nUncachedPorts
})) }))
def buildUncore(implicit p: Parameters) = { def buildUncore(implicit p: Parameters) {
// Create a simple L1toL2 NoC between the tiles and the banks of outer memory // Create a simple L1toL2 NoC between the tiles and the banks of outer memory
// Cached ports are first in client list, making sharerToClientId just an indentity function // Cached ports are first in client list, making sharerToClientId just an indentity function
// addrToBank is sed to hash physical addresses (of cache blocks) to banks (and thereby memory channels) // addrToBank is sed to hash physical addresses (of cache blocks) to banks (and thereby memory channels)
@ -112,8 +199,9 @@ abstract class BaseCoreplexModule[+L <: BaseCoreplex, +B <: BaseCoreplexBundle](
l1tol2net.io.clients_cached <> uncoreTileIOs.map(_.cached).flatten l1tol2net.io.clients_cached <> uncoreTileIOs.map(_.cached).flatten
l1tol2net.io.clients_uncached <> uncoreTileIOs.map(_.uncached).flatten ++ io.slave l1tol2net.io.clients_uncached <> uncoreTileIOs.map(_.uncached).flatten ++ io.slave
l1tol2net.io.managers <> managerEndpoints.map(_.innerTL) :+ mmioManager.io.inner l1tol2net.io.managers <> managerEndpoints.map(_.innerTL) :+ mmioManager.io.inner
outer.legacy.module.io.legacy <> mmioManager.io.outer
val mem_ic = Module(new TileLinkMemoryInterconnect(nBanksPerMemChannel, c.nMemChannels)(outerMemParams)) val mem_ic = Module(new TileLinkMemoryInterconnect(nBanksPerMemChannel, nMemChannels)(outerMemParams))
val backendBuffering = TileLinkDepths(0,0,0,0,0) val backendBuffering = TileLinkDepths(0,0,0,0,0)
for ((bank, icPort) <- managerEndpoints zip mem_ic.io.in) { for ((bank, icPort) <- managerEndpoints zip mem_ic.io.in) {
@ -121,46 +209,36 @@ abstract class BaseCoreplexModule[+L <: BaseCoreplex, +B <: BaseCoreplexBundle](
icPort <> TileLinkIOUnwrapper(enqueued) icPort <> TileLinkIOUnwrapper(enqueued)
} }
io.master.mem <> mem_ic.io.out io.mem <> mem_ic.io.out
buildMMIONetwork(TileLinkEnqueuer(mmioManager.io.outer, 1))(outerMMIOParams)
} }
def buildMMIONetwork(mmio: ClientUncachedTileLinkIO)(implicit p: Parameters) = { // connect coreplex-internal interrupts to tiles
val ioAddrMap = globalAddrMap.subMap("io") for ((tile, i) <- (uncoreTileIOs zipWithIndex)) {
tile.hartid := UInt(i)
val cBus = Module(new TileLinkRecursiveInterconnect(1, ioAddrMap)) tile.resetVector := io.resetVector
cBus.io.in.head <> mmio tile.interrupts := outer.clint.module.io.tiles(i)
tile.interrupts.debug := outer.debug.module.io.debugInterrupts(i)
val plic = Module(new PLIC(c.plicKey)) tile.interrupts.meip := outer.tileIntNodes(i).bundleOut(0)(0)
plic.io.tl <> cBus.port("cbus:plic") tile.interrupts.seip.foreach(_ := outer.tileIntNodes(i).bundleOut(0)(1))
for (i <- 0 until io.interrupts.size) {
val gateway = Module(new LevelGateway)
gateway.io.interrupt := io.interrupts(i)
plic.io.devices(i) <> gateway.io.plic
}
val debugModule = Module(new DebugModule)
debugModule.io.tl <> cBus.port("cbus:debug")
debugModule.io.db <> io.debug
// connect coreplex-internal interrupts to tiles
for ((tile, i) <- (uncoreTileIOs zipWithIndex)) {
tile.interrupts <> io.clint(i)
tile.interrupts.meip := plic.io.harts(plic.cfg.context(i, 'M'))
tile.interrupts.seip.foreach(_ := plic.io.harts(plic.cfg.context(i, 'S')))
tile.interrupts.debug := debugModule.io.debugInterrupts(i)
tile.hartid := UInt(i)
tile.resetVector := io.resetVector
}
val tileSlavePorts = (0 until c.nTiles) map (i => s"cbus:dmem$i") filter (ioAddrMap contains _)
for ((t, m) <- (uncoreTileIOs.map(_.slave).flatten) zip (tileSlavePorts map (cBus port _)))
t <> m
io.master.mmio <> cBus.port("pbus")
} }
outer.debug.module.io.db <> io.debug
outer.clint.module.io.rtcTick := io.rtcTick
// Coreplex doesn't know when to stop running // Coreplex doesn't know when to stop running
io.success := Bool(false) io.success := Bool(false)
} }
class BaseCoreplex(implicit p: Parameters) extends BareCoreplex
with CoreplexNetwork
with CoreplexRISCVPlatform {
override lazy val module = new BaseCoreplexModule(this, () => new BaseCoreplexBundle(this))
}
class BaseCoreplexBundle[+L <: BaseCoreplex](_outer: L) extends BareCoreplexBundle(_outer)
with CoreplexNetworkBundle
with CoreplexRISCVPlatformBundle
class BaseCoreplexModule[+L <: BaseCoreplex, +B <: BaseCoreplexBundle[L]](_outer: L, _io: () => B) extends BareCoreplexModule(_outer, _io)
with CoreplexNetworkModule
with CoreplexRISCVPlatformModule

View File

@ -4,6 +4,7 @@ package coreplex
import Chisel._ import Chisel._
import junctions._ import junctions._
import diplomacy._
import uncore.tilelink._ import uncore.tilelink._
import uncore.coherence._ import uncore.coherence._
import uncore.agents._ import uncore.agents._
@ -69,8 +70,8 @@ class BaseCoreplexConfig extends Config (
case NUncachedTileLinkPorts => 1 case NUncachedTileLinkPorts => 1
//Tile Constants //Tile Constants
case BuildTiles => { case BuildTiles => {
List.tabulate(site(NTiles)){ i => (r: Bool, p: Parameters) => List.tabulate(site(NTiles)){ i => (p: Parameters) =>
Module(new RocketTile(resetSignal = r)(p.alterPartial({ LazyModule(new RocketTile()(p.alterPartial({
case TileId => i case TileId => i
case TLId => "L1toL2" case TLId => "L1toL2"
case NUncachedTileLinkPorts => 1 + site(RoccNMemChannels) case NUncachedTileLinkPorts => 1 + site(RoccNMemChannels)

View File

@ -3,14 +3,20 @@ package coreplex
import Chisel._ import Chisel._
import cde.{Parameters, Field} import cde.{Parameters, Field}
import junctions._ import junctions._
import diplomacy._
import uncore.tilelink._ import uncore.tilelink._
import uncore.tilelink2._
import uncore.util._ import uncore.util._
import util._ import util._
import rocket._ import rocket._
trait DirectConnection { trait DirectConnection {
val tiles: Seq[Tile] this: CoreplexNetwork with CoreplexRISCVPlatform =>
val uncoreTileIOs: Seq[TileIO] lazyTiles.map(_.slave).flatten.foreach { scratch => scratch := cbus.node }
}
trait DirectConnectionModule {
this: CoreplexNetworkModule with CoreplexRISCVPlatformModule =>
val tlBuffering = TileLinkDepths(1,1,2,2,0) val tlBuffering = TileLinkDepths(1,1,2,2,0)
val ultBuffering = UncachedTileLinkDepths(1,2) val ultBuffering = UncachedTileLinkDepths(1,2)
@ -18,7 +24,6 @@ trait DirectConnection {
(tiles zip uncoreTileIOs) foreach { case (tile, uncore) => (tiles zip uncoreTileIOs) foreach { case (tile, uncore) =>
(uncore.cached zip tile.io.cached) foreach { case (u, t) => u <> TileLinkEnqueuer(t, tlBuffering) } (uncore.cached zip tile.io.cached) foreach { case (u, t) => u <> TileLinkEnqueuer(t, tlBuffering) }
(uncore.uncached zip tile.io.uncached) foreach { case (u, t) => u <> TileLinkEnqueuer(t, ultBuffering) } (uncore.uncached zip tile.io.uncached) foreach { case (u, t) => u <> TileLinkEnqueuer(t, ultBuffering) }
tile.io.slave.foreach { _ <> TileLinkEnqueuer(uncore.slave.get, 1) }
tile.io.interrupts <> uncore.interrupts tile.io.interrupts <> uncore.interrupts
@ -27,30 +32,54 @@ trait DirectConnection {
} }
} }
class DefaultCoreplex(c: CoreplexConfig)(implicit p: Parameters) extends BaseCoreplex(c)(p) { class DefaultCoreplex(implicit p: Parameters) extends BaseCoreplex
override lazy val module = Module(new DefaultCoreplexModule(c, this, new DefaultCoreplexBundle(c)(p))(p)) with DirectConnection {
override lazy val module = new DefaultCoreplexModule(this, () => new DefaultCoreplexBundle(this))
} }
class DefaultCoreplexBundle(c: CoreplexConfig)(implicit p: Parameters) extends BaseCoreplexBundle(c)(p) class DefaultCoreplexBundle[+L <: DefaultCoreplex](_outer: L) extends BaseCoreplexBundle(_outer)
class DefaultCoreplexModule[+L <: DefaultCoreplex, +B <: DefaultCoreplexBundle]( class DefaultCoreplexModule[+L <: DefaultCoreplex, +B <: DefaultCoreplexBundle[L]](_outer: L, _io: () => B) extends BaseCoreplexModule(_outer, _io)
c: CoreplexConfig, l: L, b: => B)(implicit p: Parameters) extends BaseCoreplexModule(c, l, b)(p) with DirectConnectionModule
with DirectConnection
///// /////
trait TileClockResetBundle { trait AsyncConnection {
val c: CoreplexConfig this: CoreplexNetwork with CoreplexRISCVPlatform =>
val tcrs = Vec(c.nTiles, new Bundle { val crossings = lazyTiles.map(_.slave).map(_.map { scratch =>
val crossing = LazyModule(new TLAsyncCrossing)
crossing.node := cbus.node
val monitor = (scratch := crossing.node)
(crossing, monitor)
})
}
trait AsyncConnectionBundle {
this: CoreplexNetworkBundle with CoreplexRISCVPlatformBundle =>
val tcrs = Vec(nTiles, new Bundle {
val clock = Clock(INPUT) val clock = Clock(INPUT)
val reset = Bool(INPUT) val reset = Bool(INPUT)
}) })
} }
trait AsyncConnection { trait AsyncConnectionModule {
val io: TileClockResetBundle this: Module with CoreplexNetworkModule with CoreplexRISCVPlatformModule {
val tiles: Seq[Tile] val outer: AsyncConnection
val uncoreTileIOs: Seq[TileIO] val io: AsyncConnectionBundle
} =>
(outer.crossings zip io.tcrs) foreach { case (slaves, tcr) =>
slaves.foreach { case (crossing, monitor) =>
crossing.module.io.in_clock := clock
crossing.module.io.in_reset := reset
crossing.module.io.out_clock := tcr.clock
crossing.module.io.out_reset := tcr.reset
monitor.foreach { m =>
m.module.clock := tcr.clock
m.module.reset := tcr.reset
}
}
}
(tiles, uncoreTileIOs, io.tcrs).zipped foreach { case (tile, uncore, tcr) => (tiles, uncoreTileIOs, io.tcrs).zipped foreach { case (tile, uncore, tcr) =>
tile.clock := tcr.clock tile.clock := tcr.clock
@ -58,7 +87,6 @@ trait AsyncConnection {
(uncore.cached zip tile.io.cached) foreach { case (u, t) => u <> AsyncTileLinkFrom(tcr.clock, tcr.reset, t) } (uncore.cached zip tile.io.cached) foreach { case (u, t) => u <> AsyncTileLinkFrom(tcr.clock, tcr.reset, t) }
(uncore.uncached zip tile.io.uncached) foreach { case (u, t) => u <> AsyncUTileLinkFrom(tcr.clock, tcr.reset, t) } (uncore.uncached zip tile.io.uncached) foreach { case (u, t) => u <> AsyncUTileLinkFrom(tcr.clock, tcr.reset, t) }
tile.io.slave.foreach { _ <> AsyncUTileLinkTo(tcr.clock, tcr.reset, uncore.slave.get)}
val ti = tile.io.interrupts val ti = tile.io.interrupts
val ui = uncore.interrupts val ui = uncore.interrupts
@ -73,13 +101,13 @@ trait AsyncConnection {
} }
} }
class MultiClockCoreplex(c: CoreplexConfig)(implicit p: Parameters) extends BaseCoreplex(c)(p) { class MultiClockCoreplex(implicit p: Parameters) extends BaseCoreplex
override lazy val module = Module(new MultiClockCoreplexModule(c, this, new MultiClockCoreplexBundle(c)(p))(p)) with AsyncConnection {
override lazy val module = new MultiClockCoreplexModule(this, () => new MultiClockCoreplexBundle(this))
} }
class MultiClockCoreplexBundle(c: CoreplexConfig)(implicit p: Parameters) extends BaseCoreplexBundle(c)(p) class MultiClockCoreplexBundle[+L <: MultiClockCoreplex](_outer: L) extends BaseCoreplexBundle(_outer)
with TileClockResetBundle with AsyncConnectionBundle
class MultiClockCoreplexModule[+L <: MultiClockCoreplex, +B <: MultiClockCoreplexBundle]( class MultiClockCoreplexModule[+L <: MultiClockCoreplex, +B <: MultiClockCoreplexBundle[L]](_outer: L, _io: () => B) extends BaseCoreplexModule(_outer, _io)
c: CoreplexConfig, l: L, b: => B)(implicit p: Parameters) extends BaseCoreplexModule(c, l, b)(p) with AsyncConnectionModule
with AsyncConnection

View File

@ -62,7 +62,7 @@ abstract class LazyModule
private def nodesGraphML(buf: StringBuilder, pad: String) { private def nodesGraphML(buf: StringBuilder, pad: String) {
buf ++= s"""${pad}<node id=\"${index}\">\n""" buf ++= s"""${pad}<node id=\"${index}\">\n"""
buf ++= s"""${pad} <data key=\"n\"><y:ShapeNode><y:NodeLabel modelName=\"sides\" modelPosition=\"w\" fontSize=\"10\" borderDistance=\"1.0\" rotationAngle=\"270.0\">${module.name}</y:NodeLabel></y:ShapeNode></data>\n""" buf ++= s"""${pad} <data key=\"n\"><y:ShapeNode><y:NodeLabel modelName=\"sides\" modelPosition=\"w\" rotationAngle=\"270.0\">${module.instanceName}</y:NodeLabel></y:ShapeNode></data>\n"""
buf ++= s"""${pad} <graph id=\"${index}::\" edgedefault=\"directed\">\n""" buf ++= s"""${pad} <graph id=\"${index}::\" edgedefault=\"directed\">\n"""
nodes.filter(!_.omitGraphML).foreach { n => nodes.filter(!_.omitGraphML).foreach { n =>
buf ++= s"""${pad} <node id=\"${index}::${n.index}\"/>\n""" buf ++= s"""${pad} <node id=\"${index}::${n.index}\"/>\n"""
@ -72,11 +72,15 @@ abstract class LazyModule
buf ++= s"""${pad}</node>\n""" buf ++= s"""${pad}</node>\n"""
} }
private def edgesGraphML(buf: StringBuilder, pad: String) { private def edgesGraphML(buf: StringBuilder, pad: String) {
nodes.filter(!_.omitGraphML) foreach { n => n.outputs.filter(!_.omitGraphML).foreach { o => nodes.filter(!_.omitGraphML) foreach { n => n.outputs.filter(!_._1.omitGraphML).foreach { case (o, l) =>
buf ++= pad buf ++= pad
buf ++= "<edge" buf ++= "<edge"
buf ++= s""" source=\"${index}::${n.index}\"""" buf ++= s""" source=\"${index}::${n.index}\""""
buf ++= s""" target=\"${o.lazyModule.index}::${o.index}\"><data key=\"e\"><y:PolyLineEdge><y:Arrows source=\"none\" target=\"standard\"/><y:LineStyle color=\"${o.colour}\" type=\"line\" width=\"1.0\"/></y:PolyLineEdge></data></edge>\n""" buf ++= s""" target=\"${o.lazyModule.index}::${o.index}\"><data key=\"e\"><y:PolyLineEdge>"""
buf ++= s"""<y:Arrows source=\"none\" target=\"standard\"/>"""
buf ++= s"""<y:LineStyle color=\"${o.colour}\" type=\"line\" width=\"1.0\"/>"""
buf ++= s"""<y:EdgeLabel modelName=\"centered\" rotationAngle=\"270.0\">${l}</y:EdgeLabel>"""
buf ++= s"""</y:PolyLineEdge></data></edge>\n"""
} } } }
children.filter(!_.omitGraphML).foreach { c => c.edgesGraphML(buf, pad) } children.filter(!_.omitGraphML).foreach { c => c.edgesGraphML(buf, pad) }
} }

View File

@ -20,6 +20,7 @@ trait InwardNodeImp[DI, UI, EI, BI <: Data]
// optional methods to track node graph // optional methods to track node graph
def mixI(pu: UI, node: InwardNode[DI, UI, BI]): UI = pu // insert node into parameters def mixI(pu: UI, node: InwardNode[DI, UI, BI]): UI = pu // insert node into parameters
def getO(pu: UI): Option[BaseNode] = None // most-outward common node def getO(pu: UI): Option[BaseNode] = None // most-outward common node
def labelI(ei: EI) = ""
} }
// DO = Downwards flowing Parameters generated by the outer side of the node // DO = Downwards flowing Parameters generated by the outer side of the node
@ -34,6 +35,7 @@ trait OutwardNodeImp[DO, UO, EO, BO <: Data]
// optional methods to track node graph // optional methods to track node graph
def mixO(pd: DO, node: OutwardNode[DO, UO, BO]): DO = pd // insert node into parameters def mixO(pd: DO, node: OutwardNode[DO, UO, BO]): DO = pd // insert node into parameters
def getI(pd: DO): Option[BaseNode] = None // most-inward common node def getI(pd: DO): Option[BaseNode] = None // most-inward common node
def labelO(eo: EO) = ""
} }
abstract class NodeImp[D, U, EO, EI, B <: Data] abstract class NodeImp[D, U, EO, EI, B <: Data]
@ -54,8 +56,8 @@ abstract class BaseNode
protected[diplomacy] def gci: Option[BaseNode] // greatest common inner protected[diplomacy] def gci: Option[BaseNode] // greatest common inner
protected[diplomacy] def gco: Option[BaseNode] // greatest common outer protected[diplomacy] def gco: Option[BaseNode] // greatest common outer
protected[diplomacy] def outputs: Seq[BaseNode] protected[diplomacy] def outputs: Seq[(BaseNode, String)]
protected[diplomacy] def inputs: Seq[BaseNode] protected[diplomacy] def inputs: Seq[(BaseNode, String)]
protected[diplomacy] def colour: String protected[diplomacy] def colour: String
} }
@ -94,7 +96,7 @@ trait InwardNode[DI, UI, BI <: Data] extends BaseNode with InwardNodeHandle[DI,
protected[diplomacy] lazy val iPorts = { iRealized = true; reqI(); accPI.result() } protected[diplomacy] lazy val iPorts = { iRealized = true; reqI(); accPI.result() }
protected[diplomacy] val iParams: Seq[UI] protected[diplomacy] val iParams: Seq[UI]
protected[diplomacy] def iConnect: Vec[BI] val bundleIn: Vec[BI]
} }
trait OutwardNodeHandle[DO, UO, BO <: Data] trait OutwardNodeHandle[DO, UO, BO <: Data]
@ -126,7 +128,7 @@ trait OutwardNode[DO, UO, BO <: Data] extends BaseNode with OutwardNodeHandle[DO
protected[diplomacy] lazy val oPorts = { oRealized = true; reqO(); accPO.result() } protected[diplomacy] lazy val oPorts = { oRealized = true; reqO(); accPO.result() }
protected[diplomacy] val oParams: Seq[DO] protected[diplomacy] val oParams: Seq[DO]
protected[diplomacy] def oConnect: Vec[BO] val bundleOut: Vec[BO]
} }
class MixedNode[DI, UI, EI, BI <: Data, DO, UO, EO, BO <: Data]( class MixedNode[DI, UI, EI, BI <: Data, DO, UO, EO, BO <: Data](
@ -140,8 +142,8 @@ class MixedNode[DI, UI, EI, BI <: Data, DO, UO, EO, BO <: Data](
{ {
// meta-data for printing the node graph // meta-data for printing the node graph
protected[diplomacy] def colour = inner.colour protected[diplomacy] def colour = inner.colour
protected[diplomacy] def outputs = oPorts.map(_._2) protected[diplomacy] def outputs = oPorts.map(_._2) zip edgesOut.map(e => outer.labelO(e))
protected[diplomacy] def inputs = iPorts.map(_._2) protected[diplomacy] def inputs = iPorts.map(_._2) zip edgesIn .map(e => inner.labelI(e))
private def reqE(o: Int, i: Int) = require(i == o, s"${name} has ${i} inputs and ${o} outputs; they must match${lazyModule.line}") private def reqE(o: Int, i: Int) = require(i == o, s"${name} has ${i} inputs and ${o} outputs; they must match${lazyModule.line}")
protected[diplomacy] lazy val oParams: Seq[DO] = { protected[diplomacy] lazy val oParams: Seq[DO] = {
@ -161,11 +163,15 @@ class MixedNode[DI, UI, EI, BI <: Data, DO, UO, EO, BO <: Data](
lazy val edgesOut = (oPorts zip oParams).map { case ((i, n), o) => outer.edgeO(o, n.iParams(i)) } lazy val edgesOut = (oPorts zip oParams).map { case ((i, n), o) => outer.edgeO(o, n.iParams(i)) }
lazy val edgesIn = (iPorts zip iParams).map { case ((o, n), i) => inner.edgeI(n.oParams(o), i) } lazy val edgesIn = (iPorts zip iParams).map { case ((o, n), i) => inner.edgeI(n.oParams(o), i) }
lazy val bundleOut = outer.bundleO(edgesOut) val flip = false // needed for blind nodes
lazy val bundleIn = inner.bundleI(edgesIn) private def flipO(b: Vec[BO]) = if (flip) b.flip else b
private def flipI(b: Vec[BI]) = if (flip) b else b.flip
val wire = false // needed if you want to grab access to from inside a module
private def wireO(b: Vec[BO]) = if (wire) Wire(b) else b
private def wireI(b: Vec[BI]) = if (wire) Wire(b) else b
def oConnect = bundleOut lazy val bundleOut = wireO(flipO(outer.bundleO(edgesOut)))
def iConnect = bundleIn lazy val bundleIn = wireI(flipI(inner.bundleI(edgesIn)))
// connects the outward part of a node with the inward part of this node // connects the outward part of a node with the inward part of this node
override def := (h: OutwardNodeHandle[DI, UI, BI])(implicit sourceInfo: SourceInfo): Option[LazyModule] = { override def := (h: OutwardNodeHandle[DI, UI, BI])(implicit sourceInfo: SourceInfo): Option[LazyModule] = {
@ -177,7 +183,7 @@ class MixedNode[DI, UI, EI, BI <: Data, DO, UO, EO, BO <: Data](
val o = y.oPushed val o = y.oPushed
y.oPush(i, x) y.oPush(i, x)
x.iPush(o, y) x.iPush(o, y)
val (out, binding) = inner.connect(y.oConnect(o), x.iConnect(i), x.edgesIn(i)) val (out, binding) = inner.connect(y.bundleOut(o), x.bundleIn(i), x.edgesIn(i))
LazyModule.stack.head.bindings = binding :: LazyModule.stack.head.bindings LazyModule.stack.head.bindings = binding :: LazyModule.stack.head.bindings
out out
} }
@ -195,26 +201,54 @@ class IdentityNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])
class OutputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B]) extends IdentityNode(imp) class OutputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B]) extends IdentityNode(imp)
{ {
override def oConnect = bundleOut override lazy val bundleIn = bundleOut
override def iConnect = bundleOut
} }
class InputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B]) extends IdentityNode(imp) class InputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B]) extends IdentityNode(imp)
{ {
override def oConnect = bundleIn override lazy val bundleOut = bundleIn
override def iConnect = bundleIn
} }
class SourceNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(po: PO, num: Range.Inclusive = 1 to 1) class SourceNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(po: PO, num: Range.Inclusive = 1 to 1)
extends SimpleNode(imp)({case (n, Seq()) => Seq.fill(n)(po)}, {case (0, _) => Seq()}, num, 0 to 0) extends SimpleNode(imp)({case (n, Seq()) => Seq.fill(n)(po)}, {case (0, _) => Seq()}, num, 0 to 0)
{ {
require (num.end >= 1, s"${name} is a source which does not accept outputs${lazyModule.line}") require (num.end >= 1, s"${name} is a source which does not accept outputs${lazyModule.line}")
override lazy val bundleIn = { require(false, s"${name} has no bundleIn; try bundleOut?"); bundleOut }
} }
class SinkNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(pi: PI, num: Range.Inclusive = 1 to 1) class SinkNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(pi: PI, num: Range.Inclusive = 1 to 1)
extends SimpleNode(imp)({case (0, _) => Seq()}, {case (n, Seq()) => Seq.fill(n)(pi)}, 0 to 0, num) extends SimpleNode(imp)({case (0, _) => Seq()}, {case (n, Seq()) => Seq.fill(n)(pi)}, 0 to 0, num)
{ {
require (num.end >= 1, s"${name} is a sink which does not accept inputs${lazyModule.line}") require (num.end >= 1, s"${name} is a sink which does not accept inputs${lazyModule.line}")
override lazy val bundleOut = { require(false, s"${name} has no bundleOut; try bundleIn?"); bundleIn }
}
class BlindOutputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(pi: PI)
extends SimpleNode(imp)({case (0, _) => Seq()}, {case (n, Seq()) => Seq.fill(n)(pi)}, 0 to 0, 1 to 1)
{
override val flip = true
override lazy val bundleOut = bundleIn
}
class BlindInputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(po: PO)
extends SimpleNode(imp)({case (n, Seq()) => Seq.fill(n)(po)}, {case (0, _) => Seq()}, 1 to 1, 0 to 0)
{
override val flip = true
override lazy val bundleIn = bundleOut
}
class InternalOutputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(pi: PI)
extends SimpleNode(imp)({case (0, _) => Seq()}, {case (n, Seq()) => Seq.fill(n)(pi)}, 0 to 0, 1 to 1)
{
override val wire = true
override lazy val bundleOut = bundleIn
}
class InternalInputNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])(po: PO)
extends SimpleNode(imp)({case (n, Seq()) => Seq.fill(n)(po)}, {case (0, _) => Seq()}, 1 to 1, 0 to 0)
{
override val wire = true
override lazy val bundleIn = bundleOut
} }
class InteriorNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B]) class InteriorNode[PO, PI, EO, EI, B <: Data](imp: NodeImp[PO, PI, EO, EI, B])

View File

@ -72,8 +72,6 @@ class Edge32BitMemtestConfig extends Config(
/* Composable Configs to set individual parameters */ /* Composable Configs to set individual parameters */
class WithGroundTest extends Config( class WithGroundTest extends Config(
(pname, site, here) => pname match { (pname, site, here) => pname match {
case BuildCoreplex =>
(c: CoreplexConfig, p: Parameters) => LazyModule(new GroundTestCoreplex(c)(p)).module
case TLKey("L1toL2") => { case TLKey("L1toL2") => {
val useMEI = site(NTiles) <= 1 && site(NCachedTileLinkPorts) <= 1 val useMEI = site(NTiles) <= 1 && site(NCachedTileLinkPorts) <= 1
val dataBeats = (8 * site(CacheBlockBytes)) / site(XLen) val dataBeats = (8 * site(CacheBlockBytes)) / site(XLen)
@ -95,8 +93,8 @@ class WithGroundTest extends Config(
case BuildTiles => { case BuildTiles => {
(0 until site(NTiles)).map { i => (0 until site(NTiles)).map { i =>
val tileSettings = site(GroundTestKey)(i) val tileSettings = site(GroundTestKey)(i)
(r: Bool, p: Parameters) => { (p: Parameters) => {
Module(new GroundTestTile(resetSignal = r)(p.alterPartial({ LazyModule(new GroundTestTile()(p.alterPartial({
case TLId => "L1toL2" case TLId => "L1toL2"
case TileId => i case TileId => i
case NCachedTileLinkPorts => if(tileSettings.cached > 0) 1 else 0 case NCachedTileLinkPorts => if(tileSettings.cached > 0) 1 else 0
@ -106,7 +104,7 @@ class WithGroundTest extends Config(
} }
} }
case BuildExampleTop => case BuildExampleTop =>
(p: Parameters) => LazyModule(new ExampleTopWithTestRAM(p)) (p: Parameters) => LazyModule(new ExampleTopWithTestRAM(new GroundTestCoreplex()(_))(p))
case FPUKey => None case FPUKey => None
case UseAtomics => false case UseAtomics => false
case UseCompressed => false case UseCompressed => false
@ -116,12 +114,12 @@ class WithGroundTest extends Config(
class WithComparator extends Config( class WithComparator extends Config(
(pname, site, here) => pname match { (pname, site, here) => pname match {
case GroundTestKey => Seq.fill(site(NTiles)) { case GroundTestKey => Seq.fill(site(NTiles)) {
GroundTestTileSettings(uncached = site(ComparatorKey).targets.size) GroundTestTileSettings(uncached = 2)
} }
case BuildGroundTest => case BuildGroundTest =>
(p: Parameters) => Module(new ComparatorCore()(p)) (p: Parameters) => Module(new ComparatorCore()(p))
case ComparatorKey => ComparatorParameters( case ComparatorKey => ComparatorParameters(
targets = Seq("mem", "io:pbus:TL2:testram").map(name => targets = Seq("mem", "TL2:testram").map(name =>
site(GlobalAddrMap)(name).start.longValue), site(GlobalAddrMap)(name).start.longValue),
width = 8, width = 8,
operations = 1000, operations = 1000,

View File

@ -4,13 +4,14 @@ import Chisel._
import cde.{Parameters} import cde.{Parameters}
import coreplex._ import coreplex._
class GroundTestCoreplex(c: CoreplexConfig)(implicit p: Parameters) extends BaseCoreplex(c)(p) { class GroundTestCoreplex(implicit p: Parameters) extends BaseCoreplex
override lazy val module = Module(new GroundTestCoreplexModule(c, this, new GroundTestCoreplexBundle(c)(p))(p)) with DirectConnection {
override lazy val module = new GroundTestCoreplexModule(this, () => new GroundTestCoreplexBundle(this))
} }
class GroundTestCoreplexBundle(c: CoreplexConfig)(implicit p: Parameters) extends BaseCoreplexBundle(c)(p) class GroundTestCoreplexBundle[+L <: GroundTestCoreplex](_outer: L) extends BaseCoreplexBundle(_outer)
class GroundTestCoreplexModule[+L <: GroundTestCoreplex, +B <: GroundTestCoreplexBundle]( class GroundTestCoreplexModule[+L <: GroundTestCoreplex, +B <: GroundTestCoreplexBundle[L]](_outer: L, _io: () => B) extends BaseCoreplexModule(_outer, _io)
c: CoreplexConfig, l: L, b: => B)(implicit p: Parameters) extends BaseCoreplexModule(c, l, b)(p) with DirectConnection { with DirectConnectionModule {
io.success := tiles.flatMap(_.io.elements get "success").map(_.asInstanceOf[Bool]).reduce(_&&_) io.success := tiles.flatMap(_.io.elements get "success").map(_.asInstanceOf[Bool]).reduce(_&&_)
} }

View File

@ -5,6 +5,7 @@ package groundtest
object Generator extends util.GeneratorApp { object Generator extends util.GeneratorApp {
val longName = names.topModuleProject + "." + names.configs val longName = names.topModuleProject + "." + names.configs
generateFirrtl generateFirrtl
generateGraphML
generateTestSuiteMakefrags // TODO: Needed only for legacy make targets generateTestSuiteMakefrags // TODO: Needed only for legacy make targets
generateParameterDump // TODO: Needed only for legacy make targets generateParameterDump // TODO: Needed only for legacy make targets
} }

View File

@ -71,7 +71,7 @@ class IOGetAfterPutBlockRegression(implicit p: Parameters) extends Regression()(
io.mem.grant.ready := Bool(true) io.mem.grant.ready := Bool(true)
io.cache.req.valid := !get_sent && started io.cache.req.valid := !get_sent && started
io.cache.req.bits.addr := UInt(addrMap("io:pbus:TL2:bootrom").start) io.cache.req.bits.addr := UInt(addrMap("TL2:bootrom").start)
io.cache.req.bits.typ := MT_WU io.cache.req.bits.typ := MT_WU
io.cache.req.bits.cmd := M_XRD io.cache.req.bits.cmd := M_XRD
io.cache.req.bits.tag := UInt(0) io.cache.req.bits.tag := UInt(0)

View File

@ -96,48 +96,47 @@ abstract class GroundTest(implicit val p: Parameters) extends Module
val io = new GroundTestIO val io = new GroundTestIO
} }
class GroundTestTile(resetSignal: Bool) class GroundTestTile(implicit val p: Parameters) extends LazyTile {
(implicit val p: Parameters) val slave = None
extends Tile(resetSignal = resetSignal)(p) lazy val module = new TileImp(this) with HasGroundTestParameters {
with HasGroundTestParameters { val io = new TileIO(bc) {
val success = Bool(OUTPUT)
override val io = new TileIO(bc) {
val success = Bool(OUTPUT)
}
val test = p(BuildGroundTest)(dcacheParams)
val ptwPorts = ListBuffer.empty ++= test.io.ptw
val memPorts = ListBuffer.empty ++= test.io.mem
if (nCached > 0) {
val dcache_io = HellaCache(p(DCacheKey))(dcacheParams)
val dcacheArb = Module(new HellaCacheArbiter(nCached)(dcacheParams))
dcacheArb.io.requestor.zip(test.io.cache).foreach {
case (requestor, cache) =>
val dcacheIF = Module(new SimpleHellaCacheIF()(dcacheParams))
dcacheIF.io.requestor <> cache
requestor <> dcacheIF.io.cache
} }
dcache_io.cpu <> dcacheArb.io.mem
io.cached.head <> dcache_io.mem
// SimpleHellaCacheIF leaves invalidate_lr dangling, so we wire it to false val test = p(BuildGroundTest)(dcacheParams)
dcache_io.cpu.invalidate_lr := Bool(false)
ptwPorts += dcache_io.ptw val ptwPorts = ListBuffer.empty ++= test.io.ptw
val memPorts = ListBuffer.empty ++= test.io.mem
if (nCached > 0) {
val dcache_io = HellaCache(p(DCacheKey))(dcacheParams)
val dcacheArb = Module(new HellaCacheArbiter(nCached)(dcacheParams))
dcacheArb.io.requestor.zip(test.io.cache).foreach {
case (requestor, cache) =>
val dcacheIF = Module(new SimpleHellaCacheIF()(dcacheParams))
dcacheIF.io.requestor <> cache
requestor <> dcacheIF.io.cache
}
dcache_io.cpu <> dcacheArb.io.mem
io.cached.head <> dcache_io.mem
// SimpleHellaCacheIF leaves invalidate_lr dangling, so we wire it to false
dcache_io.cpu.invalidate_lr := Bool(false)
ptwPorts += dcache_io.ptw
}
if (ptwPorts.size > 0) {
val ptw = Module(new DummyPTW(ptwPorts.size))
ptw.io.requestors <> ptwPorts
}
require(memPorts.size == io.uncached.size)
if (memPorts.size > 0) {
io.uncached <> memPorts
}
io.success := test.io.status.finished
} }
if (ptwPorts.size > 0) {
val ptw = Module(new DummyPTW(ptwPorts.size))
ptw.io.requestors <> ptwPorts
}
require(memPorts.size == io.uncached.size)
if (memPorts.size > 0) {
io.uncached <> memPorts
}
io.success := test.io.status.finished
} }

View File

@ -76,12 +76,7 @@ class AddrMap(
var cacheable = true var cacheable = true
for (AddrMapEntry(name, r) <- entriesIn) { for (AddrMapEntry(name, r) <- entriesIn) {
require (!mapping.contains(name)) require (!mapping.contains(name))
base = r.start
if (r.start != 0) {
base = r.start
} else {
base = (base + r.size - 1) / r.size * r.size
}
r match { r match {
case r: AddrMap => case r: AddrMap =>

View File

@ -4,7 +4,9 @@ package rocket
import Chisel._ import Chisel._
import junctions._ import junctions._
import diplomacy._
import uncore.tilelink._ import uncore.tilelink._
import uncore.tilelink2._
import uncore.agents._ import uncore.agents._
import uncore.coherence._ import uncore.coherence._
import uncore.constants._ import uncore.constants._
@ -125,7 +127,7 @@ class DCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
require(nWays == 1) require(nWays == 1)
metaWriteArb.io.out.ready := true metaWriteArb.io.out.ready := true
metaReadArb.io.out.ready := !metaWriteArb.io.out.valid metaReadArb.io.out.ready := !metaWriteArb.io.out.valid
val inScratchpad = addrMap(s"io:cbus:dmem${tileId}").containsAddress(s1_paddr) val inScratchpad = addrMap(s"TL2:dmem${tileId}").containsAddress(s1_paddr)
val hitState = Mux(inScratchpad, ClientMetadata.onReset.onHit(M_XWR), ClientMetadata.onReset) val hitState = Mux(inScratchpad, ClientMetadata.onReset.onHit(M_XWR), ClientMetadata.onReset)
(inScratchpad, hitState, L1Metadata(UInt(0), ClientMetadata.onReset)) (inScratchpad, hitState, L1Metadata(UInt(0), ClientMetadata.onReset))
} else { } else {
@ -495,60 +497,97 @@ class DCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
} }
} }
class ScratchpadSlavePort(implicit p: Parameters) extends CoreModule()(p) { class ScratchpadSlavePort(implicit val p: Parameters) extends LazyModule with HasCoreParameters {
val io = new Bundle { val node = TLManagerNode(TLManagerPortParameters(
val tl = new ClientUncachedTileLinkIO().flip Seq(TLManagerParameters(
val dmem = new HellaCacheIO address = List(AddressSet(0x80000000L, BigInt(p(DataScratchpadSize)-1))),
regionType = RegionType.UNCACHED,
executable = true,
supportsArithmetic = if (p(UseAtomics)) TransferSizes(1, coreDataBytes) else TransferSizes.none,
supportsLogical = if (p(UseAtomics)) TransferSizes(1, coreDataBytes) else TransferSizes.none,
supportsPutPartial = TransferSizes(1, coreDataBytes),
supportsPutFull = TransferSizes(1, coreDataBytes),
supportsGet = TransferSizes(1, coreDataBytes),
fifoId = Some(0))), // requests handled in FIFO order
beatBytes = coreDataBytes,
minLatency = 1))
// Make sure this ends up with the same name as before
override def name = "dmem0"
lazy val module = new LazyModuleImp(this) {
val io = new Bundle {
val tl_in = node.bundleIn
val dmem = new HellaCacheIO
}
val tl_in = io.tl_in(0)
val edge = node.edgesIn(0)
require(usingDataScratchpad)
val s_ready :: s_wait :: s_replay :: s_grant :: Nil = Enum(UInt(), 4)
val state = Reg(init = s_ready)
when (io.dmem.resp.valid) { state := s_grant }
when (tl_in.d.fire()) { state := s_ready }
when (io.dmem.s2_nack) { state := s_replay }
when (io.dmem.req.fire()) { state := s_wait }
val acq = Reg(tl_in.a.bits)
when (io.dmem.resp.valid) { acq.data := io.dmem.resp.bits.data }
when (tl_in.a.fire()) { acq := tl_in.a.bits }
val isWrite = acq.opcode === TLMessages.PutFullData || acq.opcode === TLMessages.PutPartialData
val isRead = !edge.hasData(acq)
def formCacheReq(acq: TLBundleA) = {
val req = Wire(new HellaCacheReq)
req.cmd := MuxLookup(acq.opcode, Wire(M_XRD), Array(
TLMessages.PutFullData -> M_XWR,
TLMessages.PutPartialData -> M_XWR,
TLMessages.ArithmeticData -> MuxLookup(acq.param, Wire(M_XRD), Array(
TLAtomics.MIN -> M_XA_MIN,
TLAtomics.MAX -> M_XA_MAX,
TLAtomics.MINU -> M_XA_MINU,
TLAtomics.MAXU -> M_XA_MAXU,
TLAtomics.ADD -> M_XA_ADD)),
TLMessages.LogicalData -> MuxLookup(acq.param, Wire(M_XRD), Array(
TLAtomics.XOR -> M_XA_XOR,
TLAtomics.OR -> M_XA_OR,
TLAtomics.AND -> M_XA_AND,
TLAtomics.SWAP -> M_XA_SWAP)),
TLMessages.Get -> M_XRD))
// treat all loads as full words, so bytes appear in correct lane
req.typ := Mux(isRead, log2Ceil(coreDataBytes), acq.size)
req.addr := Mux(isRead, ~(~acq.address | (coreDataBytes-1)), acq.address)
req.tag := UInt(0)
req
}
val ready = state === s_ready || tl_in.d.fire()
io.dmem.req.valid := (tl_in.a.valid && ready) || state === s_replay
tl_in.a.ready := io.dmem.req.ready && ready
io.dmem.req.bits := formCacheReq(Mux(state === s_replay, acq, tl_in.a.bits))
// the TL data is already in the correct byte lane, but the D$
// expects right-justified store data, so that it can steer the bytes.
io.dmem.s1_data := new LoadGen(acq.size, Bool(false), acq.address(log2Ceil(coreDataBytes)-1,0), acq.data, Bool(false), coreDataBytes).data
io.dmem.s1_kill := false
io.dmem.invalidate_lr := false
// place AMO data in correct word lane
val minAMOBytes = 4
val grantData = Mux(io.dmem.resp.valid, io.dmem.resp.bits.data, acq.data)
val alignedGrantData = Mux(acq.size <= log2Ceil(minAMOBytes), Fill(coreDataBytes/minAMOBytes, grantData(8*minAMOBytes-1, 0)), grantData)
tl_in.d.valid := io.dmem.resp.valid || state === s_grant
tl_in.d.bits := Mux(isWrite,
edge.AccessAck(acq, UInt(0)),
edge.AccessAck(acq, UInt(0), UInt(0)))
tl_in.d.bits.data := alignedGrantData
// Tie off unused channels
tl_in.b.valid := Bool(false)
tl_in.c.ready := Bool(true)
tl_in.e.ready := Bool(true)
} }
val s_ready :: s_wait :: s_replay :: s_grant :: Nil = Enum(UInt(), 4)
val state = Reg(init = s_ready)
when (io.dmem.resp.valid) { state := s_grant }
when (io.tl.grant.fire()) { state := s_ready }
when (io.dmem.s2_nack) { state := s_replay }
when (io.dmem.req.fire()) { state := s_wait }
val acq = Reg(io.tl.acquire.bits)
when (io.dmem.resp.valid) { acq.data := io.dmem.resp.bits.data }
when (io.tl.acquire.fire()) { acq := io.tl.acquire.bits }
val isRead = acq.isBuiltInType(Acquire.getType)
val isWrite = acq.isBuiltInType(Acquire.putType)
assert(state === s_ready || isRead || isWrite)
require(coreDataBits == acq.tlDataBits)
require(usingDataScratchpad)
def formCacheReq(acq: Acquire) = {
val req = Wire(new HellaCacheReq)
// treat all loads as full words, so bytes appear in correct lane
req.typ := Mux(isRead, log2Ceil(acq.tlDataBytes), acq.op_size())
req.cmd := acq.op_code()
req.addr := Mux(isRead, ~(~acq.full_addr() | (acq.tlDataBytes-1)), acq.full_addr())
req.tag := UInt(0)
req
}
val ready = state === s_ready || io.tl.grant.fire()
io.dmem.req.valid := (io.tl.acquire.valid && ready) || state === s_replay
io.tl.acquire.ready := io.dmem.req.ready && ready
io.dmem.req.bits := formCacheReq(Mux(state === s_replay, acq, io.tl.acquire.bits))
// this blows. the TL data is already in the correct byte lane, but the D$
// expects right-justified store data, so that it can steer the bytes.
io.dmem.s1_data := new LoadGen(acq.op_size(), Bool(false), acq.addr_byte(), acq.data, Bool(false), acq.tlDataBytes).data
io.dmem.s1_kill := false
io.dmem.invalidate_lr := false
// place AMO data in correct word lane
val minAMOBytes = 4
val grantData = Mux(io.dmem.resp.valid, io.dmem.resp.bits.data, acq.data)
val alignedGrantData = Mux(acq.op_size() <= log2Ceil(minAMOBytes), Fill(coreDataBytes/minAMOBytes, grantData(8*minAMOBytes-1, 0)), grantData)
io.tl.grant.valid := io.dmem.resp.valid || state === s_grant
io.tl.grant.bits := Grant(
is_builtin_type = Bool(true),
g_type = acq.getBuiltInGrantType(),
client_xact_id = acq.client_xact_id,
manager_xact_id = UInt(0),
addr_beat = acq.addr_beat,
data = alignedGrantData)
} }

View File

@ -3,7 +3,9 @@
package rocket package rocket
import Chisel._ import Chisel._
import diplomacy._
import uncore.tilelink._ import uncore.tilelink._
import uncore.tilelink2._
import uncore.agents._ import uncore.agents._
import uncore.converters._ import uncore.converters._
import uncore.devices._ import uncore.devices._
@ -25,136 +27,141 @@ case class RoccParameters(
case class TileBundleConfig( case class TileBundleConfig(
nCachedTileLinkPorts: Int, nCachedTileLinkPorts: Int,
nUncachedTileLinkPorts: Int, nUncachedTileLinkPorts: Int,
xLen: Int, xLen: Int)
hasSlavePort: Boolean)
class TileIO(c: TileBundleConfig)(implicit p: Parameters) extends Bundle { class TileIO(c: TileBundleConfig, node: Option[TLInwardNode] = None)(implicit p: Parameters) extends Bundle {
val cached = Vec(c.nCachedTileLinkPorts, new ClientTileLinkIO) val cached = Vec(c.nCachedTileLinkPorts, new ClientTileLinkIO)
val uncached = Vec(c.nUncachedTileLinkPorts, new ClientUncachedTileLinkIO) val uncached = Vec(c.nUncachedTileLinkPorts, new ClientUncachedTileLinkIO)
val hartid = UInt(INPUT, c.xLen) val hartid = UInt(INPUT, c.xLen)
val interrupts = new TileInterrupts().asInput val interrupts = new TileInterrupts().asInput
val slave = c.hasSlavePort.option(new ClientUncachedTileLinkIO().flip) val slave = node.map(_.inward.bundleIn)
val resetVector = UInt(INPUT, c.xLen) val resetVector = UInt(INPUT, c.xLen)
override def cloneType = new TileIO(c).asInstanceOf[this.type] override def cloneType = new TileIO(c).asInstanceOf[this.type]
} }
abstract class Tile(clockSignal: Clock = null, resetSignal: Bool = null) abstract class TileImp(l: LazyTile)(implicit val p: Parameters) extends LazyModuleImp(l) {
(implicit p: Parameters) extends Module(Option(clockSignal), Option(resetSignal)) { val io: TileIO
}
abstract class LazyTile(implicit p: Parameters) extends LazyModule {
val nCachedTileLinkPorts = p(NCachedTileLinkPorts) val nCachedTileLinkPorts = p(NCachedTileLinkPorts)
val nUncachedTileLinkPorts = p(NUncachedTileLinkPorts) val nUncachedTileLinkPorts = p(NUncachedTileLinkPorts)
val dcacheParams = p.alterPartial({ case CacheName => "L1D" }) val dcacheParams = p.alterPartial({ case CacheName => "L1D" })
val bc = TileBundleConfig( val bc = TileBundleConfig(
nCachedTileLinkPorts = nCachedTileLinkPorts, nCachedTileLinkPorts = nCachedTileLinkPorts,
nUncachedTileLinkPorts = nUncachedTileLinkPorts, nUncachedTileLinkPorts = nUncachedTileLinkPorts,
xLen = p(XLen), xLen = p(XLen))
hasSlavePort = p(DataScratchpadSize) > 0)
val io = new TileIO(bc) val module: TileImp
val slave: Option[TLInputNode]
} }
class RocketTile(clockSignal: Clock = null, resetSignal: Bool = null) class RocketTile(implicit p: Parameters) extends LazyTile {
(implicit p: Parameters) extends Tile(clockSignal, resetSignal)(p) { val slave = if (p(DataScratchpadSize) == 0) None else Some(TLInputNode())
val buildRocc = p(BuildRoCC) val scratch = if (p(DataScratchpadSize) == 0) None else Some(LazyModule(new ScratchpadSlavePort()(dcacheParams)))
val usingRocc = !buildRocc.isEmpty
val nRocc = buildRocc.size
val nFPUPorts = buildRocc.filter(_.useFPU).size
val core = Module(new Rocket) (slave zip scratch) foreach { case (node, lm) => lm.node := TLFragmenter(p(XLen)/8, p(CacheBlockBytes))(node) }
val icache = Module(new Frontend()(p.alterPartial({ case CacheName => "L1I" })))
val dcache = HellaCache(p(DCacheKey))(dcacheParams)
val ptwPorts = collection.mutable.ArrayBuffer(icache.io.ptw, dcache.ptw) lazy val module = new TileImp(this) {
val dcPorts = collection.mutable.ArrayBuffer(core.io.dmem) val io = new TileIO(bc, slave)
val uncachedArbPorts = collection.mutable.ArrayBuffer(icache.io.mem) val buildRocc = p(BuildRoCC)
val uncachedPorts = collection.mutable.ArrayBuffer[ClientUncachedTileLinkIO]() val usingRocc = !buildRocc.isEmpty
val cachedPorts = collection.mutable.ArrayBuffer(dcache.mem) val nRocc = buildRocc.size
core.io.interrupts := io.interrupts val nFPUPorts = buildRocc.filter(_.useFPU).size
core.io.hartid := io.hartid
icache.io.cpu <> core.io.imem
icache.io.resetVector := io.resetVector
val fpuOpt = p(FPUKey).map(cfg => Module(new FPU(cfg))) val core = Module(new Rocket)
fpuOpt.foreach(fpu => core.io.fpu <> fpu.io) val icache = Module(new Frontend()(p.alterPartial({ case CacheName => "L1I" })))
val dcache = HellaCache(p(DCacheKey))(dcacheParams)
if (usingRocc) { val ptwPorts = collection.mutable.ArrayBuffer(icache.io.ptw, dcache.ptw)
val respArb = Module(new RRArbiter(new RoCCResponse, nRocc)) val dcPorts = collection.mutable.ArrayBuffer(core.io.dmem)
core.io.rocc.resp <> respArb.io.out val uncachedArbPorts = collection.mutable.ArrayBuffer(icache.io.mem)
val uncachedPorts = collection.mutable.ArrayBuffer[ClientUncachedTileLinkIO]()
val cachedPorts = collection.mutable.ArrayBuffer(dcache.mem)
core.io.interrupts := io.interrupts
core.io.hartid := io.hartid
icache.io.cpu <> core.io.imem
icache.io.resetVector := io.resetVector
val roccOpcodes = buildRocc.map(_.opcodes) val fpuOpt = p(FPUKey).map(cfg => Module(new FPU(cfg)))
val cmdRouter = Module(new RoccCommandRouter(roccOpcodes)) fpuOpt.foreach(fpu => core.io.fpu <> fpu.io)
cmdRouter.io.in <> core.io.rocc.cmd
val roccs = buildRocc.zipWithIndex.map { case (accelParams, i) => if (usingRocc) {
val rocc = accelParams.generator(p.alterPartial({ val respArb = Module(new RRArbiter(new RoCCResponse, nRocc))
case RoccNMemChannels => accelParams.nMemChannels core.io.rocc.resp <> respArb.io.out
case RoccNPTWPorts => accelParams.nPTWPorts
})) val roccOpcodes = buildRocc.map(_.opcodes)
val dcIF = Module(new SimpleHellaCacheIF()(dcacheParams)) val cmdRouter = Module(new RoccCommandRouter(roccOpcodes))
rocc.io.cmd <> cmdRouter.io.out(i) cmdRouter.io.in <> core.io.rocc.cmd
rocc.io.exception := core.io.rocc.exception
dcIF.io.requestor <> rocc.io.mem val roccs = buildRocc.zipWithIndex.map { case (accelParams, i) =>
dcPorts += dcIF.io.cache val rocc = accelParams.generator(p.alterPartial({
uncachedArbPorts += rocc.io.autl case RoccNMemChannels => accelParams.nMemChannels
rocc case RoccNPTWPorts => accelParams.nPTWPorts
}))
val dcIF = Module(new SimpleHellaCacheIF()(dcacheParams))
rocc.io.cmd <> cmdRouter.io.out(i)
rocc.io.exception := core.io.rocc.exception
dcIF.io.requestor <> rocc.io.mem
dcPorts += dcIF.io.cache
uncachedArbPorts += rocc.io.autl
rocc
}
if (nFPUPorts > 0) {
fpuOpt.foreach { fpu =>
val fpArb = Module(new InOrderArbiter(new FPInput, new FPResult, nFPUPorts))
val fp_roccs = roccs.zip(buildRocc)
.filter { case (_, params) => params.useFPU }
.map { case (rocc, _) => rocc.io }
fpArb.io.in_req <> fp_roccs.map(_.fpu_req)
fp_roccs.zip(fpArb.io.in_resp).foreach {
case (rocc, fpu_resp) => rocc.fpu_resp <> fpu_resp
}
fpu.io.cp_req <> fpArb.io.out_req
fpArb.io.out_resp <> fpu.io.cp_resp
}
}
core.io.rocc.busy := cmdRouter.io.busy || roccs.map(_.io.busy).reduce(_ || _)
core.io.rocc.interrupt := roccs.map(_.io.interrupt).reduce(_ || _)
respArb.io.in <> roccs.map(rocc => Queue(rocc.io.resp))
ptwPorts ++= roccs.flatMap(_.io.ptw)
uncachedPorts ++= roccs.flatMap(_.io.utl)
} }
if (nFPUPorts > 0) { val uncachedArb = Module(new ClientUncachedTileLinkIOArbiter(uncachedArbPorts.size))
uncachedArb.io.in <> uncachedArbPorts
uncachedArb.io.out +=: uncachedPorts
// Connect the caches and RoCC to the outer memory system
io.uncached <> uncachedPorts
io.cached <> cachedPorts
// TODO remove nCached/nUncachedTileLinkPorts parameters and these assertions
require(uncachedPorts.size == nUncachedTileLinkPorts)
require(cachedPorts.size == nCachedTileLinkPorts)
if (p(UseVM)) {
val ptw = Module(new PTW(ptwPorts.size)(dcacheParams))
ptw.io.requestor <> ptwPorts
ptw.io.mem +=: dcPorts
core.io.ptw <> ptw.io.dpath
}
scratch.foreach { lm => lm.module.io.dmem +=: dcPorts }
require(dcPorts.size == core.dcacheArbPorts)
val dcArb = Module(new HellaCacheArbiter(dcPorts.size)(dcacheParams))
dcArb.io.requestor <> dcPorts
dcache.cpu <> dcArb.io.mem
if (nFPUPorts == 0) {
fpuOpt.foreach { fpu => fpuOpt.foreach { fpu =>
val fpArb = Module(new InOrderArbiter(new FPInput, new FPResult, nFPUPorts)) fpu.io.cp_req.valid := Bool(false)
val fp_roccs = roccs.zip(buildRocc) fpu.io.cp_resp.ready := Bool(false)
.filter { case (_, params) => params.useFPU }
.map { case (rocc, _) => rocc.io }
fpArb.io.in_req <> fp_roccs.map(_.fpu_req)
fp_roccs.zip(fpArb.io.in_resp).foreach {
case (rocc, fpu_resp) => rocc.fpu_resp <> fpu_resp
}
fpu.io.cp_req <> fpArb.io.out_req
fpArb.io.out_resp <> fpu.io.cp_resp
} }
} }
core.io.rocc.busy := cmdRouter.io.busy || roccs.map(_.io.busy).reduce(_ || _)
core.io.rocc.interrupt := roccs.map(_.io.interrupt).reduce(_ || _)
respArb.io.in <> roccs.map(rocc => Queue(rocc.io.resp))
ptwPorts ++= roccs.flatMap(_.io.ptw)
uncachedPorts ++= roccs.flatMap(_.io.utl)
}
val uncachedArb = Module(new ClientUncachedTileLinkIOArbiter(uncachedArbPorts.size))
uncachedArb.io.in <> uncachedArbPorts
uncachedArb.io.out +=: uncachedPorts
// Connect the caches and RoCC to the outer memory system
io.uncached <> uncachedPorts
io.cached <> cachedPorts
// TODO remove nCached/nUncachedTileLinkPorts parameters and these assertions
require(uncachedPorts.size == nUncachedTileLinkPorts)
require(cachedPorts.size == nCachedTileLinkPorts)
if (p(UseVM)) {
val ptw = Module(new PTW(ptwPorts.size)(dcacheParams))
ptw.io.requestor <> ptwPorts
ptw.io.mem +=: dcPorts
core.io.ptw <> ptw.io.dpath
}
io.slave foreach { case slavePort =>
val adapter = Module(new ScratchpadSlavePort()(dcacheParams))
adapter.io.tl <> TileLinkFragmenter(slavePort)
adapter.io.dmem +=: dcPorts
}
require(dcPorts.size == core.dcacheArbPorts)
val dcArb = Module(new HellaCacheArbiter(dcPorts.size)(dcacheParams))
dcArb.io.requestor <> dcPorts
dcache.cpu <> dcArb.io.mem
if (nFPUPorts == 0) {
fpuOpt.foreach { fpu =>
fpu.io.cp_req.valid := Bool(false)
fpu.io.cp_resp.ready := Bool(false)
}
} }
} }

View File

@ -15,103 +15,97 @@ import coreplex._
// the following parameters will be refactored properly with TL2 // the following parameters will be refactored properly with TL2
case object GlobalAddrMap extends Field[AddrMap] case object GlobalAddrMap extends Field[AddrMap]
case object ConfigString extends Field[String]
case object NCoreplexExtClients extends Field[Int] case object NCoreplexExtClients extends Field[Int]
/** Enable or disable monitoring of Diplomatic buses */ /** Enable or disable monitoring of Diplomatic buses */
case object TLEmitMonitors extends Field[Bool] case object TLEmitMonitors extends Field[Bool]
/** Function for building Coreplex */
case object BuildCoreplex extends Field[(CoreplexConfig, Parameters) => BaseCoreplexModule[BaseCoreplex, BaseCoreplexBundle]]
/** Base Top with no Periphery */ abstract class BareTop[+C <: BaseCoreplex](_coreplex: Parameters => C)(implicit val q: Parameters) extends LazyModule {
abstract class BaseTop(q: Parameters) extends LazyModule { // Fill in the TL1 legacy parameters; remove these once rocket/groundtest/unittest are TL2
// the following variables will be refactored properly with TL2
val pInterrupts = new RangeManager
val pBusMasters = new RangeManager val pBusMasters = new RangeManager
val pDevices = new ResourceManager[AddrMapEntry] lazy val legacyAddrMap = GenerateGlobalAddrMap(q, coreplex.l1tol2.node.edgesIn(0).manager.managers)
val coreplex : C = LazyModule(_coreplex(q.alterPartial {
TLImp.emitMonitors = q(TLEmitMonitors) case NCoreplexExtClients => pBusMasters.sum
case GlobalAddrMap => legacyAddrMap
// Add a SoC and peripheral bus }))
val socBus = LazyModule(new TLXbar)
val peripheryBus = LazyModule(new TLXbar)
lazy val peripheryManagers = socBus.node.edgesIn(0).manager.managers
lazy val c = CoreplexConfig(
nTiles = q(NTiles),
nExtInterrupts = pInterrupts.sum,
nSlaves = pBusMasters.sum,
nMemChannels = q(NMemoryChannels),
hasSupervisor = q(UseVM)
)
lazy val genGlobalAddrMap = GenerateGlobalAddrMap(q, pDevices.get, peripheryManagers)
private val qWithMap = q.alterPartial({case GlobalAddrMap => genGlobalAddrMap})
lazy val genConfigString = GenerateConfigString(qWithMap, c, pDevices.get, peripheryManagers)
implicit val p = qWithMap.alterPartial({
case ConfigString => genConfigString
case NCoreplexExtClients => pBusMasters.sum})
val legacy = LazyModule(new TLLegacy()(p.alterPartial({ case TLId => "L2toMMIO" })))
peripheryBus.node :=
TLWidthWidget(p(SOCBusKey).beatBytes)(
TLBuffer()(
TLAtomicAutomata(arithmetic = p(PeripheryBusKey).arithAMO)(
socBus.node)))
socBus.node :=
TLWidthWidget(legacy.tlDataBytes)(
TLHintHandler()(
legacy.node))
TopModule.contents = Some(this) TopModule.contents = Some(this)
} }
abstract class BaseTopBundle(val p: Parameters) extends Bundle { abstract class BareTopBundle[+L <: BareTop[BaseCoreplex]](_outer: L) extends Bundle {
val outer = _outer
}
abstract class BareTopModule[+L <: BareTop[BaseCoreplex], +B <: BareTopBundle[L]](_outer: L, _io: () => B) extends LazyModuleImp(_outer) {
val outer = _outer
val io = _io ()
}
/** Base Top with no Periphery */
trait TopNetwork extends HasPeripheryParameters {
this: BareTop[BaseCoreplex] =>
implicit val p = q
TLImp.emitMonitors = p(TLEmitMonitors)
// Add a SoC and peripheral bus
val socBus = LazyModule(new TLXbar)
val peripheryBus = LazyModule(new TLXbar)
val intBus = LazyModule(new IntXbar)
peripheryBus.node :=
TLWidthWidget(p(SOCBusKey).beatBytes)(
TLAtomicAutomata(arithmetic = p(PeripheryBusKey).arithAMO)(
socBus.node))
}
trait TopNetworkBundle extends HasPeripheryParameters {
this: BareTopBundle[BareTop[BaseCoreplex]] =>
implicit val p = outer.q
val success = Bool(OUTPUT) val success = Bool(OUTPUT)
} }
abstract class BaseTopModule[+L <: BaseTop, +B <: BaseTopBundle]( trait TopNetworkModule extends HasPeripheryParameters {
val p: Parameters, l: L, b: => B) extends LazyModuleImp(l) { this: {
val outer: L = l val outer: BareTop[BaseCoreplex] with TopNetwork
val io: B = b val io: TopNetworkBundle
} =>
implicit val p = outer.p
val coreplex = p(BuildCoreplex)(outer.c, p) val coreplexMem : Vec[ClientUncachedTileLinkIO] = Wire(outer.coreplex.module.io.mem)
val coreplexIO = Wire(coreplex.io) val coreplexSlave: Vec[ClientUncachedTileLinkIO] = Wire(outer.coreplex.module.io.slave)
val coreplexDebug: DebugBusIO = Wire(outer.coreplex.module.io.debug)
val coreplexRtc : Bool = Wire(outer.coreplex.module.io.rtcTick)
val pBus = io.success := outer.coreplex.module.io.success
Module(new TileLinkRecursiveInterconnect(1, p(GlobalAddrMap).subMap("io:pbus"))(
p.alterPartial({ case TLId => "L2toMMIO" })))
pBus.io.in.head <> coreplexIO.master.mmio
outer.legacy.module.io.legacy <> pBus.port("TL2")
println("Generated Address Map") outer.coreplex.module.io.rtcTick := coreplexRtc
for (entry <- p(GlobalAddrMap).flatten) { coreplexRtc := Counter(p(rocketchip.RTCPeriod)).inc()
val name = entry.name
val start = entry.region.start
val end = entry.region.start + entry.region.size - 1
val prot = entry.region.attr.prot
val protStr = (if ((prot & AddrMapProt.R) > 0) "R" else "") +
(if ((prot & AddrMapProt.W) > 0) "W" else "") +
(if ((prot & AddrMapProt.X) > 0) "X" else "")
val cacheable = if (entry.region.attr.cacheable) " [C]" else ""
println(f"\t$name%s $start%x - $end%x, $protStr$cacheable")
}
println("\nGenerated Interrupt Vector")
outer.pInterrupts.print
println("\nGenerated Configuration String")
println(p(ConfigString))
ConfigStringOutput.contents = Some(p(ConfigString))
io.success := coreplexIO.success
} }
/** Base Top with no Periphery */
class BaseTop[+C <: BaseCoreplex](_coreplex: Parameters => C)(implicit p: Parameters) extends BareTop(_coreplex)
with TopNetwork {
override lazy val module = new BaseTopModule(this, () => new BaseTopBundle(this))
}
class BaseTopBundle[+L <: BaseTop[BaseCoreplex]](_outer: L) extends BareTopBundle(_outer)
with TopNetworkBundle
class BaseTopModule[+L <: BaseTop[BaseCoreplex], +B <: BaseTopBundle[L]](_outer: L, _io: () => B) extends BareTopModule(_outer, _io)
with TopNetworkModule
trait DirectConnection { trait DirectConnection {
val coreplexIO: BaseCoreplexBundle this: BareTop[BaseCoreplex] with TopNetwork =>
val coreplex: BaseCoreplexModule[BaseCoreplex, BaseCoreplexBundle]
coreplexIO <> coreplex.io socBus.node := coreplex.mmio
coreplex.mmioInt := intBus.intnode
}
trait DirectConnectionModule {
this: TopNetworkModule {
val outer: BaseTop[BaseCoreplex]
} =>
coreplexMem <> outer.coreplex.module.io.mem
outer.coreplex.module.io.slave <> coreplexSlave
outer.coreplex.module.io.debug <> coreplexDebug
} }

View File

@ -40,19 +40,12 @@ class BasePlatformConfig extends Config(
site(TLKey("L2toMC")).copy(dataBeats = edgeDataBeats) site(TLKey("L2toMC")).copy(dataBeats = edgeDataBeats)
case TLKey("MMIOtoEdge") => case TLKey("MMIOtoEdge") =>
site(TLKey("L2toMMIO")).copy(dataBeats = edgeDataBeats) site(TLKey("L2toMMIO")).copy(dataBeats = edgeDataBeats)
case BuildCoreplex =>
(c: CoreplexConfig, p: Parameters) => LazyModule(new DefaultCoreplex(c)(p)).module
case NExtTopInterrupts => 2 case NExtTopInterrupts => 2
case SOCBusKey => SOCBusConfig(beatBytes = site(TLKey("L2toMMIO")).dataBitsPerBeat/8) case SOCBusKey => SOCBusConfig(beatBytes = site(TLKey("L2toMMIO")).dataBitsPerBeat/8)
case PeripheryBusKey => PeripheryBusConfig(arithAMO = true, beatBytes = 4) case PeripheryBusKey => PeripheryBusConfig(arithAMO = true, beatBytes = 4)
// Note that PLIC asserts that this is > 0. // Note that PLIC asserts that this is > 0.
case AsyncDebugBus => false case AsyncDebugBus => false
case IncludeJtagDTM => false case IncludeJtagDTM => false
case AsyncMMIOChannels => false
case ExtMMIOPorts => Nil
case NExtMMIOAXIChannels => 0
case NExtMMIOAHBChannels => 0
case NExtMMIOTLChannels => 0
case AsyncBusChannels => false case AsyncBusChannels => false
case NExtBusAXIChannels => 0 case NExtBusAXIChannels => 0
case HastiId => "Ext" case HastiId => "Ext"
@ -70,7 +63,7 @@ class BasePlatformConfig extends Config(
case ExtMemSize => Dump("MEM_SIZE", 0x10000000L) case ExtMemSize => Dump("MEM_SIZE", 0x10000000L)
case RTCPeriod => 100 // gives 10 MHz RTC assuming 1 GHz uncore clock case RTCPeriod => 100 // gives 10 MHz RTC assuming 1 GHz uncore clock
case BuildExampleTop => case BuildExampleTop =>
(p: Parameters) => LazyModule(new ExampleTop(p)) (p: Parameters) => LazyModule(new ExampleTop(new DefaultCoreplex()(_))(p))
case SimMemLatency => 0 case SimMemLatency => 0
case _ => throw new CDEMatchError case _ => throw new CDEMatchError
} }
@ -110,17 +103,6 @@ class WithExtMemSize(n: Long) extends Config(
case _ => throw new CDEMatchError case _ => throw new CDEMatchError
} }
) )
class WithAHB extends Config(
(pname, site, here) => pname match {
case TMemoryChannels => BusType.AHB
case NExtMMIOAHBChannels => 1
})
class WithTL extends Config(
(pname, site, here) => pname match {
case TMemoryChannels => BusType.TL
case NExtMMIOTLChannels => 1
})
class WithScratchpads extends Config(new WithNMemoryChannels(0) ++ new WithDataScratchpad(16384)) class WithScratchpads extends Config(new WithNMemoryChannels(0) ++ new WithDataScratchpad(16384))

View File

@ -9,45 +9,43 @@ import coreplex._
import rocketchip._ import rocketchip._
/** Example Top with Periphery */ /** Example Top with Periphery */
class ExampleTop(q: Parameters) extends BaseTop(q) class ExampleTop[+C <: BaseCoreplex](_coreplex: Parameters => C)(implicit p: Parameters) extends BaseTop(_coreplex)
with PeripheryBootROM with PeripheryBootROM
with PeripheryDebug with PeripheryDebug
with PeripheryExtInterrupts with PeripheryExtInterrupts
with PeripheryCoreplexLocalInterrupter
with PeripheryMasterMem with PeripheryMasterMem
with PeripheryMasterMMIO with PeripheryMasterAXI4MMIO
with PeripherySlave { with PeripherySlave
override lazy val module = Module(new ExampleTopModule(p, this, new ExampleTopBundle(p))) with DirectConnection {
override lazy val module = new ExampleTopModule(this, () => new ExampleTopBundle(this))
} }
class ExampleTopBundle(p: Parameters) extends BaseTopBundle(p) class ExampleTopBundle[+L <: ExampleTop[BaseCoreplex]](_outer: L) extends BaseTopBundle(_outer)
with PeripheryBootROMBundle with PeripheryBootROMBundle
with PeripheryDebugBundle with PeripheryDebugBundle
with PeripheryExtInterruptsBundle with PeripheryExtInterruptsBundle
with PeripheryCoreplexLocalInterrupterBundle
with PeripheryMasterMemBundle with PeripheryMasterMemBundle
with PeripheryMasterMMIOBundle with PeripheryMasterAXI4MMIOBundle
with PeripherySlaveBundle with PeripherySlaveBundle
class ExampleTopModule[+L <: ExampleTop, +B <: ExampleTopBundle](p: Parameters, l: L, b: => B) extends BaseTopModule(p, l, b) class ExampleTopModule[+L <: ExampleTop[BaseCoreplex], +B <: ExampleTopBundle[L]](_outer: L, _io: () => B) extends BaseTopModule(_outer, _io)
with PeripheryBootROMModule with PeripheryBootROMModule
with PeripheryDebugModule with PeripheryDebugModule
with PeripheryExtInterruptsModule with PeripheryExtInterruptsModule
with PeripheryCoreplexLocalInterrupterModule
with PeripheryMasterMemModule with PeripheryMasterMemModule
with PeripheryMasterMMIOModule with PeripheryMasterAXI4MMIOModule
with PeripherySlaveModule with PeripherySlaveModule
with HardwiredResetVector with HardwiredResetVector
with DirectConnection with DirectConnectionModule
/** Example Top with TestRAM */ /** Example Top with TestRAM */
class ExampleTopWithTestRAM(q: Parameters) extends ExampleTop(q) class ExampleTopWithTestRAM[+C <: BaseCoreplex](_coreplex: Parameters => C)(implicit p: Parameters) extends ExampleTop(_coreplex)
with PeripheryTestRAM { with PeripheryTestRAM {
override lazy val module = Module(new ExampleTopWithTestRAMModule(p, this, new ExampleTopWithTestRAMBundle(p))) override lazy val module = new ExampleTopWithTestRAMModule(this, () => new ExampleTopWithTestRAMBundle(this))
} }
class ExampleTopWithTestRAMBundle(p: Parameters) extends ExampleTopBundle(p) class ExampleTopWithTestRAMBundle[+L <: ExampleTopWithTestRAM[BaseCoreplex]](_outer: L) extends ExampleTopBundle(_outer)
with PeripheryTestRAMBundle with PeripheryTestRAMBundle
class ExampleTopWithTestRAMModule[+L <: ExampleTopWithTestRAM, +B <: ExampleTopWithTestRAMBundle](p: Parameters, l: L, b: => B) extends ExampleTopModule(p, l, b) class ExampleTopWithTestRAMModule[+L <: ExampleTopWithTestRAM[BaseCoreplex], +B <: ExampleTopWithTestRAMBundle[L]](_outer: L, _io: () => B) extends ExampleTopModule(_outer, _io)
with PeripheryTestRAMModule with PeripheryTestRAMModule

View File

@ -9,6 +9,7 @@ import junctions.NastiConstants._
import diplomacy._ import diplomacy._
import uncore.tilelink._ import uncore.tilelink._
import uncore.tilelink2._ import uncore.tilelink2._
import uncore.axi4._
import uncore.converters._ import uncore.converters._
import uncore.devices._ import uncore.devices._
import uncore.agents._ import uncore.agents._
@ -29,19 +30,12 @@ object BusType {
/** Memory channel controls */ /** Memory channel controls */
case object TMemoryChannels extends Field[BusType.EnumVal] case object TMemoryChannels extends Field[BusType.EnumVal]
/** External MMIO controls */
case object NExtMMIOAXIChannels extends Field[Int]
case object NExtMMIOAHBChannels extends Field[Int]
case object NExtMMIOTLChannels extends Field[Int]
/** External Bus controls */ /** External Bus controls */
case object NExtBusAXIChannels extends Field[Int] case object NExtBusAXIChannels extends Field[Int]
/** Async configurations */ /** Async configurations */
case object AsyncBusChannels extends Field[Boolean] case object AsyncBusChannels extends Field[Boolean]
case object AsyncDebugBus extends Field[Boolean] case object AsyncDebugBus extends Field[Boolean]
case object AsyncMemChannels extends Field[Boolean] case object AsyncMemChannels extends Field[Boolean]
case object AsyncMMIOChannels extends Field[Boolean]
/** External address map settings */
case object ExtMMIOPorts extends Field[Seq[AddrMapEntry]]
/** Specifies the size of external memory */ /** Specifies the size of external memory */
case object ExtMemSize extends Field[Long] case object ExtMemSize extends Field[Long]
/** Specifies the number of external interrupts */ /** Specifies the number of external interrupts */
@ -89,10 +83,8 @@ trait HasPeripheryParameters {
lazy val nMemAXIChannels = if (tMemChannels == BusType.AXI) nMemChannels else 0 lazy val nMemAXIChannels = if (tMemChannels == BusType.AXI) nMemChannels else 0
lazy val nMemAHBChannels = if (tMemChannels == BusType.AHB) nMemChannels else 0 lazy val nMemAHBChannels = if (tMemChannels == BusType.AHB) nMemChannels else 0
lazy val nMemTLChannels = if (tMemChannels == BusType.TL) nMemChannels else 0 lazy val nMemTLChannels = if (tMemChannels == BusType.TL) nMemChannels else 0
lazy val outerMMIOParams = p.alterPartial({ case TLId => "L2toMMIO" })
lazy val edgeSlaveParams = p.alterPartial({ case TLId => "EdgetoSlave" }) lazy val edgeSlaveParams = p.alterPartial({ case TLId => "EdgetoSlave" })
lazy val edgeMemParams = p.alterPartial({ case TLId => "MCtoEdge" }) lazy val edgeMemParams = p.alterPartial({ case TLId => "MCtoEdge" })
lazy val edgeMMIOParams = p.alterPartial({ case TLId => "MMIOtoEdge" })
lazy val peripheryBusConfig = p(PeripheryBusKey) lazy val peripheryBusConfig = p(PeripheryBusKey)
lazy val socBusConfig = p(SOCBusKey) lazy val socBusConfig = p(SOCBusKey)
lazy val cacheBlockBytes = p(CacheBlockBytes) lazy val cacheBlockBytes = p(CacheBlockBytes)
@ -100,12 +92,14 @@ trait HasPeripheryParameters {
///// /////
trait PeripheryDebug extends LazyModule { trait PeripheryDebug {
implicit val p: Parameters this: TopNetwork =>
} }
trait PeripheryDebugBundle { trait PeripheryDebugBundle {
implicit val p: Parameters this: TopNetworkBundle {
val outer: PeripheryDebug
} =>
val debug_clk = (p(AsyncDebugBus) && !p(IncludeJtagDTM)).option(Clock(INPUT)) val debug_clk = (p(AsyncDebugBus) && !p(IncludeJtagDTM)).option(Clock(INPUT))
val debug_rst = (p(AsyncDebugBus) && !p(IncludeJtagDTM)).option(Bool(INPUT)) val debug_rst = (p(AsyncDebugBus) && !p(IncludeJtagDTM)).option(Bool(INPUT))
val debug = (!p(IncludeJtagDTM)).option(new DebugBusIO()(p).flip) val debug = (!p(IncludeJtagDTM)).option(new DebugBusIO()(p).flip)
@ -113,19 +107,19 @@ trait PeripheryDebugBundle {
} }
trait PeripheryDebugModule { trait PeripheryDebugModule {
implicit val p: Parameters this: TopNetworkModule {
val outer: PeripheryDebug val outer: PeripheryDebug
val io: PeripheryDebugBundle val io: PeripheryDebugBundle
val coreplexIO: BaseCoreplexBundle } =>
if (p(IncludeJtagDTM)) { if (p(IncludeJtagDTM)) {
// JtagDTMWithSync is a wrapper which // JtagDTMWithSync is a wrapper which
// handles the synchronization as well. // handles the synchronization as well.
val dtm = Module (new JtagDTMWithSync()(p)) val dtm = Module (new JtagDTMWithSync()(p))
dtm.io.jtag <> io.jtag.get dtm.io.jtag <> io.jtag.get
coreplexIO.debug <> dtm.io.debug coreplexDebug <> dtm.io.debug
} else { } else {
coreplexIO.debug <> coreplexDebug <>
(if (p(AsyncDebugBus)) AsyncDebugBusFrom(io.debug_clk.get, io.debug_rst.get, io.debug.get) (if (p(AsyncDebugBus)) AsyncDebugBusFrom(io.debug_clk.get, io.debug_rst.get, io.debug.get)
else io.debug.get) else io.debug.get)
} }
@ -133,40 +127,40 @@ trait PeripheryDebugModule {
///// /////
trait PeripheryExtInterrupts extends LazyModule { trait PeripheryExtInterrupts {
implicit val p: Parameters this: TopNetwork =>
val pInterrupts: RangeManager
pInterrupts.add("ext", p(NExtTopInterrupts)) val extInterrupts = IntBlindInputNode(p(NExtTopInterrupts))
val extInterruptXing = LazyModule(new IntXing)
intBus.intnode := extInterruptXing.intnode
extInterruptXing.intnode := extInterrupts
} }
trait PeripheryExtInterruptsBundle { trait PeripheryExtInterruptsBundle {
implicit val p: Parameters this: TopNetworkBundle {
val interrupts = Vec(p(NExtTopInterrupts), Bool()).asInput val outer: PeripheryExtInterrupts
} =>
val interrupts = outer.extInterrupts.bundleIn
} }
trait PeripheryExtInterruptsModule { trait PeripheryExtInterruptsModule {
implicit val p: Parameters this: TopNetworkModule {
val outer: PeripheryExtInterrupts val outer: PeripheryExtInterrupts
val io: PeripheryExtInterruptsBundle val io: PeripheryExtInterruptsBundle
val coreplexIO: BaseCoreplexBundle } =>
{
val r = outer.pInterrupts.range("ext")
((r._1 until r._2) zipWithIndex) foreach { case (c, i) =>
coreplexIO.interrupts(c) := io.interrupts(i)
}
}
} }
///// /////
trait PeripheryMasterMem extends LazyModule { trait PeripheryMasterMem {
implicit val p: Parameters this: TopNetwork =>
} }
trait PeripheryMasterMemBundle extends HasPeripheryParameters { trait PeripheryMasterMemBundle {
implicit val p: Parameters this: TopNetworkBundle {
val outer: PeripheryMasterMem
} =>
val mem_clk = p(AsyncMemChannels).option(Vec(nMemChannels, Clock(INPUT))) val mem_clk = p(AsyncMemChannels).option(Vec(nMemChannels, Clock(INPUT)))
val mem_rst = p(AsyncMemChannels).option(Vec(nMemChannels, Bool (INPUT))) val mem_rst = p(AsyncMemChannels).option(Vec(nMemChannels, Bool (INPUT)))
val mem_axi = Vec(nMemAXIChannels, new NastiIO) val mem_axi = Vec(nMemAXIChannels, new NastiIO)
@ -174,13 +168,13 @@ trait PeripheryMasterMemBundle extends HasPeripheryParameters {
val mem_tl = Vec(nMemTLChannels, new ClientUncachedTileLinkIO()(edgeMemParams)) val mem_tl = Vec(nMemTLChannels, new ClientUncachedTileLinkIO()(edgeMemParams))
} }
trait PeripheryMasterMemModule extends HasPeripheryParameters { trait PeripheryMasterMemModule {
implicit val p: Parameters this: TopNetworkModule {
val outer: PeripheryMasterMem val outer: PeripheryMasterMem
val io: PeripheryMasterMemBundle val io: PeripheryMasterMemBundle
val coreplexIO: BaseCoreplexBundle } =>
val edgeMem = coreplexIO.master.mem.map(TileLinkWidthAdapter(_, edgeMemParams)) val edgeMem = coreplexMem.map(TileLinkWidthAdapter(_, edgeMemParams))
// Abuse the fact that zip takes the shorter of the two lists // Abuse the fact that zip takes the shorter of the two lists
((io.mem_axi zip edgeMem) zipWithIndex) foreach { case ((axi, mem), idx) => ((io.mem_axi zip edgeMem) zipWithIndex) foreach { case ((axi, mem), idx) =>
@ -204,78 +198,65 @@ trait PeripheryMasterMemModule extends HasPeripheryParameters {
///// /////
trait PeripheryMasterMMIO extends LazyModule { // PeripheryMasterAXI4MMIO is an example, make your own cake pattern like this one.
implicit val p: Parameters trait PeripheryMasterAXI4MMIO {
this: TopNetwork =>
val mmio_axi4 = AXI4BlindOutputNode(AXI4SlavePortParameters(
slaves = Seq(AXI4SlaveParameters(
address = List(AddressSet(0x60000000L, 0x1fffffffL)),
executable = true, // Can we run programs on this memory?
supportsWrite = TransferSizes(1, 256), // The slave supports 1-256 byte transfers
supportsRead = TransferSizes(1, 256),
interleavedId = Some(0))), // slave does not interleave read responses
beatBytes = 8)) // 64-bit AXI interface
mmio_axi4 :=
// AXI4Fragmenter(lite=false, maxInFlight = 20)( // beef device up to support awlen = 0xff
TLToAXI4(idBits = 4)( // use idBits = 0 for AXI4-Lite
TLWidthWidget(socBusConfig.beatBytes)( // convert width before attaching to socBus
socBus.node))
} }
trait PeripheryMasterMMIOBundle extends HasPeripheryParameters { trait PeripheryMasterAXI4MMIOBundle {
implicit val p: Parameters this: TopNetworkBundle {
val mmio_clk = p(AsyncMMIOChannels).option(Vec(p(NExtMMIOAXIChannels), Clock(INPUT))) val outer: PeripheryMasterAXI4MMIO
val mmio_rst = p(AsyncMMIOChannels).option(Vec(p(NExtMMIOAXIChannels), Bool (INPUT))) } =>
val mmio_axi = Vec(p(NExtMMIOAXIChannels), new NastiIO) val mmio_axi = outer.mmio_axi4.bundleOut
val mmio_ahb = Vec(p(NExtMMIOAHBChannels), new HastiMasterIO)
val mmio_tl = Vec(p(NExtMMIOTLChannels), new ClientUncachedTileLinkIO()(edgeMMIOParams))
} }
trait PeripheryMasterMMIOModule extends HasPeripheryParameters { trait PeripheryMasterAXI4MMIOModule {
implicit val p: Parameters this: TopNetworkModule {
val outer: PeripheryMasterMMIO val outer: PeripheryMasterAXI4MMIO
val io: PeripheryMasterMMIOBundle val io: PeripheryMasterAXI4MMIOBundle
val pBus: TileLinkRecursiveInterconnect } =>
// nothing to do
val mmio_ports = p(ExtMMIOPorts) map { port =>
TileLinkWidthAdapter(pBus.port(port.name), edgeMMIOParams)
}
val mmio_axi_start = 0
val mmio_axi_end = mmio_axi_start + p(NExtMMIOAXIChannels)
val mmio_ahb_start = mmio_axi_end
val mmio_ahb_end = mmio_ahb_start + p(NExtMMIOAHBChannels)
val mmio_tl_start = mmio_ahb_end
val mmio_tl_end = mmio_tl_start + p(NExtMMIOTLChannels)
require (mmio_tl_end == mmio_ports.size)
for (i <- 0 until mmio_ports.size) {
if (mmio_axi_start <= i && i < mmio_axi_end) {
val idx = i-mmio_axi_start
val axi_sync = PeripheryUtils.convertTLtoAXI(mmio_ports(i))
io.mmio_axi(idx) <> (
if (!p(AsyncMMIOChannels)) axi_sync
else AsyncNastiTo(io.mmio_clk.get(idx), io.mmio_rst.get(idx), axi_sync)
)
} else if (mmio_ahb_start <= i && i < mmio_ahb_end) {
val idx = i-mmio_ahb_start
io.mmio_ahb(idx) <> PeripheryUtils.convertTLtoAHB(mmio_ports(i), atomics = true)
} else if (mmio_tl_start <= i && i < mmio_tl_end) {
val idx = i-mmio_tl_start
io.mmio_tl(idx) <> TileLinkEnqueuer(mmio_ports(i), 2)
} else {
require(false, "Unconnected external MMIO port")
}
}
} }
///// /////
trait PeripherySlave extends LazyModule { trait PeripherySlave {
implicit val p: Parameters this: TopNetwork {
val pBusMasters: RangeManager val pBusMasters: RangeManager
} =>
if (p(NExtBusAXIChannels) > 0) pBusMasters.add("ext", 1) // NExtBusAXIChannels are arbitrated into one TL port if (p(NExtBusAXIChannels) > 0) pBusMasters.add("ext", 1) // NExtBusAXIChannels are arbitrated into one TL port
} }
trait PeripherySlaveBundle extends HasPeripheryParameters { trait PeripherySlaveBundle {
implicit val p: Parameters this: TopNetworkBundle {
val outer: PeripherySlave
} =>
val bus_clk = p(AsyncBusChannels).option(Vec(p(NExtBusAXIChannels), Clock(INPUT))) val bus_clk = p(AsyncBusChannels).option(Vec(p(NExtBusAXIChannels), Clock(INPUT)))
val bus_rst = p(AsyncBusChannels).option(Vec(p(NExtBusAXIChannels), Bool (INPUT))) val bus_rst = p(AsyncBusChannels).option(Vec(p(NExtBusAXIChannels), Bool (INPUT)))
val bus_axi = Vec(p(NExtBusAXIChannels), new NastiIO).flip val bus_axi = Vec(p(NExtBusAXIChannels), new NastiIO).flip
} }
trait PeripherySlaveModule extends HasPeripheryParameters { trait PeripherySlaveModule {
implicit val p: Parameters this: TopNetworkModule {
val outer: PeripherySlave val outer: PeripherySlave { val pBusMasters: RangeManager }
val io: PeripherySlaveBundle val io: PeripherySlaveBundle
val coreplexIO: BaseCoreplexBundle } =>
if (p(NExtBusAXIChannels) > 0) { if (p(NExtBusAXIChannels) > 0) {
val arb = Module(new NastiArbiter(p(NExtBusAXIChannels))) val arb = Module(new NastiArbiter(p(NExtBusAXIChannels)))
@ -290,105 +271,82 @@ trait PeripherySlaveModule extends HasPeripheryParameters {
val (r_start, r_end) = outer.pBusMasters.range("ext") val (r_start, r_end) = outer.pBusMasters.range("ext")
require(r_end - r_start == 1, "RangeManager should return 1 slot") require(r_end - r_start == 1, "RangeManager should return 1 slot")
TileLinkWidthAdapter(coreplexIO.slave(r_start), conv.io.tl) TileLinkWidthAdapter(coreplexSlave(r_start), conv.io.tl)
} }
} }
///// /////
trait PeripheryCoreplexLocalInterrupter extends LazyModule with HasPeripheryParameters { trait PeripheryBootROM {
implicit val p: Parameters this: TopNetwork =>
val peripheryBus: TLXbar
// CoreplexLocalInterrupter must be at least 64b if XLen >= 64 val bootrom_address = 0x1000
val beatBytes = max((outerMMIOParams(XLen) min 64) / 8, peripheryBusConfig.beatBytes) val bootrom_size = 0x1000
val clintConfig = CoreplexLocalInterrupterConfig(beatBytes) val bootrom = LazyModule(new TLROM(bootrom_address, bootrom_size, GenerateBootROM(p, bootrom_address), true, peripheryBusConfig.beatBytes))
val clint = LazyModule(new CoreplexLocalInterrupter(clintConfig)(outerMMIOParams))
// The periphery bus is 32-bit, so we may need to adapt its width to XLen
clint.node := TLFragmenter(beatBytes, cacheBlockBytes)(TLWidthWidget(peripheryBusConfig.beatBytes)(peripheryBus.node))
}
trait PeripheryCoreplexLocalInterrupterBundle {
implicit val p: Parameters
}
trait PeripheryCoreplexLocalInterrupterModule extends HasPeripheryParameters {
implicit val p: Parameters
val outer: PeripheryCoreplexLocalInterrupter
val io: PeripheryCoreplexLocalInterrupterBundle
val coreplexIO: BaseCoreplexBundle
outer.clint.module.io.rtcTick := Counter(p(RTCPeriod)).inc()
coreplexIO.clint <> outer.clint.module.io.tiles
}
/////
trait PeripheryBootROM extends LazyModule with HasPeripheryParameters {
implicit val p: Parameters
val peripheryBus: TLXbar
val address = 0x1000
val size = 0x1000
val bootrom = LazyModule(new TLROM(address, size, GenerateBootROM(p, address), true, peripheryBusConfig.beatBytes))
bootrom.node := TLFragmenter(peripheryBusConfig.beatBytes, cacheBlockBytes)(peripheryBus.node) bootrom.node := TLFragmenter(peripheryBusConfig.beatBytes, cacheBlockBytes)(peripheryBus.node)
} }
trait PeripheryBootROMBundle { trait PeripheryBootROMBundle {
implicit val p: Parameters this: TopNetworkBundle {
val outer: PeripheryBootROM
} =>
} }
trait PeripheryBootROMModule extends HasPeripheryParameters { trait PeripheryBootROMModule {
implicit val p: Parameters this: TopNetworkModule {
val outer: PeripheryBootROM val outer: PeripheryBootROM
val io: PeripheryBootROMBundle val io: PeripheryBootROMBundle
} =>
} }
///// /////
trait PeripheryTestRAM extends LazyModule with HasPeripheryParameters { trait PeripheryTestRAM {
implicit val p: Parameters this: TopNetwork =>
val peripheryBus: TLXbar
val ramBase = 0x52000000 val testram = LazyModule(new TLRAM(AddressSet(0x52000000, 0xfff), true, peripheryBusConfig.beatBytes))
val ramSize = 0x1000 testram.node := TLFragmenter(peripheryBusConfig.beatBytes, cacheBlockBytes)(peripheryBus.node)
val sram = LazyModule(new TLRAM(AddressSet(ramBase, ramSize-1), true, peripheryBusConfig.beatBytes)
{ override def name = "testram" })
sram.node := TLFragmenter(peripheryBusConfig.beatBytes, cacheBlockBytes)(peripheryBus.node)
} }
trait PeripheryTestRAMBundle { trait PeripheryTestRAMBundle {
implicit val p: Parameters this: TopNetworkBundle {
val outer: PeripheryTestRAM
} =>
} }
trait PeripheryTestRAMModule extends HasPeripheryParameters { trait PeripheryTestRAMModule {
implicit val p: Parameters this: TopNetworkModule {
val outer: PeripheryTestRAM val outer: PeripheryTestRAM
val io: PeripheryTestRAMBundle
} =>
} }
///// /////
trait PeripheryTestBusMaster extends LazyModule { trait PeripheryTestBusMaster {
implicit val p: Parameters this: TopNetwork =>
val peripheryBus: TLXbar
val fuzzer = LazyModule(new TLFuzzer(5000)) val fuzzer = LazyModule(new TLFuzzer(5000))
peripheryBus.node := fuzzer.node peripheryBus.node := fuzzer.node
} }
trait PeripheryTestBusMasterBundle { trait PeripheryTestBusMasterBundle {
implicit val p: Parameters this: TopNetworkBundle {
val outer: PeripheryTestBusMaster
} =>
} }
trait PeripheryTestBusMasterModule { trait PeripheryTestBusMasterModule {
implicit val p: Parameters this: TopNetworkModule {
val outer: PeripheryTestBusMaster val outer: PeripheryTestBusMaster
val io: PeripheryTestBusMasterBundle
} =>
} }
///// /////
trait HardwiredResetVector { trait HardwiredResetVector {
val coreplexIO: BaseCoreplexBundle this: TopNetworkModule {
coreplexIO.resetVector := UInt(0x1000) // boot ROM val outer: BaseTop[BaseCoreplex]
} =>
outer.coreplex.module.io.resetVector := UInt(0x1000) // boot ROM
} }

View File

@ -8,14 +8,14 @@ import junctions._
import junctions.NastiConstants._ import junctions.NastiConstants._
import util.LatencyPipe import util.LatencyPipe
case object BuildExampleTop extends Field[Parameters => ExampleTop] case object BuildExampleTop extends Field[Parameters => ExampleTop[coreplex.BaseCoreplex]]
case object SimMemLatency extends Field[Int] case object SimMemLatency extends Field[Int]
class TestHarness(q: Parameters) extends Module { class TestHarness(q: Parameters) extends Module {
val io = new Bundle { val io = new Bundle {
val success = Bool(OUTPUT) val success = Bool(OUTPUT)
} }
val dut = q(BuildExampleTop)(q).module val dut = Module(q(BuildExampleTop)(q).module)
implicit val p = dut.p implicit val p = dut.p
// This test harness isn't especially flexible yet // This test harness isn't especially flexible yet
@ -25,16 +25,12 @@ class TestHarness(q: Parameters) extends Module {
require(dut.io.mem_tl.isEmpty) require(dut.io.mem_tl.isEmpty)
require(dut.io.bus_clk.isEmpty) require(dut.io.bus_clk.isEmpty)
require(dut.io.bus_rst.isEmpty) require(dut.io.bus_rst.isEmpty)
require(dut.io.mmio_clk.isEmpty)
require(dut.io.mmio_rst.isEmpty)
require(dut.io.mmio_ahb.isEmpty)
require(dut.io.mmio_tl.isEmpty)
for (int <- dut.io.interrupts) for (int <- dut.io.interrupts(0))
int := Bool(false) int := Bool(false)
if (dut.io.mem_axi.nonEmpty) { if (dut.io.mem_axi.nonEmpty) {
val memSize = p(GlobalAddrMap)("mem").size val memSize = p(ExtMemSize)
require(memSize % dut.io.mem_axi.size == 0) require(memSize % dut.io.mem_axi.size == 0)
for (axi <- dut.io.mem_axi) { for (axi <- dut.io.mem_axi) {
val mem = Module(new SimAXIMem(memSize / dut.io.mem_axi.size)) val mem = Module(new SimAXIMem(memSize / dut.io.mem_axi.size))

View File

@ -53,20 +53,8 @@ class GlobalVariable[T] {
} }
object GenerateGlobalAddrMap { object GenerateGlobalAddrMap {
def apply(p: Parameters, pDevicesEntries: Seq[AddrMapEntry], peripheryManagers: Seq[TLManagerParameters]) = { def apply(p: Parameters, peripheryManagers: Seq[TLManagerParameters]) = {
lazy val cBusIOAddrMap: AddrMap = { val tl2Devices = peripheryManagers.map { manager =>
val entries = collection.mutable.ArrayBuffer[AddrMapEntry]()
entries += AddrMapEntry("debug", MemSize(4096, MemAttr(AddrMapProt.RWX)))
entries += AddrMapEntry("plic", MemRange(0x0C000000, 0x4000000, MemAttr(AddrMapProt.RW)))
if (p(DataScratchpadSize) > 0) { // TODO heterogeneous tiles
require(p(NTiles) == 1) // TODO relax this
require(p(NMemoryChannels) == 0) // TODO allow both scratchpad & DRAM
entries += AddrMapEntry("dmem0", MemRange(0x80000000L, BigInt(p(DataScratchpadSize)), MemAttr(AddrMapProt.RWX)))
}
new AddrMap(entries)
}
lazy val tl2Devices = peripheryManagers.map { manager =>
val cacheable = manager.regionType match { val cacheable = manager.regionType match {
case RegionType.CACHED => true case RegionType.CACHED => true
case RegionType.TRACKED => true case RegionType.TRACKED => true
@ -84,41 +72,28 @@ object GenerateGlobalAddrMap {
} }
}.flatten }.flatten
lazy val uniquelyNamedTL2Devices = val uniquelyNamedTL2Devices =
tl2Devices.groupBy(_.name).values.map(_.zipWithIndex.map { tl2Devices.groupBy(_.name).values.map(_.zipWithIndex.map {
case (e, i) => if (i == 0) e else e.copy(name = e.name + "_" + i) case (e, i) => if (i == 0) e else e.copy(name = e.name + "_" + i)
}).flatten.toList }).flatten.toList
lazy val tl2AddrMap = new AddrMap(uniquelyNamedTL2Devices, collapse = true)
lazy val pBusIOAddrMap = new AddrMap(AddrMapEntry("TL2", tl2AddrMap) +: (p(ExtMMIOPorts) ++ pDevicesEntries), collapse = true)
val memBase = 0x80000000L val memBase = 0x80000000L
val memSize = p(ExtMemSize) val memSize = p(ExtMemSize)
Dump("MEM_BASE", memBase) Dump("MEM_BASE", memBase)
val cBus = AddrMapEntry("cbus", cBusIOAddrMap) val tl2 = AddrMapEntry("TL2", new AddrMap(uniquelyNamedTL2Devices, collapse = true))
val pBus = AddrMapEntry("pbus", pBusIOAddrMap)
val io = AddrMapEntry("io", AddrMap((cBus +: (!pBusIOAddrMap.isEmpty).option(pBus).toSeq):_*))
val mem = AddrMapEntry("mem", MemRange(memBase, memSize, MemAttr(AddrMapProt.RWX, true))) val mem = AddrMapEntry("mem", MemRange(memBase, memSize, MemAttr(AddrMapProt.RWX, true)))
AddrMap((io +: (p(NMemoryChannels) > 0).option(mem).toSeq):_*) AddrMap((tl2 +: (p(NMemoryChannels) > 0).option(mem).toSeq):_*)
} }
} }
object GenerateConfigString { object GenerateConfigString {
def apply(p: Parameters, c: CoreplexConfig, pDevicesEntries: Seq[AddrMapEntry], peripheryManagers: Seq[TLManagerParameters]) = { def apply(p: Parameters, clint: CoreplexLocalInterrupter, plic: TLPLIC, peripheryManagers: Seq[TLManagerParameters]) = {
val c = CoreplexParameters()(p)
val addrMap = p(GlobalAddrMap) val addrMap = p(GlobalAddrMap)
val plicAddr = addrMap("io:cbus:plic").start
val clint = CoreplexLocalInterrupterConfig(0, addrMap("io:pbus:TL2:clint").start)
val xLen = p(XLen)
val res = new StringBuilder val res = new StringBuilder
res append "plic {\n" res append plic.module.globalConfigString
res append s" priority 0x${plicAddr.toString(16)};\n" res append clint.module.globalConfigString
res append s" pending 0x${(plicAddr + c.plicKey.pendingBase).toString(16)};\n"
res append s" ndevs ${c.plicKey.nDevices};\n"
res append "};\n"
res append "rtc {\n"
res append s" addr 0x${clint.timeAddress.toString(16)};\n"
res append "};\n"
if (addrMap contains "mem") { if (addrMap contains "mem") {
res append "ram {\n" res append "ram {\n"
res append " 0 {\n" res append " 0 {\n"
@ -140,33 +115,12 @@ object GenerateConfigString {
res append s" $i {\n" res append s" $i {\n"
res append " 0 {\n" res append " 0 {\n"
res append s" isa $isa;\n" res append s" isa $isa;\n"
res append s" timecmp 0x${clint.timecmpAddress(i).toString(16)};\n" res append clint.module.hartConfigStrings(i)
res append s" ipi 0x${clint.msipAddress(i).toString(16)};\n" res append plic.module.hartConfigStrings(i)
res append s" plic {\n"
res append s" m {\n"
res append s" ie 0x${(plicAddr + c.plicKey.enableAddr(i, 'M')).toString(16)};\n"
res append s" thresh 0x${(plicAddr + c.plicKey.threshAddr(i, 'M')).toString(16)};\n"
res append s" claim 0x${(plicAddr + c.plicKey.claimAddr(i, 'M')).toString(16)};\n"
res append s" };\n"
if (c.hasSupervisor) {
res append s" s {\n"
res append s" ie 0x${(plicAddr + c.plicKey.enableAddr(i, 'S')).toString(16)};\n"
res append s" thresh 0x${(plicAddr + c.plicKey.threshAddr(i, 'S')).toString(16)};\n"
res append s" claim 0x${(plicAddr + c.plicKey.claimAddr(i, 'S')).toString(16)};\n"
res append s" };\n"
}
res append " };\n"
res append " };\n" res append " };\n"
res append " };\n" res append " };\n"
} }
res append "};\n" res append "};\n"
pDevicesEntries foreach { entry =>
val region = addrMap("io:pbus:" + entry.name)
res append s"${entry.name} {\n"
res append s" addr 0x${region.start.toString(16)};\n"
res append s" size 0x${region.size.toString(16)}; \n"
res append "}\n"
}
peripheryManagers.foreach { manager => res append manager.dts } peripheryManagers.foreach { manager => res append manager.dts }
res append '\u0000' res append '\u0000'
res.toString res.toString
@ -185,6 +139,6 @@ object GenerateBootROM {
require(rom.getInt(12) == 0, require(rom.getInt(12) == 0,
"Config string address position should not be occupied by code") "Config string address position should not be occupied by code")
rom.putInt(12, configStringAddr) rom.putInt(12, configStringAddr)
rom.array() ++ (p(ConfigString).getBytes.toSeq) rom.array() ++ (ConfigStringOutput.contents.get.getBytes.toSeq)
} }
} }

View File

@ -10,7 +10,7 @@ import scala.math.{min,max}
import uncore.tilelink2.{leftOR, rightOR, UIntToOH1, OH1ToOH} import uncore.tilelink2.{leftOR, rightOR, UIntToOH1, OH1ToOH}
// lite: masters all use only one ID => reads will not be interleaved // lite: masters all use only one ID => reads will not be interleaved
class AXI4Fragmenter(lite: Boolean = false, maxInFlight: Int = 32, combinational: Boolean = true) extends LazyModule class AXI4Fragmenter(lite: Boolean = false, maxInFlight: => Int = 32, combinational: Boolean = true) extends LazyModule
{ {
val maxBeats = 1 << AXI4Parameters.lenBits val maxBeats = 1 << AXI4Parameters.lenBits
def expandTransfer(x: TransferSizes, beatBytes: Int, alignment: BigInt) = def expandTransfer(x: TransferSizes, beatBytes: Int, alignment: BigInt) =
@ -287,7 +287,7 @@ class AXI4FragmenterSideband(maxInFlight: Int, flow: Boolean = false) extends Mo
object AXI4Fragmenter object AXI4Fragmenter
{ {
// applied to the AXI4 source node; y.node := AXI4Fragmenter()(x.node) // applied to the AXI4 source node; y.node := AXI4Fragmenter()(x.node)
def apply(lite: Boolean = false, maxInFlight: Int = 32, combinational: Boolean = true)(x: AXI4OutwardNode)(implicit sourceInfo: SourceInfo): AXI4OutwardNode = { def apply(lite: Boolean = false, maxInFlight: => Int = 32, combinational: Boolean = true)(x: AXI4OutwardNode)(implicit sourceInfo: SourceInfo): AXI4OutwardNode = {
val fragmenter = LazyModule(new AXI4Fragmenter(lite, maxInFlight, combinational)) val fragmenter = LazyModule(new AXI4Fragmenter(lite, maxInFlight, combinational))
fragmenter.node := x fragmenter.node := x
fragmenter.node fragmenter.node

View File

@ -16,10 +16,13 @@ object AXI4Imp extends NodeImp[AXI4MasterPortParameters, AXI4SlavePortParameters
} }
def bundleI(ei: Seq[AXI4EdgeParameters]): Vec[AXI4Bundle] = { def bundleI(ei: Seq[AXI4EdgeParameters]): Vec[AXI4Bundle] = {
require (!ei.isEmpty) require (!ei.isEmpty)
Vec(ei.size, AXI4Bundle(ei.map(_.bundle).reduce(_.union(_)))).flip Vec(ei.size, AXI4Bundle(ei.map(_.bundle).reduce(_.union(_))))
} }
def colour = "#00ccff" // bluish def colour = "#00ccff" // bluish
override def labelI(ei: AXI4EdgeParameters) = (ei.slave.beatBytes * 8).toString
override def labelO(eo: AXI4EdgeParameters) = (eo.slave.beatBytes * 8).toString
def connect(bo: => AXI4Bundle, bi: => AXI4Bundle, ei: => AXI4EdgeParameters)(implicit sourceInfo: SourceInfo): (Option[LazyModule], () => Unit) = { def connect(bo: => AXI4Bundle, bi: => AXI4Bundle, ei: => AXI4EdgeParameters)(implicit sourceInfo: SourceInfo): (Option[LazyModule], () => Unit) = {
(None, () => { bi <> bo }) (None, () => { bi <> bo })
} }
@ -30,18 +33,23 @@ object AXI4Imp extends NodeImp[AXI4MasterPortParameters, AXI4SlavePortParameters
pu.copy(slaves = pu.slaves.map { m => m.copy (nodePath = node +: m.nodePath) }) pu.copy(slaves = pu.slaves.map { m => m.copy (nodePath = node +: m.nodePath) })
} }
// Nodes implemented inside modules
case class AXI4IdentityNode() extends IdentityNode(AXI4Imp) case class AXI4IdentityNode() extends IdentityNode(AXI4Imp)
case class AXI4OutputNode() extends OutputNode(AXI4Imp)
case class AXI4InputNode() extends InputNode(AXI4Imp)
case class AXI4MasterNode(portParams: AXI4MasterPortParameters, numPorts: Range.Inclusive = 1 to 1) case class AXI4MasterNode(portParams: AXI4MasterPortParameters, numPorts: Range.Inclusive = 1 to 1)
extends SourceNode(AXI4Imp)(portParams, numPorts) extends SourceNode(AXI4Imp)(portParams, numPorts)
case class AXI4SlaveNode(portParams: AXI4SlavePortParameters, numPorts: Range.Inclusive = 1 to 1) case class AXI4SlaveNode(portParams: AXI4SlavePortParameters, numPorts: Range.Inclusive = 1 to 1)
extends SinkNode(AXI4Imp)(portParams, numPorts) extends SinkNode(AXI4Imp)(portParams, numPorts)
case class AXI4AdapterNode( case class AXI4AdapterNode(
masterFn: Seq[AXI4MasterPortParameters] => AXI4MasterPortParameters, masterFn: Seq[AXI4MasterPortParameters] => AXI4MasterPortParameters,
slaveFn: Seq[AXI4SlavePortParameters] => AXI4SlavePortParameters, slaveFn: Seq[AXI4SlavePortParameters] => AXI4SlavePortParameters,
numMasterPorts: Range.Inclusive = 1 to 1, numMasterPorts: Range.Inclusive = 1 to 1,
numSlavePorts: Range.Inclusive = 1 to 1) numSlavePorts: Range.Inclusive = 1 to 1)
extends InteriorNode(AXI4Imp)(masterFn, slaveFn, numMasterPorts, numSlavePorts) extends InteriorNode(AXI4Imp)(masterFn, slaveFn, numMasterPorts, numSlavePorts)
// Nodes passed from an inner module
case class AXI4OutputNode() extends OutputNode(AXI4Imp)
case class AXI4InputNode() extends InputNode(AXI4Imp)
// Nodes used for external ports
case class AXI4BlindOutputNode(portParams: AXI4SlavePortParameters) extends BlindOutputNode(AXI4Imp)(portParams)
case class AXI4BlindInputNode(portParams: AXI4MasterPortParameters) extends BlindInputNode(AXI4Imp)(portParams)

View File

@ -7,10 +7,11 @@ import diplomacy._
import regmapper._ import regmapper._
import scala.math.{min,max} import scala.math.{min,max}
class AXI4RegisterNode(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true) class AXI4RegisterNode(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true, executable: Boolean = false)
extends AXI4SlaveNode(AXI4SlavePortParameters( extends AXI4SlaveNode(AXI4SlavePortParameters(
Seq(AXI4SlaveParameters( Seq(AXI4SlaveParameters(
address = Seq(address), address = Seq(address),
executable = executable,
supportsWrite = TransferSizes(1, beatBytes), supportsWrite = TransferSizes(1, beatBytes),
supportsRead = TransferSizes(1, beatBytes), supportsRead = TransferSizes(1, beatBytes),
interleavedId = Some(0))), interleavedId = Some(0))),
@ -69,16 +70,16 @@ class AXI4RegisterNode(address: AddressSet, concurrency: Int = 0, beatBytes: Int
object AXI4RegisterNode object AXI4RegisterNode
{ {
def apply(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true) = def apply(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true, executable: Boolean = false) =
new AXI4RegisterNode(address, concurrency, beatBytes, undefZero) new AXI4RegisterNode(address, concurrency, beatBytes, undefZero, executable)
} }
// These convenience methods below combine to make it possible to create a AXI4 // These convenience methods below combine to make it possible to create a AXI4
// register mapped device from a totally abstract register mapped device. // register mapped device from a totally abstract register mapped device.
abstract class AXI4RegisterRouterBase(address: AddressSet, interrupts: Int, concurrency: Int, beatBytes: Int, undefZero: Boolean) extends LazyModule abstract class AXI4RegisterRouterBase(address: AddressSet, interrupts: Int, concurrency: Int, beatBytes: Int, undefZero: Boolean, executable: Boolean) extends LazyModule
{ {
val node = AXI4RegisterNode(address, concurrency, beatBytes, undefZero) val node = AXI4RegisterNode(address, concurrency, beatBytes, undefZero, executable)
val intnode = uncore.tilelink2.IntSourceNode(interrupts) val intnode = uncore.tilelink2.IntSourceNode(interrupts)
} }
@ -101,10 +102,10 @@ class AXI4RegModule[P, B <: AXI4RegBundleBase](val params: P, bundleBuilder: =>
} }
class AXI4RegisterRouter[B <: AXI4RegBundleBase, M <: LazyModuleImp] class AXI4RegisterRouter[B <: AXI4RegBundleBase, M <: LazyModuleImp]
(val base: BigInt, val interrupts: Int = 0, val size: BigInt = 4096, val concurrency: Int = 0, val beatBytes: Int = 4, undefZero: Boolean = true) (val base: BigInt, val interrupts: Int = 0, val size: BigInt = 4096, val concurrency: Int = 0, val beatBytes: Int = 4, undefZero: Boolean = true, executable: Boolean = false)
(bundleBuilder: AXI4RegBundleArg => B) (bundleBuilder: AXI4RegBundleArg => B)
(moduleBuilder: (=> B, AXI4RegisterRouterBase) => M) (moduleBuilder: (=> B, AXI4RegisterRouterBase) => M)
extends AXI4RegisterRouterBase(AddressSet(base, size-1), interrupts, concurrency, beatBytes, undefZero) extends AXI4RegisterRouterBase(AddressSet(base, size-1), interrupts, concurrency, beatBytes, undefZero, executable)
{ {
require (isPow2(size)) require (isPow2(size))
// require (size >= 4096) ... not absolutely required, but highly recommended // require (size >= 4096) ... not absolutely required, but highly recommended

View File

@ -4,8 +4,9 @@ package uncore.devices
import Chisel._ import Chisel._
import junctions._ import junctions._
import uncore.tilelink._
import util._ import util._
import regmapper._
import uncore.tilelink2._
import cde.{Parameters, Config, Field} import cde.{Parameters, Config, Field}
// ***************************************** // *****************************************
@ -15,6 +16,7 @@ import cde.{Parameters, Config, Field}
object DbRegAddrs{ object DbRegAddrs{
def DMRAMBASE = UInt(0x0)
def DMCONTROL = UInt(0x10) def DMCONTROL = UInt(0x10)
def DMINFO = UInt(0x11) def DMINFO = UInt(0x11)
@ -69,7 +71,7 @@ object DsbBusConsts {
// See $RISCV/riscv-tools/riscv-isa-sim/debug_rom/debug_rom.h/S // See $RISCV/riscv-tools/riscv-isa-sim/debug_rom/debug_rom.h/S
// The code assumes 64 bytes of Debug RAM. // The code assumes 64 bytes of Debug RAM.
def defaultRomContents : Array[Byte] = Array( def xlenAnyRomContents : Array[Byte] = Array(
0x6f, 0x00, 0xc0, 0x04, 0x6f, 0x00, 0xc0, 0x00, 0x13, 0x04, 0xf0, 0xff, 0x6f, 0x00, 0xc0, 0x04, 0x6f, 0x00, 0xc0, 0x00, 0x13, 0x04, 0xf0, 0xff,
0x6f, 0x00, 0x80, 0x00, 0x13, 0x04, 0x00, 0x00, 0x0f, 0x00, 0xf0, 0x0f, 0x6f, 0x00, 0x80, 0x00, 0x13, 0x04, 0x00, 0x00, 0x0f, 0x00, 0xf0, 0x0f,
0xf3, 0x24, 0x00, 0xf1, 0x63, 0xc6, 0x04, 0x00, 0x83, 0x24, 0xc0, 0x43, 0xf3, 0x24, 0x00, 0xf1, 0x63, 0xc6, 0x04, 0x00, 0x83, 0x24, 0xc0, 0x43,
@ -121,10 +123,10 @@ object DsbBusConsts {
object DsbRegAddrs{ object DsbRegAddrs{
def CLEARDEBINT = UInt(0x100) def CLEARDEBINT = 0x100
def SETHALTNOT = UInt(0x10C) def SETHALTNOT = 0x10C
def SERINFO = UInt(0x110) def SERINFO = 0x110
def SERBASE = UInt(0x114) def SERBASE = 0x114
// For each serial, there are // For each serial, there are
// 3 registers starting here: // 3 registers starting here:
// SERSEND0 // SERSEND0
@ -132,9 +134,11 @@ object DsbRegAddrs{
// SERSTATUS0 // SERSTATUS0
// ... // ...
// SERSTATUS7 // SERSTATUS7
def SERTX_OFFSET = UInt(0) def SERTX_OFFSET = 0
def SERRX_OFFSET = UInt(4) def SERRX_OFFSET = 4
def SERSTAT_OFFSET = UInt(8) def SERSTAT_OFFSET = 8
def RAMBASE = 0x400
def ROMBASE = 0x800
} }
@ -302,6 +306,24 @@ class DebugBusIO(implicit val p: cde.Parameters) extends ParameterizedBundle()(p
val resp = new DecoupledIO(new DebugBusResp).flip() val resp = new DecoupledIO(new DebugBusResp).flip()
} }
trait HasDebugModuleParameters {
val params : Parameters
implicit val p = params
val cfg = p(DMKey)
}
/** Debug Module I/O, with the exclusion of the RegisterRouter
* Access interface.
*/
trait DebugModuleBundle extends Bundle with HasDebugModuleParameters {
val db = new DebugBusIO()(p).flip()
val debugInterrupts = Vec(cfg.nComponents, Bool()).asOutput
val ndreset = Bool(OUTPUT)
val fullreset = Bool(OUTPUT)
}
// ***************************************** // *****************************************
// The Module // The Module
// //
@ -313,7 +335,7 @@ class DebugBusIO(implicit val p: cde.Parameters) extends ParameterizedBundle()(p
* DebugModule is a slave to two masters: * DebugModule is a slave to two masters:
* The Debug Bus -- implemented as a generic Decoupled IO with request * The Debug Bus -- implemented as a generic Decoupled IO with request
* and response channels * and response channels
* The System Bus -- implemented as Uncached Tile Link. * The System Bus -- implemented as generic RegisterRouter
* *
* DebugModule is responsible for holding registers, RAM, and ROM * DebugModule is responsible for holding registers, RAM, and ROM
* to support debug interactions, as well as driving interrupts * to support debug interactions, as well as driving interrupts
@ -321,10 +343,9 @@ class DebugBusIO(implicit val p: cde.Parameters) extends ParameterizedBundle()(p
* It is also responsible for some reset lines. * It is also responsible for some reset lines.
*/ */
class DebugModule ()(implicit val p:cde.Parameters) trait DebugModule extends Module with HasDebugModuleParameters with HasRegMap {
extends Module
with HasTileLinkParameters { val io: DebugModuleBundle
val cfg = p(DMKey)
//-------------------------------------------------------------- //--------------------------------------------------------------
// Import constants for shorter variable names // Import constants for shorter variable names
@ -344,6 +365,7 @@ class DebugModule ()(implicit val p:cde.Parameters)
require (cfg.hasBusMaster == false) require (cfg.hasBusMaster == false)
require (cfg.nDebugRamBytes <= 64) require (cfg.nDebugRamBytes <= 64)
require (cfg.authType == DebugModuleAuthType.None) require (cfg.authType == DebugModuleAuthType.None)
require((DbBusConsts.dbRamWordBits % 8) == 0)
//-------------------------------------------------------------- //--------------------------------------------------------------
// Private Classes (Register Fields) // Private Classes (Register Fields)
@ -403,17 +425,6 @@ class DebugModule ()(implicit val p:cde.Parameters)
} }
//--------------------------------------------------------------
// Module I/O
//--------------------------------------------------------------
val io = new Bundle {
val db = new DebugBusIO()(p).flip()
val debugInterrupts = Vec(cfg.nComponents, Bool()).asOutput
val tl = new ClientUncachedTileLinkIO().flip
val ndreset = Bool(OUTPUT)
val fullreset = Bool(OUTPUT)
}
//-------------------------------------------------------------- //--------------------------------------------------------------
// Register & Wire Declarations // Register & Wire Declarations
@ -455,46 +466,23 @@ class DebugModule ()(implicit val p:cde.Parameters)
// --- Debug RAM // --- Debug RAM
// Since the access size from Debug Bus and System Bus may not be consistent, val ramDataWidth = DbBusConsts.dbRamWordBits
// use the maximum to build the RAM, and then select as needed for the smaller val ramDataBytes = ramDataWidth / 8;
// size. val ramAddrWidth = log2Up(cfg.nDebugRamBytes / ramDataBytes)
val dbRamDataWidth = DbBusConsts.dbRamWordBits val ramMem = Reg(init = Vec.fill(cfg.nDebugRamBytes){UInt(0, width = 8)})
val sbRamDataWidth = tlDataBits
val dbRamAddrWidth = log2Up((cfg.nDebugRamBytes * 8) / dbRamDataWidth)
val sbRamAddrWidth = log2Up((cfg.nDebugRamBytes * 8) / sbRamDataWidth)
val sbRamAddrOffset = log2Up(tlDataBits/8)
val ramDataWidth = dbRamDataWidth max sbRamDataWidth val dbRamAddr = Wire(UInt(width=ramAddrWidth))
val ramAddrWidth = dbRamAddrWidth min sbRamAddrWidth
val ramMem = Mem(1 << ramAddrWidth , UInt(width=ramDataWidth))
val ramAddr = Wire(UInt(width=ramAddrWidth))
val ramRdData = Wire(UInt(width=ramDataWidth))
val ramWrData = Wire(UInt(width=ramDataWidth))
val ramWrMask = Wire(UInt(width=ramDataWidth))
val ramWrEn = Wire(Bool())
val dbRamAddr = Wire(UInt(width=dbRamAddrWidth))
val dbRamAddrValid = Wire(Bool()) val dbRamAddrValid = Wire(Bool())
val dbRamRdData = Wire (UInt(width=dbRamDataWidth)) val dbRamRdData = Wire (UInt(width=ramDataWidth))
val dbRamWrData = Wire(UInt(width=dbRamDataWidth)) val dbRamWrData = Wire(UInt(width=ramDataWidth))
val dbRamWrEn = Wire(Bool()) val dbRamWrEn = Wire(Bool())
val dbRamRdEn = Wire(Bool()) val dbRamRdEn = Wire(Bool())
val dbRamWrEnFinal = Wire(Bool()) val dbRamWrEnFinal = Wire(Bool())
val dbRamRdEnFinal = Wire(Bool()) val dbRamRdEnFinal = Wire(Bool())
require((cfg.nDebugRamBytes % ramDataBytes) == 0)
val dbRamDataOffset = log2Up(ramDataBytes)
val sbRamAddr = Wire(UInt(width=sbRamAddrWidth))
val sbRamAddrValid = Wire(Bool())
val sbRamRdData = Wire (UInt(width=sbRamDataWidth))
val sbRamWrData = Wire(UInt(width=sbRamDataWidth))
val sbRamWrEn = Wire(Bool())
val sbRamRdEn = Wire(Bool())
val sbRamWrEnFinal = Wire(Bool())
val sbRamRdEnFinal = Wire(Bool())
val sbRomRdData = Wire(UInt(width=tlDataBits))
val sbRomAddrOffset = log2Up(tlDataBits/8)
// --- Debug Bus Accesses // --- Debug Bus Accesses
@ -513,16 +501,6 @@ class DebugModule ()(implicit val p:cde.Parameters)
val rdCondWrFailure = Wire(Bool()) val rdCondWrFailure = Wire(Bool())
val dbWrNeeded = Wire(Bool()) val dbWrNeeded = Wire(Bool())
// --- System Bus Access
val sbAddr = Wire(UInt(width=sbAddrWidth))
val sbRdData = Wire(UInt(width=tlDataBits))
val sbWrData = Wire(UInt(width=tlDataBits))
val sbWrMask = Wire(UInt(width=tlDataBits))
val sbWrEn = Wire(Bool())
val sbRdEn = Wire(Bool())
val stallFromDb = Wire(Bool())
val stallFromSb = Wire(Bool())
//-------------------------------------------------------------- //--------------------------------------------------------------
// Interrupt Registers // Interrupt Registers
//-------------------------------------------------------------- //--------------------------------------------------------------
@ -622,63 +600,29 @@ class DebugModule ()(implicit val p:cde.Parameters)
HALTSUMRdData.acks := haltnotSummary HALTSUMRdData.acks := haltnotSummary
//-------------------------------------------------------------- //--------------------------------------------------------------
// Debug RAM Access (Debug Bus & System Bus) // Debug RAM Access (Debug Bus ... System Bus can override)
//-------------------------------------------------------------- //--------------------------------------------------------------
dbReq := io.db.req.bits dbReq := io.db.req.bits
// Debug Bus RAM Access // Debug Bus RAM Access
// From Specification: Debug RAM is 0x00 - 0x0F // From Specification: Debug RAM is 0x00 - 0x0F
// 0x40 - 0x6F Not Implemented // 0x40 - 0x6F Not Implemented
dbRamAddr := dbReq.addr( dbRamAddrWidth-1 , 0) dbRamAddr := dbReq.addr( ramAddrWidth-1 , 0)
dbRamWrData := dbReq.data dbRamWrData := dbReq.data
dbRamAddrValid := Bool(true) dbRamAddrValid := (dbReq.addr(3,0) <= UInt((cfg.nDebugRamBytes/ramDataBytes)))
if (dbRamAddrWidth < 4){
dbRamAddrValid := (dbReq.addr(3, dbRamAddrWidth) === UInt(0)) val dbRamRdDataFields = List.tabulate(cfg.nDebugRamBytes / ramDataBytes) { ii =>
val slice = ramMem.slice(ii * ramDataBytes, (ii+1)*ramDataBytes)
slice.reduce[UInt]{ case (x: UInt, y: UInt) => Cat(y, x)}
} }
sbRamAddr := sbAddr(sbRamAddrWidth + sbRamAddrOffset - 1, sbRamAddrOffset) dbRamRdData := dbRamRdDataFields(dbRamAddr)
sbRamWrData := sbWrData
sbRamAddrValid := Bool(true) when (dbRamWrEnFinal) {
// From Specification: Debug RAM is 0x400 - 0x4FF for (ii <- 0 until ramDataBytes) {
if ((sbRamAddrWidth + sbRamAddrOffset) < 8){ ramMem((dbRamAddr << UInt(dbRamDataOffset)) + UInt(ii)) := dbRamWrData((8*(ii+1)-1), (8*ii))
sbRamAddrValid := (sbAddr(7, sbRamAddrWidth + sbRamAddrOffset) === UInt(0)) }
} }
require (dbRamAddrWidth >= ramAddrWidth) // SB accesses less than 32 bits Not Implemented.
val dbRamWrMask = Wire(init=Vec.fill(1 << (dbRamAddrWidth - ramAddrWidth)){Fill(dbRamDataWidth, UInt(1, width=1))})
if (dbRamDataWidth < ramDataWidth){
val dbRamSel = dbRamAddr(dbRamAddrWidth - ramAddrWidth - 1 , 0)
val rdDataWords = Vec.tabulate(1 << (dbRamAddrWidth - ramAddrWidth)){ ii =>
ramRdData((ii+1)*dbRamDataWidth - 1 , ii*dbRamDataWidth)}
dbRamWrMask := Vec.fill(1 << (dbRamAddrWidth - ramAddrWidth)){UInt(0, width = dbRamDataWidth)}
dbRamWrMask(dbRamSel) := Fill(dbRamDataWidth, UInt(1, width=1))
dbRamRdData := rdDataWords(dbRamSel)
} else {
dbRamRdData := ramRdData
}
sbRamRdData := ramRdData
ramWrMask := Mux(sbRamWrEn, sbWrMask, dbRamWrMask.asUInt)
assert (!((dbRamWrEn | dbRamRdEn) & (sbRamRdEn | sbRamWrEn)), "Stall logic should have prevented concurrent SB/DB RAM Access")
// Make copies of DB RAM data before writing.
val dbRamWrDataVec = Fill(1 << (dbRamAddrWidth - ramAddrWidth), dbRamWrData)
ramWrData := Mux(sbRamWrEn,
(ramWrMask & sbRamWrData ) | (~ramWrMask & ramRdData),
(ramWrMask & dbRamWrDataVec) | (~ramWrMask & ramRdData))
ramAddr := Mux(sbRamWrEn | sbRamRdEn, sbRamAddr,
dbRamAddr >> (dbRamAddrWidth - ramAddrWidth))
ramRdData := ramMem(ramAddr)
when (ramWrEn) { ramMem(ramAddr) := ramWrData }
ramWrEn := sbRamWrEnFinal | dbRamWrEnFinal
//-------------------------------------------------------------- //--------------------------------------------------------------
// Debug Bus Access // Debug Bus Access
@ -813,8 +757,8 @@ class DebugModule ()(implicit val p:cde.Parameters)
// ----------------------------------------- // -----------------------------------------
// DB Access State Machine Decode (Combo) // DB Access State Machine Decode (Combo)
io.db.req.ready := !stallFromSb && ((dbStateReg === s_DB_READY) || io.db.req.ready := (dbStateReg === s_DB_READY) ||
(dbStateReg === s_DB_RESP && io.db.resp.fire())) (dbStateReg === s_DB_RESP && io.db.resp.fire())
io.db.resp.valid := (dbStateReg === s_DB_RESP) io.db.resp.valid := (dbStateReg === s_DB_RESP)
io.db.resp.bits := dbRespReg io.db.resp.bits := dbRespReg
@ -844,172 +788,28 @@ class DebugModule ()(implicit val p:cde.Parameters)
// Debug ROM // Debug ROM
//-------------------------------------------------------------- //--------------------------------------------------------------
sbRomRdData := UInt(0) val romRegFields = if (cfg.hasDebugRom) {
if (cfg.hasDebugRom) { cfg.debugRomContents.get.map( x => RegField.r(8, UInt(x.toInt & 0xFF)))
// Inspired by ROMSlave } else {
val romContents = cfg.debugRomContents.get Seq(RegField(8))
val romByteWidth = tlDataBits / 8
val romRows = (romContents.size + romByteWidth - 1)/romByteWidth
val romMem = Vec.tabulate(romRows) { ii =>
val slice = romContents.slice(ii*romByteWidth, (ii+1)*romByteWidth)
UInt(slice.foldRight(BigInt(0)) { case (x,y) => ((y << 8) + (x.toInt & 0xFF))}, width = romByteWidth*8)
}
val sbRomRdAddr = Wire(UInt())
if (romRows == 1) {
sbRomRdAddr := UInt(0)
} else {
sbRomRdAddr := sbAddr(log2Up(romRows) + sbRomAddrOffset - 1, sbRomAddrOffset)
}
sbRomRdData := romMem (sbRomRdAddr)
} }
//-------------------------------------------------------------- //--------------------------------------------------------------
// System Bus Access // System Bus Access
//-------------------------------------------------------------- //--------------------------------------------------------------
// Local reg mapper function : Notify when written, but give the value.
// ----------------------------------------- def wValue (n: Int, value: UInt, set: Bool) : RegField = {
// SB Access Write Decoder RegField(n, value, RegWriteFn((valid, data) => {set := valid ; value := data; Bool(true)}))
sbRamWrEn := Bool(false)
sbRamWrEnFinal := Bool(false)
SETHALTNOTWrEn := Bool(false)
CLEARDEBINTWrEn := Bool(false)
if (tlDataBits == 32) {
SETHALTNOTWrData := sbWrData
CLEARDEBINTWrData := sbWrData
when (sbAddr(11, 8) === UInt(4)){ // 0x400-0x4ff is Debug RAM
sbRamWrEn := sbWrEn
sbRamRdEn := sbRdEn
when (sbRamAddrValid) {
sbRamWrEnFinal := sbWrEn
sbRamRdEnFinal := sbRdEn
}
}.elsewhen (sbAddr === SETHALTNOT){
SETHALTNOTWrEn := sbWrEn
}.elsewhen (sbAddr === CLEARDEBINT){
CLEARDEBINTWrEn := sbWrEn
}.otherwise {
//Other registers/RAM are Not Implemented.
}
} else {
// Pick out the correct word based on the address.
val sbWrDataWords = Vec.tabulate (tlDataBits / 32) {ii => sbWrData((ii+1)*32 - 1, ii*32)}
val sbWrMaskWords = Vec.tabulate (tlDataBits / 32) {ii => sbWrMask ((ii+1)*32 -1, ii*32)}
val sbWrSelTop = log2Up(tlDataBits/8) - 1
val sbWrSelBottom = 2
SETHALTNOTWrData := sbWrDataWords(SETHALTNOT(sbWrSelTop, sbWrSelBottom))
CLEARDEBINTWrData := sbWrDataWords(CLEARDEBINT(sbWrSelTop, sbWrSelBottom))
when (sbAddr(11,8) === UInt(4)){ //0x400-0x4ff is Debug RAM
sbRamWrEn := sbWrEn
sbRamRdEn := sbRdEn
when (sbRamAddrValid){
sbRamWrEnFinal := sbWrEn
sbRamRdEnFinal := sbRdEn
}
}
SETHALTNOTWrEn := sbAddr(sbAddrWidth - 1, sbWrSelTop + 1) === SETHALTNOT(sbAddrWidth-1, sbWrSelTop + 1) &&
(sbWrMaskWords(SETHALTNOT(sbWrSelTop, sbWrSelBottom))).orR &&
sbWrEn
CLEARDEBINTWrEn := sbAddr(sbAddrWidth - 1, sbWrSelTop + 1) === CLEARDEBINT(sbAddrWidth-1, sbWrSelTop + 1) &&
(sbWrMaskWords(CLEARDEBINT(sbWrSelTop, sbWrSelBottom))).orR &&
sbWrEn
} }
// ----------------------------------------- regmap(
// SB Access Read Mux CLEARDEBINT -> Seq(wValue(sbIdWidth, CLEARDEBINTWrData, CLEARDEBINTWrEn)),
SETHALTNOT -> Seq(wValue(sbIdWidth, SETHALTNOTWrData, SETHALTNOTWrEn)),
sbRdData := UInt(0) RAMBASE -> ramMem.map(x => RegField(8, x)),
sbRamRdEn := Bool(false) ROMBASE -> romRegFields
sbRamRdEnFinal := Bool(false)
when (sbAddr(11, 8) === UInt(4)) { //0x400-0x4FF Debug RAM
sbRamRdEn := sbRdEn
when (sbRamAddrValid) {
sbRdData := sbRamRdData
sbRamRdEnFinal := sbRdEn
}
}.elsewhen (sbAddr(11,8).isOneOf(UInt(8), UInt(9))){ //0x800-0x9FF Debug ROM
if (cfg.hasDebugRom) {
sbRdData := sbRomRdData
} else {
sbRdData := UInt(0)
}
}. otherwise {
// All readable registers are Not Implemented.
sbRdData := UInt(0)
}
// -----------------------------------------
// SB Access State Machine -- based on BRAM Slave
val sbAcqReg = Reg(io.tl.acquire.bits)
val sbAcqValidReg = Reg(init = Bool(false))
val (sbReg_get :: sbReg_getblk :: sbReg_put :: sbReg_putblk :: Nil) = Seq(
Acquire.getType, Acquire.getBlockType, Acquire.putType, Acquire.putBlockType
).map(sbAcqReg.isBuiltInType _)
val sbMultibeat = sbReg_getblk & sbAcqValidReg;
val sbBeatInc1 = sbAcqReg.addr_beat + UInt(1)
val sbLast = (sbAcqReg.addr_beat === UInt(tlDataBeats - 1))
sbAddr := sbAcqReg.full_addr()
sbRdEn := (sbAcqValidReg && (sbReg_get || sbReg_getblk))
sbWrEn := (sbAcqValidReg && (sbReg_put || sbReg_putblk))
sbWrData := sbAcqReg.data
sbWrMask := sbAcqReg.full_wmask()
// -----------------------------------------
// SB Access State Machine Update (Seq)
when (io.tl.acquire.fire()){
sbAcqReg := io.tl.acquire.bits
sbAcqValidReg := Bool(true)
} .elsewhen (io.tl.grant.fire()) {
when (sbMultibeat){
sbAcqReg.addr_beat := sbBeatInc1
when (sbLast) {
sbAcqValidReg := Bool(false)
}
} . otherwise {
sbAcqValidReg := Bool(false)
}
}
io.tl.grant.valid := sbAcqValidReg
io.tl.grant.bits := Grant(
is_builtin_type = Bool(true),
g_type = sbAcqReg.getBuiltInGrantType(),
client_xact_id = sbAcqReg.client_xact_id,
manager_xact_id = UInt(0),
addr_beat = sbAcqReg.addr_beat,
data = sbRdData
) )
stallFromDb := Bool(false) // SB always wins, and DB latches its read data so it is not necessary for SB to wait
stallFromSb := sbRamRdEn || sbRamWrEn // pessimistically assume that DB/SB are going to conflict on the RAM,
// and SB doesn't latch its read data to it is necessary for DB hold
// off while SB is accessing the RAM and waiting to send its result.
val sbStall = (sbMultibeat & !sbLast) || (io.tl.grant.valid && !io.tl.grant.ready) || stallFromDb
io.tl.acquire.ready := !sbStall
//-------------------------------------------------------------- //--------------------------------------------------------------
// Misc. Outputs // Misc. Outputs
//-------------------------------------------------------------- //--------------------------------------------------------------
@ -1019,6 +819,21 @@ class DebugModule ()(implicit val p:cde.Parameters)
} }
/** Create a concrete TL2 Slave for the DebugModule RegMapper interface.
*
*/
class TLDebugModule(address: BigInt = 0)(implicit p: Parameters)
extends TLRegisterRouter(address, beatBytes=p(rocket.XLen)/8, executable=true)(
new TLRegBundle(p, _ ) with DebugModuleBundle)(
new TLRegModule(p, _, _) with DebugModule)
/** Synchronizers for DebugBus
*
*/
object AsyncDebugBusCrossing { object AsyncDebugBusCrossing {
// takes from_source from the 'from' clock domain to the 'to' clock domain // takes from_source from the 'from' clock domain to the 'to' clock domain
def apply(from_clock: Clock, from_reset: Bool, from_source: DebugBusIO, to_clock: Clock, to_reset: Bool, depth: Int = 1, sync: Int = 3) = { def apply(from_clock: Clock, from_reset: Bool, from_source: DebugBusIO, to_clock: Clock, to_reset: Bool, depth: Int = 1, sync: Int = 3) = {
@ -1029,6 +844,7 @@ object AsyncDebugBusCrossing {
} }
} }
object AsyncDebugBusFrom { // OutsideClockDomain object AsyncDebugBusFrom { // OutsideClockDomain
// takes from_source from the 'from' clock domain and puts it into your clock domain // takes from_source from the 'from' clock domain and puts it into your clock domain
def apply(from_clock: Clock, from_reset: Bool, from_source: DebugBusIO, depth: Int = 1, sync: Int = 3): DebugBusIO = { def apply(from_clock: Clock, from_reset: Bool, from_source: DebugBusIO, depth: Int = 1, sync: Int = 3): DebugBusIO = {

View File

@ -6,8 +6,11 @@ import Chisel._
import Chisel.ImplicitConversions._ import Chisel.ImplicitConversions._
import junctions._ import junctions._
import uncore.tilelink._ import diplomacy._
import regmapper._
import uncore.tilelink2._
import cde.Parameters import cde.Parameters
import scala.math.min
class GatewayPLICIO extends Bundle { class GatewayPLICIO extends Bundle {
val valid = Bool(OUTPUT) val valid = Bool(OUTPUT)
@ -27,161 +30,183 @@ class LevelGateway extends Module {
io.plic.valid := io.interrupt && !inFlight io.plic.valid := io.interrupt && !inFlight
} }
case class PLICConfig(nHartsIn: Int, supervisor: Boolean, nDevices: Int, nPriorities: Int) { object PLICConsts
def contextsPerHart = if (supervisor) 2 else 1 {
def nHarts = contextsPerHart * nHartsIn
def context(i: Int, mode: Char) = mode match {
case 'M' => i * contextsPerHart
case 'S' => require(supervisor); i * contextsPerHart + 1
}
def claimAddr(i: Int, mode: Char) = hartBase + hartOffset(context(i, mode)) + claimOffset
def threshAddr(i: Int, mode: Char) = hartBase + hartOffset(context(i, mode))
def enableAddr(i: Int, mode: Char) = enableBase + enableOffset(context(i, mode))
def size = hartBase + hartOffset(maxHarts)
def maxDevices = 1023 def maxDevices = 1023
def maxHarts = 15872 def maxHarts = 15872
def priorityBase = 0x0
def pendingBase = 0x1000 def pendingBase = 0x1000
def enableBase = 0x2000 def enableBase = 0x2000
def hartBase = 0x200000 def hartBase = 0x200000
require(hartBase >= enableBase + enableOffset(maxHarts))
def enableOffset(i: Int) = i * ((maxDevices+7)/8)
def hartOffset(i: Int) = i * 0x1000
def claimOffset = 4 def claimOffset = 4
def priorityBytes = 4 def priorityBytes = 4
require(nDevices <= maxDevices) def enableOffset(i: Int) = i * ((maxDevices+7)/8)
require(nHarts > 0 && nHarts <= maxHarts) def hartOffset(i: Int) = i * 0x1000
require(nPriorities >= 0 && nPriorities <= nDevices) def enableBase(i: Int):Int = enableOffset(i) + enableBase
def hartBase(i: Int):Int = hartOffset(i) + hartBase
def size = hartBase(maxHarts)
require(hartBase >= enableBase(maxHarts))
} }
/** Platform-Level Interrupt Controller */ /** Platform-Level Interrupt Controller */
class PLIC(val cfg: PLICConfig)(implicit val p: Parameters) extends Module class TLPLIC(supervisor: Boolean, maxPriorities: Int, address: BigInt = 0xC000000)(implicit val p: Parameters) extends LazyModule
with HasTileLinkParameters {
with HasAddrMapParameters { val contextsPerHart = if (supervisor) 2 else 1
val io = new Bundle { require (maxPriorities >= 0)
val devices = Vec(cfg.nDevices, new GatewayPLICIO).flip
val harts = Vec(cfg.nHarts, Bool()).asOutput
val tl = new ClientUncachedTileLinkIO().flip
}
val priority = val node = TLRegisterNode(
if (cfg.nPriorities > 0) Reg(Vec(cfg.nDevices+1, UInt(width=log2Up(cfg.nPriorities+1)))) address = AddressSet(address, PLICConsts.size-1),
else Wire(init=Vec.fill(cfg.nDevices+1)(UInt(1))) beatBytes = p(rocket.XLen)/8,
val threshold = undefZero = false)
if (cfg.nPriorities > 0) Reg(Vec(cfg.nHarts, UInt(width = log2Up(cfg.nPriorities+1))))
else Wire(init=Vec.fill(cfg.nHarts)(UInt(0)))
val pending = Reg(init=Vec.fill(cfg.nDevices+1){Bool(false)})
val enables = Reg(Vec(cfg.nHarts, Vec(cfg.nDevices+1, Bool())))
for ((p, g) <- pending.tail zip io.devices) { val intnode = IntAdapterNode(
g.ready := !p numSourcePorts = 0 to 1024,
g.complete := false numSinkPorts = 0 to 1024,
when (g.valid) { p := true } sourceFn = { _ => IntSourcePortParameters(Seq(IntSourceParameters(contextsPerHart))) },
} sinkFn = { _ => IntSinkPortParameters(Seq(IntSinkParameters())) })
def findMax(x: Seq[UInt]): (UInt, UInt) = { lazy val module = new LazyModuleImp(this) {
if (x.length > 1) { val io = new Bundle {
val half = 1 << (log2Ceil(x.length) - 1) val tl_in = node.bundleIn
val lMax = findMax(x take half) val devices = intnode.bundleIn
val rMax = findMax(x drop half) val harts = intnode.bundleOut
val useLeft = lMax._1 >= rMax._1
(Mux(useLeft, lMax._1, rMax._1), Mux(useLeft, lMax._2, UInt(half) + rMax._2))
} else (x.head, UInt(0))
}
val maxDevs = Wire(Vec(cfg.nHarts, UInt(width = log2Up(pending.size))))
for (hart <- 0 until cfg.nHarts) {
val effectivePriority =
for (((p, en), pri) <- (pending zip enables(hart) zip priority).tail)
yield Cat(p && en, pri)
val (maxPri, maxDev) = findMax((UInt(1) << priority(0).getWidth) +: effectivePriority)
maxDevs(hart) := Reg(next = maxDev)
io.harts(hart) := Reg(next = maxPri) > Cat(UInt(1), threshold(hart))
}
val acq = Queue(io.tl.acquire, 1)
val read = acq.fire() && acq.bits.isBuiltInType(Acquire.getType)
val write = acq.fire() && acq.bits.isBuiltInType(Acquire.putType)
assert(!acq.fire() || read || write, "unsupported PLIC operation")
val addr = acq.bits.full_addr()(log2Up(cfg.size)-1,0)
val claimant =
if (cfg.nHarts == 1) UInt(0)
else (addr - cfg.hartBase)(log2Up(cfg.hartOffset(cfg.nHarts))-1,log2Up(cfg.hartOffset(1)))
val hart = Wire(init = claimant)
val myMaxDev = maxDevs(claimant)
val myEnables = enables(hart)
val rdata = Wire(init = UInt(0, tlDataBits))
val masked_wdata = (acq.bits.data & acq.bits.full_wmask()) | (rdata & ~acq.bits.full_wmask())
if (cfg.nDevices > 0) when (addr >= cfg.hartBase) {
val word =
if (tlDataBytes > cfg.claimOffset) UInt(0)
else addr(log2Up(cfg.claimOffset),log2Up(tlDataBytes))
rdata := Cat(myMaxDev, UInt(0, 8*cfg.priorityBytes-threshold(0).getWidth), threshold(claimant)) >> (word * tlDataBits)
when (read && addr(log2Ceil(cfg.claimOffset))) {
pending(myMaxDev) := false
} }
when (write) {
when (if (tlDataBytes > cfg.claimOffset) acq.bits.wmask()(cfg.claimOffset) else addr(log2Ceil(cfg.claimOffset))) { // Assign all the devices unique ranges
val dev = (acq.bits.data >> ((8 * cfg.claimOffset) % tlDataBits))(log2Up(pending.size)-1,0) val sources = intnode.edgesIn.map(_.source)
when (myEnables(dev)) { io.devices(dev-1).complete := true } val flatSources = (sources zip sources.map(_.num).scanLeft(0)(_+_).init).map {
}.otherwise { case (s, o) => s.sources.map(z => z.copy(range = z.range.offset(o)))
if (cfg.nPriorities > 0) threshold(claimant) := acq.bits.data }.flatten
} // Compact the interrupt vector the same way
val interrupts = (intnode.edgesIn zip io.devices).map { case (e, i) => i.take(e.source.num) }.flatten
// This flattens the harts into an MSMSMSMSMS... or MMMMM.... sequence
val harts = io.harts.flatten
println("\nInterrupt map:")
flatSources.foreach { s =>
// +1 because 0 is reserved, +1-1 because the range is half-open
println(s" [${s.range.start+1}, ${s.range.end}] => ${s.name}")
} }
}.elsewhen (addr >= cfg.enableBase) {
val enableHart = val nDevices = interrupts.size
if (cfg.nHarts > 1) (addr - cfg.enableBase)(log2Up(cfg.enableOffset(cfg.nHarts))-1,log2Up(cfg.enableOffset(1))) val nPriorities = min(maxPriorities, nDevices)
else UInt(0) val nHarts = harts.size
hart := enableHart
val word = require(nDevices <= PLICConsts.maxDevices)
if (tlDataBits >= myEnables.size) UInt(0) require(nHarts > 0 && nHarts <= PLICConsts.maxHarts)
else addr(log2Ceil((myEnables.size-1)/tlDataBits+1) + tlByteAddrBits - 1, tlByteAddrBits)
for (i <- 0 until myEnables.size by tlDataBits) { def context(i: Int, mode: Char) = mode match {
when (word === i/tlDataBits) { case 'M' => i * contextsPerHart
rdata := Cat(myEnables.slice(i, i + tlDataBits).reverse) case 'S' => require(supervisor); i * contextsPerHart + 1
for (j <- 0 until (tlDataBits min (myEnables.size - i))) {
when (write) { enables(enableHart)(i+j) := masked_wdata(j) }
}
}
} }
}.elsewhen (addr >= cfg.pendingBase) { def claimAddr(i: Int, mode: Char) = address + PLICConsts.hartBase(context(i, mode)) + PLICConsts.claimOffset
val word = def threshAddr(i: Int, mode: Char) = address + PLICConsts.hartBase(context(i, mode))
if (tlDataBytes >= pending.size) UInt(0) def enableAddr(i: Int, mode: Char) = address + PLICConsts.enableBase(context(i, mode))
else addr(log2Up(pending.size)-1,log2Up(tlDataBytes))
rdata := pending.asUInt >> (word * tlDataBits) // Create the global PLIC config string
}.otherwise { val globalConfigString = Seq(
val regsPerBeat = tlDataBytes >> log2Up(cfg.priorityBytes) s"plic {\n",
val word = s" priority 0x${address.toString(16)};\n",
if (regsPerBeat >= priority.size) UInt(0) s" pending 0x${(address + PLICConsts.pendingBase).toString(16)};\n",
else addr(log2Up(priority.size*cfg.priorityBytes)-1,log2Up(tlDataBytes)) s" ndevs ${nDevices};\n",
for (i <- 0 until priority.size by regsPerBeat) { s"};\n").mkString
when (word === i/regsPerBeat) {
rdata := Cat(priority.slice(i, i + regsPerBeat).map(p => Cat(UInt(0, 8*cfg.priorityBytes-p.getWidth), p)).reverse) // Create the per-Hart config string
for (j <- 0 until (regsPerBeat min (priority.size - i))) { val hartConfigStrings = io.harts.zipWithIndex.map { case (_, i) => (Seq(
if (cfg.nPriorities > 0) when (write) { priority(i+j) := masked_wdata >> (j * 8 * cfg.priorityBytes) } s" plic {\n",
} s" m {\n",
} s" ie 0x${enableAddr(i, 'M').toString(16)};\n",
s" thresh 0x${threshAddr(i, 'M').toString(16)};\n",
s" claim 0x${claimAddr(i, 'M').toString(16)};\n",
s" };\n") ++ (if (!supervisor) Seq() else Seq(
s" s {\n",
s" ie 0x${enableAddr(i, 'S').toString(16)};\n",
s" thresh 0x${threshAddr(i, 'S').toString(16)};\n",
s" claim 0x${claimAddr(i, 'S').toString(16)};\n",
s" };\n")) ++ Seq(
s" };\n")).mkString
} }
// For now, use LevelGateways for all TL2 interrupts
val gateways = Vec(interrupts.map { case i =>
val gateway = Module(new LevelGateway)
gateway.io.interrupt := i
gateway.io.plic
})
val priority =
if (nPriorities > 0) Reg(Vec(nDevices+1, UInt(width=log2Up(nPriorities+1))))
else Wire(init=Vec.fill(nDevices+1)(UInt(1)))
val threshold =
if (nPriorities > 0) Reg(Vec(nHarts, UInt(width = log2Up(nPriorities+1))))
else Wire(init=Vec.fill(nHarts)(UInt(0)))
val pending = Reg(init=Vec.fill(nDevices+1){Bool(false)})
val enables = Reg(Vec(nHarts, Vec(nDevices+1, Bool())))
for ((p, g) <- pending.tail zip gateways) {
g.ready := !p
g.complete := false
when (g.valid) { p := true }
}
def findMax(x: Seq[UInt]): (UInt, UInt) = {
if (x.length > 1) {
val half = 1 << (log2Ceil(x.length) - 1)
val lMax = findMax(x take half)
val rMax = findMax(x drop half)
val useLeft = lMax._1 >= rMax._1
(Mux(useLeft, lMax._1, rMax._1), Mux(useLeft, lMax._2, UInt(half) | rMax._2))
} else (x.head, UInt(0))
}
val maxDevs = Reg(Vec(nHarts, UInt(width = log2Up(pending.size))))
for (hart <- 0 until nHarts) {
val effectivePriority =
for (((p, en), pri) <- (pending zip enables(hart) zip priority).tail)
yield Cat(p && en, pri)
val (maxPri, maxDev) = findMax((UInt(1) << priority(0).getWidth) +: effectivePriority)
maxDevs(hart) := maxDev
harts(hart) := Reg(next = maxPri) > Cat(UInt(1), threshold(hart))
}
def priorityRegField(x: UInt) = if (nPriorities > 0) RegField(32, x) else RegField.r(32, x)
val priorityRegFields = Seq(PLICConsts.priorityBase -> priority.map(p => priorityRegField(p)))
val pendingRegFields = Seq(PLICConsts.pendingBase -> pending .map(b => RegField.r(1, b)))
val enableRegFields = enables.zipWithIndex.map { case (e, i) =>
PLICConsts.enableBase(i) -> e.map(b => RegField(1, b))
}
val hartRegFields = Seq.tabulate(nHarts) { i =>
PLICConsts.hartBase(i) -> Seq(
priorityRegField(threshold(i)),
RegField(32,
RegReadFn { valid =>
when (valid) {
pending(maxDevs(i)) := Bool(false)
maxDevs(i) := UInt(0) // flush pipeline
}
(Bool(true), maxDevs(i))
},
RegWriteFn { (valid, data) =>
when (valid && enables(i)(data)) {
gateways(data - UInt(1)).complete := Bool(true)
}
Bool(true)
}
)
)
}
node.regmap((priorityRegFields ++ pendingRegFields ++ enableRegFields ++ hartRegFields):_*)
priority(0) := 0
pending(0) := false
for (e <- enables)
e(0) := false
} }
priority(0) := 0
pending(0) := false
for (e <- enables)
e(0) := false
io.tl.grant.valid := acq.valid
acq.ready := io.tl.grant.ready
io.tl.grant.bits := Grant(
is_builtin_type = Bool(true),
g_type = acq.bits.getBuiltInGrantType(),
client_xact_id = acq.bits.client_xact_id,
manager_xact_id = UInt(0),
addr_beat = UInt(0),
data = rdata)
} }

View File

@ -6,6 +6,7 @@ import Chisel._
import junctions._ import junctions._
import junctions.NastiConstants._ import junctions.NastiConstants._
import regmapper._ import regmapper._
import diplomacy._
import uncore.tilelink2._ import uncore.tilelink2._
import uncore.util._ import uncore.util._
import util._ import util._
@ -20,22 +21,19 @@ class CoreplexLocalInterrupts extends Bundle {
val msip = Bool() val msip = Bool()
} }
case class CoreplexLocalInterrupterConfig(beatBytes: Int, address: BigInt = 0x02000000) { object ClintConsts
{
def msipOffset(hart: Int) = hart * msipBytes def msipOffset(hart: Int) = hart * msipBytes
def msipAddress(hart: Int) = address + msipOffset(hart)
def timecmpOffset(hart: Int) = 0x4000 + hart * timecmpBytes def timecmpOffset(hart: Int) = 0x4000 + hart * timecmpBytes
def timecmpAddress(hart: Int) = address + timecmpOffset(hart)
def timeOffset = 0xbff8 def timeOffset = 0xbff8
def timeAddress = address + timeOffset
def msipBytes = 4 def msipBytes = 4
def timecmpBytes = 8 def timecmpBytes = 8
def size = 0x10000 def size = 0x10000
} }
trait MixCoreplexLocalInterrupterParameters { trait MixCoreplexLocalInterrupterParameters {
val params: (CoreplexLocalInterrupterConfig, Parameters) val params: Parameters
val c = params._1 implicit val p = params
implicit val p = params._2
} }
trait CoreplexLocalInterrupterBundle extends Bundle with MixCoreplexLocalInterrupterParameters { trait CoreplexLocalInterrupterBundle extends Bundle with MixCoreplexLocalInterrupterParameters {
@ -45,11 +43,10 @@ trait CoreplexLocalInterrupterBundle extends Bundle with MixCoreplexLocalInterru
trait CoreplexLocalInterrupterModule extends Module with HasRegMap with MixCoreplexLocalInterrupterParameters { trait CoreplexLocalInterrupterModule extends Module with HasRegMap with MixCoreplexLocalInterrupterParameters {
val io: CoreplexLocalInterrupterBundle val io: CoreplexLocalInterrupterBundle
val address: AddressSet
val timeWidth = 64 val timeWidth = 64
val regWidth = 32 val regWidth = 32
// demand atomic accesses for RV64
require(c.beatBytes >= (p(rocket.XLen) min timeWidth)/8)
val time = Seq.fill(timeWidth/regWidth)(Reg(init=UInt(0, width = regWidth))) val time = Seq.fill(timeWidth/regWidth)(Reg(init=UInt(0, width = regWidth)))
when (io.rtcTick) { when (io.rtcTick) {
@ -66,6 +63,15 @@ trait CoreplexLocalInterrupterModule extends Module with HasRegMap with MixCorep
tile.mtip := time.asUInt >= timecmp(i).asUInt tile.mtip := time.asUInt >= timecmp(i).asUInt
} }
val globalConfigString = Seq(
s"rtc {\n",
s" addr 0x${(address.base + ClintConsts.timeOffset).toString(16)};\n",
s"};\n").mkString
val hartConfigStrings = (0 until p(NTiles)).map { i => Seq(
s" timecmp 0x${(address.base + ClintConsts.timecmpOffset(i)).toString(16)};\n",
s" ipi 0x${(address.base + ClintConsts.msipOffset(i)).toString(16)};\n").mkString
}
/* 0000 msip hart 0 /* 0000 msip hart 0
* 0004 msip hart 1 * 0004 msip hart 1
* 4000 mtimecmp hart 0 lo * 4000 mtimecmp hart 0 lo
@ -77,16 +83,16 @@ trait CoreplexLocalInterrupterModule extends Module with HasRegMap with MixCorep
*/ */
regmap( regmap(
0 -> makeRegFields(ipi), 0 -> makeRegFields(ipi),
c.timecmpOffset(0) -> makeRegFields(timecmp.flatten), ClintConsts.timecmpOffset(0) -> makeRegFields(timecmp.flatten),
c.timeOffset -> makeRegFields(time)) ClintConsts.timeOffset -> makeRegFields(time))
def makeRegFields(s: Seq[UInt]) = s.map(r => RegField(regWidth, r)) def makeRegFields(s: Seq[UInt]) = s.map(r => RegField(regWidth, r))
} }
/** Power, Reset, Clock, Interrupt */ /** Power, Reset, Clock, Interrupt */
// Magic TL2 Incantation to create a TL2 Slave // Magic TL2 Incantation to create a TL2 Slave
class CoreplexLocalInterrupter(c: CoreplexLocalInterrupterConfig)(implicit val p: Parameters) class CoreplexLocalInterrupter(address: BigInt = 0x02000000)(implicit val p: Parameters)
extends TLRegisterRouter(c.address, 0, c.size, 0, c.beatBytes, false)( extends TLRegisterRouter(address, size = ClintConsts.size, beatBytes = p(rocket.XLen)/8, undefZero = false)(
new TLRegBundle((c, p), _) with CoreplexLocalInterrupterBundle)( new TLRegBundle(p, _) with CoreplexLocalInterrupterBundle)(
new TLRegModule((c, p), _, _) with CoreplexLocalInterrupterModule) new TLRegModule(p, _, _) with CoreplexLocalInterrupterModule)

View File

@ -94,7 +94,7 @@ class TLAtomicAutomata(logical: Boolean = true, arithmetic: Boolean = true, conc
val cam_free = cam_s.map(_.state === FREE) val cam_free = cam_s.map(_.state === FREE)
val cam_amo = cam_s.map(_.state === AMO) val cam_amo = cam_s.map(_.state === AMO)
val cam_abusy = cam_s.map(e => e.state === GET || e.state === AMO) // A is blocked val cam_abusy = cam_s.map(e => e.state === GET || e.state === AMO) // A is blocked
val cam_dmatch = cam_s.map(e => e.state === GET || e.state === ACK) // D should inspect these entries val cam_dmatch = cam_s.map(e => e.state =/= FREE) // D should inspect these entries
// Can the manager already handle this message? // Can the manager already handle this message?
val a_size = edgeIn.size(in.a.bits) val a_size = edgeIn.size(in.a.bits)
@ -211,7 +211,7 @@ class TLAtomicAutomata(logical: Boolean = true, arithmetic: Boolean = true, conc
val d_cam_sel_match = (d_cam_sel_raw zip cam_dmatch) map { case (a,b) => a&&b } val d_cam_sel_match = (d_cam_sel_raw zip cam_dmatch) map { case (a,b) => a&&b }
val d_cam_data = Mux1H(d_cam_sel_match, cam_d.map(_.data)) val d_cam_data = Mux1H(d_cam_sel_match, cam_d.map(_.data))
val d_cam_sel_bypass = if (edgeOut.manager.minLatency > 0) Bool(false) else val d_cam_sel_bypass = if (edgeOut.manager.minLatency > 0) Bool(false) else
out.d.bits.source === in.a.bits.source && in.a.valid && out.d.valid && !a_isSupported out.d.bits.source === in.a.bits.source && in.a.valid && !a_isSupported
val d_cam_sel = (a_cam_sel_free zip d_cam_sel_match) map { case (a,d) => Mux(d_cam_sel_bypass, a, d) } val d_cam_sel = (a_cam_sel_free zip d_cam_sel_match) map { case (a,d) => Mux(d_cam_sel_bypass, a, d) }
val d_cam_sel_any = d_cam_sel_bypass || d_cam_sel_match.reduce(_ || _) val d_cam_sel_any = d_cam_sel_bypass || d_cam_sel_match.reduce(_ || _)
val d_ackd = out.d.bits.opcode === TLMessages.AccessAckData val d_ackd = out.d.bits.opcode === TLMessages.AccessAckData

View File

@ -2,6 +2,7 @@
package uncore.tilelink2 package uncore.tilelink2
import scala.math.min
import Chisel._ import Chisel._
import chisel3.internal.sourceinfo.SourceInfo import chisel3.internal.sourceinfo.SourceInfo
import diplomacy._ import diplomacy._
@ -9,10 +10,9 @@ import diplomacy._
// Acks Hints for managers that don't support them or Acks all Hints if !passthrough // Acks Hints for managers that don't support them or Acks all Hints if !passthrough
class TLHintHandler(supportManagers: Boolean = true, supportClients: Boolean = false, passthrough: Boolean = true) extends LazyModule class TLHintHandler(supportManagers: Boolean = true, supportClients: Boolean = false, passthrough: Boolean = true) extends LazyModule
{ {
// HintAcks can come back combinationally => minLatency=0
val node = TLAdapterNode( val node = TLAdapterNode(
clientFn = { case Seq(c) => if (!supportClients) c else c.copy(minLatency = 0, clients = c.clients .map(_.copy(supportsHint = TransferSizes(1, c.maxTransfer)))) }, clientFn = { case Seq(c) => if (!supportClients) c else c.copy(minLatency = min(1, c.minLatency), clients = c.clients .map(_.copy(supportsHint = TransferSizes(1, c.maxTransfer)))) },
managerFn = { case Seq(m) => if (!supportManagers) m else m.copy(minLatency = 0, managers = m.managers.map(_.copy(supportsHint = TransferSizes(1, m.maxTransfer)))) }) managerFn = { case Seq(m) => if (!supportManagers) m else m.copy(minLatency = min(1, m.minLatency), managers = m.managers.map(_.copy(supportsHint = TransferSizes(1, m.maxTransfer)))) })
lazy val module = new LazyModuleImp(this) { lazy val module = new LazyModuleImp(this) {
val io = new Bundle { val io = new Bundle {
@ -46,7 +46,7 @@ class TLHintHandler(supportManagers: Boolean = true, supportClients: Boolean = f
hint.bits := edgeIn.HintAck(in.a.bits, edgeOut.manager.findIdStartFast(address)) hint.bits := edgeIn.HintAck(in.a.bits, edgeOut.manager.findIdStartFast(address))
out.a.bits := in.a.bits out.a.bits := in.a.bits
TLArbiter(TLArbiter.lowestIndexFirst)(in.d, (edgeOut.numBeats1(out.d.bits), out.d), (UInt(0), hint)) TLArbiter(TLArbiter.lowestIndexFirst)(in.d, (edgeOut.numBeats1(out.d.bits), out.d), (UInt(0), Queue(hint, 1)))
} else { } else {
out.a.valid := in.a.valid out.a.valid := in.a.valid
in.a.ready := out.a.ready in.a.ready := out.a.ready
@ -69,7 +69,7 @@ class TLHintHandler(supportManagers: Boolean = true, supportClients: Boolean = f
hint.bits := edgeOut.HintAck(out.b.bits) hint.bits := edgeOut.HintAck(out.b.bits)
in.b.bits := out.b.bits in.b.bits := out.b.bits
TLArbiter(TLArbiter.lowestIndexFirst)(out.c, (edgeIn.numBeats1(in.c.bits), in.c), (UInt(0), hint)) TLArbiter(TLArbiter.lowestIndexFirst)(out.c, (edgeIn.numBeats1(in.c.bits), in.c), (UInt(0), Queue(hint, 1)))
} else if (bce) { } else if (bce) {
in.b.valid := out.b.valid in.b.valid := out.b.valid
out.b.ready := in.b.ready out.b.ready := in.b.ready

View File

@ -58,10 +58,13 @@ object IntImp extends NodeImp[IntSourcePortParameters, IntSinkPortParameters, In
} }
def bundleI(ei: Seq[IntEdge]): Vec[Vec[Bool]] = { def bundleI(ei: Seq[IntEdge]): Vec[Vec[Bool]] = {
require (!ei.isEmpty) require (!ei.isEmpty)
Vec(ei.size, Vec(ei.map(_.source.num).max, Bool())).flip Vec(ei.size, Vec(ei.map(_.source.num).max, Bool()))
} }
def colour = "#0000ff" // blue def colour = "#0000ff" // blue
override def labelI(ei: IntEdge) = ei.source.sources.map(_.range.size).sum.toString
override def labelO(eo: IntEdge) = eo.source.sources.map(_.range.size).sum.toString
def connect(bo: => Vec[Bool], bi: => Vec[Bool], ei: => IntEdge)(implicit sourceInfo: SourceInfo): (Option[LazyModule], () => Unit) = { def connect(bo: => Vec[Bool], bi: => Vec[Bool], ei: => IntEdge)(implicit sourceInfo: SourceInfo): (Option[LazyModule], () => Unit) = {
(None, () => { (None, () => {
// Cannot use bulk connect, because the widths could differ // Cannot use bulk connect, because the widths could differ
@ -76,9 +79,6 @@ object IntImp extends NodeImp[IntSourcePortParameters, IntSinkPortParameters, In
} }
case class IntIdentityNode() extends IdentityNode(IntImp) case class IntIdentityNode() extends IdentityNode(IntImp)
case class IntOutputNode() extends OutputNode(IntImp)
case class IntInputNode() extends InputNode(IntImp)
case class IntSourceNode(num: Int) extends SourceNode(IntImp)( case class IntSourceNode(num: Int) extends SourceNode(IntImp)(
IntSourcePortParameters(Seq(IntSourceParameters(num))), (if (num == 0) 0 else 1) to 1) IntSourcePortParameters(Seq(IntSourceParameters(num))), (if (num == 0) 0 else 1) to 1)
case class IntSinkNode() extends SinkNode(IntImp)( case class IntSinkNode() extends SinkNode(IntImp)(
@ -91,11 +91,20 @@ case class IntAdapterNode(
numSinkPorts: Range.Inclusive = 1 to 1) numSinkPorts: Range.Inclusive = 1 to 1)
extends InteriorNode(IntImp)(sourceFn, sinkFn, numSourcePorts, numSinkPorts) extends InteriorNode(IntImp)(sourceFn, sinkFn, numSourcePorts, numSinkPorts)
case class IntOutputNode() extends OutputNode(IntImp)
case class IntInputNode() extends InputNode(IntImp)
case class IntBlindOutputNode() extends BlindOutputNode(IntImp)(IntSinkPortParameters(Seq(IntSinkParameters())))
case class IntBlindInputNode(num: Int) extends BlindInputNode(IntImp)(IntSourcePortParameters(Seq(IntSourceParameters(num))))
case class IntInternalOutputNode() extends InternalOutputNode(IntImp)(IntSinkPortParameters(Seq(IntSinkParameters())))
case class IntInternalInputNode(num: Int) extends InternalInputNode(IntImp)(IntSourcePortParameters(Seq(IntSourceParameters(num))))
class IntXbar extends LazyModule class IntXbar extends LazyModule
{ {
val intnode = IntAdapterNode( val intnode = IntAdapterNode(
numSourcePorts = 1 to 1, // does it make sense to have more than one interrupt sink? numSourcePorts = 1 to 1, // does it make sense to have more than one interrupt sink?
numSinkPorts = 1 to 128, numSinkPorts = 0 to 128,
sinkFn = { _ => IntSinkPortParameters(Seq(IntSinkParameters())) }, sinkFn = { _ => IntSinkPortParameters(Seq(IntSinkParameters())) },
sourceFn = { seq => sourceFn = { seq =>
IntSourcePortParameters((seq zip seq.map(_.num).scanLeft(0)(_+_).init).map { IntSourcePortParameters((seq zip seq.map(_.num).scanLeft(0)(_+_).init).map {
@ -113,3 +122,17 @@ class IntXbar extends LazyModule
io.out.foreach { _ := cat } io.out.foreach { _ := cat }
} }
} }
class IntXing extends LazyModule
{
val intnode = IntIdentityNode()
lazy val module = new LazyModuleImp(this) {
val io = new Bundle {
val in = intnode.bundleIn
val out = intnode.bundleOut
}
io.out := RegNext(RegNext(RegNext(io.in)))
}
}

View File

@ -38,11 +38,9 @@ class TLLegacy(implicit val p: Parameters) extends LazyModule with HasTileLinkPa
require (m.supportsPutPartial.contains(TransferSizes(1, tlDataBytes))) require (m.supportsPutPartial.contains(TransferSizes(1, tlDataBytes)))
require (m.supportsPutPartial.contains(TransferSizes(tlDataBeats * tlDataBytes))) require (m.supportsPutPartial.contains(TransferSizes(tlDataBeats * tlDataBytes)))
} }
// Any atomic support => must support 32-bit up to beat size of all types // Any atomic support => must support 32-bit size
if (m.supportsArithmetic || m.supportsLogical) { if (m.supportsArithmetic) { require (m.supportsArithmetic.contains(TransferSizes(4))) }
require (m.supportsArithmetic.contains(TransferSizes(4, tlDataBytes))) if (m.supportsLogical) { require (m.supportsLogical .contains(TransferSizes(4))) }
require (m.supportsLogical .contains(TransferSizes(4, tlDataBytes)))
}
// We straight-up require hints // We straight-up require hints
require (edge.manager.allSupportHint) require (edge.manager.allSupportHint)
} }

View File

@ -17,7 +17,7 @@ object TLImp extends NodeImp[TLClientPortParameters, TLManagerPortParameters, TL
} }
def bundleI(ei: Seq[TLEdgeIn]): Vec[TLBundle] = { def bundleI(ei: Seq[TLEdgeIn]): Vec[TLBundle] = {
require (!ei.isEmpty) require (!ei.isEmpty)
Vec(ei.size, TLBundle(ei.map(_.bundle).reduce(_.union(_)))).flip Vec(ei.size, TLBundle(ei.map(_.bundle).reduce(_.union(_))))
} }
var emitMonitors = true var emitMonitors = true
@ -25,6 +25,9 @@ object TLImp extends NodeImp[TLClientPortParameters, TLManagerPortParameters, TL
var combinationalCheck = false var combinationalCheck = false
def colour = "#000000" // black def colour = "#000000" // black
override def labelI(ei: TLEdgeIn) = (ei.manager.beatBytes * 8).toString
override def labelO(eo: TLEdgeOut) = (eo.manager.beatBytes * 8).toString
def connect(bo: => TLBundle, bi: => TLBundle, ei: => TLEdgeIn)(implicit sourceInfo: SourceInfo): (Option[LazyModule], () => Unit) = { def connect(bo: => TLBundle, bi: => TLBundle, ei: => TLEdgeIn)(implicit sourceInfo: SourceInfo): (Option[LazyModule], () => Unit) = {
val monitor = if (emitMonitors) { val monitor = if (emitMonitors) {
Some(LazyModule(new TLMonitor(() => new TLBundleSnoop(bo.params), () => ei, sourceInfo))) Some(LazyModule(new TLMonitor(() => new TLBundleSnoop(bo.params), () => ei, sourceInfo)))
@ -89,10 +92,8 @@ object TLImp extends NodeImp[TLClientPortParameters, TLManagerPortParameters, TL
} }
} }
// Nodes implemented inside modules
case class TLIdentityNode() extends IdentityNode(TLImp) case class TLIdentityNode() extends IdentityNode(TLImp)
case class TLOutputNode() extends OutputNode(TLImp)
case class TLInputNode() extends InputNode(TLImp)
case class TLClientNode(portParams: TLClientPortParameters, numPorts: Range.Inclusive = 1 to 1) case class TLClientNode(portParams: TLClientPortParameters, numPorts: Range.Inclusive = 1 to 1)
extends SourceNode(TLImp)(portParams, numPorts) extends SourceNode(TLImp)(portParams, numPorts)
case class TLManagerNode(portParams: TLManagerPortParameters, numPorts: Range.Inclusive = 1 to 1) case class TLManagerNode(portParams: TLManagerPortParameters, numPorts: Range.Inclusive = 1 to 1)
@ -117,6 +118,14 @@ case class TLAdapterNode(
numManagerPorts: Range.Inclusive = 1 to 1) numManagerPorts: Range.Inclusive = 1 to 1)
extends InteriorNode(TLImp)(clientFn, managerFn, numClientPorts, numManagerPorts) extends InteriorNode(TLImp)(clientFn, managerFn, numClientPorts, numManagerPorts)
// Nodes passed from an inner module
case class TLOutputNode() extends OutputNode(TLImp)
case class TLInputNode() extends InputNode(TLImp)
// Nodes used for external ports
case class TLBlindOutputNode(portParams: TLManagerPortParameters) extends BlindOutputNode(TLImp)(portParams)
case class TLBlindInputNode(portParams: TLClientPortParameters) extends BlindInputNode(TLImp)(portParams)
/** Synthesizeable unit tests */ /** Synthesizeable unit tests */
import unittest._ import unittest._
@ -149,10 +158,13 @@ object TLAsyncImp extends NodeImp[TLAsyncClientPortParameters, TLAsyncManagerPor
} }
def bundleI(ei: Seq[TLAsyncEdgeParameters]): Vec[TLAsyncBundle] = { def bundleI(ei: Seq[TLAsyncEdgeParameters]): Vec[TLAsyncBundle] = {
require (ei.size == 1) require (ei.size == 1)
Vec(ei.size, new TLAsyncBundle(ei(0).bundle)).flip Vec(ei.size, new TLAsyncBundle(ei(0).bundle))
} }
def colour = "#ff0000" // red def colour = "#ff0000" // red
override def labelI(ei: TLAsyncEdgeParameters) = ei.manager.depth.toString
override def labelO(eo: TLAsyncEdgeParameters) = eo.manager.depth.toString
def connect(bo: => TLAsyncBundle, bi: => TLAsyncBundle, ei: => TLAsyncEdgeParameters)(implicit sourceInfo: SourceInfo): (Option[LazyModule], () => Unit) = { def connect(bo: => TLAsyncBundle, bi: => TLAsyncBundle, ei: => TLAsyncEdgeParameters)(implicit sourceInfo: SourceInfo): (Option[LazyModule], () => Unit) = {
(None, () => { bi <> bo }) (None, () => { bi <> bo })
} }

View File

@ -7,10 +7,11 @@ import diplomacy._
import regmapper._ import regmapper._
import scala.math.{min,max} import scala.math.{min,max}
class TLRegisterNode(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true) class TLRegisterNode(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true, executable: Boolean = false)
extends TLManagerNode(TLManagerPortParameters( extends TLManagerNode(TLManagerPortParameters(
Seq(TLManagerParameters( Seq(TLManagerParameters(
address = Seq(address), address = Seq(address),
executable = executable,
supportsGet = TransferSizes(1, beatBytes), supportsGet = TransferSizes(1, beatBytes),
supportsPutPartial = TransferSizes(1, beatBytes), supportsPutPartial = TransferSizes(1, beatBytes),
supportsPutFull = TransferSizes(1, beatBytes), supportsPutFull = TransferSizes(1, beatBytes),
@ -70,17 +71,17 @@ class TLRegisterNode(address: AddressSet, concurrency: Int = 0, beatBytes: Int =
object TLRegisterNode object TLRegisterNode
{ {
def apply(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true) = def apply(address: AddressSet, concurrency: Int = 0, beatBytes: Int = 4, undefZero: Boolean = true, executable: Boolean = false) =
new TLRegisterNode(address, concurrency, beatBytes, undefZero) new TLRegisterNode(address, concurrency, beatBytes, undefZero, executable)
} }
// These convenience methods below combine to make it possible to create a TL2 // These convenience methods below combine to make it possible to create a TL2
// register mapped device from a totally abstract register mapped device. // register mapped device from a totally abstract register mapped device.
// See GPIO.scala in this directory for an example // See GPIO.scala in this directory for an example
abstract class TLRegisterRouterBase(address: AddressSet, interrupts: Int, concurrency: Int, beatBytes: Int, undefZero: Boolean) extends LazyModule abstract class TLRegisterRouterBase(val address: AddressSet, interrupts: Int, concurrency: Int, beatBytes: Int, undefZero: Boolean, executable: Boolean) extends LazyModule
{ {
val node = TLRegisterNode(address, concurrency, beatBytes, undefZero) val node = TLRegisterNode(address, concurrency, beatBytes, undefZero, executable)
val intnode = IntSourceNode(interrupts) val intnode = IntSourceNode(interrupts)
} }
@ -99,14 +100,15 @@ class TLRegModule[P, B <: TLRegBundleBase](val params: P, bundleBuilder: => B, r
{ {
val io = bundleBuilder val io = bundleBuilder
val interrupts = if (io.interrupts.isEmpty) Vec(0, Bool()) else io.interrupts(0) val interrupts = if (io.interrupts.isEmpty) Vec(0, Bool()) else io.interrupts(0)
val address = router.address
def regmap(mapping: RegField.Map*) = router.node.regmap(mapping:_*) def regmap(mapping: RegField.Map*) = router.node.regmap(mapping:_*)
} }
class TLRegisterRouter[B <: TLRegBundleBase, M <: LazyModuleImp] class TLRegisterRouter[B <: TLRegBundleBase, M <: LazyModuleImp]
(val base: BigInt, val interrupts: Int = 0, val size: BigInt = 4096, val concurrency: Int = 0, val beatBytes: Int = 4, undefZero: Boolean = true) (val base: BigInt, val interrupts: Int = 0, val size: BigInt = 4096, val concurrency: Int = 0, val beatBytes: Int = 4, undefZero: Boolean = true, executable: Boolean = false)
(bundleBuilder: TLRegBundleArg => B) (bundleBuilder: TLRegBundleArg => B)
(moduleBuilder: (=> B, TLRegisterRouterBase) => M) (moduleBuilder: (=> B, TLRegisterRouterBase) => M)
extends TLRegisterRouterBase(AddressSet(base, size-1), interrupts, concurrency, beatBytes, undefZero) extends TLRegisterRouterBase(AddressSet(base, size-1), interrupts, concurrency, beatBytes, undefZero, executable)
{ {
require (isPow2(size)) require (isPow2(size))
// require (size >= 4096) ... not absolutely required, but highly recommended // require (size >= 4096) ... not absolutely required, but highly recommended

View File

@ -5,9 +5,11 @@ import diplomacy._
package object tilelink2 package object tilelink2
{ {
type TLInwardNode = InwardNodeHandle[TLClientPortParameters, TLManagerPortParameters, TLBundle]
type TLOutwardNode = OutwardNodeHandle[TLClientPortParameters, TLManagerPortParameters, TLBundle] type TLOutwardNode = OutwardNodeHandle[TLClientPortParameters, TLManagerPortParameters, TLBundle]
type TLAsyncOutwardNode = OutwardNodeHandle[TLAsyncClientPortParameters, TLAsyncManagerPortParameters, TLAsyncBundle] type TLAsyncOutwardNode = OutwardNodeHandle[TLAsyncClientPortParameters, TLAsyncManagerPortParameters, TLAsyncBundle]
type IntOutwardNode = OutwardNodeHandle[IntSourcePortParameters, IntSinkPortParameters, Vec[Bool]] type IntOutwardNode = OutwardNodeHandle[IntSourcePortParameters, IntSinkPortParameters, Vec[Bool]]
def OH1ToOH(x: UInt) = (x << 1 | UInt(1)) & ~Cat(UInt(0, width=1), x) def OH1ToOH(x: UInt) = (x << 1 | UInt(1)) & ~Cat(UInt(0, width=1), x)
def OH1ToUInt(x: UInt) = OHToUInt(OH1ToOH(x)) def OH1ToUInt(x: UInt) = OHToUInt(OH1ToOH(x))
def UIntToOH1(x: UInt, width: Int) = ~(SInt(-1, width=width).asUInt << x)(width-1, 0) def UIntToOH1(x: UInt, width: Int) = ~(SInt(-1, width=width).asUInt << x)(width-1, 0)

View File

@ -5,6 +5,7 @@ package unittest
object Generator extends util.GeneratorApp { object Generator extends util.GeneratorApp {
val longName = names.topModuleProject + "." + names.configs val longName = names.topModuleProject + "." + names.configs
generateFirrtl generateFirrtl
generateGraphML
generateTestSuiteMakefrags // TODO: Needed only for legacy make targets generateTestSuiteMakefrags // TODO: Needed only for legacy make targets
generateParameterDump // TODO: Needed only for legacy make targets generateParameterDump // TODO: Needed only for legacy make targets
} }