WIP scala compile and firrtl elaborate; monitor error
This commit is contained in:
@ -24,15 +24,12 @@ case object NTrackersPerBank extends Field[Int]
|
||||
case object BankIdLSB extends Field[Int]
|
||||
/** Function for building some kind of coherence manager agent */
|
||||
case object BuildL2CoherenceManager extends Field[(Int, Parameters) => CoherenceAgent]
|
||||
/** Function for building some kind of tile connected to a reset signal */
|
||||
case object BuildTiles extends Field[Seq[Parameters => LazyTile]]
|
||||
/** The file to read the BootROM contents from */
|
||||
case object BootROMFile extends Field[String]
|
||||
|
||||
trait HasCoreplexParameters {
|
||||
implicit val p: Parameters
|
||||
lazy val nBanksPerMemChannel = p(NBanksPerMemoryChannel)
|
||||
lazy val lsb = p(BankIdLSB)
|
||||
lazy val innerParams = p.alterPartial({ case TLId => "L1toL2" })
|
||||
lazy val outerMemParams = p.alterPartial({ case TLId => "L2toMC" })
|
||||
lazy val outerMMIOParams = p.alterPartial({ case TLId => "L2toMMIO" })
|
||||
@ -138,19 +135,11 @@ trait BankedL2CoherenceManagersModule {
|
||||
trait CoreplexRISCVPlatform {
|
||||
this: CoreplexNetwork =>
|
||||
|
||||
// Build a set of Tiles
|
||||
val lazyTiles = p(BuildTiles) map { _(p) }
|
||||
val legacy = LazyModule(new TLLegacy()(outerMMIOParams))
|
||||
val tileIntNodes = lazyTiles.map { _ => IntInternalOutputNode() } // this should be moved into the Tile...
|
||||
|
||||
val lazyTiles = List.tabulate(p(NTiles)){ i => LazyModule(new RocketTile(i)) }
|
||||
val debug = LazyModule(new TLDebugModule())
|
||||
val plic = LazyModule(new TLPLIC(hasSupervisor, maxPriorities = 7))
|
||||
val clint = LazyModule(new CoreplexLocalInterrupter)
|
||||
|
||||
// Kill this once we move TL2 into rocket
|
||||
l1tol2.node :=
|
||||
TLHintHandler()(
|
||||
legacy.node)
|
||||
val tileIntNodes = lazyTiles.map { _ => IntInternalOutputNode() } // this should be moved into the Tile...
|
||||
|
||||
debug.node := TLFragmenter(cbus_beatBytes, cbus_lineBytes)(cbus.node)
|
||||
plic.node := TLFragmenter(cbus_beatBytes, cbus_lineBytes)(cbus.node)
|
||||
@ -179,7 +168,11 @@ trait CoreplexRISCVPlatformModule {
|
||||
} =>
|
||||
|
||||
val tiles = outer.lazyTiles.map(_.module)
|
||||
val uncoreTileIOs = (tiles zipWithIndex) map { case (tile, i) => Wire(tile.io) }
|
||||
|
||||
// Remaining external coreplex signals
|
||||
outer.debug.module.io.db <> io.debug
|
||||
outer.clint.module.io.rtcTick := io.rtcTick
|
||||
io.success := Bool(false) // Coreplex doesn't know when to stop running
|
||||
|
||||
println("\nGenerated Address Map")
|
||||
for (entry <- p(rocketchip.GlobalAddrMap).flatten) {
|
||||
@ -202,67 +195,6 @@ trait CoreplexRISCVPlatformModule {
|
||||
ConfigStringOutput.contents = Some(configString)
|
||||
}
|
||||
println(s"\nGenerated Configuration String\n${ConfigStringOutput.contents.get}")
|
||||
|
||||
val nCachedPorts = tiles.map(tile => tile.io.cached.size).reduce(_ + _)
|
||||
val nUncachedPorts = tiles.map(tile => tile.io.uncached.size).reduce(_ + _)
|
||||
val nBanks = nMemChannels * nBanksPerMemChannel
|
||||
|
||||
buildUncore(p.alterPartial({
|
||||
case HastiId => "TL"
|
||||
case TLId => "L1toL2"
|
||||
case NCachedTileLinkPorts => nCachedPorts
|
||||
case NUncachedTileLinkPorts => nUncachedPorts
|
||||
}))
|
||||
|
||||
def buildUncore(implicit p: Parameters) {
|
||||
// Create a simple L1toL2 NoC between the tiles and the banks of outer memory
|
||||
// Cached ports are first in client list, making sharerToClientId just an indentity function
|
||||
// addrToBank is sed to hash physical addresses (of cache blocks) to banks (and thereby memory channels)
|
||||
def sharerToClientId(sharerId: UInt) = sharerId
|
||||
def addrToBank(addr: UInt): UInt = UInt(nBanks)
|
||||
val l1tol2net = Module(new PortedTileLinkCrossbar(addrToBank, sharerToClientId))
|
||||
|
||||
// Create point(s) of coherence serialization
|
||||
val managerEndpoints = List.tabulate(nBanks){id => p(BuildL2CoherenceManager)(id, p)}
|
||||
managerEndpoints.flatMap(_.incoherent).foreach(_ := Bool(false))
|
||||
|
||||
val mmioManager = Module(new MMIOTileLinkManager()(p.alterPartial({
|
||||
case TLId => "L1toL2"
|
||||
case InnerTLId => "L1toL2"
|
||||
case OuterTLId => "L2toMMIO"
|
||||
})))
|
||||
|
||||
// Wire the tiles to the TileLink client ports of the L1toL2 network,
|
||||
// and coherence manager(s) to the other side
|
||||
l1tol2net.io.clients_cached <> uncoreTileIOs.map(_.cached).flatten
|
||||
l1tol2net.io.clients_uncached <> uncoreTileIOs.map(_.uncached).flatten ++ io.slave
|
||||
l1tol2net.io.managers <> managerEndpoints.map(_.innerTL) :+ mmioManager.io.inner
|
||||
outer.legacy.module.io.legacy <> mmioManager.io.outer
|
||||
|
||||
val mem_ic = Module(new TileLinkMemoryInterconnect(nBanksPerMemChannel, nMemChannels)(outerMemParams))
|
||||
|
||||
val backendBuffering = TileLinkDepths(0,0,0,0,0)
|
||||
for ((bank, icPort) <- managerEndpoints zip mem_ic.io.in) {
|
||||
val enqueued = TileLinkEnqueuer(bank.outerTL, backendBuffering)
|
||||
icPort <> TileLinkIOUnwrapper(enqueued)
|
||||
}
|
||||
}
|
||||
|
||||
// connect coreplex-internal interrupts to tiles
|
||||
for ((tile, i) <- (uncoreTileIOs zipWithIndex)) {
|
||||
tile.hartid := UInt(i)
|
||||
tile.resetVector := io.resetVector
|
||||
tile.interrupts := outer.clint.module.io.tiles(i)
|
||||
tile.interrupts.debug := outer.debug.module.io.debugInterrupts(i)
|
||||
tile.interrupts.meip := outer.tileIntNodes(i).bundleOut(0)(0)
|
||||
tile.interrupts.seip.foreach(_ := outer.tileIntNodes(i).bundleOut(0)(1))
|
||||
}
|
||||
|
||||
outer.debug.module.io.db <> io.debug
|
||||
outer.clint.module.io.rtcTick := io.rtcTick
|
||||
|
||||
// Coreplex doesn't know when to stop running
|
||||
io.success := Bool(false)
|
||||
}
|
||||
|
||||
abstract class BaseCoreplex(implicit p: Parameters) extends BareCoreplex
|
||||
|
@ -66,18 +66,7 @@ class BaseCoreplexConfig extends Config (
|
||||
Module(new L2BroadcastHub()(p.alterPartial({
|
||||
case InnerTLId => "L1toL2"
|
||||
case OuterTLId => "L2toMC" })))
|
||||
case NCachedTileLinkPorts => 1
|
||||
case NUncachedTileLinkPorts => 1
|
||||
//Tile Constants
|
||||
case BuildTiles => {
|
||||
List.tabulate(site(NTiles)){ i => (p: Parameters) =>
|
||||
LazyModule(new RocketTile()(p.alterPartial({
|
||||
case TileId => i
|
||||
case TLId => "L1toL2"
|
||||
case NUncachedTileLinkPorts => 1 + site(RoccNMemChannels)
|
||||
})))
|
||||
}
|
||||
}
|
||||
case BuildRoCC => Nil
|
||||
case RoccNMemChannels => site(BuildRoCC).map(_.nMemChannels).foldLeft(0)(_ + _)
|
||||
case RoccNPTWPorts => site(BuildRoCC).map(_.nPTWPorts).foldLeft(0)(_ + _)
|
||||
@ -108,14 +97,14 @@ class BaseCoreplexConfig extends Config (
|
||||
case LNHeaderBits => log2Ceil(site(TLKey(site(TLId))).nManagers) +
|
||||
log2Up(site(TLKey(site(TLId))).nClients)
|
||||
case TLKey("L1toL2") => {
|
||||
val useMEI = site(NTiles) <= 1 && site(NCachedTileLinkPorts) <= 1
|
||||
val useMEI = site(NTiles) <= 1
|
||||
TileLinkParameters(
|
||||
coherencePolicy = (
|
||||
if (useMEI) new MEICoherence(site(L2DirectoryRepresentation))
|
||||
else new MESICoherence(site(L2DirectoryRepresentation))),
|
||||
nManagers = site(NBanksPerMemoryChannel)*site(NMemoryChannels) + 1 /* MMIO */,
|
||||
nCachingClients = site(NCachedTileLinkPorts),
|
||||
nCachelessClients = site(NCoreplexExtClients) + site(NUncachedTileLinkPorts),
|
||||
nCachingClients = 1,
|
||||
nCachelessClients = site(NCoreplexExtClients) + 1,
|
||||
maxClientXacts = max_int(
|
||||
// L1 cache
|
||||
site(DCacheKey).nMSHRs + 1 /* IOMSHR */,
|
||||
|
@ -22,23 +22,28 @@ trait BroadcastL2 {
|
||||
|
||||
trait DirectConnection {
|
||||
this: CoreplexNetwork with CoreplexRISCVPlatform =>
|
||||
lazyTiles.map(_.slave).flatten.foreach { scratch => scratch := cbus.node }
|
||||
|
||||
lazyTiles foreach { t =>
|
||||
t.slaveNode.foreach { _ := cbus.node }
|
||||
l1tol2.node := TLBuffer(1,1,2,2,0)(TLHintHandler()(t.cachedOut))
|
||||
l1tol2.node := TLBuffer(1,0,0,2,0)(TLHintHandler()(t.uncachedOut))
|
||||
}
|
||||
}
|
||||
|
||||
trait DirectConnectionModule {
|
||||
this: CoreplexNetworkModule with CoreplexRISCVPlatformModule =>
|
||||
this: CoreplexNetworkModule with CoreplexRISCVPlatformModule {
|
||||
val outer: CoreplexNetwork with CoreplexRISCVPlatform
|
||||
val io: CoreplexRISCVPlatformBundle
|
||||
} =>
|
||||
|
||||
val tlBuffering = TileLinkDepths(1,1,2,2,0)
|
||||
val ultBuffering = UncachedTileLinkDepths(1,2)
|
||||
|
||||
(tiles zip uncoreTileIOs) foreach { case (tile, uncore) =>
|
||||
(uncore.cached zip tile.io.cached) foreach { case (u, t) => u <> TileLinkEnqueuer(t, tlBuffering) }
|
||||
(uncore.uncached zip tile.io.uncached) foreach { case (u, t) => u <> TileLinkEnqueuer(t, ultBuffering) }
|
||||
|
||||
tile.io.interrupts <> uncore.interrupts
|
||||
|
||||
tile.io.hartid := uncore.hartid
|
||||
tile.io.resetVector := uncore.resetVector
|
||||
// connect coreplex-internal interrupts to tiles
|
||||
tiles.zipWithIndex.foreach { case (tile, i) =>
|
||||
tile.io.hartid := UInt(i)
|
||||
tile.io.resetVector := io.resetVector
|
||||
tile.io.interrupts := outer.clint.module.io.tiles(i)
|
||||
tile.io.interrupts.debug := outer.debug.module.io.debugInterrupts(i)
|
||||
tile.io.interrupts.meip := outer.tileIntNodes(i).bundleOut(0)(0)
|
||||
tile.io.interrupts.seip.foreach(_ := outer.tileIntNodes(i).bundleOut(0)(1))
|
||||
}
|
||||
}
|
||||
|
||||
@ -57,12 +62,24 @@ class DefaultCoreplexModule[+L <: DefaultCoreplex, +B <: DefaultCoreplexBundle[L
|
||||
|
||||
trait AsyncConnection {
|
||||
this: CoreplexNetwork with CoreplexRISCVPlatform =>
|
||||
val crossings = lazyTiles.map(_.slave).map(_.map { scratch =>
|
||||
val crossing = LazyModule(new TLAsyncCrossing)
|
||||
crossing.node := cbus.node
|
||||
val monitor = (scratch := crossing.node)
|
||||
(crossing, monitor)
|
||||
})
|
||||
|
||||
val masterCrossings = lazyTiles.map { t =>
|
||||
t.masterNodes map { m =>
|
||||
val crossing = LazyModule(new TLAsyncCrossing)
|
||||
crossing.node := m
|
||||
val monitor = (cbus.node := crossing.node)
|
||||
(crossing, monitor)
|
||||
}
|
||||
}
|
||||
|
||||
val slaveCrossings = lazyTiles.map { t =>
|
||||
t.slaveNode map { s =>
|
||||
val crossing = LazyModule(new TLAsyncCrossing)
|
||||
crossing.node := cbus.node
|
||||
val monitor = (s := crossing.node)
|
||||
(crossing, monitor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait AsyncConnectionBundle {
|
||||
@ -75,11 +92,24 @@ trait AsyncConnectionBundle {
|
||||
|
||||
trait AsyncConnectionModule {
|
||||
this: Module with CoreplexNetworkModule with CoreplexRISCVPlatformModule {
|
||||
val outer: AsyncConnection
|
||||
val io: AsyncConnectionBundle
|
||||
val outer: AsyncConnection with CoreplexNetwork with CoreplexRISCVPlatform
|
||||
val io: AsyncConnectionBundle with CoreplexNetworkBundle with CoreplexRISCVPlatformBundle
|
||||
} =>
|
||||
|
||||
(outer.crossings zip io.tcrs) foreach { case (slaves, tcr) =>
|
||||
(outer.masterCrossings zip io.tcrs) foreach { case (masters, tcr) =>
|
||||
masters.foreach { case (crossing, monitor) =>
|
||||
crossing.module.io.out_clock := clock
|
||||
crossing.module.io.out_reset := reset
|
||||
crossing.module.io.in_clock := tcr.clock
|
||||
crossing.module.io.in_reset := tcr.reset
|
||||
monitor.foreach { m =>
|
||||
m.module.clock := clock
|
||||
m.module.reset := reset
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(outer.slaveCrossings zip io.tcrs) foreach { case (slaves, tcr) =>
|
||||
slaves.foreach { case (crossing, monitor) =>
|
||||
crossing.module.io.in_clock := clock
|
||||
crossing.module.io.in_reset := reset
|
||||
@ -92,23 +122,19 @@ trait AsyncConnectionModule {
|
||||
}
|
||||
}
|
||||
|
||||
(tiles, uncoreTileIOs, io.tcrs).zipped foreach { case (tile, uncore, tcr) =>
|
||||
(tiles.zipWithIndex, io.tcrs).zipped.foreach { case ((tile, i), tcr) =>
|
||||
tile.clock := tcr.clock
|
||||
tile.reset := tcr.reset
|
||||
|
||||
(uncore.cached zip tile.io.cached) foreach { case (u, t) => u <> AsyncTileLinkFrom(tcr.clock, tcr.reset, t) }
|
||||
(uncore.uncached zip tile.io.uncached) foreach { case (u, t) => u <> AsyncUTileLinkFrom(tcr.clock, tcr.reset, t) }
|
||||
|
||||
val ti = tile.io.interrupts
|
||||
val ui = uncore.interrupts
|
||||
ti.debug := LevelSyncTo(tcr.clock, ui.debug)
|
||||
ti.mtip := LevelSyncTo(tcr.clock, ui.mtip)
|
||||
ti.msip := LevelSyncTo(tcr.clock, ui.msip)
|
||||
ti.meip := LevelSyncTo(tcr.clock, ui.meip)
|
||||
ti.seip.foreach { _ := LevelSyncTo(tcr.clock, ui.seip.get) }
|
||||
ti.debug := LevelSyncTo(tcr.clock, outer.debug.module.io.debugInterrupts(i))
|
||||
ti.mtip := LevelSyncTo(tcr.clock, outer.clint.module.io.tiles(i).mtip)
|
||||
ti.msip := LevelSyncTo(tcr.clock, outer.clint.module.io.tiles(i).msip)
|
||||
ti.meip := LevelSyncTo(tcr.clock, outer.tileIntNodes(i).bundleOut(0)(0))
|
||||
ti.seip.foreach { _ := LevelSyncTo(tcr.clock, outer.tileIntNodes(i).bundleOut(0)(1)) }
|
||||
|
||||
tile.io.hartid := uncore.hartid
|
||||
tile.io.resetVector := uncore.resetVector
|
||||
tile.io.hartid := UInt(i)
|
||||
tile.io.resetVector := io.resetVector
|
||||
}
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user