now able to add periphery devices through traits
Unfortunately, I had to touch a lot of code, which weren't quite possible to split up into multiple commits. This commit gets rid of the "extra" infrastructure to add periphery devices into Top.
This commit is contained in:
parent
2c000a99da
commit
bb3f514e8d
@ -11,6 +11,7 @@ import uncore.devices._
|
|||||||
import uncore.converters._
|
import uncore.converters._
|
||||||
import rocket._
|
import rocket._
|
||||||
import rocket.Util._
|
import rocket.Util._
|
||||||
|
import rocketchip.{GlobalAddrMap, NCoreplexExtClients}
|
||||||
import scala.math.max
|
import scala.math.max
|
||||||
import scala.collection.mutable.{LinkedHashSet, ListBuffer}
|
import scala.collection.mutable.{LinkedHashSet, ListBuffer}
|
||||||
import DefaultTestSuites._
|
import DefaultTestSuites._
|
||||||
@ -127,7 +128,6 @@ class BaseCoreplexConfig extends Config (
|
|||||||
case MulDivKey => Some(MulDivConfig(mulUnroll = 8, mulEarlyOut = true, divEarlyOut = true))
|
case MulDivKey => Some(MulDivConfig(mulUnroll = 8, mulEarlyOut = true, divEarlyOut = true))
|
||||||
case UseAtomics => true
|
case UseAtomics => true
|
||||||
case UseCompressed => true
|
case UseCompressed => true
|
||||||
case PLICKey => PLICConfig(site(NTiles), site(UseVM), site(NExtInterrupts), 0)
|
|
||||||
case DMKey => new DefaultDebugModuleConfig(site(NTiles), site(XLen))
|
case DMKey => new DefaultDebugModuleConfig(site(NTiles), site(XLen))
|
||||||
case NCustomMRWCSRs => 0
|
case NCustomMRWCSRs => 0
|
||||||
case ResetVector => BigInt(0x1000)
|
case ResetVector => BigInt(0x1000)
|
||||||
@ -145,7 +145,7 @@ class BaseCoreplexConfig extends Config (
|
|||||||
else new MESICoherence(site(L2DirectoryRepresentation))),
|
else new MESICoherence(site(L2DirectoryRepresentation))),
|
||||||
nManagers = site(NBanksPerMemoryChannel)*site(NMemoryChannels) + 1 /* MMIO */,
|
nManagers = site(NBanksPerMemoryChannel)*site(NMemoryChannels) + 1 /* MMIO */,
|
||||||
nCachingClients = site(NCachedTileLinkPorts),
|
nCachingClients = site(NCachedTileLinkPorts),
|
||||||
nCachelessClients = site(NExternalClients) + site(NUncachedTileLinkPorts),
|
nCachelessClients = site(NCoreplexExtClients).get + site(NUncachedTileLinkPorts),
|
||||||
maxClientXacts = max_int(
|
maxClientXacts = max_int(
|
||||||
// L1 cache
|
// L1 cache
|
||||||
site(DCacheKey).nMSHRs + 1 /* IOMSHR */,
|
site(DCacheKey).nMSHRs + 1 /* IOMSHR */,
|
||||||
@ -176,7 +176,7 @@ class BaseCoreplexConfig extends Config (
|
|||||||
TileLinkParameters(
|
TileLinkParameters(
|
||||||
coherencePolicy = new MICoherence(
|
coherencePolicy = new MICoherence(
|
||||||
new NullRepresentation(site(NBanksPerMemoryChannel))),
|
new NullRepresentation(site(NBanksPerMemoryChannel))),
|
||||||
nManagers = site(GlobalAddrMap).subMap("io").numSlaves,
|
nManagers = site(GlobalAddrMap).get.subMap("io").numSlaves,
|
||||||
nCachingClients = 0,
|
nCachingClients = 0,
|
||||||
nCachelessClients = 1,
|
nCachelessClients = 1,
|
||||||
maxClientXacts = 4,
|
maxClientXacts = 4,
|
||||||
@ -194,7 +194,6 @@ class BaseCoreplexConfig extends Config (
|
|||||||
case CacheBlockBytes => Dump("CACHE_BLOCK_BYTES", 64)
|
case CacheBlockBytes => Dump("CACHE_BLOCK_BYTES", 64)
|
||||||
case CacheBlockOffsetBits => log2Up(here(CacheBlockBytes))
|
case CacheBlockOffsetBits => log2Up(here(CacheBlockBytes))
|
||||||
case EnableL2Logging => false
|
case EnableL2Logging => false
|
||||||
case ExtraCoreplexPorts => (p: Parameters) => new Bundle
|
|
||||||
case RegressionTestNames => LinkedHashSet(
|
case RegressionTestNames => LinkedHashSet(
|
||||||
"rv64ud-v-fcvt",
|
"rv64ud-v-fcvt",
|
||||||
"rv64ud-p-fdiv",
|
"rv64ud-p-fdiv",
|
||||||
|
@ -14,6 +14,8 @@ import rocket.Util._
|
|||||||
import java.nio.{ByteBuffer,ByteOrder}
|
import java.nio.{ByteBuffer,ByteOrder}
|
||||||
import java.nio.file.{Files, Paths}
|
import java.nio.file.{Files, Paths}
|
||||||
|
|
||||||
|
/** Function for building Coreplex */
|
||||||
|
case object BuildCoreplex extends Field[(Parameters, CoreplexConfig) => Coreplex]
|
||||||
/** Number of memory channels */
|
/** Number of memory channels */
|
||||||
case object NMemoryChannels extends Field[Int]
|
case object NMemoryChannels extends Field[Int]
|
||||||
/** Number of banks per memory channel */
|
/** Number of banks per memory channel */
|
||||||
@ -24,24 +26,11 @@ case object BankIdLSB extends Field[Int]
|
|||||||
case object BuildL2CoherenceManager extends Field[(Int, Parameters) => CoherenceAgent]
|
case object BuildL2CoherenceManager extends Field[(Int, Parameters) => CoherenceAgent]
|
||||||
/** Function for building some kind of tile connected to a reset signal */
|
/** Function for building some kind of tile connected to a reset signal */
|
||||||
case object BuildTiles extends Field[Seq[(Bool, Parameters) => Tile]]
|
case object BuildTiles extends Field[Seq[(Bool, Parameters) => Tile]]
|
||||||
/** A string describing on-chip devices, readable by target software */
|
|
||||||
case object ConfigString extends Field[Array[Byte]]
|
|
||||||
/** Number of external interrupt sources */
|
|
||||||
case object NExtInterrupts extends Field[Int]
|
|
||||||
/** Interrupt controller configuration */
|
|
||||||
case object PLICKey extends Field[PLICConfig]
|
|
||||||
/** The file to read the BootROM contents from */
|
/** The file to read the BootROM contents from */
|
||||||
case object BootROMFile extends Field[String]
|
case object BootROMFile extends Field[String]
|
||||||
/** Export an external MMIO slave port */
|
|
||||||
case object ExportMMIOPort extends Field[Boolean]
|
|
||||||
/** Expose additional TileLink client ports */
|
|
||||||
case object NExternalClients extends Field[Int]
|
|
||||||
/** Extra top-level ports exported from the coreplex */
|
|
||||||
case object ExtraCoreplexPorts extends Field[Parameters => Bundle]
|
|
||||||
|
|
||||||
trait HasCoreplexParameters {
|
trait HasCoreplexParameters {
|
||||||
implicit val p: Parameters
|
implicit val p: Parameters
|
||||||
lazy val nTiles = p(NTiles)
|
|
||||||
lazy val nMemChannels = p(NMemoryChannels)
|
lazy val nMemChannels = p(NMemoryChannels)
|
||||||
lazy val nBanksPerMemChannel = p(NBanksPerMemoryChannel)
|
lazy val nBanksPerMemChannel = p(NBanksPerMemoryChannel)
|
||||||
lazy val nBanks = nMemChannels*nBanksPerMemChannel
|
lazy val nBanks = nMemChannels*nBanksPerMemChannel
|
||||||
@ -49,20 +38,31 @@ trait HasCoreplexParameters {
|
|||||||
lazy val innerParams = p.alterPartial({ case TLId => "L1toL2" })
|
lazy val innerParams = p.alterPartial({ case TLId => "L1toL2" })
|
||||||
lazy val outermostParams = p.alterPartial({ case TLId => "Outermost" })
|
lazy val outermostParams = p.alterPartial({ case TLId => "Outermost" })
|
||||||
lazy val outermostMMIOParams = p.alterPartial({ case TLId => "MMIO_Outermost" })
|
lazy val outermostMMIOParams = p.alterPartial({ case TLId => "MMIO_Outermost" })
|
||||||
lazy val nExtClients = p(NExternalClients)
|
lazy val configString = p(rocketchip.ConfigString).get
|
||||||
lazy val exportMMIO = p(ExportMMIOPort)
|
lazy val globalAddrMap = p(rocketchip.GlobalAddrMap).get
|
||||||
}
|
}
|
||||||
|
|
||||||
abstract class Coreplex(implicit val p: Parameters) extends Module
|
case class CoreplexConfig(
|
||||||
|
nTiles: Int,
|
||||||
|
nExtInterrupts: Int,
|
||||||
|
nSlaves: Int,
|
||||||
|
hasSupervisor: Boolean,
|
||||||
|
hasExtMMIOPort: Boolean)
|
||||||
|
{
|
||||||
|
val plicKey = PLICConfig(nTiles, hasSupervisor, nExtInterrupts, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
abstract class Coreplex(implicit val p: Parameters, implicit val c: CoreplexConfig) extends Module
|
||||||
with HasCoreplexParameters {
|
with HasCoreplexParameters {
|
||||||
class CoreplexIO(implicit val p: Parameters) extends Bundle {
|
class CoreplexIO(implicit val p: Parameters, implicit val c: CoreplexConfig) extends Bundle {
|
||||||
val mem = Vec(nMemChannels, new ClientUncachedTileLinkIO()(outermostParams))
|
val master = new Bundle {
|
||||||
val ext_clients = Vec(nExtClients, new ClientUncachedTileLinkIO()(innerParams)).flip
|
val mem = Vec(nMemChannels, new ClientUncachedTileLinkIO()(outermostParams))
|
||||||
val mmio = p(ExportMMIOPort).option(new ClientUncachedTileLinkIO()(outermostMMIOParams))
|
val mmio = c.hasExtMMIOPort.option(new ClientUncachedTileLinkIO()(outermostMMIOParams))
|
||||||
val interrupts = Vec(p(NExtInterrupts), Bool()).asInput
|
}
|
||||||
|
val slave = Vec(c.nSlaves, new ClientUncachedTileLinkIO()(innerParams)).flip
|
||||||
|
val interrupts = Vec(c.nExtInterrupts, Bool()).asInput
|
||||||
val debug = new DebugBusIO()(p).flip
|
val debug = new DebugBusIO()(p).flip
|
||||||
val rtcTick = Bool(INPUT)
|
val rtcTick = Bool(INPUT)
|
||||||
val extra = p(ExtraCoreplexPorts)(p)
|
|
||||||
val success: Option[Bool] = hasSuccessFlag.option(Bool(OUTPUT))
|
val success: Option[Bool] = hasSuccessFlag.option(Bool(OUTPUT))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -70,16 +70,16 @@ abstract class Coreplex(implicit val p: Parameters) extends Module
|
|||||||
val io = new CoreplexIO
|
val io = new CoreplexIO
|
||||||
}
|
}
|
||||||
|
|
||||||
class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
class DefaultCoreplex(tp: Parameters, tc: CoreplexConfig) extends Coreplex()(tp, tc) {
|
||||||
// Build a set of Tiles
|
// Build a set of Tiles
|
||||||
val tileResets = Wire(Vec(nTiles, Bool()))
|
val tileResets = Wire(Vec(tc.nTiles, Bool()))
|
||||||
val tileList = p(BuildTiles).zip(tileResets).map {
|
val tileList = p(BuildTiles).zip(tileResets).map {
|
||||||
case (tile, rst) => tile(rst, p)
|
case (tile, rst) => tile(rst, p)
|
||||||
}
|
}
|
||||||
val nCachedPorts = tileList.map(tile => tile.io.cached.size).reduce(_ + _)
|
val nCachedPorts = tileList.map(tile => tile.io.cached.size).reduce(_ + _)
|
||||||
val nUncachedPorts = tileList.map(tile => tile.io.uncached.size).reduce(_ + _)
|
val nUncachedPorts = tileList.map(tile => tile.io.uncached.size).reduce(_ + _)
|
||||||
|
|
||||||
printConfigString
|
// Build an uncore backing the Tiles
|
||||||
buildUncore(p.alterPartial({
|
buildUncore(p.alterPartial({
|
||||||
case HastiId => "TL"
|
case HastiId => "TL"
|
||||||
case TLId => "L1toL2"
|
case TLId => "L1toL2"
|
||||||
@ -87,25 +87,13 @@ class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|||||||
case NUncachedTileLinkPorts => nUncachedPorts
|
case NUncachedTileLinkPorts => nUncachedPorts
|
||||||
}))
|
}))
|
||||||
|
|
||||||
def printConfigString(implicit p: Parameters) = {
|
|
||||||
println("Generated Address Map")
|
|
||||||
for (entry <- p(GlobalAddrMap).flatten) {
|
|
||||||
val name = entry.name
|
|
||||||
val start = entry.region.start
|
|
||||||
val end = entry.region.start + entry.region.size - 1
|
|
||||||
println(f"\t$name%s $start%x - $end%x")
|
|
||||||
}
|
|
||||||
println("Generated Configuration String")
|
|
||||||
println(new String(p(ConfigString)))
|
|
||||||
}
|
|
||||||
|
|
||||||
def buildUncore(implicit p: Parameters) = {
|
def buildUncore(implicit p: Parameters) = {
|
||||||
// Create a simple L1toL2 NoC between the tiles and the banks of outer memory
|
// Create a simple L1toL2 NoC between the tiles and the banks of outer memory
|
||||||
// Cached ports are first in client list, making sharerToClientId just an indentity function
|
// Cached ports are first in client list, making sharerToClientId just an indentity function
|
||||||
// addrToBank is sed to hash physical addresses (of cache blocks) to banks (and thereby memory channels)
|
// addrToBank is sed to hash physical addresses (of cache blocks) to banks (and thereby memory channels)
|
||||||
def sharerToClientId(sharerId: UInt) = sharerId
|
def sharerToClientId(sharerId: UInt) = sharerId
|
||||||
def addrToBank(addr: UInt): UInt = if (nBanks == 0) UInt(0) else {
|
def addrToBank(addr: UInt): UInt = if (nBanks == 0) UInt(0) else {
|
||||||
val isMemory = p(GlobalAddrMap).isInRegion("mem", addr << log2Up(p(CacheBlockBytes)))
|
val isMemory = globalAddrMap.isInRegion("mem", addr << log2Up(p(CacheBlockBytes)))
|
||||||
Mux(isMemory, addr.extract(lsb + log2Ceil(nBanks) - 1, lsb), UInt(nBanks))
|
Mux(isMemory, addr.extract(lsb + log2Ceil(nBanks) - 1, lsb), UInt(nBanks))
|
||||||
}
|
}
|
||||||
val preBuffering = TileLinkDepths(1,1,2,2,0)
|
val preBuffering = TileLinkDepths(1,1,2,2,0)
|
||||||
@ -124,7 +112,7 @@ class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|||||||
// Wire the tiles to the TileLink client ports of the L1toL2 network,
|
// Wire the tiles to the TileLink client ports of the L1toL2 network,
|
||||||
// and coherence manager(s) to the other side
|
// and coherence manager(s) to the other side
|
||||||
l1tol2net.io.clients_cached <> tileList.map(_.io.cached).flatten
|
l1tol2net.io.clients_cached <> tileList.map(_.io.cached).flatten
|
||||||
l1tol2net.io.clients_uncached <> tileList.map(_.io.uncached).flatten ++ io.ext_clients
|
l1tol2net.io.clients_uncached <> tileList.map(_.io.uncached).flatten ++ io.slave
|
||||||
l1tol2net.io.managers <> managerEndpoints.map(_.innerTL) :+ mmioManager.io.inner
|
l1tol2net.io.managers <> managerEndpoints.map(_.innerTL) :+ mmioManager.io.inner
|
||||||
|
|
||||||
// Create a converter between TileLinkIO and MemIO for each channel
|
// Create a converter between TileLinkIO and MemIO for each channel
|
||||||
@ -138,7 +126,7 @@ class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|||||||
TileLinkWidthAdapter(icPort, unwrap.io.out)
|
TileLinkWidthAdapter(icPort, unwrap.io.out)
|
||||||
}
|
}
|
||||||
|
|
||||||
io.mem <> mem_ic.io.out
|
io.master.mem <> mem_ic.io.out
|
||||||
|
|
||||||
buildMMIONetwork(ClientUncachedTileLinkEnqueuer(mmioManager.io.outer, 1))(
|
buildMMIONetwork(ClientUncachedTileLinkEnqueuer(mmioManager.io.outer, 1))(
|
||||||
p.alterPartial({case TLId => "L2toMMIO"}))
|
p.alterPartial({case TLId => "L2toMMIO"}))
|
||||||
@ -151,7 +139,10 @@ class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|||||||
rom.order(ByteOrder.LITTLE_ENDIAN)
|
rom.order(ByteOrder.LITTLE_ENDIAN)
|
||||||
|
|
||||||
// for now, have the reset vector jump straight to memory
|
// for now, have the reset vector jump straight to memory
|
||||||
val memBase = (if (p(GlobalAddrMap) contains "mem") p(GlobalAddrMap)("mem") else p(GlobalAddrMap)("io:int:dmem0")).start
|
val memBase = (
|
||||||
|
if (globalAddrMap contains "mem") globalAddrMap("mem")
|
||||||
|
else globalAddrMap("io:int:dmem0")
|
||||||
|
).start
|
||||||
val resetToMemDist = memBase - p(ResetVector)
|
val resetToMemDist = memBase - p(ResetVector)
|
||||||
require(resetToMemDist == (resetToMemDist.toInt >> 12 << 12))
|
require(resetToMemDist == (resetToMemDist.toInt >> 12 << 12))
|
||||||
val configStringAddr = p(ResetVector).toInt + rom.capacity
|
val configStringAddr = p(ResetVector).toInt + rom.capacity
|
||||||
@ -159,17 +150,17 @@ class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|||||||
require(rom.getInt(12) == 0,
|
require(rom.getInt(12) == 0,
|
||||||
"Config string address position should not be occupied by code")
|
"Config string address position should not be occupied by code")
|
||||||
rom.putInt(12, configStringAddr)
|
rom.putInt(12, configStringAddr)
|
||||||
rom.array() ++ p(ConfigString).toSeq
|
rom.array() ++ (configString.getBytes.toSeq)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def buildMMIONetwork(mmio: ClientUncachedTileLinkIO)(implicit p: Parameters) = {
|
def buildMMIONetwork(mmio: ClientUncachedTileLinkIO)(implicit p: Parameters) = {
|
||||||
val ioAddrMap = p(GlobalAddrMap).subMap("io")
|
val ioAddrMap = globalAddrMap.subMap("io")
|
||||||
|
|
||||||
val mmioNetwork = Module(new TileLinkRecursiveInterconnect(1, ioAddrMap))
|
val mmioNetwork = Module(new TileLinkRecursiveInterconnect(1, ioAddrMap))
|
||||||
mmioNetwork.io.in.head <> mmio
|
mmioNetwork.io.in.head <> mmio
|
||||||
|
|
||||||
val plic = Module(new PLIC(p(PLICKey)))
|
val plic = Module(new PLIC(c.plicKey))
|
||||||
plic.io.tl <> mmioNetwork.port("int:plic")
|
plic.io.tl <> mmioNetwork.port("int:plic")
|
||||||
for (i <- 0 until io.interrupts.size) {
|
for (i <- 0 until io.interrupts.size) {
|
||||||
val gateway = Module(new LevelGateway)
|
val gateway = Module(new LevelGateway)
|
||||||
@ -191,25 +182,25 @@ class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|||||||
tile.io.prci <> prci
|
tile.io.prci <> prci
|
||||||
}
|
}
|
||||||
|
|
||||||
for (i <- 0 until nTiles) {
|
for (i <- 0 until tc.nTiles) {
|
||||||
prci.io.interrupts(i).meip := plic.io.harts(plic.cfg.context(i, 'M'))
|
prci.io.interrupts(i).meip := plic.io.harts(plic.cfg.context(i, 'M'))
|
||||||
if (p(UseVM))
|
if (p(UseVM))
|
||||||
prci.io.interrupts(i).seip := plic.io.harts(plic.cfg.context(i, 'S'))
|
prci.io.interrupts(i).seip := plic.io.harts(plic.cfg.context(i, 'S'))
|
||||||
prci.io.interrupts(i).debug := debugModule.io.debugInterrupts(i)
|
prci.io.interrupts(i).debug := debugModule.io.debugInterrupts(i)
|
||||||
}
|
}
|
||||||
|
|
||||||
val tileSlavePorts = (0 until nTiles) map (i => s"int:dmem$i") filter (ioAddrMap contains _)
|
val tileSlavePorts = (0 until tc.nTiles) map (i => s"int:dmem$i") filter (ioAddrMap contains _)
|
||||||
for ((t, m) <- (tileList.map(_.io.slave).flatten) zip (tileSlavePorts map (mmioNetwork port _)))
|
for ((t, m) <- (tileList.map(_.io.slave).flatten) zip (tileSlavePorts map (mmioNetwork port _)))
|
||||||
t <> ClientUncachedTileLinkEnqueuer(m, 1)
|
t <> ClientUncachedTileLinkEnqueuer(m, 1)
|
||||||
|
|
||||||
val bootROM = Module(new ROMSlave(makeBootROM()))
|
val bootROM = Module(new ROMSlave(makeBootROM()))
|
||||||
bootROM.io <> mmioNetwork.port("int:bootrom")
|
bootROM.io <> mmioNetwork.port("int:bootrom")
|
||||||
|
|
||||||
io.mmio.foreach { _ <> mmioNetwork.port("ext") }
|
io.master.mmio.foreach { _ <> mmioNetwork.port("ext") }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class GroundTestCoreplex(topParams: Parameters) extends DefaultCoreplex(topParams) {
|
class GroundTestCoreplex(tp: Parameters, tc: CoreplexConfig) extends DefaultCoreplex(tp, tc) {
|
||||||
override def hasSuccessFlag = true
|
override def hasSuccessFlag = true
|
||||||
io.success.get := tileList.flatMap(_.io.elements get "success").map(_.asInstanceOf[Bool]).reduce(_&&_)
|
io.success.get := tileList.flatMap(_.io.elements get "success").map(_.asInstanceOf[Bool]).reduce(_&&_)
|
||||||
}
|
}
|
||||||
|
@ -1,63 +0,0 @@
|
|||||||
package coreplex
|
|
||||||
|
|
||||||
import Chisel._
|
|
||||||
import cde.{Parameters, Field}
|
|
||||||
import rocket.TileId
|
|
||||||
import groundtest._
|
|
||||||
import uncore.tilelink._
|
|
||||||
import uncore.agents._
|
|
||||||
|
|
||||||
case object ExportGroundTestStatus extends Field[Boolean]
|
|
||||||
|
|
||||||
class DirectGroundTestCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|
||||||
// Not using the debug
|
|
||||||
io.debug.req.ready := Bool(false)
|
|
||||||
io.debug.resp.valid := Bool(false)
|
|
||||||
|
|
||||||
require(!exportMMIO)
|
|
||||||
require(nExtClients == 0)
|
|
||||||
require(nMemChannels == 1)
|
|
||||||
require(nTiles == 1)
|
|
||||||
|
|
||||||
val test = p(BuildGroundTest)(outermostParams.alterPartial({
|
|
||||||
case TileId => 0
|
|
||||||
case CacheName => "L1D"
|
|
||||||
}))
|
|
||||||
require(test.io.cache.size == 0)
|
|
||||||
require(test.io.mem.size == nBanksPerMemChannel)
|
|
||||||
require(test.io.ptw.size == 0)
|
|
||||||
|
|
||||||
val mem_ic = Module(new TileLinkMemoryInterconnect(
|
|
||||||
nBanksPerMemChannel, nMemChannels)(outermostParams))
|
|
||||||
|
|
||||||
mem_ic.io.in <> test.io.mem
|
|
||||||
io.mem <> mem_ic.io.out
|
|
||||||
|
|
||||||
if (p(ExportGroundTestStatus)) {
|
|
||||||
val status = io.extra.asInstanceOf[GroundTestStatus]
|
|
||||||
|
|
||||||
val s_running :: s_finished :: s_errored :: s_timeout :: Nil = Enum(Bits(), 4)
|
|
||||||
val state = Reg(init = s_running)
|
|
||||||
val error_code = Reg(status.error.bits)
|
|
||||||
val timeout_code = Reg(status.timeout.bits)
|
|
||||||
when (state === s_running) {
|
|
||||||
when (test.io.status.finished) { state := s_finished }
|
|
||||||
when (test.io.status.error.valid) {
|
|
||||||
state := s_errored
|
|
||||||
error_code := test.io.status.error.bits
|
|
||||||
}
|
|
||||||
when (test.io.status.timeout.valid) {
|
|
||||||
state := s_timeout
|
|
||||||
timeout_code := test.io.status.timeout.bits
|
|
||||||
}
|
|
||||||
}
|
|
||||||
status.finished := (state === s_finished)
|
|
||||||
status.error.valid := (state === s_errored)
|
|
||||||
status.error.bits := error_code
|
|
||||||
status.timeout.valid := (state === s_timeout)
|
|
||||||
status.timeout.bits := timeout_code
|
|
||||||
}
|
|
||||||
|
|
||||||
override def hasSuccessFlag = true
|
|
||||||
io.success.get := test.io.status.finished
|
|
||||||
}
|
|
@ -2,6 +2,7 @@ package coreplex
|
|||||||
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import groundtest._
|
import groundtest._
|
||||||
|
import rocketchip.{GlobalAddrMap}
|
||||||
import rocket._
|
import rocket._
|
||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
import uncore.coherence._
|
import uncore.coherence._
|
||||||
@ -24,7 +25,7 @@ class WithComparator extends Config(
|
|||||||
(p: Parameters) => Module(new ComparatorCore()(p))
|
(p: Parameters) => Module(new ComparatorCore()(p))
|
||||||
case ComparatorKey => ComparatorParameters(
|
case ComparatorKey => ComparatorParameters(
|
||||||
targets = Seq("mem", "io:ext:testram").map(name =>
|
targets = Seq("mem", "io:ext:testram").map(name =>
|
||||||
site(GlobalAddrMap)(name).start.longValue),
|
site(GlobalAddrMap).get(name).start.longValue),
|
||||||
width = 8,
|
width = 8,
|
||||||
operations = 1000,
|
operations = 1000,
|
||||||
atomics = site(UseAtomics),
|
atomics = site(UseAtomics),
|
||||||
@ -54,7 +55,7 @@ class WithMemtest extends Config(
|
|||||||
}
|
}
|
||||||
case GeneratorKey => GeneratorParameters(
|
case GeneratorKey => GeneratorParameters(
|
||||||
maxRequests = 128,
|
maxRequests = 128,
|
||||||
startAddress = site(GlobalAddrMap)("mem").start)
|
startAddress = site(GlobalAddrMap).get("mem").start)
|
||||||
case BuildGroundTest =>
|
case BuildGroundTest =>
|
||||||
(p: Parameters) => Module(new GeneratorTest()(p))
|
(p: Parameters) => Module(new GeneratorTest()(p))
|
||||||
case _ => throw new CDEMatchError
|
case _ => throw new CDEMatchError
|
||||||
@ -114,7 +115,7 @@ class WithNastiConverterTest extends Config(
|
|||||||
}
|
}
|
||||||
case GeneratorKey => GeneratorParameters(
|
case GeneratorKey => GeneratorParameters(
|
||||||
maxRequests = 128,
|
maxRequests = 128,
|
||||||
startAddress = site(GlobalAddrMap)("mem").start)
|
startAddress = site(GlobalAddrMap).get("mem").start)
|
||||||
case BuildGroundTest =>
|
case BuildGroundTest =>
|
||||||
(p: Parameters) => Module(new NastiConverterTest()(p))
|
(p: Parameters) => Module(new NastiConverterTest()(p))
|
||||||
case _ => throw new CDEMatchError
|
case _ => throw new CDEMatchError
|
||||||
@ -134,7 +135,7 @@ class WithTraceGen extends Config(
|
|||||||
val nSets = 32 // L2 NSets
|
val nSets = 32 // L2 NSets
|
||||||
val nWays = 1
|
val nWays = 1
|
||||||
val blockOffset = site(CacheBlockOffsetBits)
|
val blockOffset = site(CacheBlockOffsetBits)
|
||||||
val baseAddr = site(GlobalAddrMap)("mem").start
|
val baseAddr = site(GlobalAddrMap).get("mem").start
|
||||||
val nBeats = site(MIFDataBeats)
|
val nBeats = site(MIFDataBeats)
|
||||||
List.tabulate(4 * nWays) { i =>
|
List.tabulate(4 * nWays) { i =>
|
||||||
Seq.tabulate(nBeats) { j => (j * 8) + ((i * nSets) << blockOffset) }
|
Seq.tabulate(nBeats) { j => (j * 8) + ((i * nSets) << blockOffset) }
|
||||||
@ -156,7 +157,7 @@ class WithPCIeMockupTest extends Config(
|
|||||||
GroundTestTileSettings(1))
|
GroundTestTileSettings(1))
|
||||||
case GeneratorKey => GeneratorParameters(
|
case GeneratorKey => GeneratorParameters(
|
||||||
maxRequests = 128,
|
maxRequests = 128,
|
||||||
startAddress = site(GlobalAddrMap)("mem").start)
|
startAddress = site(GlobalAddrMap).get("mem").start)
|
||||||
case BuildGroundTest =>
|
case BuildGroundTest =>
|
||||||
(p: Parameters) => p(TileId) match {
|
(p: Parameters) => p(TileId) match {
|
||||||
case 0 => Module(new GeneratorTest()(p))
|
case 0 => Module(new GeneratorTest()(p))
|
||||||
|
@ -6,9 +6,9 @@ import rocket.Tile
|
|||||||
import uncore.tilelink.TLId
|
import uncore.tilelink.TLId
|
||||||
import cde.Parameters
|
import cde.Parameters
|
||||||
|
|
||||||
class UnitTestCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
class UnitTestCoreplex(tp: Parameters, tc: CoreplexConfig) extends Coreplex()(tp, tc) {
|
||||||
require(!exportMMIO)
|
require(!tc.hasExtMMIOPort)
|
||||||
require(nExtClients == 0)
|
require(tc.nSlaves == 0)
|
||||||
require(nMemChannels == 0)
|
require(nMemChannels == 0)
|
||||||
|
|
||||||
io.debug.req.ready := Bool(false)
|
io.debug.req.ready := Bool(false)
|
||||||
|
@ -176,6 +176,7 @@ class TagMan(val logNumTags : Int) extends Module {
|
|||||||
|
|
||||||
class TraceGenerator(id: Int)
|
class TraceGenerator(id: Int)
|
||||||
(implicit p: Parameters) extends L1HellaCacheModule()(p)
|
(implicit p: Parameters) extends L1HellaCacheModule()(p)
|
||||||
|
with HasAddrMapParameters
|
||||||
with HasTraceGenParams {
|
with HasTraceGenParams {
|
||||||
val io = new Bundle {
|
val io = new Bundle {
|
||||||
val finished = Bool(OUTPUT)
|
val finished = Bool(OUTPUT)
|
||||||
@ -197,8 +198,7 @@ class TraceGenerator(id: Int)
|
|||||||
// Address bag, shared by all cores, taken from module parameters.
|
// Address bag, shared by all cores, taken from module parameters.
|
||||||
// In addition, there is a per-core random selection of extra addresses.
|
// In addition, there is a per-core random selection of extra addresses.
|
||||||
|
|
||||||
val addrHashMap = p(GlobalAddrMap)
|
val baseAddr = addrMap("mem").start + 0x01000000
|
||||||
val baseAddr = addrHashMap("mem").start + 0x01000000
|
|
||||||
|
|
||||||
val bagOfAddrs = addressBag.map(x => UInt(x, numBitsInWord))
|
val bagOfAddrs = addressBag.map(x => UInt(x, numBitsInWord))
|
||||||
|
|
||||||
|
@ -7,13 +7,12 @@ import cde.{Parameters, Field}
|
|||||||
import scala.collection.mutable.HashMap
|
import scala.collection.mutable.HashMap
|
||||||
|
|
||||||
case object PAddrBits extends Field[Int]
|
case object PAddrBits extends Field[Int]
|
||||||
case object GlobalAddrMap extends Field[AddrMap]
|
|
||||||
|
|
||||||
trait HasAddrMapParameters {
|
trait HasAddrMapParameters {
|
||||||
implicit val p: Parameters
|
implicit val p: Parameters
|
||||||
|
|
||||||
val paddrBits = p(PAddrBits)
|
val paddrBits = p(PAddrBits)
|
||||||
val addrMap = p(GlobalAddrMap)
|
def addrMap = p(rocketchip.GlobalAddrMap).get
|
||||||
}
|
}
|
||||||
|
|
||||||
case class MemAttr(prot: Int, cacheable: Boolean = false)
|
case class MemAttr(prot: Int, cacheable: Boolean = false)
|
||||||
|
@ -8,6 +8,7 @@ import rocket._
|
|||||||
import rocket.Util._
|
import rocket.Util._
|
||||||
import uncore.agents._
|
import uncore.agents._
|
||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
|
import uncore.tilelink2.{LazyModule}
|
||||||
import uncore.devices._
|
import uncore.devices._
|
||||||
import uncore.converters._
|
import uncore.converters._
|
||||||
import coreplex._
|
import coreplex._
|
||||||
@ -17,156 +18,66 @@ import scala.collection.immutable.HashMap
|
|||||||
import DefaultTestSuites._
|
import DefaultTestSuites._
|
||||||
import cde.{Parameters, Config, Dump, Knob, CDEMatchError}
|
import cde.{Parameters, Config, Dump, Knob, CDEMatchError}
|
||||||
|
|
||||||
class BasePlatformConfig extends Config (
|
class BasePlatformConfig extends Config(
|
||||||
topDefinitions = { (pname,site,here) =>
|
topDefinitions = {
|
||||||
type PF = PartialFunction[Any,Any]
|
val configString = new GlobalVariable[String]
|
||||||
def findBy(sname:Any):Any = here[PF](site[Any](sname))(pname)
|
val globalAddrMap = new GlobalVariable[AddrMap]
|
||||||
lazy val internalIOAddrMap: AddrMap = {
|
val nCoreplexExtClients = new GlobalVariable[Int]
|
||||||
val entries = collection.mutable.ArrayBuffer[AddrMapEntry]()
|
(pname,site,here) => {
|
||||||
entries += AddrMapEntry("debug", MemSize(4096, MemAttr(AddrMapProt.RWX)))
|
type PF = PartialFunction[Any,Any]
|
||||||
entries += AddrMapEntry("bootrom", MemSize(4096, MemAttr(AddrMapProt.RX)))
|
def findBy(sname:Any):Any = here[PF](site[Any](sname))(pname)
|
||||||
entries += AddrMapEntry("plic", MemRange(0x40000000, 0x4000000, MemAttr(AddrMapProt.RW)))
|
lazy val innerDataBits = 64
|
||||||
entries += AddrMapEntry("prci", MemSize(0x4000000, MemAttr(AddrMapProt.RW)))
|
lazy val innerDataBeats = (8 * site(CacheBlockBytes)) / innerDataBits
|
||||||
if (site(DataScratchpadSize) > 0) { // TODO heterogeneous tiles
|
pname match {
|
||||||
require(site(NTiles) == 1) // TODO relax this
|
//Memory Parameters
|
||||||
require(site(NMemoryChannels) == 0) // TODO allow both scratchpad & DRAM
|
case MIFTagBits => Dump("MIF_TAG_BITS", 5)
|
||||||
entries += AddrMapEntry("dmem0", MemRange(0x80000000L, site[Int](DataScratchpadSize), MemAttr(AddrMapProt.RWX)))
|
case MIFDataBits => Dump("MIF_DATA_BITS", 64)
|
||||||
}
|
case MIFAddrBits => Dump("MIF_ADDR_BITS",
|
||||||
new AddrMap(entries)
|
site(PAddrBits) - site(CacheBlockOffsetBits))
|
||||||
}
|
case MIFDataBeats => site(CacheBlockBytes) * 8 / site(MIFDataBits)
|
||||||
lazy val externalAddrMap = new AddrMap(
|
case NastiKey => {
|
||||||
site(ExtraDevices).addrMapEntries ++ site(ExtMMIOPorts),
|
Dump("MEM_STRB_BITS", site(MIFDataBits) / 8)
|
||||||
start = BigInt("50000000", 16),
|
NastiParameters(
|
||||||
collapse = true)
|
dataBits = Dump("MEM_DATA_BITS", site(MIFDataBits)),
|
||||||
lazy val globalAddrMap = {
|
addrBits = Dump("MEM_ADDR_BITS", site(PAddrBits)),
|
||||||
val memBase = 0x80000000L
|
idBits = Dump("MEM_ID_BITS", site(MIFTagBits)))
|
||||||
val memSize = site(ExtMemSize)
|
|
||||||
|
|
||||||
val intern = AddrMapEntry("int", internalIOAddrMap)
|
|
||||||
val extern = AddrMapEntry("ext", externalAddrMap)
|
|
||||||
val io = AddrMapEntry("io", AddrMap((intern +: site(ExportMMIOPort).option(extern).toSeq):_*))
|
|
||||||
val mem = AddrMapEntry("mem", MemRange(memBase, memSize, MemAttr(AddrMapProt.RWX, true)))
|
|
||||||
val addrMap = AddrMap((io +: (site(NMemoryChannels) > 0).option(mem).toSeq):_*)
|
|
||||||
|
|
||||||
Dump("MEM_BASE", memBase)
|
|
||||||
addrMap
|
|
||||||
}
|
|
||||||
def makeConfigString() = {
|
|
||||||
val addrMap = globalAddrMap
|
|
||||||
val plicAddr = addrMap("io:int:plic").start
|
|
||||||
val prciAddr = addrMap("io:int:prci").start
|
|
||||||
val plicInfo = site(PLICKey)
|
|
||||||
val xLen = site(XLen)
|
|
||||||
val res = new StringBuilder
|
|
||||||
res append "plic {\n"
|
|
||||||
res append s" priority 0x${plicAddr.toString(16)};\n"
|
|
||||||
res append s" pending 0x${(plicAddr + plicInfo.pendingBase).toString(16)};\n"
|
|
||||||
res append s" ndevs ${plicInfo.nDevices};\n"
|
|
||||||
res append "};\n"
|
|
||||||
res append "rtc {\n"
|
|
||||||
res append s" addr 0x${(prciAddr + PRCI.time).toString(16)};\n"
|
|
||||||
res append "};\n"
|
|
||||||
if (addrMap contains "mem") {
|
|
||||||
res append "ram {\n"
|
|
||||||
res append " 0 {\n"
|
|
||||||
res append s" addr 0x${addrMap("mem").start.toString(16)};\n"
|
|
||||||
res append s" size 0x${addrMap("mem").size.toString(16)};\n"
|
|
||||||
res append " };\n"
|
|
||||||
res append "};\n"
|
|
||||||
}
|
|
||||||
res append "core {\n"
|
|
||||||
for (i <- 0 until site(NTiles)) { // TODO heterogeneous tiles
|
|
||||||
val isa = {
|
|
||||||
val m = if (site(MulDivKey).nonEmpty) "m" else ""
|
|
||||||
val a = if (site(UseAtomics)) "a" else ""
|
|
||||||
val f = if (site(FPUKey).nonEmpty) "f" else ""
|
|
||||||
val d = if (site(FPUKey).nonEmpty && site(XLen) > 32) "d" else ""
|
|
||||||
val s = if (site(UseVM)) "s" else ""
|
|
||||||
s"rv${site(XLen)}i$m$a$f$d$s"
|
|
||||||
}
|
}
|
||||||
res append s" $i {\n"
|
case BuildCoreplex =>
|
||||||
res append " 0 {\n"
|
(p: Parameters, c: CoreplexConfig) => Module(new DefaultCoreplex(p, c))
|
||||||
res append s" isa $isa;\n"
|
case NExtTopInterrupts => 2
|
||||||
res append s" timecmp 0x${(prciAddr + PRCI.timecmp(i)).toString(16)};\n"
|
// Note that PLIC asserts that this is > 0.
|
||||||
res append s" ipi 0x${(prciAddr + PRCI.msip(i)).toString(16)};\n"
|
case AsyncDebugBus => false
|
||||||
res append s" plic {\n"
|
case IncludeJtagDTM => false
|
||||||
res append s" m {\n"
|
case AsyncMMIOChannels => false
|
||||||
res append s" ie 0x${(plicAddr + plicInfo.enableAddr(i, 'M')).toString(16)};\n"
|
case ExtMMIOPorts => Nil
|
||||||
res append s" thresh 0x${(plicAddr + plicInfo.threshAddr(i, 'M')).toString(16)};\n"
|
case NExtMMIOAXIChannels => 0
|
||||||
res append s" claim 0x${(plicAddr + plicInfo.claimAddr(i, 'M')).toString(16)};\n"
|
case NExtMMIOAHBChannels => 0
|
||||||
res append s" };\n"
|
case NExtMMIOTLChannels => 0
|
||||||
if (site(UseVM)) {
|
case AsyncBusChannels => false
|
||||||
res append s" s {\n"
|
case NExtBusAXIChannels => 0
|
||||||
res append s" ie 0x${(plicAddr + plicInfo.enableAddr(i, 'S')).toString(16)};\n"
|
case NCoreplexExtClients => nCoreplexExtClients
|
||||||
res append s" thresh 0x${(plicAddr + plicInfo.threshAddr(i, 'S')).toString(16)};\n"
|
case HastiId => "Ext"
|
||||||
res append s" claim 0x${(plicAddr + plicInfo.claimAddr(i, 'S')).toString(16)};\n"
|
case HastiKey("TL") =>
|
||||||
res append s" };\n"
|
HastiParameters(
|
||||||
}
|
addrBits = site(PAddrBits),
|
||||||
res append " };\n"
|
dataBits = site(TLKey(site(TLId))).dataBits / site(TLKey(site(TLId))).dataBeats)
|
||||||
res append " };\n"
|
case HastiKey("Ext") =>
|
||||||
res append " };\n"
|
HastiParameters(
|
||||||
|
addrBits = site(PAddrBits),
|
||||||
|
dataBits = site(XLen))
|
||||||
|
case AsyncMemChannels => false
|
||||||
|
case NMemoryChannels => Dump("N_MEM_CHANNELS", 1)
|
||||||
|
case TMemoryChannels => BusType.AXI
|
||||||
|
case ExtMemSize => Dump("MEM_SIZE", 0x10000000L)
|
||||||
|
case ConfigString => configString
|
||||||
|
case GlobalAddrMap => globalAddrMap
|
||||||
|
case RTCPeriod => 100 // gives 10 MHz RTC assuming 1 GHz uncore clock
|
||||||
|
case BuildExampleTop =>
|
||||||
|
(p: Parameters) => uncore.tilelink2.LazyModule(new ExampleTop(p))
|
||||||
|
case _ => throw new CDEMatchError
|
||||||
}
|
}
|
||||||
res append "};\n"
|
|
||||||
res append (site(ExtraDevices).makeConfigString(addrMap))
|
|
||||||
res append '\u0000'
|
|
||||||
res.toString.getBytes
|
|
||||||
}
|
}
|
||||||
lazy val innerDataBits = 64
|
})
|
||||||
lazy val innerDataBeats = (8 * site(CacheBlockBytes)) / innerDataBits
|
|
||||||
pname match {
|
|
||||||
//Memory Parameters
|
|
||||||
case MIFTagBits => Dump("MIF_TAG_BITS", 5)
|
|
||||||
case MIFDataBits => Dump("MIF_DATA_BITS", 64)
|
|
||||||
case MIFAddrBits => Dump("MIF_ADDR_BITS",
|
|
||||||
site(PAddrBits) - site(CacheBlockOffsetBits))
|
|
||||||
case MIFDataBeats => site(CacheBlockBytes) * 8 / site(MIFDataBits)
|
|
||||||
case NastiKey => {
|
|
||||||
Dump("MEM_STRB_BITS", site(MIFDataBits) / 8)
|
|
||||||
NastiParameters(
|
|
||||||
dataBits = Dump("MEM_DATA_BITS", site(MIFDataBits)),
|
|
||||||
addrBits = Dump("MEM_ADDR_BITS", site(PAddrBits)),
|
|
||||||
idBits = Dump("MEM_ID_BITS", site(MIFTagBits)))
|
|
||||||
}
|
|
||||||
case BuildCoreplex => (p: Parameters) => Module(new DefaultCoreplex(p))
|
|
||||||
case NExtTopInterrupts => 2
|
|
||||||
case NExtPeripheryInterrupts => site(ExtraDevices).nInterrupts
|
|
||||||
// Note that PLIC asserts that this is > 0.
|
|
||||||
case NExtInterrupts => site(NExtTopInterrupts) + site(NExtPeripheryInterrupts)
|
|
||||||
case AsyncDebugBus => false
|
|
||||||
case IncludeJtagDTM => false
|
|
||||||
case AsyncMMIOChannels => false
|
|
||||||
case ExtraDevices => new EmptyDeviceBlock
|
|
||||||
case ExtraTopPorts => (p: Parameters) => new Bundle
|
|
||||||
case ExtMMIOPorts => Nil
|
|
||||||
case NExtMMIOAXIChannels => 0
|
|
||||||
case NExtMMIOAHBChannels => 0
|
|
||||||
case NExtMMIOTLChannels => 0
|
|
||||||
case ExportMMIOPort => !externalAddrMap.isEmpty
|
|
||||||
case AsyncBusChannels => false
|
|
||||||
case NExtBusAXIChannels => 0
|
|
||||||
case NExternalClients => (if (site(NExtBusAXIChannels) > 0) 1 else 0) +
|
|
||||||
site(ExtraDevices).nClientPorts
|
|
||||||
case ConnectExtraPorts =>
|
|
||||||
(out: Bundle, in: Bundle, p: Parameters) => out <> in
|
|
||||||
|
|
||||||
case HastiId => "Ext"
|
|
||||||
case HastiKey("TL") =>
|
|
||||||
HastiParameters(
|
|
||||||
addrBits = site(PAddrBits),
|
|
||||||
dataBits = site(TLKey(site(TLId))).dataBits / site(TLKey(site(TLId))).dataBeats)
|
|
||||||
case HastiKey("Ext") =>
|
|
||||||
HastiParameters(
|
|
||||||
addrBits = site(PAddrBits),
|
|
||||||
dataBits = site(XLen))
|
|
||||||
case AsyncMemChannels => false
|
|
||||||
case NMemoryChannels => Dump("N_MEM_CHANNELS", 1)
|
|
||||||
case TMemoryChannels => BusType.AXI
|
|
||||||
case ExtMemSize => Dump("MEM_SIZE", 0x10000000L)
|
|
||||||
case ConfigString => makeConfigString()
|
|
||||||
case GlobalAddrMap => globalAddrMap
|
|
||||||
case RTCPeriod => 100 // gives 10 MHz RTC assuming 1 GHz uncore clock
|
|
||||||
case _ => throw new CDEMatchError
|
|
||||||
}})
|
|
||||||
|
|
||||||
class BaseConfig extends Config(new BaseCoreplexConfig ++ new BasePlatformConfig)
|
class BaseConfig extends Config(new BaseCoreplexConfig ++ new BasePlatformConfig)
|
||||||
class DefaultConfig extends Config(new WithBlockingL1 ++ new BaseConfig)
|
class DefaultConfig extends Config(new WithBlockingL1 ++ new BaseConfig)
|
||||||
@ -178,7 +89,6 @@ class DefaultBufferlessConfig extends Config(
|
|||||||
class FPGAConfig extends Config (
|
class FPGAConfig extends Config (
|
||||||
(pname,site,here) => pname match {
|
(pname,site,here) => pname match {
|
||||||
case NAcquireTransactors => 4
|
case NAcquireTransactors => 4
|
||||||
case ExportGroundTestStatus => true
|
|
||||||
case _ => throw new CDEMatchError
|
case _ => throw new CDEMatchError
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -269,35 +179,12 @@ class TinyConfig extends Config(
|
|||||||
new WithSmallCores ++ new WithRV32 ++
|
new WithSmallCores ++ new WithRV32 ++
|
||||||
new WithStatelessBridge ++ new BaseConfig)
|
new WithStatelessBridge ++ new BaseConfig)
|
||||||
|
|
||||||
class WithTestRAM extends Config(
|
|
||||||
(pname, site, here) => pname match {
|
|
||||||
case ExtraDevices => {
|
|
||||||
class TestRAMDevice extends DeviceBlock {
|
|
||||||
val ramSize = 0x1000
|
|
||||||
def nClientPorts = 0
|
|
||||||
def addrMapEntries = Seq(
|
|
||||||
AddrMapEntry("testram", MemSize(ramSize, MemAttr(AddrMapProt.RW))))
|
|
||||||
def builder(
|
|
||||||
mmioPorts: HashMap[String, ClientUncachedTileLinkIO],
|
|
||||||
clientPorts: Seq[ClientUncachedTileLinkIO],
|
|
||||||
interrupts: Seq[Bool],
|
|
||||||
extra: Bundle, p: Parameters) {
|
|
||||||
val testram = Module(new TileLinkTestRAM(ramSize)(p))
|
|
||||||
testram.io <> mmioPorts("testram")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
new TestRAMDevice
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
class WithAsyncDebug extends Config (
|
class WithAsyncDebug extends Config (
|
||||||
(pname, site, here) => pname match {
|
(pname, site, here) => pname match {
|
||||||
case AsyncDebugBus => true
|
case AsyncDebugBus => true
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class WithJtagDTM extends Config (
|
class WithJtagDTM extends Config (
|
||||||
(pname, site, here) => pname match {
|
(pname, site, here) => pname match {
|
||||||
case IncludeJtagDTM => true
|
case IncludeJtagDTM => true
|
||||||
|
@ -1,68 +0,0 @@
|
|||||||
package rocketchip
|
|
||||||
|
|
||||||
import Chisel._
|
|
||||||
import junctions._
|
|
||||||
import uncore.tilelink._
|
|
||||||
import scala.collection.immutable.HashMap
|
|
||||||
import cde.{Parameters, Field}
|
|
||||||
|
|
||||||
case object ExtraTopPorts extends Field[Parameters => Bundle]
|
|
||||||
case object ExtraDevices extends Field[DeviceBlock]
|
|
||||||
|
|
||||||
abstract class DeviceBlock {
|
|
||||||
/** How many client ports will the devices use */
|
|
||||||
def nClientPorts: Int
|
|
||||||
/** Address map entries for all of the devices */
|
|
||||||
def addrMapEntries: Seq[AddrMapEntry]
|
|
||||||
/**
|
|
||||||
* The total number of interrupt signals coming
|
|
||||||
* from all the devices */
|
|
||||||
def nInterrupts : Int = 0
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The function that elaborates all the extra devices and connects them
|
|
||||||
* to the TileLink ports and extra top-level ports.
|
|
||||||
*
|
|
||||||
* @param mmioPorts A hashmap for the mmio ports.
|
|
||||||
* Use the names specified in addrMapEntries to get
|
|
||||||
* the mmio port for each device.
|
|
||||||
* @param clientPorts All the client ports available for the devices
|
|
||||||
* @param interrupts External interrupts from Periphery to Coreplex
|
|
||||||
* @param extra The extra top-level IO bundle
|
|
||||||
* @param p The CDE parameters for the devices
|
|
||||||
*/
|
|
||||||
def builder(
|
|
||||||
mmioPorts: HashMap[String, ClientUncachedTileLinkIO],
|
|
||||||
clientPorts: Seq[ClientUncachedTileLinkIO],
|
|
||||||
interrupts : Seq[Bool],
|
|
||||||
extra: Bundle, p: Parameters): Unit
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create the config string entry for this device that goes into the
|
|
||||||
* Boot ROM. You generally won't need to override this
|
|
||||||
*
|
|
||||||
* @param fullAddrMap The full global address map
|
|
||||||
*/
|
|
||||||
def makeConfigString(fullAddrMap: AddrMap): String = {
|
|
||||||
addrMapEntries.map { entry =>
|
|
||||||
val region = fullAddrMap("io:ext:" + entry.name)
|
|
||||||
s"${entry.name} {\n" +
|
|
||||||
s" addr 0x${region.start.toString(16)};\n" +
|
|
||||||
s" size 0x${region.size.toString(16)}; \n" +
|
|
||||||
"}\n"
|
|
||||||
}.mkString
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
class EmptyDeviceBlock extends DeviceBlock {
|
|
||||||
def nClientPorts = 0
|
|
||||||
def addrMapEntries = Seq.empty
|
|
||||||
|
|
||||||
def builder(
|
|
||||||
mmioPorts: HashMap[String, ClientUncachedTileLinkIO],
|
|
||||||
clientPorts: Seq[ClientUncachedTileLinkIO],
|
|
||||||
interrupts : Seq[Bool],
|
|
||||||
extra: Bundle, p: Parameters) {}
|
|
||||||
}
|
|
@ -82,5 +82,5 @@ object RocketChipGenerator extends Generator {
|
|||||||
writeOutputFile(td, s"$longName.prm", ParameterDump.getDump) // Parameters flagged with Dump()
|
writeOutputFile(td, s"$longName.prm", ParameterDump.getDump) // Parameters flagged with Dump()
|
||||||
writeOutputFile(td, s"${names.configs}.knb", world.getKnobs) // Knobs for DSE
|
writeOutputFile(td, s"${names.configs}.knb", world.getKnobs) // Knobs for DSE
|
||||||
writeOutputFile(td, s"${names.configs}.cst", world.getConstraints) // Constraints for DSE
|
writeOutputFile(td, s"${names.configs}.cst", world.getConstraints) // Constraints for DSE
|
||||||
writeOutputFile(td, s"${names.configs}.cfg", params(ConfigString).toString) // String for software
|
writeOutputFile(td, s"${names.configs}.cfg", params(ConfigString).get) // String for software
|
||||||
}
|
}
|
||||||
|
@ -37,17 +37,10 @@ case object AsyncMemChannels extends Field[Boolean]
|
|||||||
case object AsyncMMIOChannels extends Field[Boolean]
|
case object AsyncMMIOChannels extends Field[Boolean]
|
||||||
/** External address map settings */
|
/** External address map settings */
|
||||||
case object ExtMMIOPorts extends Field[Seq[AddrMapEntry]]
|
case object ExtMMIOPorts extends Field[Seq[AddrMapEntry]]
|
||||||
/** Function for building Coreplex */
|
|
||||||
case object BuildCoreplex extends Field[Parameters => Coreplex]
|
|
||||||
/** Function for connecting coreplex extra ports to top-level extra ports */
|
|
||||||
case object ConnectExtraPorts extends Field[(Bundle, Bundle, Parameters) => Unit]
|
|
||||||
/** Specifies the size of external memory */
|
/** Specifies the size of external memory */
|
||||||
case object ExtMemSize extends Field[Long]
|
case object ExtMemSize extends Field[Long]
|
||||||
/** Specifies the actual sorce of External Interrupts as Top and Periphery.
|
/** Specifies the number of external interrupts */
|
||||||
* NExtInterrupts = NExtTopInterrupts + NExtPeripheryInterrupts
|
|
||||||
**/
|
|
||||||
case object NExtTopInterrupts extends Field[Int]
|
case object NExtTopInterrupts extends Field[Int]
|
||||||
case object NExtPeripheryInterrupts extends Field[Int]
|
|
||||||
/** Source of RTC. First bundle is TopIO.extra, Second bundle is periphery.io.extra **/
|
/** Source of RTC. First bundle is TopIO.extra, Second bundle is periphery.io.extra **/
|
||||||
case object RTCPeriod extends Field[Int]
|
case object RTCPeriod extends Field[Int]
|
||||||
|
|
||||||
@ -122,29 +115,30 @@ trait PeripheryDebugModule {
|
|||||||
|
|
||||||
/////
|
/////
|
||||||
|
|
||||||
trait PeripheryInterrupt extends LazyModule {
|
trait PeripheryExtInterrupts extends LazyModule {
|
||||||
implicit val p: Parameters
|
implicit val p: Parameters
|
||||||
|
val pInterrupts: RangeManager
|
||||||
|
|
||||||
|
pInterrupts.add("ext", p(NExtTopInterrupts))
|
||||||
}
|
}
|
||||||
|
|
||||||
trait PeripheryInterruptBundle {
|
trait PeripheryExtInterruptsBundle {
|
||||||
implicit val p: Parameters
|
implicit val p: Parameters
|
||||||
val interrupts = Vec(p(NExtTopInterrupts), Bool()).asInput
|
val interrupts = Vec(p(NExtTopInterrupts), Bool()).asInput
|
||||||
}
|
}
|
||||||
|
|
||||||
trait PeripheryInterruptModule {
|
trait PeripheryExtInterruptsModule {
|
||||||
implicit val p: Parameters
|
implicit val p: Parameters
|
||||||
val outer: PeripheryInterrupt
|
val outer: PeripheryExtInterrupts
|
||||||
val io: PeripheryInterruptBundle
|
val io: PeripheryExtInterruptsBundle
|
||||||
val coreplex: Coreplex
|
val coreplex: Coreplex
|
||||||
|
|
||||||
val interrupts_periphery = Vec(p(NExtPeripheryInterrupts), Bool())
|
{
|
||||||
var interrupts_cnt = 0
|
val r = outer.pInterrupts.range("ext")
|
||||||
|
((r._1 until r._2) zipWithIndex) foreach { case (c, i) =>
|
||||||
// This places the Periphery Interrupts at Bits [0...]
|
coreplex.io.interrupts(c) := io.interrupts(i)
|
||||||
// External interrupts are at the higher Bits.
|
}
|
||||||
// This may have some implications for prioritization of the interrupts,
|
}
|
||||||
// but PLIC could do some internal swizzling in the future.
|
|
||||||
coreplex.io.interrupts <> (interrupts_periphery ++ io.interrupts)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/////
|
/////
|
||||||
@ -169,7 +163,7 @@ trait PeripheryMasterMemModule extends HasPeripheryParameters {
|
|||||||
val coreplex: Coreplex
|
val coreplex: Coreplex
|
||||||
|
|
||||||
// Abuse the fact that zip takes the shorter of the two lists
|
// Abuse the fact that zip takes the shorter of the two lists
|
||||||
((io.mem_axi zip coreplex.io.mem) zipWithIndex) foreach { case ((axi, mem), idx) =>
|
((io.mem_axi zip coreplex.io.master.mem) zipWithIndex) foreach { case ((axi, mem), idx) =>
|
||||||
val axi_sync = PeripheryUtils.convertTLtoAXI(mem)(outermostParams)
|
val axi_sync = PeripheryUtils.convertTLtoAXI(mem)(outermostParams)
|
||||||
axi_sync.ar.bits.cache := UInt("b0011")
|
axi_sync.ar.bits.cache := UInt("b0011")
|
||||||
axi_sync.aw.bits.cache := UInt("b0011")
|
axi_sync.aw.bits.cache := UInt("b0011")
|
||||||
@ -179,11 +173,11 @@ trait PeripheryMasterMemModule extends HasPeripheryParameters {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
(io.mem_ahb zip coreplex.io.mem) foreach { case (ahb, mem) =>
|
(io.mem_ahb zip coreplex.io.master.mem) foreach { case (ahb, mem) =>
|
||||||
ahb <> PeripheryUtils.convertTLtoAHB(mem, atomics = false)(outermostParams)
|
ahb <> PeripheryUtils.convertTLtoAHB(mem, atomics = false)(outermostParams)
|
||||||
}
|
}
|
||||||
|
|
||||||
(io.mem_tl zip coreplex.io.mem) foreach { case (tl, mem) =>
|
(io.mem_tl zip coreplex.io.master.mem) foreach { case (tl, mem) =>
|
||||||
tl <> ClientUncachedTileLinkEnqueuer(mem, 2)(outermostParams)
|
tl <> ClientUncachedTileLinkEnqueuer(mem, 2)(outermostParams)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -245,6 +239,9 @@ trait PeripheryMasterMMIOModule extends HasPeripheryParameters {
|
|||||||
|
|
||||||
trait PeripherySlave extends LazyModule {
|
trait PeripherySlave extends LazyModule {
|
||||||
implicit val p: Parameters
|
implicit val p: Parameters
|
||||||
|
val pBusMasters: RangeManager
|
||||||
|
|
||||||
|
if (p(NExtBusAXIChannels) > 0) pBusMasters.add("ext", 1) // NExtBusAXIChannels are arbitrated into one TL port
|
||||||
}
|
}
|
||||||
|
|
||||||
trait PeripherySlaveBundle extends HasPeripheryParameters {
|
trait PeripherySlaveBundle extends HasPeripheryParameters {
|
||||||
@ -270,7 +267,65 @@ trait PeripherySlaveModule extends HasPeripheryParameters {
|
|||||||
}
|
}
|
||||||
val conv = Module(new TileLinkIONastiIOConverter()(innerParams))
|
val conv = Module(new TileLinkIONastiIOConverter()(innerParams))
|
||||||
conv.io.nasti <> arb.io.slave
|
conv.io.nasti <> arb.io.slave
|
||||||
coreplex.io.ext_clients.head <> conv.io.tl
|
|
||||||
require(p(NExternalClients) == 1, "external devices can't slave ports. wait for tilelink2!")
|
val r = outer.pBusMasters.range("ext")
|
||||||
|
require(r._2 - r._1 == 1, "RangeManager should return 1 slot")
|
||||||
|
coreplex.io.slave(r._1) <> conv.io.tl
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/////
|
||||||
|
|
||||||
|
trait PeripheryTestRAM extends LazyModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val pDevices: ResourceManager[AddrMapEntry]
|
||||||
|
|
||||||
|
val ramSize = 0x1000
|
||||||
|
pDevices.add(AddrMapEntry("testram", MemSize(ramSize, MemAttr(AddrMapProt.RW))))
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryTestRAMBundle {
|
||||||
|
implicit val p: Parameters
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryTestRAMModule extends HasPeripheryParameters {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val outer: PeripheryTestRAM
|
||||||
|
val io: PeripheryTestRAMBundle
|
||||||
|
val mmioNetwork: Option[TileLinkRecursiveInterconnect]
|
||||||
|
|
||||||
|
val testram = Module(new TileLinkTestRAM(outer.ramSize)(innerMMIOParams))
|
||||||
|
testram.io <> mmioNetwork.get.port("testram")
|
||||||
|
}
|
||||||
|
|
||||||
|
/////
|
||||||
|
|
||||||
|
trait PeripheryTestBusMaster extends LazyModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val pBusMasters: RangeManager
|
||||||
|
val pDevices: ResourceManager[AddrMapEntry]
|
||||||
|
|
||||||
|
pBusMasters.add("busmaster", 1)
|
||||||
|
pDevices.add(AddrMapEntry("busmaster", MemSize(4096, MemAttr(AddrMapProt.RW))))
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryTestBusMasterBundle {
|
||||||
|
implicit val p: Parameters
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryTestBusMasterModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val outer: PeripheryTestBusMaster
|
||||||
|
val io: PeripheryTestBusMasterBundle
|
||||||
|
val mmioNetwork: Option[TileLinkRecursiveInterconnect]
|
||||||
|
val coreplex: Coreplex
|
||||||
|
|
||||||
|
val busmaster = Module(new groundtest.ExampleBusMaster()(p))
|
||||||
|
busmaster.io.mmio <> mmioNetwork.get.port("busmaster")
|
||||||
|
|
||||||
|
{
|
||||||
|
val r = outer.pBusMasters.range("busmaster")
|
||||||
|
require(r._2 - r._1 == 1, "RangeManager should return 1 slot")
|
||||||
|
coreplex.io.slave(r._1) <> busmaster.io.mem
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -26,7 +26,7 @@ class WithUnitTest extends Config(
|
|||||||
DefaultTestSuites.groundtest32
|
DefaultTestSuites.groundtest32
|
||||||
TestGeneration.addSuite(groundtest("p"))
|
TestGeneration.addSuite(groundtest("p"))
|
||||||
TestGeneration.addSuite(DefaultTestSuites.emptyBmarks)
|
TestGeneration.addSuite(DefaultTestSuites.emptyBmarks)
|
||||||
(p: Parameters) => Module(new UnitTestCoreplex(p))
|
(p: Parameters, c: CoreplexConfig) => Module(new UnitTestCoreplex(p, c))
|
||||||
}
|
}
|
||||||
case UnitTests => (testParams: Parameters) =>
|
case UnitTests => (testParams: Parameters) =>
|
||||||
JunctionsUnitTests(testParams) ++ UncoreUnitTests(testParams)
|
JunctionsUnitTests(testParams) ++ UncoreUnitTests(testParams)
|
||||||
@ -42,7 +42,8 @@ class UnitTestConfig extends Config(new WithUnitTest ++ new BaseConfig)
|
|||||||
|
|
||||||
class WithGroundTest extends Config(
|
class WithGroundTest extends Config(
|
||||||
(pname, site, here) => pname match {
|
(pname, site, here) => pname match {
|
||||||
case BuildCoreplex => (p: Parameters) => Module(new GroundTestCoreplex(p))
|
case BuildCoreplex =>
|
||||||
|
(p: Parameters, c: CoreplexConfig) => Module(new GroundTestCoreplex(p, c))
|
||||||
case TLKey("L1toL2") => {
|
case TLKey("L1toL2") => {
|
||||||
val useMEI = site(NTiles) <= 1 && site(NCachedTileLinkPorts) <= 1
|
val useMEI = site(NTiles) <= 1 && site(NCachedTileLinkPorts) <= 1
|
||||||
TileLinkParameters(
|
TileLinkParameters(
|
||||||
@ -51,7 +52,7 @@ class WithGroundTest extends Config(
|
|||||||
else new MESICoherence(site(L2DirectoryRepresentation))),
|
else new MESICoherence(site(L2DirectoryRepresentation))),
|
||||||
nManagers = site(NBanksPerMemoryChannel)*site(NMemoryChannels) + 1,
|
nManagers = site(NBanksPerMemoryChannel)*site(NMemoryChannels) + 1,
|
||||||
nCachingClients = site(NCachedTileLinkPorts),
|
nCachingClients = site(NCachedTileLinkPorts),
|
||||||
nCachelessClients = site(NExternalClients) + site(NUncachedTileLinkPorts),
|
nCachelessClients = site(NCoreplexExtClients).get + site(NUncachedTileLinkPorts),
|
||||||
maxClientXacts = ((site(DCacheKey).nMSHRs + 1) +:
|
maxClientXacts = ((site(DCacheKey).nMSHRs + 1) +:
|
||||||
site(GroundTestKey).map(_.maxXacts))
|
site(GroundTestKey).map(_.maxXacts))
|
||||||
.reduce(max(_, _)),
|
.reduce(max(_, _)),
|
||||||
@ -79,6 +80,8 @@ class WithGroundTest extends Config(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
case BuildExampleTop =>
|
||||||
|
(p: Parameters) => uncore.tilelink2.LazyModule(new ExampleTopWithTestRAM(p))
|
||||||
case FPUKey => None
|
case FPUKey => None
|
||||||
case UseAtomics => false
|
case UseAtomics => false
|
||||||
case UseCompressed => false
|
case UseCompressed => false
|
||||||
@ -89,7 +92,7 @@ class WithGroundTest extends Config(
|
|||||||
class GroundTestConfig extends Config(new WithGroundTest ++ new BaseConfig)
|
class GroundTestConfig extends Config(new WithGroundTest ++ new BaseConfig)
|
||||||
|
|
||||||
class ComparatorConfig extends Config(
|
class ComparatorConfig extends Config(
|
||||||
new WithTestRAM ++ new WithComparator ++ new GroundTestConfig)
|
new WithComparator ++ new GroundTestConfig)
|
||||||
class ComparatorL2Config extends Config(
|
class ComparatorL2Config extends Config(
|
||||||
new WithAtomics ++ new WithPrefetches ++
|
new WithAtomics ++ new WithPrefetches ++
|
||||||
new WithL2Cache ++ new ComparatorConfig)
|
new WithL2Cache ++ new ComparatorConfig)
|
||||||
@ -147,60 +150,3 @@ class MIF32BitMemtestConfig extends Config(
|
|||||||
|
|
||||||
class PCIeMockupTestConfig extends Config(
|
class PCIeMockupTestConfig extends Config(
|
||||||
new WithPCIeMockupTest ++ new GroundTestConfig)
|
new WithPCIeMockupTest ++ new GroundTestConfig)
|
||||||
|
|
||||||
class WithDirectGroundTest extends Config(
|
|
||||||
(pname, site, here) => pname match {
|
|
||||||
case ExportGroundTestStatus => true
|
|
||||||
case BuildCoreplex => (p: Parameters) => Module(new DirectGroundTestCoreplex(p))
|
|
||||||
case ExtraCoreplexPorts => (p: Parameters) =>
|
|
||||||
if (p(ExportGroundTestStatus)) new GroundTestStatus else new Bundle
|
|
||||||
case ExtraTopPorts => (p: Parameters) =>
|
|
||||||
if (p(ExportGroundTestStatus)) new GroundTestStatus else new Bundle
|
|
||||||
case TLKey("Outermost") => site(TLKey("L2toMC")).copy(
|
|
||||||
maxClientXacts = site(GroundTestKey)(0).maxXacts,
|
|
||||||
maxClientsPerPort = site(NBanksPerMemoryChannel),
|
|
||||||
dataBeats = site(MIFDataBeats))
|
|
||||||
case NBanksPerMemoryChannel => site(GroundTestKey)(0).uncached
|
|
||||||
case _ => throw new CDEMatchError
|
|
||||||
})
|
|
||||||
|
|
||||||
class DirectGroundTestConfig extends Config(
|
|
||||||
new WithDirectGroundTest ++ new GroundTestConfig)
|
|
||||||
class DirectMemtestConfig extends Config(
|
|
||||||
new WithDirectMemtest ++ new DirectGroundTestConfig)
|
|
||||||
class DirectComparatorConfig extends Config(
|
|
||||||
new WithDirectComparator ++ new DirectGroundTestConfig)
|
|
||||||
|
|
||||||
class DirectMemtestFPGAConfig extends Config(
|
|
||||||
new FPGAConfig ++ new DirectMemtestConfig)
|
|
||||||
class DirectComparatorFPGAConfig extends Config(
|
|
||||||
new FPGAConfig ++ new DirectComparatorConfig)
|
|
||||||
|
|
||||||
class WithBusMasterTest extends Config(
|
|
||||||
(pname, site, here) => pname match {
|
|
||||||
case GroundTestKey => Seq.fill(site(NTiles)) {
|
|
||||||
GroundTestTileSettings(uncached = 1)
|
|
||||||
}
|
|
||||||
case BuildGroundTest =>
|
|
||||||
(p: Parameters) => Module(new BusMasterTest()(p))
|
|
||||||
case ExtraDevices => {
|
|
||||||
class BusMasterDevice extends DeviceBlock {
|
|
||||||
def nClientPorts = 1
|
|
||||||
def addrMapEntries = Seq(
|
|
||||||
AddrMapEntry("busmaster", MemSize(4096, MemAttr(AddrMapProt.RW))))
|
|
||||||
def builder(
|
|
||||||
mmioPorts: HashMap[String, ClientUncachedTileLinkIO],
|
|
||||||
clientPorts: Seq[ClientUncachedTileLinkIO],
|
|
||||||
interrupts : Seq[Bool],
|
|
||||||
extra: Bundle, p: Parameters) {
|
|
||||||
val busmaster = Module(new ExampleBusMaster()(p))
|
|
||||||
busmaster.io.mmio <> mmioPorts("busmaster")
|
|
||||||
clientPorts.head <> busmaster.io.mem
|
|
||||||
}
|
|
||||||
}
|
|
||||||
new BusMasterDevice
|
|
||||||
}
|
|
||||||
case _ => throw new CDEMatchError
|
|
||||||
})
|
|
||||||
|
|
||||||
class BusMasterTestConfig extends Config(new WithBusMasterTest ++ new GroundTestConfig)
|
|
||||||
|
@ -7,11 +7,13 @@ import cde.{Parameters, Field}
|
|||||||
import rocket.Util._
|
import rocket.Util._
|
||||||
import junctions._
|
import junctions._
|
||||||
|
|
||||||
class TestHarness(implicit p: Parameters) extends Module {
|
case object BuildExampleTop extends Field[Parameters => ExampleTop]
|
||||||
|
|
||||||
|
class TestHarness(implicit val p: Parameters) extends Module with HasAddrMapParameters {
|
||||||
val io = new Bundle {
|
val io = new Bundle {
|
||||||
val success = Bool(OUTPUT)
|
val success = Bool(OUTPUT)
|
||||||
}
|
}
|
||||||
val dut = uncore.tilelink2.LazyModule(new ExampleTop(p)).module
|
val dut = p(BuildExampleTop)(p).module
|
||||||
|
|
||||||
// This test harness isn't especially flexible yet
|
// This test harness isn't especially flexible yet
|
||||||
require(dut.io.mem_clk.isEmpty)
|
require(dut.io.mem_clk.isEmpty)
|
||||||
@ -29,7 +31,7 @@ class TestHarness(implicit p: Parameters) extends Module {
|
|||||||
int := false
|
int := false
|
||||||
|
|
||||||
if (dut.io.mem_axi.nonEmpty) {
|
if (dut.io.mem_axi.nonEmpty) {
|
||||||
val memSize = p(GlobalAddrMap)("mem").size
|
val memSize = addrMap("mem").size
|
||||||
require(memSize % dut.io.mem_axi.size == 0)
|
require(memSize % dut.io.mem_axi.size == 0)
|
||||||
for (axi <- dut.io.mem_axi)
|
for (axi <- dut.io.mem_axi)
|
||||||
Module(new SimAXIMem(memSize / dut.io.mem_axi.size)).io.axi <> axi
|
Module(new SimAXIMem(memSize / dut.io.mem_axi.size)).io.axi <> axi
|
||||||
|
@ -3,49 +3,93 @@
|
|||||||
package rocketchip
|
package rocketchip
|
||||||
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import cde.{Parameters}
|
import cde.{Parameters, Field}
|
||||||
import junctions._
|
import junctions._
|
||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
import uncore.tilelink2.{LazyModule, LazyModuleImp}
|
import uncore.tilelink2.{LazyModule, LazyModuleImp}
|
||||||
|
import uncore.devices._
|
||||||
|
import rocket._
|
||||||
import rocket.Util._
|
import rocket.Util._
|
||||||
import coreplex._
|
import coreplex._
|
||||||
|
|
||||||
|
// the following parameters will be refactored properly with TL2
|
||||||
|
case object GlobalAddrMap extends Field[GlobalVariable[AddrMap]]
|
||||||
|
case object ConfigString extends Field[GlobalVariable[String]]
|
||||||
|
case object NCoreplexExtClients extends Field[GlobalVariable[Int]]
|
||||||
|
|
||||||
/** Base Top with no Periphery */
|
/** Base Top with no Periphery */
|
||||||
|
abstract class BaseTop(val p: Parameters) extends LazyModule {
|
||||||
abstract class BaseTop(val p: Parameters) extends LazyModule
|
// the following variables will be refactored properly with TL2
|
||||||
|
val pInterrupts = new RangeManager
|
||||||
|
val pBusMasters = new RangeManager
|
||||||
|
val pDevices = new ResourceManager[AddrMapEntry]
|
||||||
|
}
|
||||||
|
|
||||||
class BaseTopBundle(val p: Parameters, val c: Coreplex) extends ParameterizedBundle()(p) {
|
class BaseTopBundle(val p: Parameters, val c: Coreplex) extends ParameterizedBundle()(p) {
|
||||||
val success = c.hasSuccessFlag.option(Bool(OUTPUT))
|
val success = c.hasSuccessFlag.option(Bool(OUTPUT))
|
||||||
}
|
}
|
||||||
|
|
||||||
class BaseTopModule[L <: BaseTop, B <: BaseTopBundle](val p: Parameters, l: L, b: Coreplex => B) extends LazyModuleImp(l) {
|
class BaseTopModule[+L <: BaseTop, +B <: BaseTopBundle](val p: Parameters, l: L, b: Coreplex => B) extends LazyModuleImp(l) {
|
||||||
val coreplex = p(BuildCoreplex)(p)
|
|
||||||
val outer: L = l
|
val outer: L = l
|
||||||
|
|
||||||
|
val c = CoreplexConfig(
|
||||||
|
nTiles = p(NTiles),
|
||||||
|
nExtInterrupts = outer.pInterrupts.sum,
|
||||||
|
nSlaves = outer.pBusMasters.sum,
|
||||||
|
hasSupervisor = p(UseVM),
|
||||||
|
hasExtMMIOPort = !(outer.pDevices.get.isEmpty && p(ExtMMIOPorts).isEmpty)
|
||||||
|
)
|
||||||
|
|
||||||
|
p(NCoreplexExtClients).assign(outer.pBusMasters.sum)
|
||||||
|
p(GlobalAddrMap).assign(GenerateGlobalAddrMap(p, outer.pDevices.get))
|
||||||
|
p(ConfigString).assign(GenerateConfigString(p, c, outer.pDevices.get))
|
||||||
|
|
||||||
|
println("Generated Address Map")
|
||||||
|
for (entry <- p(GlobalAddrMap).get.flatten) {
|
||||||
|
val name = entry.name
|
||||||
|
val start = entry.region.start
|
||||||
|
val end = entry.region.start + entry.region.size - 1
|
||||||
|
println(f"\t$name%s $start%x - $end%x")
|
||||||
|
}
|
||||||
|
|
||||||
|
println("Generated Configuration String")
|
||||||
|
println(p(ConfigString).get)
|
||||||
|
|
||||||
|
val coreplex = p(BuildCoreplex)(p, c)
|
||||||
val io: B = b(coreplex)
|
val io: B = b(coreplex)
|
||||||
|
|
||||||
io.success zip coreplex.io.success map { case (x, y) => x := y }
|
io.success zip coreplex.io.success map { case (x, y) => x := y }
|
||||||
coreplex.io.rtcTick := Counter(p(RTCPeriod)).inc()
|
coreplex.io.rtcTick := Counter(p(RTCPeriod)).inc()
|
||||||
|
|
||||||
val mmioNetwork = p(ExportMMIOPort).option(
|
val mmioNetwork = c.hasExtMMIOPort.option(
|
||||||
Module(new TileLinkRecursiveInterconnect(1, p(GlobalAddrMap).subMap("io:ext"))(
|
Module(new TileLinkRecursiveInterconnect(1, p(GlobalAddrMap).get.subMap("io:ext"))(
|
||||||
p.alterPartial({ case TLId => "L2toMMIO" }))))
|
p.alterPartial({ case TLId => "L2toMMIO" }))))
|
||||||
mmioNetwork.foreach { _.io.in.head <> coreplex.io.mmio.get }
|
mmioNetwork.foreach { _.io.in.head <> coreplex.io.master.mmio.get }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/** Example Top with Periphery */
|
/** Example Top with Periphery */
|
||||||
|
|
||||||
class ExampleTop(p: Parameters) extends BaseTop(p)
|
class ExampleTop(p: Parameters) extends BaseTop(p)
|
||||||
with PeripheryDebug with PeripheryInterrupt
|
with PeripheryDebug with PeripheryExtInterrupts
|
||||||
with PeripheryMasterMem with PeripheryMasterMMIO with PeripherySlave {
|
with PeripheryMasterMem with PeripheryMasterMMIO with PeripherySlave {
|
||||||
lazy val module = Module(new ExampleTopModule(p, this, new ExampleTopBundle(p, _)))
|
override lazy val module = Module(new ExampleTopModule(p, this, new ExampleTopBundle(p, _)))
|
||||||
}
|
}
|
||||||
|
|
||||||
class ExampleTopBundle(p: Parameters, c: Coreplex) extends BaseTopBundle(p, c)
|
class ExampleTopBundle(p: Parameters, c: Coreplex) extends BaseTopBundle(p, c)
|
||||||
with PeripheryDebugBundle with PeripheryInterruptBundle
|
with PeripheryDebugBundle with PeripheryExtInterruptsBundle
|
||||||
with PeripheryMasterMemBundle with PeripheryMasterMMIOBundle with PeripherySlaveBundle
|
with PeripheryMasterMemBundle with PeripheryMasterMMIOBundle with PeripherySlaveBundle
|
||||||
|
|
||||||
class ExampleTopModule[L <: ExampleTop, B <: ExampleTopBundle](p: Parameters, l: L, b: Coreplex => B) extends BaseTopModule(p, l, b)
|
class ExampleTopModule[+L <: ExampleTop, +B <: ExampleTopBundle](p: Parameters, l: L, b: Coreplex => B) extends BaseTopModule(p, l, b)
|
||||||
with PeripheryDebugModule with PeripheryInterruptModule
|
with PeripheryDebugModule with PeripheryExtInterruptsModule
|
||||||
with PeripheryMasterMemModule with PeripheryMasterMMIOModule with PeripherySlaveModule
|
with PeripheryMasterMemModule with PeripheryMasterMMIOModule with PeripherySlaveModule
|
||||||
|
|
||||||
|
/** Example Top with TestRAM */
|
||||||
|
class ExampleTopWithTestRAM(p: Parameters) extends ExampleTop(p)
|
||||||
|
with PeripheryTestRAM {
|
||||||
|
override lazy val module = Module(new ExampleTopWithTestRAMModule(p, this, new ExampleTopWithTestRAMBundle(p, _)))
|
||||||
|
}
|
||||||
|
|
||||||
|
class ExampleTopWithTestRAMBundle(p: Parameters, c: Coreplex) extends ExampleTopBundle(p, c)
|
||||||
|
with PeripheryTestRAMBundle
|
||||||
|
|
||||||
|
class ExampleTopWithTestRAMModule[+L <: ExampleTopWithTestRAM, +B <: ExampleTopWithTestRAMBundle](p: Parameters, l: L, b: Coreplex => B) extends ExampleTopModule(p, l, b)
|
||||||
|
with PeripheryTestRAMModule
|
||||||
|
149
src/main/scala/rocketchip/Utils.scala
Normal file
149
src/main/scala/rocketchip/Utils.scala
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
// See LICENSE for license details.
|
||||||
|
|
||||||
|
package rocketchip
|
||||||
|
|
||||||
|
import cde.{Parameters, Dump}
|
||||||
|
import junctions._
|
||||||
|
import uncore.devices._
|
||||||
|
import rocket._
|
||||||
|
import rocket.Util._
|
||||||
|
import coreplex._
|
||||||
|
|
||||||
|
class RangeManager {
|
||||||
|
private var finalized = false
|
||||||
|
private val l = collection.mutable.HashMap[String, Int]()
|
||||||
|
def add(name: String, element: Int) = { require(!finalized); l += (name -> element) }
|
||||||
|
def rangeMap = {
|
||||||
|
finalized = true
|
||||||
|
l map {
|
||||||
|
var sum = 0
|
||||||
|
x => { sum += x._2; (x._1 -> (sum-x._2, sum)) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
def range(name: String) = rangeMap(name)
|
||||||
|
def print = {
|
||||||
|
rangeMap map { case (name, (start, end)) =>
|
||||||
|
println(s"${name} on port ${start}-${end-1}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
def sum = {
|
||||||
|
finalized = true
|
||||||
|
l.map(_._2).sum
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ResourceManager[T] {
|
||||||
|
private var finalized = false
|
||||||
|
private val l = collection.mutable.ArrayBuffer[T]()
|
||||||
|
def add(element: T) = { require(!finalized); l += element }
|
||||||
|
def add(list: Seq[T]) = { require(!finalized); l ++= list }
|
||||||
|
def get: Seq[T] = { finalized = true; l }
|
||||||
|
}
|
||||||
|
|
||||||
|
class GlobalVariable[T] {
|
||||||
|
private var assigned = false
|
||||||
|
private var variable: T = _
|
||||||
|
def assign(value: T) = { require(!assigned); assigned = true; variable = value }
|
||||||
|
def get: T = { require(assigned); variable }
|
||||||
|
}
|
||||||
|
|
||||||
|
object GenerateGlobalAddrMap {
|
||||||
|
def apply(p: Parameters, pDevicesEntries: Seq[AddrMapEntry]) = {
|
||||||
|
lazy val intIOAddrMap: AddrMap = {
|
||||||
|
val entries = collection.mutable.ArrayBuffer[AddrMapEntry]()
|
||||||
|
entries += AddrMapEntry("debug", MemSize(4096, MemAttr(AddrMapProt.RWX)))
|
||||||
|
entries += AddrMapEntry("bootrom", MemSize(4096, MemAttr(AddrMapProt.RX)))
|
||||||
|
entries += AddrMapEntry("plic", MemRange(0x40000000, 0x4000000, MemAttr(AddrMapProt.RW)))
|
||||||
|
entries += AddrMapEntry("prci", MemSize(0x4000000, MemAttr(AddrMapProt.RW)))
|
||||||
|
if (p(DataScratchpadSize) > 0) { // TODO heterogeneous tiles
|
||||||
|
require(p(NTiles) == 1) // TODO relax this
|
||||||
|
require(p(NMemoryChannels) == 0) // TODO allow both scratchpad & DRAM
|
||||||
|
entries += AddrMapEntry("dmem0", MemRange(0x80000000L, BigInt(p(DataScratchpadSize)), MemAttr(AddrMapProt.RWX)))
|
||||||
|
}
|
||||||
|
new AddrMap(entries)
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy val extIOAddrMap = new AddrMap(
|
||||||
|
pDevicesEntries ++ p(ExtMMIOPorts),
|
||||||
|
start = BigInt("50000000", 16),
|
||||||
|
collapse = true)
|
||||||
|
|
||||||
|
val memBase = 0x80000000L
|
||||||
|
val memSize = p(ExtMemSize)
|
||||||
|
Dump("MEM_BASE", memBase)
|
||||||
|
|
||||||
|
val intern = AddrMapEntry("int", intIOAddrMap)
|
||||||
|
val extern = AddrMapEntry("ext", extIOAddrMap)
|
||||||
|
val io = AddrMapEntry("io", AddrMap((intern +: (!extIOAddrMap.isEmpty).option(extern).toSeq):_*))
|
||||||
|
val mem = AddrMapEntry("mem", MemRange(memBase, memSize, MemAttr(AddrMapProt.RWX, true)))
|
||||||
|
AddrMap((io +: (p(NMemoryChannels) > 0).option(mem).toSeq):_*)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object GenerateConfigString {
|
||||||
|
def apply(p: Parameters, c: CoreplexConfig, pDevicesEntries: Seq[AddrMapEntry]) = {
|
||||||
|
val addrMap = p(GlobalAddrMap).get
|
||||||
|
val plicAddr = addrMap("io:int:plic").start
|
||||||
|
val prciAddr = addrMap("io:int:prci").start
|
||||||
|
val xLen = p(XLen)
|
||||||
|
val res = new StringBuilder
|
||||||
|
res append "plic {\n"
|
||||||
|
res append s" priority 0x${plicAddr.toString(16)};\n"
|
||||||
|
res append s" pending 0x${(plicAddr + c.plicKey.pendingBase).toString(16)};\n"
|
||||||
|
res append s" ndevs ${c.plicKey.nDevices};\n"
|
||||||
|
res append "};\n"
|
||||||
|
res append "rtc {\n"
|
||||||
|
res append s" addr 0x${(prciAddr + PRCI.time).toString(16)};\n"
|
||||||
|
res append "};\n"
|
||||||
|
if (addrMap contains "mem") {
|
||||||
|
res append "ram {\n"
|
||||||
|
res append " 0 {\n"
|
||||||
|
res append s" addr 0x${addrMap("mem").start.toString(16)};\n"
|
||||||
|
res append s" size 0x${addrMap("mem").size.toString(16)};\n"
|
||||||
|
res append " };\n"
|
||||||
|
res append "};\n"
|
||||||
|
}
|
||||||
|
res append "core {\n"
|
||||||
|
for (i <- 0 until c.nTiles) { // TODO heterogeneous tiles
|
||||||
|
val isa = {
|
||||||
|
val m = if (p(MulDivKey).nonEmpty) "m" else ""
|
||||||
|
val a = if (p(UseAtomics)) "a" else ""
|
||||||
|
val f = if (p(FPUKey).nonEmpty) "f" else ""
|
||||||
|
val d = if (p(FPUKey).nonEmpty && p(XLen) > 32) "d" else ""
|
||||||
|
val s = if (c.hasSupervisor) "s" else ""
|
||||||
|
s"rv${p(XLen)}i$m$a$f$d$s"
|
||||||
|
}
|
||||||
|
res append s" $i {\n"
|
||||||
|
res append " 0 {\n"
|
||||||
|
res append s" isa $isa;\n"
|
||||||
|
res append s" timecmp 0x${(prciAddr + PRCI.timecmp(i)).toString(16)};\n"
|
||||||
|
res append s" ipi 0x${(prciAddr + PRCI.msip(i)).toString(16)};\n"
|
||||||
|
res append s" plic {\n"
|
||||||
|
res append s" m {\n"
|
||||||
|
res append s" ie 0x${(plicAddr + c.plicKey.enableAddr(i, 'M')).toString(16)};\n"
|
||||||
|
res append s" thresh 0x${(plicAddr + c.plicKey.threshAddr(i, 'M')).toString(16)};\n"
|
||||||
|
res append s" claim 0x${(plicAddr + c.plicKey.claimAddr(i, 'M')).toString(16)};\n"
|
||||||
|
res append s" };\n"
|
||||||
|
if (c.hasSupervisor) {
|
||||||
|
res append s" s {\n"
|
||||||
|
res append s" ie 0x${(plicAddr + c.plicKey.enableAddr(i, 'S')).toString(16)};\n"
|
||||||
|
res append s" thresh 0x${(plicAddr + c.plicKey.threshAddr(i, 'S')).toString(16)};\n"
|
||||||
|
res append s" claim 0x${(plicAddr + c.plicKey.claimAddr(i, 'S')).toString(16)};\n"
|
||||||
|
res append s" };\n"
|
||||||
|
}
|
||||||
|
res append " };\n"
|
||||||
|
res append " };\n"
|
||||||
|
res append " };\n"
|
||||||
|
}
|
||||||
|
res append "};\n"
|
||||||
|
pDevicesEntries foreach { entry =>
|
||||||
|
val region = addrMap("io:ext:" + entry.name)
|
||||||
|
res append s"${entry.name} {\n"
|
||||||
|
res append s" addr 0x${region.start.toString(16)};\n"
|
||||||
|
res append s" size 0x${region.size.toString(16)}; \n"
|
||||||
|
res append "}\n"
|
||||||
|
}
|
||||||
|
res append '\u0000'
|
||||||
|
res.toString
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user