Merge pull request #259 from ucb-bar/refactor-periphery
Refactor Periphery
This commit is contained in:
commit
49bba961cf
@ -11,6 +11,7 @@ import uncore.devices._
|
|||||||
import uncore.converters._
|
import uncore.converters._
|
||||||
import rocket._
|
import rocket._
|
||||||
import rocket.Util._
|
import rocket.Util._
|
||||||
|
import rocketchip.{GlobalAddrMap, NCoreplexExtClients}
|
||||||
import scala.math.max
|
import scala.math.max
|
||||||
import scala.collection.mutable.{LinkedHashSet, ListBuffer}
|
import scala.collection.mutable.{LinkedHashSet, ListBuffer}
|
||||||
import DefaultTestSuites._
|
import DefaultTestSuites._
|
||||||
@ -127,7 +128,6 @@ class BaseCoreplexConfig extends Config (
|
|||||||
case MulDivKey => Some(MulDivConfig(mulUnroll = 8, mulEarlyOut = true, divEarlyOut = true))
|
case MulDivKey => Some(MulDivConfig(mulUnroll = 8, mulEarlyOut = true, divEarlyOut = true))
|
||||||
case UseAtomics => true
|
case UseAtomics => true
|
||||||
case UseCompressed => true
|
case UseCompressed => true
|
||||||
case PLICKey => PLICConfig(site(NTiles), site(UseVM), site(NExtInterrupts), 0)
|
|
||||||
case DMKey => new DefaultDebugModuleConfig(site(NTiles), site(XLen))
|
case DMKey => new DefaultDebugModuleConfig(site(NTiles), site(XLen))
|
||||||
case NCustomMRWCSRs => 0
|
case NCustomMRWCSRs => 0
|
||||||
case ResetVector => BigInt(0x1000)
|
case ResetVector => BigInt(0x1000)
|
||||||
@ -145,7 +145,7 @@ class BaseCoreplexConfig extends Config (
|
|||||||
else new MESICoherence(site(L2DirectoryRepresentation))),
|
else new MESICoherence(site(L2DirectoryRepresentation))),
|
||||||
nManagers = site(NBanksPerMemoryChannel)*site(NMemoryChannels) + 1 /* MMIO */,
|
nManagers = site(NBanksPerMemoryChannel)*site(NMemoryChannels) + 1 /* MMIO */,
|
||||||
nCachingClients = site(NCachedTileLinkPorts),
|
nCachingClients = site(NCachedTileLinkPorts),
|
||||||
nCachelessClients = site(NExternalClients) + site(NUncachedTileLinkPorts),
|
nCachelessClients = site(NCoreplexExtClients).get + site(NUncachedTileLinkPorts),
|
||||||
maxClientXacts = max_int(
|
maxClientXacts = max_int(
|
||||||
// L1 cache
|
// L1 cache
|
||||||
site(DCacheKey).nMSHRs + 1 /* IOMSHR */,
|
site(DCacheKey).nMSHRs + 1 /* IOMSHR */,
|
||||||
@ -176,7 +176,7 @@ class BaseCoreplexConfig extends Config (
|
|||||||
TileLinkParameters(
|
TileLinkParameters(
|
||||||
coherencePolicy = new MICoherence(
|
coherencePolicy = new MICoherence(
|
||||||
new NullRepresentation(site(NBanksPerMemoryChannel))),
|
new NullRepresentation(site(NBanksPerMemoryChannel))),
|
||||||
nManagers = site(GlobalAddrMap).subMap("io").numSlaves,
|
nManagers = site(GlobalAddrMap).get.subMap("io").numSlaves,
|
||||||
nCachingClients = 0,
|
nCachingClients = 0,
|
||||||
nCachelessClients = 1,
|
nCachelessClients = 1,
|
||||||
maxClientXacts = 4,
|
maxClientXacts = 4,
|
||||||
@ -194,7 +194,6 @@ class BaseCoreplexConfig extends Config (
|
|||||||
case CacheBlockBytes => Dump("CACHE_BLOCK_BYTES", 64)
|
case CacheBlockBytes => Dump("CACHE_BLOCK_BYTES", 64)
|
||||||
case CacheBlockOffsetBits => log2Up(here(CacheBlockBytes))
|
case CacheBlockOffsetBits => log2Up(here(CacheBlockBytes))
|
||||||
case EnableL2Logging => false
|
case EnableL2Logging => false
|
||||||
case ExtraCoreplexPorts => (p: Parameters) => new Bundle
|
|
||||||
case RegressionTestNames => LinkedHashSet(
|
case RegressionTestNames => LinkedHashSet(
|
||||||
"rv64ud-v-fcvt",
|
"rv64ud-v-fcvt",
|
||||||
"rv64ud-p-fdiv",
|
"rv64ud-p-fdiv",
|
||||||
|
@ -14,6 +14,8 @@ import rocket.Util._
|
|||||||
import java.nio.{ByteBuffer,ByteOrder}
|
import java.nio.{ByteBuffer,ByteOrder}
|
||||||
import java.nio.file.{Files, Paths}
|
import java.nio.file.{Files, Paths}
|
||||||
|
|
||||||
|
/** Function for building Coreplex */
|
||||||
|
case object BuildCoreplex extends Field[(Parameters, CoreplexConfig) => Coreplex]
|
||||||
/** Number of memory channels */
|
/** Number of memory channels */
|
||||||
case object NMemoryChannels extends Field[Int]
|
case object NMemoryChannels extends Field[Int]
|
||||||
/** Number of banks per memory channel */
|
/** Number of banks per memory channel */
|
||||||
@ -24,24 +26,11 @@ case object BankIdLSB extends Field[Int]
|
|||||||
case object BuildL2CoherenceManager extends Field[(Int, Parameters) => CoherenceAgent]
|
case object BuildL2CoherenceManager extends Field[(Int, Parameters) => CoherenceAgent]
|
||||||
/** Function for building some kind of tile connected to a reset signal */
|
/** Function for building some kind of tile connected to a reset signal */
|
||||||
case object BuildTiles extends Field[Seq[(Bool, Parameters) => Tile]]
|
case object BuildTiles extends Field[Seq[(Bool, Parameters) => Tile]]
|
||||||
/** A string describing on-chip devices, readable by target software */
|
|
||||||
case object ConfigString extends Field[Array[Byte]]
|
|
||||||
/** Number of external interrupt sources */
|
|
||||||
case object NExtInterrupts extends Field[Int]
|
|
||||||
/** Interrupt controller configuration */
|
|
||||||
case object PLICKey extends Field[PLICConfig]
|
|
||||||
/** The file to read the BootROM contents from */
|
/** The file to read the BootROM contents from */
|
||||||
case object BootROMFile extends Field[String]
|
case object BootROMFile extends Field[String]
|
||||||
/** Export an external MMIO slave port */
|
|
||||||
case object ExportMMIOPort extends Field[Boolean]
|
|
||||||
/** Expose additional TileLink client ports */
|
|
||||||
case object NExternalClients extends Field[Int]
|
|
||||||
/** Extra top-level ports exported from the coreplex */
|
|
||||||
case object ExtraCoreplexPorts extends Field[Parameters => Bundle]
|
|
||||||
|
|
||||||
trait HasCoreplexParameters {
|
trait HasCoreplexParameters {
|
||||||
implicit val p: Parameters
|
implicit val p: Parameters
|
||||||
lazy val nTiles = p(NTiles)
|
|
||||||
lazy val nMemChannels = p(NMemoryChannels)
|
lazy val nMemChannels = p(NMemoryChannels)
|
||||||
lazy val nBanksPerMemChannel = p(NBanksPerMemoryChannel)
|
lazy val nBanksPerMemChannel = p(NBanksPerMemoryChannel)
|
||||||
lazy val nBanks = nMemChannels*nBanksPerMemChannel
|
lazy val nBanks = nMemChannels*nBanksPerMemChannel
|
||||||
@ -49,20 +38,31 @@ trait HasCoreplexParameters {
|
|||||||
lazy val innerParams = p.alterPartial({ case TLId => "L1toL2" })
|
lazy val innerParams = p.alterPartial({ case TLId => "L1toL2" })
|
||||||
lazy val outermostParams = p.alterPartial({ case TLId => "Outermost" })
|
lazy val outermostParams = p.alterPartial({ case TLId => "Outermost" })
|
||||||
lazy val outermostMMIOParams = p.alterPartial({ case TLId => "MMIO_Outermost" })
|
lazy val outermostMMIOParams = p.alterPartial({ case TLId => "MMIO_Outermost" })
|
||||||
lazy val nExtClients = p(NExternalClients)
|
lazy val configString = p(rocketchip.ConfigString).get
|
||||||
lazy val exportMMIO = p(ExportMMIOPort)
|
lazy val globalAddrMap = p(rocketchip.GlobalAddrMap).get
|
||||||
}
|
}
|
||||||
|
|
||||||
abstract class Coreplex(implicit val p: Parameters) extends Module
|
case class CoreplexConfig(
|
||||||
|
nTiles: Int,
|
||||||
|
nExtInterrupts: Int,
|
||||||
|
nSlaves: Int,
|
||||||
|
hasSupervisor: Boolean,
|
||||||
|
hasExtMMIOPort: Boolean)
|
||||||
|
{
|
||||||
|
val plicKey = PLICConfig(nTiles, hasSupervisor, nExtInterrupts, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
abstract class Coreplex(implicit val p: Parameters, implicit val c: CoreplexConfig) extends Module
|
||||||
with HasCoreplexParameters {
|
with HasCoreplexParameters {
|
||||||
class CoreplexIO(implicit val p: Parameters) extends Bundle {
|
class CoreplexIO(implicit val p: Parameters, implicit val c: CoreplexConfig) extends Bundle {
|
||||||
val mem = Vec(nMemChannels, new ClientUncachedTileLinkIO()(outermostParams))
|
val master = new Bundle {
|
||||||
val ext_clients = Vec(nExtClients, new ClientUncachedTileLinkIO()(innerParams)).flip
|
val mem = Vec(nMemChannels, new ClientUncachedTileLinkIO()(outermostParams))
|
||||||
val mmio = p(ExportMMIOPort).option(new ClientUncachedTileLinkIO()(outermostMMIOParams))
|
val mmio = c.hasExtMMIOPort.option(new ClientUncachedTileLinkIO()(outermostMMIOParams))
|
||||||
val interrupts = Vec(p(NExtInterrupts), Bool()).asInput
|
}
|
||||||
|
val slave = Vec(c.nSlaves, new ClientUncachedTileLinkIO()(innerParams)).flip
|
||||||
|
val interrupts = Vec(c.nExtInterrupts, Bool()).asInput
|
||||||
val debug = new DebugBusIO()(p).flip
|
val debug = new DebugBusIO()(p).flip
|
||||||
val rtcTick = Bool(INPUT)
|
val rtcTick = Bool(INPUT)
|
||||||
val extra = p(ExtraCoreplexPorts)(p)
|
|
||||||
val success: Option[Bool] = hasSuccessFlag.option(Bool(OUTPUT))
|
val success: Option[Bool] = hasSuccessFlag.option(Bool(OUTPUT))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -70,16 +70,16 @@ abstract class Coreplex(implicit val p: Parameters) extends Module
|
|||||||
val io = new CoreplexIO
|
val io = new CoreplexIO
|
||||||
}
|
}
|
||||||
|
|
||||||
class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
class DefaultCoreplex(tp: Parameters, tc: CoreplexConfig) extends Coreplex()(tp, tc) {
|
||||||
// Build a set of Tiles
|
// Build a set of Tiles
|
||||||
val tileResets = Wire(Vec(nTiles, Bool()))
|
val tileResets = Wire(Vec(tc.nTiles, Bool()))
|
||||||
val tileList = p(BuildTiles).zip(tileResets).map {
|
val tileList = p(BuildTiles).zip(tileResets).map {
|
||||||
case (tile, rst) => tile(rst, p)
|
case (tile, rst) => tile(rst, p)
|
||||||
}
|
}
|
||||||
val nCachedPorts = tileList.map(tile => tile.io.cached.size).reduce(_ + _)
|
val nCachedPorts = tileList.map(tile => tile.io.cached.size).reduce(_ + _)
|
||||||
val nUncachedPorts = tileList.map(tile => tile.io.uncached.size).reduce(_ + _)
|
val nUncachedPorts = tileList.map(tile => tile.io.uncached.size).reduce(_ + _)
|
||||||
|
|
||||||
printConfigString
|
// Build an uncore backing the Tiles
|
||||||
buildUncore(p.alterPartial({
|
buildUncore(p.alterPartial({
|
||||||
case HastiId => "TL"
|
case HastiId => "TL"
|
||||||
case TLId => "L1toL2"
|
case TLId => "L1toL2"
|
||||||
@ -87,25 +87,13 @@ class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|||||||
case NUncachedTileLinkPorts => nUncachedPorts
|
case NUncachedTileLinkPorts => nUncachedPorts
|
||||||
}))
|
}))
|
||||||
|
|
||||||
def printConfigString(implicit p: Parameters) = {
|
|
||||||
println("Generated Address Map")
|
|
||||||
for (entry <- p(GlobalAddrMap).flatten) {
|
|
||||||
val name = entry.name
|
|
||||||
val start = entry.region.start
|
|
||||||
val end = entry.region.start + entry.region.size - 1
|
|
||||||
println(f"\t$name%s $start%x - $end%x")
|
|
||||||
}
|
|
||||||
println("Generated Configuration String")
|
|
||||||
println(new String(p(ConfigString)))
|
|
||||||
}
|
|
||||||
|
|
||||||
def buildUncore(implicit p: Parameters) = {
|
def buildUncore(implicit p: Parameters) = {
|
||||||
// Create a simple L1toL2 NoC between the tiles and the banks of outer memory
|
// Create a simple L1toL2 NoC between the tiles and the banks of outer memory
|
||||||
// Cached ports are first in client list, making sharerToClientId just an indentity function
|
// Cached ports are first in client list, making sharerToClientId just an indentity function
|
||||||
// addrToBank is sed to hash physical addresses (of cache blocks) to banks (and thereby memory channels)
|
// addrToBank is sed to hash physical addresses (of cache blocks) to banks (and thereby memory channels)
|
||||||
def sharerToClientId(sharerId: UInt) = sharerId
|
def sharerToClientId(sharerId: UInt) = sharerId
|
||||||
def addrToBank(addr: UInt): UInt = if (nBanks == 0) UInt(0) else {
|
def addrToBank(addr: UInt): UInt = if (nBanks == 0) UInt(0) else {
|
||||||
val isMemory = p(GlobalAddrMap).isInRegion("mem", addr << log2Up(p(CacheBlockBytes)))
|
val isMemory = globalAddrMap.isInRegion("mem", addr << log2Up(p(CacheBlockBytes)))
|
||||||
Mux(isMemory, addr.extract(lsb + log2Ceil(nBanks) - 1, lsb), UInt(nBanks))
|
Mux(isMemory, addr.extract(lsb + log2Ceil(nBanks) - 1, lsb), UInt(nBanks))
|
||||||
}
|
}
|
||||||
val preBuffering = TileLinkDepths(1,1,2,2,0)
|
val preBuffering = TileLinkDepths(1,1,2,2,0)
|
||||||
@ -124,7 +112,7 @@ class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|||||||
// Wire the tiles to the TileLink client ports of the L1toL2 network,
|
// Wire the tiles to the TileLink client ports of the L1toL2 network,
|
||||||
// and coherence manager(s) to the other side
|
// and coherence manager(s) to the other side
|
||||||
l1tol2net.io.clients_cached <> tileList.map(_.io.cached).flatten
|
l1tol2net.io.clients_cached <> tileList.map(_.io.cached).flatten
|
||||||
l1tol2net.io.clients_uncached <> tileList.map(_.io.uncached).flatten ++ io.ext_clients
|
l1tol2net.io.clients_uncached <> tileList.map(_.io.uncached).flatten ++ io.slave
|
||||||
l1tol2net.io.managers <> managerEndpoints.map(_.innerTL) :+ mmioManager.io.inner
|
l1tol2net.io.managers <> managerEndpoints.map(_.innerTL) :+ mmioManager.io.inner
|
||||||
|
|
||||||
// Create a converter between TileLinkIO and MemIO for each channel
|
// Create a converter between TileLinkIO and MemIO for each channel
|
||||||
@ -138,7 +126,7 @@ class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|||||||
TileLinkWidthAdapter(icPort, unwrap.io.out)
|
TileLinkWidthAdapter(icPort, unwrap.io.out)
|
||||||
}
|
}
|
||||||
|
|
||||||
io.mem <> mem_ic.io.out
|
io.master.mem <> mem_ic.io.out
|
||||||
|
|
||||||
buildMMIONetwork(ClientUncachedTileLinkEnqueuer(mmioManager.io.outer, 1))(
|
buildMMIONetwork(ClientUncachedTileLinkEnqueuer(mmioManager.io.outer, 1))(
|
||||||
p.alterPartial({case TLId => "L2toMMIO"}))
|
p.alterPartial({case TLId => "L2toMMIO"}))
|
||||||
@ -151,7 +139,10 @@ class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|||||||
rom.order(ByteOrder.LITTLE_ENDIAN)
|
rom.order(ByteOrder.LITTLE_ENDIAN)
|
||||||
|
|
||||||
// for now, have the reset vector jump straight to memory
|
// for now, have the reset vector jump straight to memory
|
||||||
val memBase = (if (p(GlobalAddrMap) contains "mem") p(GlobalAddrMap)("mem") else p(GlobalAddrMap)("io:int:dmem0")).start
|
val memBase = (
|
||||||
|
if (globalAddrMap contains "mem") globalAddrMap("mem")
|
||||||
|
else globalAddrMap("io:int:dmem0")
|
||||||
|
).start
|
||||||
val resetToMemDist = memBase - p(ResetVector)
|
val resetToMemDist = memBase - p(ResetVector)
|
||||||
require(resetToMemDist == (resetToMemDist.toInt >> 12 << 12))
|
require(resetToMemDist == (resetToMemDist.toInt >> 12 << 12))
|
||||||
val configStringAddr = p(ResetVector).toInt + rom.capacity
|
val configStringAddr = p(ResetVector).toInt + rom.capacity
|
||||||
@ -159,17 +150,17 @@ class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|||||||
require(rom.getInt(12) == 0,
|
require(rom.getInt(12) == 0,
|
||||||
"Config string address position should not be occupied by code")
|
"Config string address position should not be occupied by code")
|
||||||
rom.putInt(12, configStringAddr)
|
rom.putInt(12, configStringAddr)
|
||||||
rom.array() ++ p(ConfigString).toSeq
|
rom.array() ++ (configString.getBytes.toSeq)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def buildMMIONetwork(mmio: ClientUncachedTileLinkIO)(implicit p: Parameters) = {
|
def buildMMIONetwork(mmio: ClientUncachedTileLinkIO)(implicit p: Parameters) = {
|
||||||
val ioAddrMap = p(GlobalAddrMap).subMap("io")
|
val ioAddrMap = globalAddrMap.subMap("io")
|
||||||
|
|
||||||
val mmioNetwork = Module(new TileLinkRecursiveInterconnect(1, ioAddrMap))
|
val mmioNetwork = Module(new TileLinkRecursiveInterconnect(1, ioAddrMap))
|
||||||
mmioNetwork.io.in.head <> mmio
|
mmioNetwork.io.in.head <> mmio
|
||||||
|
|
||||||
val plic = Module(new PLIC(p(PLICKey)))
|
val plic = Module(new PLIC(c.plicKey))
|
||||||
plic.io.tl <> mmioNetwork.port("int:plic")
|
plic.io.tl <> mmioNetwork.port("int:plic")
|
||||||
for (i <- 0 until io.interrupts.size) {
|
for (i <- 0 until io.interrupts.size) {
|
||||||
val gateway = Module(new LevelGateway)
|
val gateway = Module(new LevelGateway)
|
||||||
@ -191,25 +182,25 @@ class DefaultCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|||||||
tile.io.prci <> prci
|
tile.io.prci <> prci
|
||||||
}
|
}
|
||||||
|
|
||||||
for (i <- 0 until nTiles) {
|
for (i <- 0 until tc.nTiles) {
|
||||||
prci.io.interrupts(i).meip := plic.io.harts(plic.cfg.context(i, 'M'))
|
prci.io.interrupts(i).meip := plic.io.harts(plic.cfg.context(i, 'M'))
|
||||||
if (p(UseVM))
|
if (p(UseVM))
|
||||||
prci.io.interrupts(i).seip := plic.io.harts(plic.cfg.context(i, 'S'))
|
prci.io.interrupts(i).seip := plic.io.harts(plic.cfg.context(i, 'S'))
|
||||||
prci.io.interrupts(i).debug := debugModule.io.debugInterrupts(i)
|
prci.io.interrupts(i).debug := debugModule.io.debugInterrupts(i)
|
||||||
}
|
}
|
||||||
|
|
||||||
val tileSlavePorts = (0 until nTiles) map (i => s"int:dmem$i") filter (ioAddrMap contains _)
|
val tileSlavePorts = (0 until tc.nTiles) map (i => s"int:dmem$i") filter (ioAddrMap contains _)
|
||||||
for ((t, m) <- (tileList.map(_.io.slave).flatten) zip (tileSlavePorts map (mmioNetwork port _)))
|
for ((t, m) <- (tileList.map(_.io.slave).flatten) zip (tileSlavePorts map (mmioNetwork port _)))
|
||||||
t <> ClientUncachedTileLinkEnqueuer(m, 1)
|
t <> ClientUncachedTileLinkEnqueuer(m, 1)
|
||||||
|
|
||||||
val bootROM = Module(new ROMSlave(makeBootROM()))
|
val bootROM = Module(new ROMSlave(makeBootROM()))
|
||||||
bootROM.io <> mmioNetwork.port("int:bootrom")
|
bootROM.io <> mmioNetwork.port("int:bootrom")
|
||||||
|
|
||||||
io.mmio.foreach { _ <> mmioNetwork.port("ext") }
|
io.master.mmio.foreach { _ <> mmioNetwork.port("ext") }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class GroundTestCoreplex(topParams: Parameters) extends DefaultCoreplex(topParams) {
|
class GroundTestCoreplex(tp: Parameters, tc: CoreplexConfig) extends DefaultCoreplex(tp, tc) {
|
||||||
override def hasSuccessFlag = true
|
override def hasSuccessFlag = true
|
||||||
io.success.get := tileList.flatMap(_.io.elements get "success").map(_.asInstanceOf[Bool]).reduce(_&&_)
|
io.success.get := tileList.flatMap(_.io.elements get "success").map(_.asInstanceOf[Bool]).reduce(_&&_)
|
||||||
}
|
}
|
||||||
|
@ -1,63 +0,0 @@
|
|||||||
package coreplex
|
|
||||||
|
|
||||||
import Chisel._
|
|
||||||
import cde.{Parameters, Field}
|
|
||||||
import rocket.TileId
|
|
||||||
import groundtest._
|
|
||||||
import uncore.tilelink._
|
|
||||||
import uncore.agents._
|
|
||||||
|
|
||||||
case object ExportGroundTestStatus extends Field[Boolean]
|
|
||||||
|
|
||||||
class DirectGroundTestCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
|
||||||
// Not using the debug
|
|
||||||
io.debug.req.ready := Bool(false)
|
|
||||||
io.debug.resp.valid := Bool(false)
|
|
||||||
|
|
||||||
require(!exportMMIO)
|
|
||||||
require(nExtClients == 0)
|
|
||||||
require(nMemChannels == 1)
|
|
||||||
require(nTiles == 1)
|
|
||||||
|
|
||||||
val test = p(BuildGroundTest)(outermostParams.alterPartial({
|
|
||||||
case TileId => 0
|
|
||||||
case CacheName => "L1D"
|
|
||||||
}))
|
|
||||||
require(test.io.cache.size == 0)
|
|
||||||
require(test.io.mem.size == nBanksPerMemChannel)
|
|
||||||
require(test.io.ptw.size == 0)
|
|
||||||
|
|
||||||
val mem_ic = Module(new TileLinkMemoryInterconnect(
|
|
||||||
nBanksPerMemChannel, nMemChannels)(outermostParams))
|
|
||||||
|
|
||||||
mem_ic.io.in <> test.io.mem
|
|
||||||
io.mem <> mem_ic.io.out
|
|
||||||
|
|
||||||
if (p(ExportGroundTestStatus)) {
|
|
||||||
val status = io.extra.asInstanceOf[GroundTestStatus]
|
|
||||||
|
|
||||||
val s_running :: s_finished :: s_errored :: s_timeout :: Nil = Enum(Bits(), 4)
|
|
||||||
val state = Reg(init = s_running)
|
|
||||||
val error_code = Reg(status.error.bits)
|
|
||||||
val timeout_code = Reg(status.timeout.bits)
|
|
||||||
when (state === s_running) {
|
|
||||||
when (test.io.status.finished) { state := s_finished }
|
|
||||||
when (test.io.status.error.valid) {
|
|
||||||
state := s_errored
|
|
||||||
error_code := test.io.status.error.bits
|
|
||||||
}
|
|
||||||
when (test.io.status.timeout.valid) {
|
|
||||||
state := s_timeout
|
|
||||||
timeout_code := test.io.status.timeout.bits
|
|
||||||
}
|
|
||||||
}
|
|
||||||
status.finished := (state === s_finished)
|
|
||||||
status.error.valid := (state === s_errored)
|
|
||||||
status.error.bits := error_code
|
|
||||||
status.timeout.valid := (state === s_timeout)
|
|
||||||
status.timeout.bits := timeout_code
|
|
||||||
}
|
|
||||||
|
|
||||||
override def hasSuccessFlag = true
|
|
||||||
io.success.get := test.io.status.finished
|
|
||||||
}
|
|
@ -2,6 +2,7 @@ package coreplex
|
|||||||
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import groundtest._
|
import groundtest._
|
||||||
|
import rocketchip.{GlobalAddrMap}
|
||||||
import rocket._
|
import rocket._
|
||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
import uncore.coherence._
|
import uncore.coherence._
|
||||||
@ -24,7 +25,7 @@ class WithComparator extends Config(
|
|||||||
(p: Parameters) => Module(new ComparatorCore()(p))
|
(p: Parameters) => Module(new ComparatorCore()(p))
|
||||||
case ComparatorKey => ComparatorParameters(
|
case ComparatorKey => ComparatorParameters(
|
||||||
targets = Seq("mem", "io:ext:testram").map(name =>
|
targets = Seq("mem", "io:ext:testram").map(name =>
|
||||||
site(GlobalAddrMap)(name).start.longValue),
|
site(GlobalAddrMap).get(name).start.longValue),
|
||||||
width = 8,
|
width = 8,
|
||||||
operations = 1000,
|
operations = 1000,
|
||||||
atomics = site(UseAtomics),
|
atomics = site(UseAtomics),
|
||||||
@ -54,7 +55,7 @@ class WithMemtest extends Config(
|
|||||||
}
|
}
|
||||||
case GeneratorKey => GeneratorParameters(
|
case GeneratorKey => GeneratorParameters(
|
||||||
maxRequests = 128,
|
maxRequests = 128,
|
||||||
startAddress = site(GlobalAddrMap)("mem").start)
|
startAddress = site(GlobalAddrMap).get("mem").start)
|
||||||
case BuildGroundTest =>
|
case BuildGroundTest =>
|
||||||
(p: Parameters) => Module(new GeneratorTest()(p))
|
(p: Parameters) => Module(new GeneratorTest()(p))
|
||||||
case _ => throw new CDEMatchError
|
case _ => throw new CDEMatchError
|
||||||
@ -114,7 +115,7 @@ class WithNastiConverterTest extends Config(
|
|||||||
}
|
}
|
||||||
case GeneratorKey => GeneratorParameters(
|
case GeneratorKey => GeneratorParameters(
|
||||||
maxRequests = 128,
|
maxRequests = 128,
|
||||||
startAddress = site(GlobalAddrMap)("mem").start)
|
startAddress = site(GlobalAddrMap).get("mem").start)
|
||||||
case BuildGroundTest =>
|
case BuildGroundTest =>
|
||||||
(p: Parameters) => Module(new NastiConverterTest()(p))
|
(p: Parameters) => Module(new NastiConverterTest()(p))
|
||||||
case _ => throw new CDEMatchError
|
case _ => throw new CDEMatchError
|
||||||
@ -134,7 +135,7 @@ class WithTraceGen extends Config(
|
|||||||
val nSets = 32 // L2 NSets
|
val nSets = 32 // L2 NSets
|
||||||
val nWays = 1
|
val nWays = 1
|
||||||
val blockOffset = site(CacheBlockOffsetBits)
|
val blockOffset = site(CacheBlockOffsetBits)
|
||||||
val baseAddr = site(GlobalAddrMap)("mem").start
|
val baseAddr = site(GlobalAddrMap).get("mem").start
|
||||||
val nBeats = site(MIFDataBeats)
|
val nBeats = site(MIFDataBeats)
|
||||||
List.tabulate(4 * nWays) { i =>
|
List.tabulate(4 * nWays) { i =>
|
||||||
Seq.tabulate(nBeats) { j => (j * 8) + ((i * nSets) << blockOffset) }
|
Seq.tabulate(nBeats) { j => (j * 8) + ((i * nSets) << blockOffset) }
|
||||||
@ -156,7 +157,7 @@ class WithPCIeMockupTest extends Config(
|
|||||||
GroundTestTileSettings(1))
|
GroundTestTileSettings(1))
|
||||||
case GeneratorKey => GeneratorParameters(
|
case GeneratorKey => GeneratorParameters(
|
||||||
maxRequests = 128,
|
maxRequests = 128,
|
||||||
startAddress = site(GlobalAddrMap)("mem").start)
|
startAddress = site(GlobalAddrMap).get("mem").start)
|
||||||
case BuildGroundTest =>
|
case BuildGroundTest =>
|
||||||
(p: Parameters) => p(TileId) match {
|
(p: Parameters) => p(TileId) match {
|
||||||
case 0 => Module(new GeneratorTest()(p))
|
case 0 => Module(new GeneratorTest()(p))
|
||||||
|
@ -6,9 +6,9 @@ import rocket.Tile
|
|||||||
import uncore.tilelink.TLId
|
import uncore.tilelink.TLId
|
||||||
import cde.Parameters
|
import cde.Parameters
|
||||||
|
|
||||||
class UnitTestCoreplex(topParams: Parameters) extends Coreplex()(topParams) {
|
class UnitTestCoreplex(tp: Parameters, tc: CoreplexConfig) extends Coreplex()(tp, tc) {
|
||||||
require(!exportMMIO)
|
require(!tc.hasExtMMIOPort)
|
||||||
require(nExtClients == 0)
|
require(tc.nSlaves == 0)
|
||||||
require(nMemChannels == 0)
|
require(nMemChannels == 0)
|
||||||
|
|
||||||
io.debug.req.ready := Bool(false)
|
io.debug.req.ready := Bool(false)
|
||||||
|
@ -176,6 +176,7 @@ class TagMan(val logNumTags : Int) extends Module {
|
|||||||
|
|
||||||
class TraceGenerator(id: Int)
|
class TraceGenerator(id: Int)
|
||||||
(implicit p: Parameters) extends L1HellaCacheModule()(p)
|
(implicit p: Parameters) extends L1HellaCacheModule()(p)
|
||||||
|
with HasAddrMapParameters
|
||||||
with HasTraceGenParams {
|
with HasTraceGenParams {
|
||||||
val io = new Bundle {
|
val io = new Bundle {
|
||||||
val finished = Bool(OUTPUT)
|
val finished = Bool(OUTPUT)
|
||||||
@ -197,8 +198,7 @@ class TraceGenerator(id: Int)
|
|||||||
// Address bag, shared by all cores, taken from module parameters.
|
// Address bag, shared by all cores, taken from module parameters.
|
||||||
// In addition, there is a per-core random selection of extra addresses.
|
// In addition, there is a per-core random selection of extra addresses.
|
||||||
|
|
||||||
val addrHashMap = p(GlobalAddrMap)
|
val baseAddr = addrMap("mem").start + 0x01000000
|
||||||
val baseAddr = addrHashMap("mem").start + 0x01000000
|
|
||||||
|
|
||||||
val bagOfAddrs = addressBag.map(x => UInt(x, numBitsInWord))
|
val bagOfAddrs = addressBag.map(x => UInt(x, numBitsInWord))
|
||||||
|
|
||||||
|
@ -7,13 +7,12 @@ import cde.{Parameters, Field}
|
|||||||
import scala.collection.mutable.HashMap
|
import scala.collection.mutable.HashMap
|
||||||
|
|
||||||
case object PAddrBits extends Field[Int]
|
case object PAddrBits extends Field[Int]
|
||||||
case object GlobalAddrMap extends Field[AddrMap]
|
|
||||||
|
|
||||||
trait HasAddrMapParameters {
|
trait HasAddrMapParameters {
|
||||||
implicit val p: Parameters
|
implicit val p: Parameters
|
||||||
|
|
||||||
val paddrBits = p(PAddrBits)
|
val paddrBits = p(PAddrBits)
|
||||||
val addrMap = p(GlobalAddrMap)
|
def addrMap = p(rocketchip.GlobalAddrMap).get
|
||||||
}
|
}
|
||||||
|
|
||||||
case class MemAttr(prot: Int, cacheable: Boolean = false)
|
case class MemAttr(prot: Int, cacheable: Boolean = false)
|
||||||
|
@ -8,6 +8,7 @@ import rocket._
|
|||||||
import rocket.Util._
|
import rocket.Util._
|
||||||
import uncore.agents._
|
import uncore.agents._
|
||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
|
import uncore.tilelink2.{LazyModule}
|
||||||
import uncore.devices._
|
import uncore.devices._
|
||||||
import uncore.converters._
|
import uncore.converters._
|
||||||
import coreplex._
|
import coreplex._
|
||||||
@ -17,157 +18,66 @@ import scala.collection.immutable.HashMap
|
|||||||
import DefaultTestSuites._
|
import DefaultTestSuites._
|
||||||
import cde.{Parameters, Config, Dump, Knob, CDEMatchError}
|
import cde.{Parameters, Config, Dump, Knob, CDEMatchError}
|
||||||
|
|
||||||
class BasePlatformConfig extends Config (
|
class BasePlatformConfig extends Config(
|
||||||
topDefinitions = { (pname,site,here) =>
|
topDefinitions = {
|
||||||
type PF = PartialFunction[Any,Any]
|
val configString = new GlobalVariable[String]
|
||||||
def findBy(sname:Any):Any = here[PF](site[Any](sname))(pname)
|
val globalAddrMap = new GlobalVariable[AddrMap]
|
||||||
lazy val internalIOAddrMap: AddrMap = {
|
val nCoreplexExtClients = new GlobalVariable[Int]
|
||||||
val entries = collection.mutable.ArrayBuffer[AddrMapEntry]()
|
(pname,site,here) => {
|
||||||
entries += AddrMapEntry("debug", MemSize(4096, MemAttr(AddrMapProt.RWX)))
|
type PF = PartialFunction[Any,Any]
|
||||||
entries += AddrMapEntry("bootrom", MemSize(4096, MemAttr(AddrMapProt.RX)))
|
def findBy(sname:Any):Any = here[PF](site[Any](sname))(pname)
|
||||||
entries += AddrMapEntry("plic", MemRange(0x40000000, 0x4000000, MemAttr(AddrMapProt.RW)))
|
lazy val innerDataBits = 64
|
||||||
entries += AddrMapEntry("prci", MemSize(0x4000000, MemAttr(AddrMapProt.RW)))
|
lazy val innerDataBeats = (8 * site(CacheBlockBytes)) / innerDataBits
|
||||||
if (site(DataScratchpadSize) > 0) { // TODO heterogeneous tiles
|
pname match {
|
||||||
require(site(NTiles) == 1) // TODO relax this
|
//Memory Parameters
|
||||||
require(site(NMemoryChannels) == 0) // TODO allow both scratchpad & DRAM
|
case MIFTagBits => Dump("MIF_TAG_BITS", 5)
|
||||||
entries += AddrMapEntry("dmem0", MemRange(0x80000000L, site[Int](DataScratchpadSize), MemAttr(AddrMapProt.RWX)))
|
case MIFDataBits => Dump("MIF_DATA_BITS", 64)
|
||||||
}
|
case MIFAddrBits => Dump("MIF_ADDR_BITS",
|
||||||
new AddrMap(entries)
|
site(PAddrBits) - site(CacheBlockOffsetBits))
|
||||||
}
|
case MIFDataBeats => site(CacheBlockBytes) * 8 / site(MIFDataBits)
|
||||||
lazy val externalAddrMap = new AddrMap(
|
case NastiKey => {
|
||||||
site(ExtraDevices).addrMapEntries ++ site(ExtMMIOPorts),
|
Dump("MEM_STRB_BITS", site(MIFDataBits) / 8)
|
||||||
start = BigInt("50000000", 16),
|
NastiParameters(
|
||||||
collapse = true)
|
dataBits = Dump("MEM_DATA_BITS", site(MIFDataBits)),
|
||||||
lazy val globalAddrMap = {
|
addrBits = Dump("MEM_ADDR_BITS", site(PAddrBits)),
|
||||||
val memBase = 0x80000000L
|
idBits = Dump("MEM_ID_BITS", site(MIFTagBits)))
|
||||||
val memSize = site(ExtMemSize)
|
|
||||||
|
|
||||||
val intern = AddrMapEntry("int", internalIOAddrMap)
|
|
||||||
val extern = AddrMapEntry("ext", externalAddrMap)
|
|
||||||
val io = AddrMapEntry("io", AddrMap((intern +: site(ExportMMIOPort).option(extern).toSeq):_*))
|
|
||||||
val mem = AddrMapEntry("mem", MemRange(memBase, memSize, MemAttr(AddrMapProt.RWX, true)))
|
|
||||||
val addrMap = AddrMap((io +: (site(NMemoryChannels) > 0).option(mem).toSeq):_*)
|
|
||||||
|
|
||||||
Dump("MEM_BASE", memBase)
|
|
||||||
addrMap
|
|
||||||
}
|
|
||||||
def makeConfigString() = {
|
|
||||||
val addrMap = globalAddrMap
|
|
||||||
val plicAddr = addrMap("io:int:plic").start
|
|
||||||
val prciAddr = addrMap("io:int:prci").start
|
|
||||||
val plicInfo = site(PLICKey)
|
|
||||||
val xLen = site(XLen)
|
|
||||||
val res = new StringBuilder
|
|
||||||
res append "plic {\n"
|
|
||||||
res append s" priority 0x${plicAddr.toString(16)};\n"
|
|
||||||
res append s" pending 0x${(plicAddr + plicInfo.pendingBase).toString(16)};\n"
|
|
||||||
res append s" ndevs ${plicInfo.nDevices};\n"
|
|
||||||
res append "};\n"
|
|
||||||
res append "rtc {\n"
|
|
||||||
res append s" addr 0x${(prciAddr + PRCI.time).toString(16)};\n"
|
|
||||||
res append "};\n"
|
|
||||||
if (addrMap contains "mem") {
|
|
||||||
res append "ram {\n"
|
|
||||||
res append " 0 {\n"
|
|
||||||
res append s" addr 0x${addrMap("mem").start.toString(16)};\n"
|
|
||||||
res append s" size 0x${addrMap("mem").size.toString(16)};\n"
|
|
||||||
res append " };\n"
|
|
||||||
res append "};\n"
|
|
||||||
}
|
|
||||||
res append "core {\n"
|
|
||||||
for (i <- 0 until site(NTiles)) { // TODO heterogeneous tiles
|
|
||||||
val isa = {
|
|
||||||
val m = if (site(MulDivKey).nonEmpty) "m" else ""
|
|
||||||
val a = if (site(UseAtomics)) "a" else ""
|
|
||||||
val f = if (site(FPUKey).nonEmpty) "f" else ""
|
|
||||||
val d = if (site(FPUKey).nonEmpty && site(XLen) > 32) "d" else ""
|
|
||||||
val s = if (site(UseVM)) "s" else ""
|
|
||||||
s"rv${site(XLen)}i$m$a$f$d$s"
|
|
||||||
}
|
}
|
||||||
res append s" $i {\n"
|
case BuildCoreplex =>
|
||||||
res append " 0 {\n"
|
(p: Parameters, c: CoreplexConfig) => Module(new DefaultCoreplex(p, c))
|
||||||
res append s" isa $isa;\n"
|
case NExtTopInterrupts => 2
|
||||||
res append s" timecmp 0x${(prciAddr + PRCI.timecmp(i)).toString(16)};\n"
|
// Note that PLIC asserts that this is > 0.
|
||||||
res append s" ipi 0x${(prciAddr + PRCI.msip(i)).toString(16)};\n"
|
case AsyncDebugBus => false
|
||||||
res append s" plic {\n"
|
case IncludeJtagDTM => false
|
||||||
res append s" m {\n"
|
case AsyncMMIOChannels => false
|
||||||
res append s" ie 0x${(plicAddr + plicInfo.enableAddr(i, 'M')).toString(16)};\n"
|
case ExtMMIOPorts => Nil
|
||||||
res append s" thresh 0x${(plicAddr + plicInfo.threshAddr(i, 'M')).toString(16)};\n"
|
case NExtMMIOAXIChannels => 0
|
||||||
res append s" claim 0x${(plicAddr + plicInfo.claimAddr(i, 'M')).toString(16)};\n"
|
case NExtMMIOAHBChannels => 0
|
||||||
res append s" };\n"
|
case NExtMMIOTLChannels => 0
|
||||||
if (site(UseVM)) {
|
case AsyncBusChannels => false
|
||||||
res append s" s {\n"
|
case NExtBusAXIChannels => 0
|
||||||
res append s" ie 0x${(plicAddr + plicInfo.enableAddr(i, 'S')).toString(16)};\n"
|
case NCoreplexExtClients => nCoreplexExtClients
|
||||||
res append s" thresh 0x${(plicAddr + plicInfo.threshAddr(i, 'S')).toString(16)};\n"
|
case HastiId => "Ext"
|
||||||
res append s" claim 0x${(plicAddr + plicInfo.claimAddr(i, 'S')).toString(16)};\n"
|
case HastiKey("TL") =>
|
||||||
res append s" };\n"
|
HastiParameters(
|
||||||
}
|
addrBits = site(PAddrBits),
|
||||||
res append " };\n"
|
dataBits = site(TLKey(site(TLId))).dataBits / site(TLKey(site(TLId))).dataBeats)
|
||||||
res append " };\n"
|
case HastiKey("Ext") =>
|
||||||
res append " };\n"
|
HastiParameters(
|
||||||
|
addrBits = site(PAddrBits),
|
||||||
|
dataBits = site(XLen))
|
||||||
|
case AsyncMemChannels => false
|
||||||
|
case NMemoryChannels => Dump("N_MEM_CHANNELS", 1)
|
||||||
|
case TMemoryChannels => BusType.AXI
|
||||||
|
case ExtMemSize => Dump("MEM_SIZE", 0x10000000L)
|
||||||
|
case ConfigString => configString
|
||||||
|
case GlobalAddrMap => globalAddrMap
|
||||||
|
case RTCPeriod => 100 // gives 10 MHz RTC assuming 1 GHz uncore clock
|
||||||
|
case BuildExampleTop =>
|
||||||
|
(p: Parameters) => uncore.tilelink2.LazyModule(new ExampleTop(p))
|
||||||
|
case _ => throw new CDEMatchError
|
||||||
}
|
}
|
||||||
res append "};\n"
|
|
||||||
res append (site(ExtraDevices).makeConfigString(addrMap))
|
|
||||||
res append '\u0000'
|
|
||||||
res.toString.getBytes
|
|
||||||
}
|
}
|
||||||
lazy val innerDataBits = 64
|
})
|
||||||
lazy val innerDataBeats = (8 * site(CacheBlockBytes)) / innerDataBits
|
|
||||||
pname match {
|
|
||||||
//Memory Parameters
|
|
||||||
case MIFTagBits => Dump("MIF_TAG_BITS", 5)
|
|
||||||
case MIFDataBits => Dump("MIF_DATA_BITS", 64)
|
|
||||||
case MIFAddrBits => Dump("MIF_ADDR_BITS",
|
|
||||||
site(PAddrBits) - site(CacheBlockOffsetBits))
|
|
||||||
case MIFDataBeats => site(CacheBlockBytes) * 8 / site(MIFDataBits)
|
|
||||||
case NastiKey => {
|
|
||||||
Dump("MEM_STRB_BITS", site(MIFDataBits) / 8)
|
|
||||||
NastiParameters(
|
|
||||||
dataBits = Dump("MEM_DATA_BITS", site(MIFDataBits)),
|
|
||||||
addrBits = Dump("MEM_ADDR_BITS", site(PAddrBits)),
|
|
||||||
idBits = Dump("MEM_ID_BITS", site(MIFTagBits)))
|
|
||||||
}
|
|
||||||
case BuildCoreplex => (p: Parameters) => Module(new DefaultCoreplex(p))
|
|
||||||
case NExtTopInterrupts => 2
|
|
||||||
case NExtPeripheryInterrupts => site(ExtraDevices).nInterrupts
|
|
||||||
// Note that PLIC asserts that this is > 0.
|
|
||||||
case NExtInterrupts => site(NExtTopInterrupts) + site(NExtPeripheryInterrupts)
|
|
||||||
case AsyncDebugBus => false
|
|
||||||
case IncludeJtagDTM => false
|
|
||||||
case AsyncMMIOChannels => false
|
|
||||||
case ExtraDevices => new EmptyDeviceBlock
|
|
||||||
case ExtraTopPorts => (p: Parameters) => new Bundle
|
|
||||||
case ExtMMIOPorts => Nil
|
|
||||||
case NExtMMIOAXIChannels => 0
|
|
||||||
case NExtMMIOAHBChannels => 0
|
|
||||||
case NExtMMIOTLChannels => 0
|
|
||||||
case ExportMMIOPort => !externalAddrMap.isEmpty
|
|
||||||
case AsyncBusChannels => false
|
|
||||||
case NExtBusAXIChannels => 0
|
|
||||||
case NExternalClients => (if (site(NExtBusAXIChannels) > 0) 1 else 0) +
|
|
||||||
site(ExtraDevices).nClientPorts
|
|
||||||
case ConnectExtraPorts =>
|
|
||||||
(out: Bundle, in: Bundle, p: Parameters) => out <> in
|
|
||||||
|
|
||||||
case HastiId => "Ext"
|
|
||||||
case HastiKey("TL") =>
|
|
||||||
HastiParameters(
|
|
||||||
addrBits = site(PAddrBits),
|
|
||||||
dataBits = site(TLKey(site(TLId))).dataBits / site(TLKey(site(TLId))).dataBeats)
|
|
||||||
case HastiKey("Ext") =>
|
|
||||||
HastiParameters(
|
|
||||||
addrBits = site(PAddrBits),
|
|
||||||
dataBits = site(XLen))
|
|
||||||
case AsyncMemChannels => false
|
|
||||||
case NMemoryChannels => Dump("N_MEM_CHANNELS", 1)
|
|
||||||
case TMemoryChannels => BusType.AXI
|
|
||||||
case ExtMemSize => Dump("MEM_SIZE", 0x10000000L)
|
|
||||||
case ConfigString => makeConfigString()
|
|
||||||
case GlobalAddrMap => globalAddrMap
|
|
||||||
case RTCPeriod => 100 // gives 10 MHz RTC assuming 1 GHz uncore clock
|
|
||||||
case RTCTick => (p: Parameters, t_io: Bundle, p_io:Bundle) => Counter(p(RTCPeriod)).inc()
|
|
||||||
case _ => throw new CDEMatchError
|
|
||||||
}})
|
|
||||||
|
|
||||||
class BaseConfig extends Config(new BaseCoreplexConfig ++ new BasePlatformConfig)
|
class BaseConfig extends Config(new BaseCoreplexConfig ++ new BasePlatformConfig)
|
||||||
class DefaultConfig extends Config(new WithBlockingL1 ++ new BaseConfig)
|
class DefaultConfig extends Config(new WithBlockingL1 ++ new BaseConfig)
|
||||||
@ -179,7 +89,6 @@ class DefaultBufferlessConfig extends Config(
|
|||||||
class FPGAConfig extends Config (
|
class FPGAConfig extends Config (
|
||||||
(pname,site,here) => pname match {
|
(pname,site,here) => pname match {
|
||||||
case NAcquireTransactors => 4
|
case NAcquireTransactors => 4
|
||||||
case ExportGroundTestStatus => true
|
|
||||||
case _ => throw new CDEMatchError
|
case _ => throw new CDEMatchError
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -270,35 +179,12 @@ class TinyConfig extends Config(
|
|||||||
new WithSmallCores ++ new WithRV32 ++
|
new WithSmallCores ++ new WithRV32 ++
|
||||||
new WithStatelessBridge ++ new BaseConfig)
|
new WithStatelessBridge ++ new BaseConfig)
|
||||||
|
|
||||||
class WithTestRAM extends Config(
|
|
||||||
(pname, site, here) => pname match {
|
|
||||||
case ExtraDevices => {
|
|
||||||
class TestRAMDevice extends DeviceBlock {
|
|
||||||
val ramSize = 0x1000
|
|
||||||
def nClientPorts = 0
|
|
||||||
def addrMapEntries = Seq(
|
|
||||||
AddrMapEntry("testram", MemSize(ramSize, MemAttr(AddrMapProt.RW))))
|
|
||||||
def builder(
|
|
||||||
mmioPorts: HashMap[String, ClientUncachedTileLinkIO],
|
|
||||||
clientPorts: Seq[ClientUncachedTileLinkIO],
|
|
||||||
interrupts: Seq[Bool],
|
|
||||||
extra: Bundle, p: Parameters) {
|
|
||||||
val testram = Module(new TileLinkTestRAM(ramSize)(p))
|
|
||||||
testram.io <> mmioPorts("testram")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
new TestRAMDevice
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
class WithAsyncDebug extends Config (
|
class WithAsyncDebug extends Config (
|
||||||
(pname, site, here) => pname match {
|
(pname, site, here) => pname match {
|
||||||
case AsyncDebugBus => true
|
case AsyncDebugBus => true
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class WithJtagDTM extends Config (
|
class WithJtagDTM extends Config (
|
||||||
(pname, site, here) => pname match {
|
(pname, site, here) => pname match {
|
||||||
case IncludeJtagDTM => true
|
case IncludeJtagDTM => true
|
||||||
|
@ -1,68 +0,0 @@
|
|||||||
package rocketchip
|
|
||||||
|
|
||||||
import Chisel._
|
|
||||||
import junctions._
|
|
||||||
import uncore.tilelink._
|
|
||||||
import scala.collection.immutable.HashMap
|
|
||||||
import cde.{Parameters, Field}
|
|
||||||
|
|
||||||
case object ExtraTopPorts extends Field[Parameters => Bundle]
|
|
||||||
case object ExtraDevices extends Field[DeviceBlock]
|
|
||||||
|
|
||||||
abstract class DeviceBlock {
|
|
||||||
/** How many client ports will the devices use */
|
|
||||||
def nClientPorts: Int
|
|
||||||
/** Address map entries for all of the devices */
|
|
||||||
def addrMapEntries: Seq[AddrMapEntry]
|
|
||||||
/**
|
|
||||||
* The total number of interrupt signals coming
|
|
||||||
* from all the devices */
|
|
||||||
def nInterrupts : Int = 0
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The function that elaborates all the extra devices and connects them
|
|
||||||
* to the TileLink ports and extra top-level ports.
|
|
||||||
*
|
|
||||||
* @param mmioPorts A hashmap for the mmio ports.
|
|
||||||
* Use the names specified in addrMapEntries to get
|
|
||||||
* the mmio port for each device.
|
|
||||||
* @param clientPorts All the client ports available for the devices
|
|
||||||
* @param interrupts External interrupts from Periphery to Coreplex
|
|
||||||
* @param extra The extra top-level IO bundle
|
|
||||||
* @param p The CDE parameters for the devices
|
|
||||||
*/
|
|
||||||
def builder(
|
|
||||||
mmioPorts: HashMap[String, ClientUncachedTileLinkIO],
|
|
||||||
clientPorts: Seq[ClientUncachedTileLinkIO],
|
|
||||||
interrupts : Seq[Bool],
|
|
||||||
extra: Bundle, p: Parameters): Unit
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create the config string entry for this device that goes into the
|
|
||||||
* Boot ROM. You generally won't need to override this
|
|
||||||
*
|
|
||||||
* @param fullAddrMap The full global address map
|
|
||||||
*/
|
|
||||||
def makeConfigString(fullAddrMap: AddrMap): String = {
|
|
||||||
addrMapEntries.map { entry =>
|
|
||||||
val region = fullAddrMap("io:ext:" + entry.name)
|
|
||||||
s"${entry.name} {\n" +
|
|
||||||
s" addr 0x${region.start.toString(16)};\n" +
|
|
||||||
s" size 0x${region.size.toString(16)}; \n" +
|
|
||||||
"}\n"
|
|
||||||
}.mkString
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
class EmptyDeviceBlock extends DeviceBlock {
|
|
||||||
def nClientPorts = 0
|
|
||||||
def addrMapEntries = Seq.empty
|
|
||||||
|
|
||||||
def builder(
|
|
||||||
mmioPorts: HashMap[String, ClientUncachedTileLinkIO],
|
|
||||||
clientPorts: Seq[ClientUncachedTileLinkIO],
|
|
||||||
interrupts : Seq[Bool],
|
|
||||||
extra: Bundle, p: Parameters) {}
|
|
||||||
}
|
|
@ -82,5 +82,5 @@ object RocketChipGenerator extends Generator {
|
|||||||
writeOutputFile(td, s"$longName.prm", ParameterDump.getDump) // Parameters flagged with Dump()
|
writeOutputFile(td, s"$longName.prm", ParameterDump.getDump) // Parameters flagged with Dump()
|
||||||
writeOutputFile(td, s"${names.configs}.knb", world.getKnobs) // Knobs for DSE
|
writeOutputFile(td, s"${names.configs}.knb", world.getKnobs) // Knobs for DSE
|
||||||
writeOutputFile(td, s"${names.configs}.cst", world.getConstraints) // Constraints for DSE
|
writeOutputFile(td, s"${names.configs}.cst", world.getConstraints) // Constraints for DSE
|
||||||
writeOutputFile(td, s"${names.configs}.cfg", params(ConfigString).toString) // String for software
|
writeOutputFile(td, s"${names.configs}.cfg", params(ConfigString).get) // String for software
|
||||||
}
|
}
|
||||||
|
331
src/main/scala/rocketchip/Periphery.scala
Normal file
331
src/main/scala/rocketchip/Periphery.scala
Normal file
@ -0,0 +1,331 @@
|
|||||||
|
// See LICENSE for license details.
|
||||||
|
|
||||||
|
package rocketchip
|
||||||
|
|
||||||
|
import Chisel._
|
||||||
|
import cde.{Parameters, Field}
|
||||||
|
import junctions._
|
||||||
|
import uncore.tilelink._
|
||||||
|
import uncore.tilelink2.{LazyModule, LazyModuleImp}
|
||||||
|
import uncore.converters._
|
||||||
|
import uncore.devices._
|
||||||
|
import uncore.util._
|
||||||
|
import rocket.Util._
|
||||||
|
import coreplex._
|
||||||
|
|
||||||
|
/** Options for memory bus interface */
|
||||||
|
object BusType {
|
||||||
|
sealed trait EnumVal
|
||||||
|
case object AXI extends EnumVal
|
||||||
|
case object AHB extends EnumVal
|
||||||
|
case object TL extends EnumVal
|
||||||
|
val busTypes = Seq(AXI, AHB, TL)
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Memory channel controls */
|
||||||
|
case object TMemoryChannels extends Field[BusType.EnumVal]
|
||||||
|
/** External MMIO controls */
|
||||||
|
case object NExtMMIOAXIChannels extends Field[Int]
|
||||||
|
case object NExtMMIOAHBChannels extends Field[Int]
|
||||||
|
case object NExtMMIOTLChannels extends Field[Int]
|
||||||
|
/** External Bus controls */
|
||||||
|
case object NExtBusAXIChannels extends Field[Int]
|
||||||
|
/** Async configurations */
|
||||||
|
case object AsyncBusChannels extends Field[Boolean]
|
||||||
|
case object AsyncDebugBus extends Field[Boolean]
|
||||||
|
case object AsyncMemChannels extends Field[Boolean]
|
||||||
|
case object AsyncMMIOChannels extends Field[Boolean]
|
||||||
|
/** External address map settings */
|
||||||
|
case object ExtMMIOPorts extends Field[Seq[AddrMapEntry]]
|
||||||
|
/** Specifies the size of external memory */
|
||||||
|
case object ExtMemSize extends Field[Long]
|
||||||
|
/** Specifies the number of external interrupts */
|
||||||
|
case object NExtTopInterrupts extends Field[Int]
|
||||||
|
/** Source of RTC. First bundle is TopIO.extra, Second bundle is periphery.io.extra **/
|
||||||
|
case object RTCPeriod extends Field[Int]
|
||||||
|
|
||||||
|
object PeripheryUtils {
|
||||||
|
def addQueueAXI(source: NastiIO)(implicit p: Parameters) = {
|
||||||
|
val sink = Wire(new NastiIO)
|
||||||
|
sink.ar <> Queue(source.ar, 1)
|
||||||
|
sink.aw <> Queue(source.aw, 1)
|
||||||
|
sink.w <> Queue(source.w)
|
||||||
|
source.r <> Queue(sink.r)
|
||||||
|
source.b <> Queue(sink.b, 1)
|
||||||
|
sink
|
||||||
|
}
|
||||||
|
def convertTLtoAXI(tl: ClientUncachedTileLinkIO)(implicit p: Parameters) = {
|
||||||
|
val bridge = Module(new NastiIOTileLinkIOConverter())
|
||||||
|
bridge.io.tl <> tl
|
||||||
|
addQueueAXI(bridge.io.nasti)
|
||||||
|
}
|
||||||
|
def convertTLtoAHB(tl: ClientUncachedTileLinkIO, atomics: Boolean)(implicit p: Parameters) = {
|
||||||
|
val bridge = Module(new AHBBridge(atomics))
|
||||||
|
bridge.io.tl <> tl
|
||||||
|
bridge.io.ahb
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Utility trait for quick access to some relevant parameters */
|
||||||
|
trait HasPeripheryParameters {
|
||||||
|
implicit val p: Parameters
|
||||||
|
lazy val tMemChannels = p(TMemoryChannels)
|
||||||
|
lazy val nMemChannels = p(NMemoryChannels)
|
||||||
|
lazy val nMemAXIChannels = if (tMemChannels == BusType.AXI) nMemChannels else 0
|
||||||
|
lazy val nMemAHBChannels = if (tMemChannels == BusType.AHB) nMemChannels else 0
|
||||||
|
lazy val nMemTLChannels = if (tMemChannels == BusType.TL) nMemChannels else 0
|
||||||
|
lazy val innerParams = p.alterPartial({ case TLId => "L1toL2" })
|
||||||
|
lazy val innerMMIOParams = p.alterPartial({ case TLId => "L2toMMIO" })
|
||||||
|
lazy val outermostParams = p.alterPartial({ case TLId => "Outermost" })
|
||||||
|
lazy val outermostMMIOParams = p.alterPartial({ case TLId => "MMIO_Outermost" })
|
||||||
|
}
|
||||||
|
|
||||||
|
/////
|
||||||
|
|
||||||
|
trait PeripheryDebug extends LazyModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryDebugBundle {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val debug_clk = (p(AsyncDebugBus) && !p(IncludeJtagDTM)).option(Clock(INPUT))
|
||||||
|
val debug_rst = (p(AsyncDebugBus) && !p(IncludeJtagDTM)).option(Bool(INPUT))
|
||||||
|
val debug = (!p(IncludeJtagDTM)).option(new DebugBusIO()(p).flip)
|
||||||
|
val jtag = p(IncludeJtagDTM).option(new JTAGIO(true).flip)
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryDebugModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val outer: PeripheryDebug
|
||||||
|
val io: PeripheryDebugBundle
|
||||||
|
val coreplex: Coreplex
|
||||||
|
|
||||||
|
if (p(IncludeJtagDTM)) {
|
||||||
|
// JtagDTMWithSync is a wrapper which
|
||||||
|
// handles the synchronization as well.
|
||||||
|
val dtm = Module (new JtagDTMWithSync()(p))
|
||||||
|
dtm.io.jtag <> io.jtag.get
|
||||||
|
coreplex.io.debug <> dtm.io.debug
|
||||||
|
} else {
|
||||||
|
coreplex.io.debug <>
|
||||||
|
(if (p(AsyncDebugBus)) AsyncDebugBusFrom(io.debug_clk.get, io.debug_rst.get, io.debug.get)
|
||||||
|
else io.debug.get)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/////
|
||||||
|
|
||||||
|
trait PeripheryExtInterrupts extends LazyModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val pInterrupts: RangeManager
|
||||||
|
|
||||||
|
pInterrupts.add("ext", p(NExtTopInterrupts))
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryExtInterruptsBundle {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val interrupts = Vec(p(NExtTopInterrupts), Bool()).asInput
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryExtInterruptsModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val outer: PeripheryExtInterrupts
|
||||||
|
val io: PeripheryExtInterruptsBundle
|
||||||
|
val coreplex: Coreplex
|
||||||
|
|
||||||
|
{
|
||||||
|
val r = outer.pInterrupts.range("ext")
|
||||||
|
((r._1 until r._2) zipWithIndex) foreach { case (c, i) =>
|
||||||
|
coreplex.io.interrupts(c) := io.interrupts(i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/////
|
||||||
|
|
||||||
|
trait PeripheryMasterMem extends LazyModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryMasterMemBundle extends HasPeripheryParameters {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val mem_clk = p(AsyncMemChannels).option(Vec(nMemChannels, Clock(INPUT)))
|
||||||
|
val mem_rst = p(AsyncMemChannels).option(Vec(nMemChannels, Bool (INPUT)))
|
||||||
|
val mem_axi = Vec(nMemAXIChannels, new NastiIO)
|
||||||
|
val mem_ahb = Vec(nMemAHBChannels, new HastiMasterIO)
|
||||||
|
val mem_tl = Vec(nMemTLChannels, new ClientUncachedTileLinkIO()(outermostParams))
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryMasterMemModule extends HasPeripheryParameters {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val outer: PeripheryMasterMem
|
||||||
|
val io: PeripheryMasterMemBundle
|
||||||
|
val coreplex: Coreplex
|
||||||
|
|
||||||
|
// Abuse the fact that zip takes the shorter of the two lists
|
||||||
|
((io.mem_axi zip coreplex.io.master.mem) zipWithIndex) foreach { case ((axi, mem), idx) =>
|
||||||
|
val axi_sync = PeripheryUtils.convertTLtoAXI(mem)(outermostParams)
|
||||||
|
axi_sync.ar.bits.cache := UInt("b0011")
|
||||||
|
axi_sync.aw.bits.cache := UInt("b0011")
|
||||||
|
axi <> (
|
||||||
|
if (!p(AsyncMemChannels)) axi_sync
|
||||||
|
else AsyncNastiTo(io.mem_clk.get(idx), io.mem_rst.get(idx), axi_sync)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
(io.mem_ahb zip coreplex.io.master.mem) foreach { case (ahb, mem) =>
|
||||||
|
ahb <> PeripheryUtils.convertTLtoAHB(mem, atomics = false)(outermostParams)
|
||||||
|
}
|
||||||
|
|
||||||
|
(io.mem_tl zip coreplex.io.master.mem) foreach { case (tl, mem) =>
|
||||||
|
tl <> ClientUncachedTileLinkEnqueuer(mem, 2)(outermostParams)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/////
|
||||||
|
|
||||||
|
trait PeripheryMasterMMIO extends LazyModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryMasterMMIOBundle extends HasPeripheryParameters {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val mmio_clk = p(AsyncMMIOChannels).option(Vec(p(NExtMMIOAXIChannels), Clock(INPUT)))
|
||||||
|
val mmio_rst = p(AsyncMMIOChannels).option(Vec(p(NExtMMIOAXIChannels), Bool (INPUT)))
|
||||||
|
val mmio_axi = Vec(p(NExtMMIOAXIChannels), new NastiIO)
|
||||||
|
val mmio_ahb = Vec(p(NExtMMIOAHBChannels), new HastiMasterIO)
|
||||||
|
val mmio_tl = Vec(p(NExtMMIOTLChannels), new ClientUncachedTileLinkIO()(outermostMMIOParams))
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryMasterMMIOModule extends HasPeripheryParameters {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val outer: PeripheryMasterMMIO
|
||||||
|
val io: PeripheryMasterMMIOBundle
|
||||||
|
val mmioNetwork: Option[TileLinkRecursiveInterconnect]
|
||||||
|
|
||||||
|
val mmio_ports = p(ExtMMIOPorts) map { port =>
|
||||||
|
TileLinkWidthAdapter(mmioNetwork.get.port(port.name), "MMIO_Outermost")
|
||||||
|
}
|
||||||
|
|
||||||
|
val mmio_axi_start = 0
|
||||||
|
val mmio_axi_end = mmio_axi_start + p(NExtMMIOAXIChannels)
|
||||||
|
val mmio_ahb_start = mmio_axi_end
|
||||||
|
val mmio_ahb_end = mmio_ahb_start + p(NExtMMIOAHBChannels)
|
||||||
|
val mmio_tl_start = mmio_ahb_end
|
||||||
|
val mmio_tl_end = mmio_tl_start + p(NExtMMIOTLChannels)
|
||||||
|
require (mmio_tl_end == mmio_ports.size)
|
||||||
|
|
||||||
|
for (i <- 0 until mmio_ports.size) {
|
||||||
|
if (mmio_axi_start <= i && i < mmio_axi_end) {
|
||||||
|
val idx = i-mmio_axi_start
|
||||||
|
val axi_sync = PeripheryUtils.convertTLtoAXI(mmio_ports(i))(outermostMMIOParams)
|
||||||
|
io.mmio_axi(idx) <> (
|
||||||
|
if (!p(AsyncMMIOChannels)) axi_sync
|
||||||
|
else AsyncNastiTo(io.mmio_clk.get(idx), io.mmio_rst.get(idx), axi_sync)
|
||||||
|
)
|
||||||
|
} else if (mmio_ahb_start <= i && i < mmio_ahb_end) {
|
||||||
|
val idx = i-mmio_ahb_start
|
||||||
|
io.mmio_ahb(idx) <> PeripheryUtils.convertTLtoAHB(mmio_ports(i), atomics = true)(outermostMMIOParams)
|
||||||
|
} else if (mmio_tl_start <= i && i < mmio_tl_end) {
|
||||||
|
val idx = i-mmio_tl_start
|
||||||
|
io.mmio_tl(idx) <> ClientUncachedTileLinkEnqueuer(mmio_ports(i), 2)(outermostMMIOParams)
|
||||||
|
} else {
|
||||||
|
require(false, "Unconnected external MMIO port")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/////
|
||||||
|
|
||||||
|
trait PeripherySlave extends LazyModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val pBusMasters: RangeManager
|
||||||
|
|
||||||
|
if (p(NExtBusAXIChannels) > 0) pBusMasters.add("ext", 1) // NExtBusAXIChannels are arbitrated into one TL port
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripherySlaveBundle extends HasPeripheryParameters {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val bus_clk = p(AsyncBusChannels).option(Vec(p(NExtBusAXIChannels), Clock(INPUT)))
|
||||||
|
val bus_rst = p(AsyncBusChannels).option(Vec(p(NExtBusAXIChannels), Bool (INPUT)))
|
||||||
|
val bus_axi = Vec(p(NExtBusAXIChannels), new NastiIO).flip
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripherySlaveModule extends HasPeripheryParameters {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val outer: PeripherySlave
|
||||||
|
val io: PeripherySlaveBundle
|
||||||
|
val coreplex: Coreplex
|
||||||
|
|
||||||
|
if (p(NExtBusAXIChannels) > 0) {
|
||||||
|
val arb = Module(new NastiArbiter(p(NExtBusAXIChannels)))
|
||||||
|
((io.bus_axi zip arb.io.master) zipWithIndex) foreach { case ((bus, port), idx) =>
|
||||||
|
port <> (
|
||||||
|
if (!p(AsyncBusChannels)) bus
|
||||||
|
else AsyncNastiFrom(io.bus_clk.get(idx), io.bus_rst.get(idx), bus)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
val conv = Module(new TileLinkIONastiIOConverter()(innerParams))
|
||||||
|
conv.io.nasti <> arb.io.slave
|
||||||
|
|
||||||
|
val r = outer.pBusMasters.range("ext")
|
||||||
|
require(r._2 - r._1 == 1, "RangeManager should return 1 slot")
|
||||||
|
coreplex.io.slave(r._1) <> conv.io.tl
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/////
|
||||||
|
|
||||||
|
trait PeripheryTestRAM extends LazyModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val pDevices: ResourceManager[AddrMapEntry]
|
||||||
|
|
||||||
|
val ramSize = 0x1000
|
||||||
|
pDevices.add(AddrMapEntry("testram", MemSize(ramSize, MemAttr(AddrMapProt.RW))))
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryTestRAMBundle {
|
||||||
|
implicit val p: Parameters
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryTestRAMModule extends HasPeripheryParameters {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val outer: PeripheryTestRAM
|
||||||
|
val io: PeripheryTestRAMBundle
|
||||||
|
val mmioNetwork: Option[TileLinkRecursiveInterconnect]
|
||||||
|
|
||||||
|
val testram = Module(new TileLinkTestRAM(outer.ramSize)(innerMMIOParams))
|
||||||
|
testram.io <> mmioNetwork.get.port("testram")
|
||||||
|
}
|
||||||
|
|
||||||
|
/////
|
||||||
|
|
||||||
|
trait PeripheryTestBusMaster extends LazyModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val pBusMasters: RangeManager
|
||||||
|
val pDevices: ResourceManager[AddrMapEntry]
|
||||||
|
|
||||||
|
pBusMasters.add("busmaster", 1)
|
||||||
|
pDevices.add(AddrMapEntry("busmaster", MemSize(4096, MemAttr(AddrMapProt.RW))))
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryTestBusMasterBundle {
|
||||||
|
implicit val p: Parameters
|
||||||
|
}
|
||||||
|
|
||||||
|
trait PeripheryTestBusMasterModule {
|
||||||
|
implicit val p: Parameters
|
||||||
|
val outer: PeripheryTestBusMaster
|
||||||
|
val io: PeripheryTestBusMasterBundle
|
||||||
|
val mmioNetwork: Option[TileLinkRecursiveInterconnect]
|
||||||
|
val coreplex: Coreplex
|
||||||
|
|
||||||
|
val busmaster = Module(new groundtest.ExampleBusMaster()(p))
|
||||||
|
busmaster.io.mmio <> mmioNetwork.get.port("busmaster")
|
||||||
|
|
||||||
|
{
|
||||||
|
val r = outer.pBusMasters.range("busmaster")
|
||||||
|
require(r._2 - r._1 == 1, "RangeManager should return 1 slot")
|
||||||
|
coreplex.io.slave(r._1) <> busmaster.io.mem
|
||||||
|
}
|
||||||
|
}
|
@ -1,307 +0,0 @@
|
|||||||
// See LICENSE for license details.
|
|
||||||
|
|
||||||
package rocketchip
|
|
||||||
|
|
||||||
import Chisel._
|
|
||||||
import cde.{Parameters, Field}
|
|
||||||
import junctions._
|
|
||||||
import uncore.tilelink._
|
|
||||||
import uncore.devices._
|
|
||||||
import uncore.util._
|
|
||||||
import rocket.Util._
|
|
||||||
import uncore.converters._
|
|
||||||
import uncore.coherence.{InnerTLId, OuterTLId}
|
|
||||||
import rocket._
|
|
||||||
import coreplex._
|
|
||||||
import scala.collection.immutable.HashMap
|
|
||||||
|
|
||||||
/** Top-level parameters of RocketChip, values set in e.g. PublicConfigs.scala */
|
|
||||||
|
|
||||||
/** Options for memory bus interface */
|
|
||||||
object BusType {
|
|
||||||
sealed trait EnumVal
|
|
||||||
case object AXI extends EnumVal
|
|
||||||
case object AHB extends EnumVal
|
|
||||||
case object TL extends EnumVal
|
|
||||||
val busTypes = Seq(AXI, AHB, TL)
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Memory channel controls */
|
|
||||||
case object TMemoryChannels extends Field[BusType.EnumVal]
|
|
||||||
/** External MMIO controls */
|
|
||||||
case object NExtMMIOAXIChannels extends Field[Int]
|
|
||||||
case object NExtMMIOAHBChannels extends Field[Int]
|
|
||||||
case object NExtMMIOTLChannels extends Field[Int]
|
|
||||||
/** External Bus controls */
|
|
||||||
case object NExtBusAXIChannels extends Field[Int]
|
|
||||||
/** Async configurations */
|
|
||||||
case object AsyncBusChannels extends Field[Boolean]
|
|
||||||
case object AsyncDebugBus extends Field[Boolean]
|
|
||||||
case object AsyncMemChannels extends Field[Boolean]
|
|
||||||
case object AsyncMMIOChannels extends Field[Boolean]
|
|
||||||
/** External address map settings */
|
|
||||||
case object ExtMMIOPorts extends Field[Seq[AddrMapEntry]]
|
|
||||||
/** Function for building Coreplex */
|
|
||||||
case object BuildCoreplex extends Field[Parameters => Coreplex]
|
|
||||||
/** Function for connecting coreplex extra ports to top-level extra ports */
|
|
||||||
case object ConnectExtraPorts extends Field[(Bundle, Bundle, Parameters) => Unit]
|
|
||||||
/** Specifies the size of external memory */
|
|
||||||
case object ExtMemSize extends Field[Long]
|
|
||||||
/** Specifies the actual sorce of External Interrupts as Top and Periphery.
|
|
||||||
* NExtInterrupts = NExtTopInterrupts + NExtPeripheryInterrupts
|
|
||||||
**/
|
|
||||||
case object NExtTopInterrupts extends Field[Int]
|
|
||||||
case object NExtPeripheryInterrupts extends Field[Int]
|
|
||||||
/** Source of RTC. First bundle is TopIO.extra, Second bundle is periphery.io.extra **/
|
|
||||||
case object RTCTick extends Field[(Parameters, Bundle, Bundle) => Bool]
|
|
||||||
case object RTCPeriod extends Field[Int]
|
|
||||||
|
|
||||||
|
|
||||||
/** Utility trait for quick access to some relevant parameters */
|
|
||||||
trait HasTopLevelParameters {
|
|
||||||
implicit val p: Parameters
|
|
||||||
lazy val tMemChannels = p(TMemoryChannels)
|
|
||||||
lazy val nMemChannels = p(NMemoryChannels)
|
|
||||||
lazy val nMemAXIChannels = if (tMemChannels == BusType.AXI) nMemChannels else 0
|
|
||||||
lazy val nMemAHBChannels = if (tMemChannels == BusType.AHB) nMemChannels else 0
|
|
||||||
lazy val nMemTLChannels = if (tMemChannels == BusType.TL) nMemChannels else 0
|
|
||||||
lazy val innerParams = p.alterPartial({ case TLId => "L1toL2" })
|
|
||||||
lazy val outermostParams = p.alterPartial({ case TLId => "Outermost" })
|
|
||||||
lazy val outermostMMIOParams = p.alterPartial({ case TLId => "MMIO_Outermost" })
|
|
||||||
lazy val exportMMIO = p(ExportMMIOPort)
|
|
||||||
}
|
|
||||||
|
|
||||||
class MemBackupCtrlIO extends Bundle {
|
|
||||||
val en = Bool(INPUT)
|
|
||||||
val in_valid = Bool(INPUT)
|
|
||||||
val out_ready = Bool(INPUT)
|
|
||||||
val out_valid = Bool(OUTPUT)
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Top-level io for the chip */
|
|
||||||
class BasicTopIO(implicit val p: Parameters) extends ParameterizedBundle()(p)
|
|
||||||
with HasTopLevelParameters
|
|
||||||
|
|
||||||
class TopIO(implicit p: Parameters) extends BasicTopIO()(p) {
|
|
||||||
val mem_clk = p(AsyncMemChannels).option(Vec(nMemChannels, Clock(INPUT)))
|
|
||||||
val mem_rst = p(AsyncMemChannels).option(Vec(nMemChannels, Bool (INPUT)))
|
|
||||||
val mem_axi = Vec(nMemAXIChannels, new NastiIO)
|
|
||||||
val mem_ahb = Vec(nMemAHBChannels, new HastiMasterIO)
|
|
||||||
val mem_tl = Vec(nMemTLChannels, new ClientUncachedTileLinkIO()(outermostParams))
|
|
||||||
val interrupts = Vec(p(NExtTopInterrupts), Bool()).asInput
|
|
||||||
val bus_clk = p(AsyncBusChannels).option(Vec(p(NExtBusAXIChannels), Clock(INPUT)))
|
|
||||||
val bus_rst = p(AsyncBusChannels).option(Vec(p(NExtBusAXIChannels), Bool (INPUT)))
|
|
||||||
val bus_axi = Vec(p(NExtBusAXIChannels), new NastiIO).flip
|
|
||||||
val mmio_clk = p(AsyncMMIOChannels).option(Vec(p(NExtMMIOAXIChannels), Clock(INPUT)))
|
|
||||||
val mmio_rst = p(AsyncMMIOChannels).option(Vec(p(NExtMMIOAXIChannels), Bool (INPUT)))
|
|
||||||
val mmio_axi = Vec(p(NExtMMIOAXIChannels), new NastiIO)
|
|
||||||
val mmio_ahb = Vec(p(NExtMMIOAHBChannels), new HastiMasterIO)
|
|
||||||
val mmio_tl = Vec(p(NExtMMIOTLChannels), new ClientUncachedTileLinkIO()(outermostMMIOParams))
|
|
||||||
val debug_clk = (p(AsyncDebugBus) && !p(IncludeJtagDTM)).option(Clock(INPUT))
|
|
||||||
val debug_rst = (p(AsyncDebugBus) && !p(IncludeJtagDTM)).option(Bool(INPUT))
|
|
||||||
val debug = (!p(IncludeJtagDTM)).option(new DebugBusIO()(p).flip)
|
|
||||||
val jtag = p(IncludeJtagDTM).option(new JTAGIO(true).flip)
|
|
||||||
val extra = p(ExtraTopPorts)(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
object TopUtils {
|
|
||||||
// Connect two Nasti interfaces with queues in-between
|
|
||||||
def connectNasti(outer: NastiIO, inner: NastiIO)(implicit p: Parameters) {
|
|
||||||
val mifDataBeats = p(MIFDataBeats)
|
|
||||||
outer.ar <> Queue(inner.ar, 1)
|
|
||||||
outer.aw <> Queue(inner.aw, 1)
|
|
||||||
outer.w <> Queue(inner.w)
|
|
||||||
inner.r <> Queue(outer.r)
|
|
||||||
inner.b <> Queue(outer.b, 1)
|
|
||||||
}
|
|
||||||
def connectTilelinkNasti(nasti: NastiIO, tl: ClientUncachedTileLinkIO)(implicit p: Parameters) = {
|
|
||||||
val conv = Module(new NastiIOTileLinkIOConverter())
|
|
||||||
conv.io.tl <> tl
|
|
||||||
TopUtils.connectNasti(nasti, conv.io.nasti)
|
|
||||||
}
|
|
||||||
def connectTilelinkAhb(ahb: HastiMasterIO, tl: ClientUncachedTileLinkIO)(implicit p: Parameters) = {
|
|
||||||
val bridge = Module(new AHBBridge(true))
|
|
||||||
bridge.io.tl <> tl
|
|
||||||
bridge.io.ahb
|
|
||||||
}
|
|
||||||
def connectTilelink(
|
|
||||||
outer: ClientUncachedTileLinkIO, inner: ClientUncachedTileLinkIO)(implicit p: Parameters) = {
|
|
||||||
outer.acquire <> Queue(inner.acquire)
|
|
||||||
inner.grant <> Queue(outer.grant)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Top-level module for the chip */
|
|
||||||
//TODO: Remove this wrapper once multichannel DRAM controller is provided
|
|
||||||
class Top(topParams: Parameters) extends Module with HasTopLevelParameters {
|
|
||||||
implicit val p = topParams
|
|
||||||
|
|
||||||
val coreplex = p(BuildCoreplex)(p)
|
|
||||||
val periphery = Module(new Periphery()(innerParams))
|
|
||||||
|
|
||||||
val io = new TopIO {
|
|
||||||
val success = coreplex.hasSuccessFlag.option(Bool(OUTPUT))
|
|
||||||
}
|
|
||||||
io.success zip coreplex.io.success map { case (x, y) => x := y }
|
|
||||||
|
|
||||||
if (exportMMIO) { periphery.io.mmio_in.get <> coreplex.io.mmio.get }
|
|
||||||
periphery.io.mem_in <> coreplex.io.mem
|
|
||||||
coreplex.io.ext_clients <> periphery.io.clients_out
|
|
||||||
|
|
||||||
if (p(IncludeJtagDTM)) {
|
|
||||||
// JtagDTMWithSync is a wrapper which
|
|
||||||
// handles the synchronization as well.
|
|
||||||
val jtag_dtm = Module (new JtagDTMWithSync()(p))
|
|
||||||
jtag_dtm.io.jtag <> io.jtag.get
|
|
||||||
coreplex.io.debug <> jtag_dtm.io.debug
|
|
||||||
} else {
|
|
||||||
coreplex.io.debug <>
|
|
||||||
(if (p(AsyncDebugBus))
|
|
||||||
AsyncDebugBusFrom(io.debug_clk.get, io.debug_rst.get, io.debug.get)
|
|
||||||
else io.debug.get)
|
|
||||||
}
|
|
||||||
|
|
||||||
def asyncAxiTo(clocks: Seq[Clock], resets: Seq[Bool], inner_axis: Seq[NastiIO]): Seq[NastiIO] =
|
|
||||||
(clocks, resets, inner_axis).zipped.map {
|
|
||||||
case (clk, rst, in_axi) => AsyncNastiTo(clk, rst, in_axi)
|
|
||||||
}
|
|
||||||
|
|
||||||
def asyncAxiFrom(clocks: Seq[Clock], resets: Seq[Bool], outer_axis: Seq[NastiIO]): Seq[NastiIO] =
|
|
||||||
(clocks, resets, outer_axis).zipped.map {
|
|
||||||
case (clk, rst, out_axi) => AsyncNastiFrom(clk, rst, out_axi)
|
|
||||||
}
|
|
||||||
|
|
||||||
io.mmio_axi <>
|
|
||||||
(if (p(AsyncMMIOChannels))
|
|
||||||
asyncAxiTo(io.mmio_clk.get, io.mmio_rst.get, periphery.io.mmio_axi)
|
|
||||||
else periphery.io.mmio_axi)
|
|
||||||
io.mmio_ahb <> periphery.io.mmio_ahb
|
|
||||||
io.mmio_tl <> periphery.io.mmio_tl
|
|
||||||
|
|
||||||
io.mem_axi <>
|
|
||||||
(if (p(AsyncMemChannels))
|
|
||||||
asyncAxiTo(io.mem_clk.get, io.mem_rst.get, periphery.io.mem_axi)
|
|
||||||
else periphery.io.mem_axi)
|
|
||||||
io.mem_ahb <> periphery.io.mem_ahb
|
|
||||||
io.mem_tl <> periphery.io.mem_tl
|
|
||||||
|
|
||||||
periphery.io.bus_axi <>
|
|
||||||
(if (p(AsyncBusChannels))
|
|
||||||
asyncAxiFrom(io.bus_clk.get, io.bus_rst.get, io.bus_axi)
|
|
||||||
else io.bus_axi)
|
|
||||||
|
|
||||||
// This places the Periphery Interrupts at Bits [0...]
|
|
||||||
// Top-level interrupts are at the higher Bits.
|
|
||||||
// This may have some implications for prioritization of the interrupts,
|
|
||||||
// but PLIC could do some internal swizzling in the future.
|
|
||||||
coreplex.io.interrupts <> (periphery.io.interrupts ++ io.interrupts)
|
|
||||||
|
|
||||||
io.extra <> periphery.io.extra
|
|
||||||
|
|
||||||
coreplex.io.rtcTick := p(RTCTick)(p, io.extra, periphery.io.extra)
|
|
||||||
|
|
||||||
p(ConnectExtraPorts)(io.extra, coreplex.io.extra, p)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Periphery(implicit val p: Parameters) extends Module
|
|
||||||
with HasTopLevelParameters {
|
|
||||||
val io = new Bundle {
|
|
||||||
val mem_in = Vec(nMemChannels, new ClientUncachedTileLinkIO()(outermostParams)).flip
|
|
||||||
val clients_out = Vec(p(NExternalClients), new ClientUncachedTileLinkIO()(innerParams))
|
|
||||||
val mmio_in = exportMMIO.option(new ClientUncachedTileLinkIO()(outermostMMIOParams).flip)
|
|
||||||
val mem_axi = Vec(nMemAXIChannels, new NastiIO)
|
|
||||||
val mem_ahb = Vec(nMemAHBChannels, new HastiMasterIO)
|
|
||||||
val mem_tl = Vec(nMemTLChannels, new ClientUncachedTileLinkIO()(outermostParams))
|
|
||||||
val bus_axi = Vec(p(NExtBusAXIChannels), new NastiIO).flip
|
|
||||||
val mmio_axi = Vec(p(NExtMMIOAXIChannels), new NastiIO)
|
|
||||||
val mmio_ahb = Vec(p(NExtMMIOAHBChannels), new HastiMasterIO)
|
|
||||||
val mmio_tl = Vec(p(NExtMMIOTLChannels), new ClientUncachedTileLinkIO()(outermostMMIOParams))
|
|
||||||
val interrupts = Vec(p(NExtPeripheryInterrupts), Bool()).asOutput
|
|
||||||
val extra = p(ExtraTopPorts)(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (io.bus_axi.size > 0) {
|
|
||||||
val conv = Module(new TileLinkIONastiIOConverter)
|
|
||||||
val arb = Module(new NastiArbiter(io.bus_axi.size))
|
|
||||||
arb.io.master <> io.bus_axi
|
|
||||||
conv.io.nasti <> arb.io.slave
|
|
||||||
io.clients_out.head <> conv.io.tl
|
|
||||||
}
|
|
||||||
|
|
||||||
def connectExternalMMIO(ports: Seq[ClientUncachedTileLinkIO])(implicit p: Parameters) {
|
|
||||||
val mmio_axi_start = 0
|
|
||||||
val mmio_axi_end = mmio_axi_start + p(NExtMMIOAXIChannels)
|
|
||||||
val mmio_ahb_start = mmio_axi_end
|
|
||||||
val mmio_ahb_end = mmio_ahb_start + p(NExtMMIOAHBChannels)
|
|
||||||
val mmio_tl_start = mmio_ahb_end
|
|
||||||
val mmio_tl_end = mmio_tl_start + p(NExtMMIOTLChannels)
|
|
||||||
require (mmio_tl_end == ports.size)
|
|
||||||
|
|
||||||
for (i <- 0 until ports.size) {
|
|
||||||
if (mmio_axi_start <= i && i < mmio_axi_end) {
|
|
||||||
TopUtils.connectTilelinkNasti(io.mmio_axi(i-mmio_axi_start), ports(i))
|
|
||||||
} else if (mmio_ahb_start <= i && i < mmio_ahb_end) {
|
|
||||||
val ahbBridge = Module(new AHBBridge(true))
|
|
||||||
io.mmio_ahb(i-mmio_ahb_start) <> ahbBridge.io.ahb
|
|
||||||
ahbBridge.io.tl <> ports(i)
|
|
||||||
} else if (mmio_tl_start <= i && i < mmio_tl_end) {
|
|
||||||
TopUtils.connectTilelink(io.mmio_tl(i-mmio_tl_start), ports(i))
|
|
||||||
} else {
|
|
||||||
require(false, "Unconnected external MMIO port")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def buildMMIONetwork(implicit p: Parameters) = {
|
|
||||||
val extAddrMap = p(GlobalAddrMap).subMap("io:ext")
|
|
||||||
|
|
||||||
val mmioNetwork = Module(new TileLinkRecursiveInterconnect(1, extAddrMap))
|
|
||||||
mmioNetwork.io.in.head <> io.mmio_in.get
|
|
||||||
|
|
||||||
val extraDevices = p(ExtraDevices)
|
|
||||||
|
|
||||||
val deviceMMIO = HashMap.newBuilder[String, ClientUncachedTileLinkIO]
|
|
||||||
for ((entry, i) <- extraDevices.addrMapEntries.zipWithIndex)
|
|
||||||
deviceMMIO += (entry.name -> mmioNetwork.port(entry.name))
|
|
||||||
|
|
||||||
val deviceClients = if (io.bus_axi.size > 0) io.clients_out.tail else io.clients_out
|
|
||||||
require(deviceClients.size == extraDevices.nClientPorts)
|
|
||||||
|
|
||||||
val buildParams = p.alterPartial({
|
|
||||||
case InnerTLId => "L2toMMIO" // Device MMIO port
|
|
||||||
case OuterTLId => "L1toL2" // Device client port
|
|
||||||
})
|
|
||||||
|
|
||||||
extraDevices.builder(deviceMMIO.result(), deviceClients,
|
|
||||||
io.interrupts, io.extra, buildParams)
|
|
||||||
|
|
||||||
val ext = p(ExtMMIOPorts).map(
|
|
||||||
port => TileLinkWidthAdapter(mmioNetwork.port(port.name), "MMIO_Outermost"))
|
|
||||||
connectExternalMMIO(ext)(outermostMMIOParams)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (exportMMIO) {
|
|
||||||
buildMMIONetwork(p.alterPartial({case TLId => "L2toMMIO"}))
|
|
||||||
}
|
|
||||||
|
|
||||||
for ((nasti, tl) <- io.mem_axi zip io.mem_in) {
|
|
||||||
TopUtils.connectTilelinkNasti(nasti, tl)(outermostParams)
|
|
||||||
// Memory cache type should be normal non-cacheable bufferable
|
|
||||||
// TODO why is this happening here? Would 0000 (device) be OK instead?
|
|
||||||
nasti.ar.bits.cache := UInt("b0011")
|
|
||||||
nasti.aw.bits.cache := UInt("b0011")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Abuse the fact that zip takes the shorter of the two lists
|
|
||||||
for ((ahb, tl) <- io.mem_ahb zip io.mem_in) {
|
|
||||||
val bridge = Module(new AHBBridge(false)) // no atomics
|
|
||||||
ahb <> bridge.io.ahb
|
|
||||||
bridge.io.tl <> tl
|
|
||||||
}
|
|
||||||
|
|
||||||
for ((mem_tl, tl) <- io.mem_tl zip io.mem_in) {
|
|
||||||
TopUtils.connectTilelink(mem_tl, tl)
|
|
||||||
}
|
|
||||||
}
|
|
@ -26,7 +26,7 @@ class WithUnitTest extends Config(
|
|||||||
DefaultTestSuites.groundtest32
|
DefaultTestSuites.groundtest32
|
||||||
TestGeneration.addSuite(groundtest("p"))
|
TestGeneration.addSuite(groundtest("p"))
|
||||||
TestGeneration.addSuite(DefaultTestSuites.emptyBmarks)
|
TestGeneration.addSuite(DefaultTestSuites.emptyBmarks)
|
||||||
(p: Parameters) => Module(new UnitTestCoreplex(p))
|
(p: Parameters, c: CoreplexConfig) => Module(new UnitTestCoreplex(p, c))
|
||||||
}
|
}
|
||||||
case UnitTests => (testParams: Parameters) =>
|
case UnitTests => (testParams: Parameters) =>
|
||||||
JunctionsUnitTests(testParams) ++ UncoreUnitTests(testParams)
|
JunctionsUnitTests(testParams) ++ UncoreUnitTests(testParams)
|
||||||
@ -42,7 +42,8 @@ class UnitTestConfig extends Config(new WithUnitTest ++ new BaseConfig)
|
|||||||
|
|
||||||
class WithGroundTest extends Config(
|
class WithGroundTest extends Config(
|
||||||
(pname, site, here) => pname match {
|
(pname, site, here) => pname match {
|
||||||
case BuildCoreplex => (p: Parameters) => Module(new GroundTestCoreplex(p))
|
case BuildCoreplex =>
|
||||||
|
(p: Parameters, c: CoreplexConfig) => Module(new GroundTestCoreplex(p, c))
|
||||||
case TLKey("L1toL2") => {
|
case TLKey("L1toL2") => {
|
||||||
val useMEI = site(NTiles) <= 1 && site(NCachedTileLinkPorts) <= 1
|
val useMEI = site(NTiles) <= 1 && site(NCachedTileLinkPorts) <= 1
|
||||||
TileLinkParameters(
|
TileLinkParameters(
|
||||||
@ -51,7 +52,7 @@ class WithGroundTest extends Config(
|
|||||||
else new MESICoherence(site(L2DirectoryRepresentation))),
|
else new MESICoherence(site(L2DirectoryRepresentation))),
|
||||||
nManagers = site(NBanksPerMemoryChannel)*site(NMemoryChannels) + 1,
|
nManagers = site(NBanksPerMemoryChannel)*site(NMemoryChannels) + 1,
|
||||||
nCachingClients = site(NCachedTileLinkPorts),
|
nCachingClients = site(NCachedTileLinkPorts),
|
||||||
nCachelessClients = site(NExternalClients) + site(NUncachedTileLinkPorts),
|
nCachelessClients = site(NCoreplexExtClients).get + site(NUncachedTileLinkPorts),
|
||||||
maxClientXacts = ((site(DCacheKey).nMSHRs + 1) +:
|
maxClientXacts = ((site(DCacheKey).nMSHRs + 1) +:
|
||||||
site(GroundTestKey).map(_.maxXacts))
|
site(GroundTestKey).map(_.maxXacts))
|
||||||
.reduce(max(_, _)),
|
.reduce(max(_, _)),
|
||||||
@ -79,6 +80,8 @@ class WithGroundTest extends Config(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
case BuildExampleTop =>
|
||||||
|
(p: Parameters) => uncore.tilelink2.LazyModule(new ExampleTopWithTestRAM(p))
|
||||||
case FPUKey => None
|
case FPUKey => None
|
||||||
case UseAtomics => false
|
case UseAtomics => false
|
||||||
case UseCompressed => false
|
case UseCompressed => false
|
||||||
@ -89,7 +92,7 @@ class WithGroundTest extends Config(
|
|||||||
class GroundTestConfig extends Config(new WithGroundTest ++ new BaseConfig)
|
class GroundTestConfig extends Config(new WithGroundTest ++ new BaseConfig)
|
||||||
|
|
||||||
class ComparatorConfig extends Config(
|
class ComparatorConfig extends Config(
|
||||||
new WithTestRAM ++ new WithComparator ++ new GroundTestConfig)
|
new WithComparator ++ new GroundTestConfig)
|
||||||
class ComparatorL2Config extends Config(
|
class ComparatorL2Config extends Config(
|
||||||
new WithAtomics ++ new WithPrefetches ++
|
new WithAtomics ++ new WithPrefetches ++
|
||||||
new WithL2Cache ++ new ComparatorConfig)
|
new WithL2Cache ++ new ComparatorConfig)
|
||||||
@ -147,60 +150,3 @@ class MIF32BitMemtestConfig extends Config(
|
|||||||
|
|
||||||
class PCIeMockupTestConfig extends Config(
|
class PCIeMockupTestConfig extends Config(
|
||||||
new WithPCIeMockupTest ++ new GroundTestConfig)
|
new WithPCIeMockupTest ++ new GroundTestConfig)
|
||||||
|
|
||||||
class WithDirectGroundTest extends Config(
|
|
||||||
(pname, site, here) => pname match {
|
|
||||||
case ExportGroundTestStatus => true
|
|
||||||
case BuildCoreplex => (p: Parameters) => Module(new DirectGroundTestCoreplex(p))
|
|
||||||
case ExtraCoreplexPorts => (p: Parameters) =>
|
|
||||||
if (p(ExportGroundTestStatus)) new GroundTestStatus else new Bundle
|
|
||||||
case ExtraTopPorts => (p: Parameters) =>
|
|
||||||
if (p(ExportGroundTestStatus)) new GroundTestStatus else new Bundle
|
|
||||||
case TLKey("Outermost") => site(TLKey("L2toMC")).copy(
|
|
||||||
maxClientXacts = site(GroundTestKey)(0).maxXacts,
|
|
||||||
maxClientsPerPort = site(NBanksPerMemoryChannel),
|
|
||||||
dataBeats = site(MIFDataBeats))
|
|
||||||
case NBanksPerMemoryChannel => site(GroundTestKey)(0).uncached
|
|
||||||
case _ => throw new CDEMatchError
|
|
||||||
})
|
|
||||||
|
|
||||||
class DirectGroundTestConfig extends Config(
|
|
||||||
new WithDirectGroundTest ++ new GroundTestConfig)
|
|
||||||
class DirectMemtestConfig extends Config(
|
|
||||||
new WithDirectMemtest ++ new DirectGroundTestConfig)
|
|
||||||
class DirectComparatorConfig extends Config(
|
|
||||||
new WithDirectComparator ++ new DirectGroundTestConfig)
|
|
||||||
|
|
||||||
class DirectMemtestFPGAConfig extends Config(
|
|
||||||
new FPGAConfig ++ new DirectMemtestConfig)
|
|
||||||
class DirectComparatorFPGAConfig extends Config(
|
|
||||||
new FPGAConfig ++ new DirectComparatorConfig)
|
|
||||||
|
|
||||||
class WithBusMasterTest extends Config(
|
|
||||||
(pname, site, here) => pname match {
|
|
||||||
case GroundTestKey => Seq.fill(site(NTiles)) {
|
|
||||||
GroundTestTileSettings(uncached = 1)
|
|
||||||
}
|
|
||||||
case BuildGroundTest =>
|
|
||||||
(p: Parameters) => Module(new BusMasterTest()(p))
|
|
||||||
case ExtraDevices => {
|
|
||||||
class BusMasterDevice extends DeviceBlock {
|
|
||||||
def nClientPorts = 1
|
|
||||||
def addrMapEntries = Seq(
|
|
||||||
AddrMapEntry("busmaster", MemSize(4096, MemAttr(AddrMapProt.RW))))
|
|
||||||
def builder(
|
|
||||||
mmioPorts: HashMap[String, ClientUncachedTileLinkIO],
|
|
||||||
clientPorts: Seq[ClientUncachedTileLinkIO],
|
|
||||||
interrupts : Seq[Bool],
|
|
||||||
extra: Bundle, p: Parameters) {
|
|
||||||
val busmaster = Module(new ExampleBusMaster()(p))
|
|
||||||
busmaster.io.mmio <> mmioPorts("busmaster")
|
|
||||||
clientPorts.head <> busmaster.io.mem
|
|
||||||
}
|
|
||||||
}
|
|
||||||
new BusMasterDevice
|
|
||||||
}
|
|
||||||
case _ => throw new CDEMatchError
|
|
||||||
})
|
|
||||||
|
|
||||||
class BusMasterTestConfig extends Config(new WithBusMasterTest ++ new GroundTestConfig)
|
|
||||||
|
@ -7,11 +7,13 @@ import cde.{Parameters, Field}
|
|||||||
import rocket.Util._
|
import rocket.Util._
|
||||||
import junctions._
|
import junctions._
|
||||||
|
|
||||||
class TestHarness(implicit p: Parameters) extends Module {
|
case object BuildExampleTop extends Field[Parameters => ExampleTop]
|
||||||
|
|
||||||
|
class TestHarness(implicit val p: Parameters) extends Module with HasAddrMapParameters {
|
||||||
val io = new Bundle {
|
val io = new Bundle {
|
||||||
val success = Bool(OUTPUT)
|
val success = Bool(OUTPUT)
|
||||||
}
|
}
|
||||||
val dut = Module(new Top(p))
|
val dut = p(BuildExampleTop)(p).module
|
||||||
|
|
||||||
// This test harness isn't especially flexible yet
|
// This test harness isn't especially flexible yet
|
||||||
require(dut.io.mem_clk.isEmpty)
|
require(dut.io.mem_clk.isEmpty)
|
||||||
@ -24,13 +26,12 @@ class TestHarness(implicit p: Parameters) extends Module {
|
|||||||
require(dut.io.mmio_rst.isEmpty)
|
require(dut.io.mmio_rst.isEmpty)
|
||||||
require(dut.io.mmio_ahb.isEmpty)
|
require(dut.io.mmio_ahb.isEmpty)
|
||||||
require(dut.io.mmio_tl.isEmpty)
|
require(dut.io.mmio_tl.isEmpty)
|
||||||
require(dut.io.extra.elements.isEmpty)
|
|
||||||
|
|
||||||
for (int <- dut.io.interrupts)
|
for (int <- dut.io.interrupts)
|
||||||
int := false
|
int := false
|
||||||
|
|
||||||
if (dut.io.mem_axi.nonEmpty) {
|
if (dut.io.mem_axi.nonEmpty) {
|
||||||
val memSize = p(GlobalAddrMap)("mem").size
|
val memSize = addrMap("mem").size
|
||||||
require(memSize % dut.io.mem_axi.size == 0)
|
require(memSize % dut.io.mem_axi.size == 0)
|
||||||
for (axi <- dut.io.mem_axi)
|
for (axi <- dut.io.mem_axi)
|
||||||
Module(new SimAXIMem(memSize / dut.io.mem_axi.size)).io.axi <> axi
|
Module(new SimAXIMem(memSize / dut.io.mem_axi.size)).io.axi <> axi
|
||||||
|
98
src/main/scala/rocketchip/Top.scala
Normal file
98
src/main/scala/rocketchip/Top.scala
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
// See LICENSE for license details.
|
||||||
|
|
||||||
|
package rocketchip
|
||||||
|
|
||||||
|
import Chisel._
|
||||||
|
import cde.{Parameters, Field}
|
||||||
|
import junctions._
|
||||||
|
import uncore.tilelink._
|
||||||
|
import uncore.tilelink2.{LazyModule, LazyModuleImp}
|
||||||
|
import uncore.devices._
|
||||||
|
import rocket._
|
||||||
|
import rocket.Util._
|
||||||
|
import coreplex._
|
||||||
|
|
||||||
|
// the following parameters will be refactored properly with TL2
|
||||||
|
case object GlobalAddrMap extends Field[GlobalVariable[AddrMap]]
|
||||||
|
case object ConfigString extends Field[GlobalVariable[String]]
|
||||||
|
case object NCoreplexExtClients extends Field[GlobalVariable[Int]]
|
||||||
|
|
||||||
|
/** Base Top with no Periphery */
|
||||||
|
abstract class BaseTop(val p: Parameters) extends LazyModule {
|
||||||
|
// the following variables will be refactored properly with TL2
|
||||||
|
val pInterrupts = new RangeManager
|
||||||
|
val pBusMasters = new RangeManager
|
||||||
|
val pDevices = new ResourceManager[AddrMapEntry]
|
||||||
|
}
|
||||||
|
|
||||||
|
class BaseTopBundle(val p: Parameters, val c: Coreplex) extends ParameterizedBundle()(p) {
|
||||||
|
val success = c.hasSuccessFlag.option(Bool(OUTPUT))
|
||||||
|
}
|
||||||
|
|
||||||
|
class BaseTopModule[+L <: BaseTop, +B <: BaseTopBundle](val p: Parameters, l: L, b: Coreplex => B) extends LazyModuleImp(l) {
|
||||||
|
val outer: L = l
|
||||||
|
|
||||||
|
val c = CoreplexConfig(
|
||||||
|
nTiles = p(NTiles),
|
||||||
|
nExtInterrupts = outer.pInterrupts.sum,
|
||||||
|
nSlaves = outer.pBusMasters.sum,
|
||||||
|
hasSupervisor = p(UseVM),
|
||||||
|
hasExtMMIOPort = !(outer.pDevices.get.isEmpty && p(ExtMMIOPorts).isEmpty)
|
||||||
|
)
|
||||||
|
|
||||||
|
def genGlobalAddrMap = GenerateGlobalAddrMap(p, outer.pDevices.get)
|
||||||
|
def genConfigString = GenerateConfigString(p, c, outer.pDevices.get)
|
||||||
|
|
||||||
|
p(NCoreplexExtClients).assign(outer.pBusMasters.sum)
|
||||||
|
p(GlobalAddrMap).assign(genGlobalAddrMap)
|
||||||
|
p(ConfigString).assign(genConfigString)
|
||||||
|
|
||||||
|
println("Generated Address Map")
|
||||||
|
for (entry <- p(GlobalAddrMap).get.flatten) {
|
||||||
|
val name = entry.name
|
||||||
|
val start = entry.region.start
|
||||||
|
val end = entry.region.start + entry.region.size - 1
|
||||||
|
println(f"\t$name%s $start%x - $end%x")
|
||||||
|
}
|
||||||
|
|
||||||
|
println("Generated Configuration String")
|
||||||
|
println(p(ConfigString).get)
|
||||||
|
|
||||||
|
val coreplex = p(BuildCoreplex)(p, c)
|
||||||
|
val io: B = b(coreplex)
|
||||||
|
|
||||||
|
io.success zip coreplex.io.success map { case (x, y) => x := y }
|
||||||
|
coreplex.io.rtcTick := Counter(p(RTCPeriod)).inc()
|
||||||
|
|
||||||
|
val mmioNetwork = c.hasExtMMIOPort.option(
|
||||||
|
Module(new TileLinkRecursiveInterconnect(1, p(GlobalAddrMap).get.subMap("io:ext"))(
|
||||||
|
p.alterPartial({ case TLId => "L2toMMIO" }))))
|
||||||
|
mmioNetwork.foreach { _.io.in.head <> coreplex.io.master.mmio.get }
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Example Top with Periphery */
|
||||||
|
class ExampleTop(p: Parameters) extends BaseTop(p)
|
||||||
|
with PeripheryDebug with PeripheryExtInterrupts
|
||||||
|
with PeripheryMasterMem with PeripheryMasterMMIO with PeripherySlave {
|
||||||
|
override lazy val module = Module(new ExampleTopModule(p, this, new ExampleTopBundle(p, _)))
|
||||||
|
}
|
||||||
|
|
||||||
|
class ExampleTopBundle(p: Parameters, c: Coreplex) extends BaseTopBundle(p, c)
|
||||||
|
with PeripheryDebugBundle with PeripheryExtInterruptsBundle
|
||||||
|
with PeripheryMasterMemBundle with PeripheryMasterMMIOBundle with PeripherySlaveBundle
|
||||||
|
|
||||||
|
class ExampleTopModule[+L <: ExampleTop, +B <: ExampleTopBundle](p: Parameters, l: L, b: Coreplex => B) extends BaseTopModule(p, l, b)
|
||||||
|
with PeripheryDebugModule with PeripheryExtInterruptsModule
|
||||||
|
with PeripheryMasterMemModule with PeripheryMasterMMIOModule with PeripherySlaveModule
|
||||||
|
|
||||||
|
/** Example Top with TestRAM */
|
||||||
|
class ExampleTopWithTestRAM(p: Parameters) extends ExampleTop(p)
|
||||||
|
with PeripheryTestRAM {
|
||||||
|
override lazy val module = Module(new ExampleTopWithTestRAMModule(p, this, new ExampleTopWithTestRAMBundle(p, _)))
|
||||||
|
}
|
||||||
|
|
||||||
|
class ExampleTopWithTestRAMBundle(p: Parameters, c: Coreplex) extends ExampleTopBundle(p, c)
|
||||||
|
with PeripheryTestRAMBundle
|
||||||
|
|
||||||
|
class ExampleTopWithTestRAMModule[+L <: ExampleTopWithTestRAM, +B <: ExampleTopWithTestRAMBundle](p: Parameters, l: L, b: Coreplex => B) extends ExampleTopModule(p, l, b)
|
||||||
|
with PeripheryTestRAMModule
|
149
src/main/scala/rocketchip/Utils.scala
Normal file
149
src/main/scala/rocketchip/Utils.scala
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
// See LICENSE for license details.
|
||||||
|
|
||||||
|
package rocketchip
|
||||||
|
|
||||||
|
import cde.{Parameters, Dump}
|
||||||
|
import junctions._
|
||||||
|
import uncore.devices._
|
||||||
|
import rocket._
|
||||||
|
import rocket.Util._
|
||||||
|
import coreplex._
|
||||||
|
|
||||||
|
class RangeManager {
|
||||||
|
private var finalized = false
|
||||||
|
private val l = collection.mutable.HashMap[String, Int]()
|
||||||
|
def add(name: String, element: Int) = { require(!finalized); l += (name -> element) }
|
||||||
|
def rangeMap = {
|
||||||
|
finalized = true
|
||||||
|
l map {
|
||||||
|
var sum = 0
|
||||||
|
x => { sum += x._2; (x._1 -> (sum-x._2, sum)) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
def range(name: String) = rangeMap(name)
|
||||||
|
def print = {
|
||||||
|
rangeMap map { case (name, (start, end)) =>
|
||||||
|
println(s"${name} on port ${start}-${end-1}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
def sum = {
|
||||||
|
finalized = true
|
||||||
|
l.map(_._2).sum
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ResourceManager[T] {
|
||||||
|
private var finalized = false
|
||||||
|
private val l = collection.mutable.ArrayBuffer[T]()
|
||||||
|
def add(element: T) = { require(!finalized); l += element }
|
||||||
|
def add(list: Seq[T]) = { require(!finalized); l ++= list }
|
||||||
|
def get: Seq[T] = { finalized = true; l }
|
||||||
|
}
|
||||||
|
|
||||||
|
class GlobalVariable[T] {
|
||||||
|
private var assigned = false
|
||||||
|
private var variable: T = _
|
||||||
|
def assign(value: T) = { require(!assigned); assigned = true; variable = value }
|
||||||
|
def get: T = { require(assigned); variable }
|
||||||
|
}
|
||||||
|
|
||||||
|
object GenerateGlobalAddrMap {
|
||||||
|
def apply(p: Parameters, pDevicesEntries: Seq[AddrMapEntry]) = {
|
||||||
|
lazy val intIOAddrMap: AddrMap = {
|
||||||
|
val entries = collection.mutable.ArrayBuffer[AddrMapEntry]()
|
||||||
|
entries += AddrMapEntry("debug", MemSize(4096, MemAttr(AddrMapProt.RWX)))
|
||||||
|
entries += AddrMapEntry("bootrom", MemSize(4096, MemAttr(AddrMapProt.RX)))
|
||||||
|
entries += AddrMapEntry("plic", MemRange(0x40000000, 0x4000000, MemAttr(AddrMapProt.RW)))
|
||||||
|
entries += AddrMapEntry("prci", MemSize(0x4000000, MemAttr(AddrMapProt.RW)))
|
||||||
|
if (p(DataScratchpadSize) > 0) { // TODO heterogeneous tiles
|
||||||
|
require(p(NTiles) == 1) // TODO relax this
|
||||||
|
require(p(NMemoryChannels) == 0) // TODO allow both scratchpad & DRAM
|
||||||
|
entries += AddrMapEntry("dmem0", MemRange(0x80000000L, BigInt(p(DataScratchpadSize)), MemAttr(AddrMapProt.RWX)))
|
||||||
|
}
|
||||||
|
new AddrMap(entries)
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy val extIOAddrMap = new AddrMap(
|
||||||
|
pDevicesEntries ++ p(ExtMMIOPorts),
|
||||||
|
start = BigInt("50000000", 16),
|
||||||
|
collapse = true)
|
||||||
|
|
||||||
|
val memBase = 0x80000000L
|
||||||
|
val memSize = p(ExtMemSize)
|
||||||
|
Dump("MEM_BASE", memBase)
|
||||||
|
|
||||||
|
val intern = AddrMapEntry("int", intIOAddrMap)
|
||||||
|
val extern = AddrMapEntry("ext", extIOAddrMap)
|
||||||
|
val io = AddrMapEntry("io", AddrMap((intern +: (!extIOAddrMap.isEmpty).option(extern).toSeq):_*))
|
||||||
|
val mem = AddrMapEntry("mem", MemRange(memBase, memSize, MemAttr(AddrMapProt.RWX, true)))
|
||||||
|
AddrMap((io +: (p(NMemoryChannels) > 0).option(mem).toSeq):_*)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object GenerateConfigString {
|
||||||
|
def apply(p: Parameters, c: CoreplexConfig, pDevicesEntries: Seq[AddrMapEntry]) = {
|
||||||
|
val addrMap = p(GlobalAddrMap).get
|
||||||
|
val plicAddr = addrMap("io:int:plic").start
|
||||||
|
val prciAddr = addrMap("io:int:prci").start
|
||||||
|
val xLen = p(XLen)
|
||||||
|
val res = new StringBuilder
|
||||||
|
res append "plic {\n"
|
||||||
|
res append s" priority 0x${plicAddr.toString(16)};\n"
|
||||||
|
res append s" pending 0x${(plicAddr + c.plicKey.pendingBase).toString(16)};\n"
|
||||||
|
res append s" ndevs ${c.plicKey.nDevices};\n"
|
||||||
|
res append "};\n"
|
||||||
|
res append "rtc {\n"
|
||||||
|
res append s" addr 0x${(prciAddr + PRCI.time).toString(16)};\n"
|
||||||
|
res append "};\n"
|
||||||
|
if (addrMap contains "mem") {
|
||||||
|
res append "ram {\n"
|
||||||
|
res append " 0 {\n"
|
||||||
|
res append s" addr 0x${addrMap("mem").start.toString(16)};\n"
|
||||||
|
res append s" size 0x${addrMap("mem").size.toString(16)};\n"
|
||||||
|
res append " };\n"
|
||||||
|
res append "};\n"
|
||||||
|
}
|
||||||
|
res append "core {\n"
|
||||||
|
for (i <- 0 until c.nTiles) { // TODO heterogeneous tiles
|
||||||
|
val isa = {
|
||||||
|
val m = if (p(MulDivKey).nonEmpty) "m" else ""
|
||||||
|
val a = if (p(UseAtomics)) "a" else ""
|
||||||
|
val f = if (p(FPUKey).nonEmpty) "f" else ""
|
||||||
|
val d = if (p(FPUKey).nonEmpty && p(XLen) > 32) "d" else ""
|
||||||
|
val s = if (c.hasSupervisor) "s" else ""
|
||||||
|
s"rv${p(XLen)}i$m$a$f$d$s"
|
||||||
|
}
|
||||||
|
res append s" $i {\n"
|
||||||
|
res append " 0 {\n"
|
||||||
|
res append s" isa $isa;\n"
|
||||||
|
res append s" timecmp 0x${(prciAddr + PRCI.timecmp(i)).toString(16)};\n"
|
||||||
|
res append s" ipi 0x${(prciAddr + PRCI.msip(i)).toString(16)};\n"
|
||||||
|
res append s" plic {\n"
|
||||||
|
res append s" m {\n"
|
||||||
|
res append s" ie 0x${(plicAddr + c.plicKey.enableAddr(i, 'M')).toString(16)};\n"
|
||||||
|
res append s" thresh 0x${(plicAddr + c.plicKey.threshAddr(i, 'M')).toString(16)};\n"
|
||||||
|
res append s" claim 0x${(plicAddr + c.plicKey.claimAddr(i, 'M')).toString(16)};\n"
|
||||||
|
res append s" };\n"
|
||||||
|
if (c.hasSupervisor) {
|
||||||
|
res append s" s {\n"
|
||||||
|
res append s" ie 0x${(plicAddr + c.plicKey.enableAddr(i, 'S')).toString(16)};\n"
|
||||||
|
res append s" thresh 0x${(plicAddr + c.plicKey.threshAddr(i, 'S')).toString(16)};\n"
|
||||||
|
res append s" claim 0x${(plicAddr + c.plicKey.claimAddr(i, 'S')).toString(16)};\n"
|
||||||
|
res append s" };\n"
|
||||||
|
}
|
||||||
|
res append " };\n"
|
||||||
|
res append " };\n"
|
||||||
|
res append " };\n"
|
||||||
|
}
|
||||||
|
res append "};\n"
|
||||||
|
pDevicesEntries foreach { entry =>
|
||||||
|
val region = addrMap("io:ext:" + entry.name)
|
||||||
|
res append s"${entry.name} {\n"
|
||||||
|
res append s" addr 0x${region.start.toString(16)};\n"
|
||||||
|
res append s" size 0x${region.size.toString(16)}; \n"
|
||||||
|
res append "}\n"
|
||||||
|
}
|
||||||
|
res append '\u0000'
|
||||||
|
res.toString
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user