Merge pull request #777 from freechipsproject/print-axi-ids
coreplex: Improve memory map and AXI ID map output
This commit is contained in:
commit
63b1f4f047
2
firrtl
2
firrtl
@ -1 +1 @@
|
||||
Subproject commit 562806aa9e95c4095fcb38c5a34421b6ddc6d3fe
|
||||
Subproject commit d824c60c9643973e0ae9cddc5007b3d9592f8a52
|
@ -115,22 +115,47 @@ trait CoreplexNetworkModule extends HasCoreplexParameters {
|
||||
val io: CoreplexNetworkBundle
|
||||
|
||||
println("Generated Address Map")
|
||||
val ranges = outer.topManagers.get.flatMap { manager =>
|
||||
val prot = (if (manager.supportsGet) "R" else "") +
|
||||
(if (manager.supportsPutFull) "W" else "") +
|
||||
(if (manager.executable) "X" else "") +
|
||||
(if (manager.supportsAcquireB) " [C]" else "")
|
||||
AddressRange.fromSets(manager.address).map { r =>
|
||||
(manager.name, r.base, r.base+r.size, prot)
|
||||
private val aw = (outer.p(rocket.PAddrBits)-1)/4 + 1
|
||||
private val fmt = s"\t%${aw}x - %${aw}x %c%c%c%c %s"
|
||||
|
||||
private def collect(path: List[String], value: ResourceValue): List[(String, ResourceAddress)] = {
|
||||
value match {
|
||||
case r: ResourceAddress => List((path(1), r))
|
||||
case ResourceMap(value, _) => value.toList.flatMap { case (key, seq) => seq.flatMap(r => collect(key :: path, r)) }
|
||||
case _ => Nil
|
||||
}
|
||||
}
|
||||
val aw = (outer.p(rocket.PAddrBits)-1)/4 + 1
|
||||
val nw = ranges.map(_._1.length).max
|
||||
val fmt = s"\t%${nw}s %${aw}x - %${aw}x, %s"
|
||||
ranges.sortWith(_._2 < _._2).foreach { case (name, start, end, prot) =>
|
||||
println(fmt.format(name, start, end, prot))
|
||||
private val ranges = collect(Nil, outer.bindingTree).groupBy(_._2).toList.flatMap { case (key, seq) =>
|
||||
AddressRange.fromSets(key.address).map { r => (r, key.r, key.w, key.x, key.c, seq.map(_._1)) }
|
||||
}.sortBy(_._1)
|
||||
private val json = ranges.map { case (range, r, w, x, c, names) =>
|
||||
println(fmt.format(
|
||||
range.base,
|
||||
range.base+range.size,
|
||||
if (r) 'R' else ' ',
|
||||
if (w) 'W' else ' ',
|
||||
if (x) 'X' else ' ',
|
||||
if (c) 'C' else ' ',
|
||||
names.mkString(", ")))
|
||||
s"""{"base":[${range.base}],"size":[${range.size}],"r":[$r],"w":[$w],"x":[$x],"c":[$c],"names":[${names.map('"'+_+'"').mkString(",")}]}"""
|
||||
}
|
||||
println("")
|
||||
ElaborationArtefacts.add("memmap.json", s"""{"mapping":[${json.mkString(",")}]}""")
|
||||
|
||||
// Confirm that all of memory was described by DTS
|
||||
private val dtsRanges = AddressRange.unify(ranges.map(_._1))
|
||||
private val allRanges = AddressRange.unify(outer.topManagers.get.flatMap { m => AddressRange.fromSets(m.address) })
|
||||
|
||||
if (dtsRanges != allRanges) {
|
||||
println("Address map described by DTS differs from physical implementation:")
|
||||
AddressRange.subtract(allRanges, dtsRanges).foreach { case r =>
|
||||
println(s"\texists, but undescribed by DTS: ${r}")
|
||||
}
|
||||
AddressRange.subtract(dtsRanges, allRanges).foreach { case r =>
|
||||
println(s"\tdoes not exist, but described by DTS: ${r}")
|
||||
}
|
||||
println("")
|
||||
}
|
||||
}
|
||||
|
||||
/////
|
||||
|
@ -26,9 +26,9 @@ object JSON
|
||||
}
|
||||
|
||||
private def helper(res: ResourceValue)(implicit path: Map[String, String]): Seq[String] = res match {
|
||||
case ResourceAddress(address, r, w, x) =>
|
||||
case ResourceAddress(address, r, w, x, c) =>
|
||||
AddressRange.fromSets(address).map { case AddressRange(base, size) =>
|
||||
s"""{"base":${base},"size":${size},"r":${r},"w":${w},"x":${x}}"""}
|
||||
s"""{"base":${base},"size":${size},"r":${r},"w":${w},"x":${x},"c":${c}}"""}
|
||||
case ResourceMapping(address, offset) =>
|
||||
AddressRange.fromSets(address).map { case AddressRange(base, size) =>
|
||||
s"""{"base":${base},"size":${size},"offset":${offset}}"""}
|
||||
|
@ -3,7 +3,6 @@
|
||||
package diplomacy
|
||||
|
||||
import Chisel._
|
||||
import scala.math.max
|
||||
|
||||
/** Options for memory regions */
|
||||
object RegionType {
|
||||
@ -110,6 +109,14 @@ case class AddressRange(base: BigInt, size: BigInt) extends Ordered[AddressRange
|
||||
Some(AddressRange(obase, oend-obase))
|
||||
}
|
||||
}
|
||||
|
||||
private def helper(base: BigInt, end: BigInt) =
|
||||
if (base < end) Seq(AddressRange(base, end-base)) else Nil
|
||||
def subtract(x: AddressRange) =
|
||||
helper(base, end min x.base) ++ helper(base max x.end, end)
|
||||
|
||||
// We always want to see things in hex
|
||||
override def toString() = "AddressRange(0x%x, 0x%x)".format(base, size)
|
||||
}
|
||||
|
||||
// AddressSets specify the address space managed by the manager
|
||||
@ -197,6 +204,9 @@ object AddressRange
|
||||
}
|
||||
}.reverse
|
||||
}
|
||||
// Set subtraction... O(n*n) b/c I am lazy
|
||||
def subtract(from: Seq[AddressRange], take: Seq[AddressRange]): Seq[AddressRange] =
|
||||
take.foldLeft(from) { case (left, r) => left.flatMap { _.subtract(r) } }
|
||||
}
|
||||
|
||||
object AddressSet
|
||||
|
@ -7,7 +7,7 @@ import config._
|
||||
import scala.collection.immutable.{ListMap,SortedMap}
|
||||
|
||||
sealed trait ResourceValue
|
||||
final case class ResourceAddress(address: Seq[AddressSet], r: Boolean, w: Boolean, x: Boolean) extends ResourceValue
|
||||
final case class ResourceAddress(address: Seq[AddressSet], r: Boolean, w: Boolean, x: Boolean, c: Boolean) extends ResourceValue
|
||||
final case class ResourceMapping(address: Seq[AddressSet], offset: BigInt) extends ResourceValue
|
||||
final case class ResourceInt(value: BigInt) extends ResourceValue
|
||||
final case class ResourceString(value: String) extends ResourceValue
|
||||
|
@ -69,7 +69,7 @@ abstract class GroundTest(implicit val p: Parameters) extends Module
|
||||
class GroundTestTile(implicit p: Parameters) extends LazyModule
|
||||
with HasGroundTestParameters {
|
||||
val slave = None
|
||||
val dcacheOpt = tileParams.dcache.map { dc => HellaCache(dc.nMSHRs == 0) }
|
||||
val dcacheOpt = tileParams.dcache.map { dc => HellaCache(0, dc.nMSHRs == 0) }
|
||||
val ucLegacy = LazyModule(new TLLegacy)
|
||||
|
||||
val masterNode = TLOutputNode()
|
||||
|
@ -39,7 +39,7 @@ class DCacheDataArray(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
||||
}
|
||||
}
|
||||
|
||||
class DCache(val scratch: () => Option[AddressSet] = () => None)(implicit p: Parameters) extends HellaCache()(p) {
|
||||
class DCache(hartid: Int, val scratch: () => Option[AddressSet] = () => None)(implicit p: Parameters) extends HellaCache(hartid)(p) {
|
||||
override lazy val module = new DCacheModule(this)
|
||||
}
|
||||
|
||||
|
@ -147,20 +147,23 @@ class HellaCacheIO(implicit p: Parameters) extends CoreBundle()(p) {
|
||||
|
||||
/** Base classes for Diplomatic TL2 HellaCaches */
|
||||
|
||||
abstract class HellaCache(implicit p: Parameters) extends LazyModule {
|
||||
abstract class HellaCache(hartid: Int)(implicit p: Parameters) extends LazyModule {
|
||||
private val cfg = p(TileKey).dcache.get
|
||||
val firstMMIO = max(1, cfg.nMSHRs)
|
||||
|
||||
val node = TLClientNode(Seq(TLClientPortParameters(
|
||||
clients = cfg.scratch.map { _ => Seq(
|
||||
TLClientParameters(
|
||||
name = s"Core ${hartid} DCache MMIO",
|
||||
sourceId = IdRange(0, cfg.nMMIOs),
|
||||
requestFifo = true))
|
||||
} getOrElse { Seq(
|
||||
TLClientParameters(
|
||||
name = s"Core ${hartid} DCache",
|
||||
sourceId = IdRange(0, firstMMIO),
|
||||
supportsProbe = TransferSizes(1, cfg.blockBytes)),
|
||||
TLClientParameters(
|
||||
name = s"Core ${hartid} DCache MMIO",
|
||||
sourceId = IdRange(firstMMIO, firstMMIO+cfg.nMMIOs),
|
||||
requestFifo = true))
|
||||
},
|
||||
@ -186,9 +189,9 @@ class HellaCacheModule(outer: HellaCache) extends LazyModuleImp(outer)
|
||||
}
|
||||
|
||||
object HellaCache {
|
||||
def apply(blocking: Boolean, scratch: () => Option[AddressSet] = () => None)(implicit p: Parameters) = {
|
||||
if (blocking) LazyModule(new DCache(scratch))
|
||||
else LazyModule(new NonBlockingDCache)
|
||||
def apply(hartid: Int, blocking: Boolean, scratch: () => Option[AddressSet] = () => None)(implicit p: Parameters) = {
|
||||
if (blocking) LazyModule(new DCache(hartid, scratch))
|
||||
else LazyModule(new NonBlockingDCache(hartid))
|
||||
}
|
||||
}
|
||||
|
||||
@ -198,8 +201,9 @@ trait HasHellaCache extends HasTileLinkMasterPort with HasTileParameters {
|
||||
val module: HasHellaCacheModule
|
||||
implicit val p: Parameters
|
||||
def findScratchpadFromICache: Option[AddressSet]
|
||||
val hartid: Int
|
||||
var nDCachePorts = 0
|
||||
val dcache = HellaCache(tileParams.dcache.get.nMSHRs == 0, findScratchpadFromICache _)
|
||||
val dcache = HellaCache(hartid, tileParams.dcache.get.nMSHRs == 0, findScratchpadFromICache _)
|
||||
tileBus.node := dcache.node
|
||||
}
|
||||
|
||||
|
@ -36,7 +36,7 @@ class ICacheReq(implicit p: Parameters) extends CoreBundle()(p) with HasL1ICache
|
||||
class ICache(val latency: Int, val hartid: Int)(implicit p: Parameters) extends LazyModule
|
||||
with HasRocketCoreParameters {
|
||||
lazy val module = new ICacheModule(this)
|
||||
val masterNode = TLClientNode(TLClientParameters(sourceId = IdRange(0,1)))
|
||||
val masterNode = TLClientNode(TLClientParameters(name = s"Core ${hartid} ICache"))
|
||||
|
||||
val icacheParams = tileParams.icache.get
|
||||
val size = icacheParams.nSets * icacheParams.nWays * icacheParams.blockBytes
|
||||
|
@ -660,7 +660,7 @@ class DataArray(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
||||
io.write.ready := Bool(true)
|
||||
}
|
||||
|
||||
class NonBlockingDCache(implicit p: Parameters) extends HellaCache()(p) {
|
||||
class NonBlockingDCache(hartid: Int)(implicit p: Parameters) extends HellaCache(hartid)(p) {
|
||||
override lazy val module = new NonBlockingDCacheModule(this)
|
||||
}
|
||||
|
||||
|
@ -241,7 +241,8 @@ trait PeripherySlaveAXI4 extends HasTopLevelNetworks {
|
||||
private val config = p(ExtIn)
|
||||
val l2FrontendAXI4Node = AXI4BlindInputNode(Seq(AXI4MasterPortParameters(
|
||||
masters = Seq(AXI4MasterParameters(
|
||||
id = IdRange(0, 1 << config.idBits))))))
|
||||
name = "AXI4 periphery",
|
||||
id = IdRange(0, 1 << config.idBits))))))
|
||||
|
||||
private val fifoBits = 1
|
||||
fsb.node :=
|
||||
@ -311,6 +312,7 @@ trait PeripherySlaveTL extends HasTopLevelNetworks {
|
||||
private val config = p(ExtIn)
|
||||
val l2FrontendTLNode = TLBlindInputNode(Seq(TLClientPortParameters(
|
||||
clients = Seq(TLClientParameters(
|
||||
name = "TL periph",
|
||||
sourceId = IdRange(0, 1 << config.idBits))))))
|
||||
|
||||
fsb.node :=
|
||||
|
@ -49,7 +49,9 @@ class SimAXIMem(channels: Int, forceSize: BigInt = 0)(implicit p: Parameters) ex
|
||||
require(totalSize % channels == 0)
|
||||
|
||||
val node = AXI4BlindInputNode(Seq.fill(channels) {
|
||||
AXI4MasterPortParameters(Seq(AXI4MasterParameters(IdRange(0, 1 << config.idBits))))})
|
||||
AXI4MasterPortParameters(Seq(AXI4MasterParameters(
|
||||
name = "dut",
|
||||
id = IdRange(0, 1 << config.idBits))))})
|
||||
|
||||
for (i <- 0 until channels) {
|
||||
val sram = LazyModule(new AXI4RAM(AddressSet(0, size-1), beatBytes = config.beatBytes))
|
||||
|
@ -52,10 +52,8 @@ case class AHBSlavePortParameters(
|
||||
}
|
||||
|
||||
case class AHBMasterParameters(
|
||||
name: String,
|
||||
nodePath: Seq[BaseNode] = Seq())
|
||||
{
|
||||
val name = nodePath.lastOption.map(_.lazyModule.name).getOrElse("disconnected")
|
||||
}
|
||||
|
||||
case class AHBMasterPortParameters(
|
||||
masters: Seq[AHBMasterParameters])
|
||||
|
@ -11,7 +11,7 @@ import uncore.tilelink2._
|
||||
case class AHBToTLNode() extends MixedAdapterNode(AHBImp, TLImp)(
|
||||
dFn = { case AHBMasterPortParameters(masters) =>
|
||||
TLClientPortParameters(clients = masters.map { m =>
|
||||
TLClientParameters(nodePath = m.nodePath)
|
||||
TLClientParameters(name = m.name, nodePath = m.nodePath)
|
||||
})
|
||||
},
|
||||
uFn = { mp => AHBSlavePortParameters(
|
||||
|
@ -42,10 +42,8 @@ case class APBSlavePortParameters(
|
||||
}
|
||||
|
||||
case class APBMasterParameters(
|
||||
name: String,
|
||||
nodePath: Seq[BaseNode] = Seq())
|
||||
{
|
||||
val name = nodePath.lastOption.map(_.lazyModule.name).getOrElse("disconnected")
|
||||
}
|
||||
|
||||
case class APBMasterPortParameters(
|
||||
masters: Seq[APBMasterParameters])
|
||||
|
@ -16,15 +16,19 @@ class AXI4IdIndexer(idBits: Int)(implicit p: Parameters) extends LazyModule
|
||||
masterFn = { mp =>
|
||||
// Create one new "master" per ID
|
||||
val masters = Array.tabulate(1 << idBits) { i => AXI4MasterParameters(
|
||||
name = "",
|
||||
id = IdRange(i, i+1),
|
||||
aligned = true,
|
||||
maxFlight = Some(0))
|
||||
}
|
||||
// Accumluate the names of masters we squish
|
||||
val names = Array.fill(1 << idBits) { new scala.collection.mutable.HashSet[String]() }
|
||||
// Squash the information from original masters into new ID masters
|
||||
mp.masters.foreach { m =>
|
||||
for (i <- m.id.start until m.id.end) {
|
||||
val j = i % (1 << idBits)
|
||||
val old = masters(j)
|
||||
names(j) += m.name
|
||||
masters(j) = old.copy(
|
||||
aligned = old.aligned && m.aligned,
|
||||
maxFlight = old.maxFlight.flatMap { o => m.maxFlight.map { n => o+n } })
|
||||
@ -32,7 +36,7 @@ class AXI4IdIndexer(idBits: Int)(implicit p: Parameters) extends LazyModule
|
||||
}
|
||||
mp.copy(
|
||||
userBits = mp.userBits + max(0, log2Ceil(mp.endId) - idBits),
|
||||
masters = masters)
|
||||
masters = masters.zipWithIndex.map { case (m,i) => m.copy(name = names(i).toList.mkString(", "))})
|
||||
},
|
||||
slaveFn = { sp => sp
|
||||
})
|
||||
|
@ -58,12 +58,12 @@ case class AXI4SlavePortParameters(
|
||||
}
|
||||
|
||||
case class AXI4MasterParameters(
|
||||
name: String,
|
||||
id: IdRange = IdRange(0, 1),
|
||||
aligned: Boolean = false,
|
||||
maxFlight: Option[Int] = None, // None = infinite, else is a per-ID cap
|
||||
nodePath: Seq[BaseNode] = Seq())
|
||||
{
|
||||
val name = nodePath.lastOption.map(_.lazyModule.name).getOrElse("disconnected")
|
||||
maxFlight.foreach { m => require (m >= 0) }
|
||||
}
|
||||
|
||||
|
@ -13,9 +13,10 @@ case class AXI4ToTLNode() extends MixedAdapterNode(AXI4Imp, TLImp)(
|
||||
masters.foreach { m => require (m.maxFlight.isDefined, "AXI4 must include a transaction maximum per ID to convert to TL") }
|
||||
val maxFlight = masters.map(_.maxFlight.get).max
|
||||
TLClientPortParameters(
|
||||
clients = masters.flatMap { m =>
|
||||
clients = masters.filter(_.maxFlight != Some(0)).flatMap { m =>
|
||||
for (id <- m.id.start until m.id.end)
|
||||
yield TLClientParameters(
|
||||
name = s"${m.name} ID#${id}",
|
||||
sourceId = IdRange(id * maxFlight*2, (id+1) * maxFlight*2), // R+W ids are distinct
|
||||
nodePath = m.nodePath,
|
||||
requestFifo = true)
|
||||
|
@ -1093,7 +1093,7 @@ class ClockedDMIIO(implicit val p: Parameters) extends ParameterizedBundle()(p){
|
||||
|
||||
class DMIToTL(implicit p: Parameters) extends LazyModule {
|
||||
|
||||
val node = TLClientNode(TLClientParameters())
|
||||
val node = TLClientNode(TLClientParameters("debug"))
|
||||
|
||||
lazy val module = new LazyModuleImp(this) {
|
||||
val io = new Bundle {
|
||||
|
@ -15,6 +15,7 @@ class TLBroadcast(lineBytes: Int, numTrackers: Int = 4, bufferless: Boolean = fa
|
||||
val node = TLAdapterNode(
|
||||
clientFn = { cp =>
|
||||
cp.copy(clients = Seq(TLClientParameters(
|
||||
name = "TLBroadcast",
|
||||
sourceId = IdRange(0, 1 << log2Ceil(cp.endSourceId*4)))))
|
||||
},
|
||||
managerFn = { mp =>
|
||||
|
@ -46,7 +46,8 @@ class TLFragmenter(val minSize: Int, val maxSize: Int, val alwaysMin: Boolean =
|
||||
// We require that all the responses are mutually FIFO
|
||||
// Thus we need to compact all of the masters into one big master
|
||||
clientFn = { c => c.copy(clients = Seq(TLClientParameters(
|
||||
sourceId = IdRange(0, c.endSourceId << addedBits),
|
||||
name = "TLFragmenter",
|
||||
sourceId = IdRange(0, c.endSourceId << addedBits),
|
||||
requestFifo = true))) },
|
||||
managerFn = { m => m.copy(managers = m.managers.map(mapManager)) })
|
||||
|
||||
|
@ -85,7 +85,9 @@ class TLFuzzer(
|
||||
noModify: Boolean = false,
|
||||
overrideAddress: Option[AddressSet] = None)(implicit p: Parameters) extends LazyModule
|
||||
{
|
||||
val node = TLClientNode(TLClientParameters(sourceId = IdRange(0,inFlight)))
|
||||
val node = TLClientNode(TLClientParameters(
|
||||
name = "Fuzzer",
|
||||
sourceId = IdRange(0,inFlight)))
|
||||
|
||||
lazy val module = new LazyModuleImp(this) {
|
||||
val io = new Bundle {
|
||||
|
@ -12,6 +12,7 @@ class TLLegacy(implicit p: Parameters) extends LazyModule with HasTileLinkParame
|
||||
{
|
||||
// TL legacy clients don't support anything fancy
|
||||
val node = TLClientNode(TLClientParameters(
|
||||
name = "TLLegacy",
|
||||
sourceId = IdRange(0, 1 << tlClientXactIdBits)))
|
||||
|
||||
lazy val module = new LazyModuleImp(this) with HasTileLinkParameters {
|
||||
|
@ -60,7 +60,8 @@ case class TLManagerParameters(
|
||||
ResourceAddress(address,
|
||||
r = supportsAcquireB || supportsGet,
|
||||
w = supportsAcquireT || supportsPutFull,
|
||||
x = executable)
|
||||
x = executable,
|
||||
c = supportsAcquireB)
|
||||
}
|
||||
}
|
||||
|
||||
@ -166,6 +167,7 @@ case class TLManagerPortParameters(
|
||||
}
|
||||
|
||||
case class TLClientParameters(
|
||||
name: String,
|
||||
sourceId: IdRange = IdRange(0,1),
|
||||
nodePath: Seq[BaseNode] = Seq(),
|
||||
requestFifo: Boolean = false, // only a request, not a requirement
|
||||
@ -196,8 +198,6 @@ case class TLClientParameters(
|
||||
supportsGet.max,
|
||||
supportsPutFull.max,
|
||||
supportsPutPartial.max).max
|
||||
|
||||
val name = nodePath.lastOption.map(_.lazyModule.name).getOrElse("disconnected")
|
||||
}
|
||||
|
||||
case class TLClientPortParameters(
|
||||
|
@ -14,7 +14,9 @@ class TLSourceShrinker(maxInFlight: Int)(implicit p: Parameters) extends LazyMod
|
||||
require (maxInFlight > 0)
|
||||
|
||||
// The SourceShrinker completely destroys all FIFO property guarantees
|
||||
private val client = TLClientParameters(sourceId = IdRange(0, maxInFlight))
|
||||
private val client = TLClientParameters(
|
||||
name = "TLSourceShrinker",
|
||||
sourceId = IdRange(0, maxInFlight))
|
||||
val node = TLAdapterNode(
|
||||
// We erase all client information since we crush the source Ids
|
||||
clientFn = { _ => TLClientPortParameters(clients = Seq(client)) },
|
||||
|
@ -12,7 +12,7 @@ import AHBParameters._
|
||||
|
||||
case class TLToAHBNode() extends MixedAdapterNode(TLImp, AHBImp)(
|
||||
dFn = { case TLClientPortParameters(clients, unsafeAtomics, minLatency) =>
|
||||
val masters = clients.map { case c => AHBMasterParameters(nodePath = c.nodePath) }
|
||||
val masters = clients.map { case c => AHBMasterParameters(name = c.name, nodePath = c.nodePath) }
|
||||
AHBMasterPortParameters(masters)
|
||||
},
|
||||
uFn = { case AHBSlavePortParameters(slaves, beatBytes) =>
|
||||
|
@ -12,7 +12,7 @@ import APBParameters._
|
||||
|
||||
case class TLToAPBNode() extends MixedAdapterNode(TLImp, APBImp)(
|
||||
dFn = { case TLClientPortParameters(clients, unsafeAtomics, minLatency) =>
|
||||
val masters = clients.map { case c => APBMasterParameters(nodePath = c.nodePath) }
|
||||
val masters = clients.map { case c => APBMasterParameters(name = c.name, nodePath = c.nodePath) }
|
||||
APBMasterPortParameters(masters)
|
||||
},
|
||||
uFn = { case APBSlavePortParameters(slaves, beatBytes) =>
|
||||
|
@ -6,16 +6,18 @@ import Chisel._
|
||||
import chisel3.internal.sourceinfo.SourceInfo
|
||||
import config._
|
||||
import diplomacy._
|
||||
import util.PositionalMultiQueue
|
||||
import util.ElaborationArtefacts
|
||||
import uncore.axi4._
|
||||
import scala.math.{min, max}
|
||||
|
||||
case class TLToAXI4Node(beatBytes: Int) extends MixedAdapterNode(TLImp, AXI4Imp)(
|
||||
dFn = { p =>
|
||||
val idSize = p.clients.map { c => if (c.requestFifo) 1 else c.sourceId.size }
|
||||
val clients = p.clients.sortWith(TLToAXI4.sortByType _)
|
||||
val idSize = clients.map { c => if (c.requestFifo) 1 else c.sourceId.size }
|
||||
val idStart = idSize.scanLeft(0)(_+_).init
|
||||
val masters = ((idStart zip idSize) zip p.clients) map { case ((start, size), c) =>
|
||||
val masters = ((idStart zip idSize) zip clients) map { case ((start, size), c) =>
|
||||
AXI4MasterParameters(
|
||||
name = c.name,
|
||||
id = IdRange(start, start+size),
|
||||
aligned = true,
|
||||
maxFlight = Some(if (c.requestFifo) c.sourceId.size else 1),
|
||||
@ -41,7 +43,7 @@ case class TLToAXI4Node(beatBytes: Int) extends MixedAdapterNode(TLImp, AXI4Imp)
|
||||
minLatency = p.minLatency)
|
||||
})
|
||||
|
||||
class TLToAXI4(beatBytes: Int, combinational: Boolean = true)(implicit p: Parameters) extends LazyModule
|
||||
class TLToAXI4(beatBytes: Int, combinational: Boolean = true, adapterName: Option[String] = None)(implicit p: Parameters) extends LazyModule
|
||||
{
|
||||
val node = TLToAXI4Node(beatBytes)
|
||||
|
||||
@ -58,15 +60,29 @@ class TLToAXI4(beatBytes: Int, combinational: Boolean = true)(implicit p: Parame
|
||||
require (slaves(0).interleavedId.isDefined)
|
||||
slaves.foreach { s => require (s.interleavedId == slaves(0).interleavedId) }
|
||||
|
||||
val axiDigits = String.valueOf(edgeOut.master.endId-1).length()
|
||||
val tlDigits = String.valueOf(edgeIn.client.endSourceId-1).length()
|
||||
|
||||
// Construct the source=>ID mapping table
|
||||
adapterName.foreach { n => println(s"$n AXI4-ID <= TL-Source mapping:") }
|
||||
val idTable = Wire(Vec(edgeIn.client.endSourceId, out.aw.bits.id))
|
||||
var idCount = Array.fill(edgeOut.master.endId) { 0 }
|
||||
(edgeIn.client.clients zip edgeOut.master.masters) foreach { case (c, m) =>
|
||||
val maps = (edgeIn.client.clients.sortWith(TLToAXI4.sortByType) zip edgeOut.master.masters) flatMap { case (c, m) =>
|
||||
for (i <- 0 until c.sourceId.size) {
|
||||
val id = m.id.start + (if (c.requestFifo) 0 else i)
|
||||
idTable(c.sourceId.start + i) := UInt(id)
|
||||
idCount(id) = idCount(id) + 1
|
||||
}
|
||||
adapterName.map { n =>
|
||||
val fmt = s"\t[%${axiDigits}d, %${axiDigits}d) <= [%${tlDigits}d, %${tlDigits}d) %s%s"
|
||||
println(fmt.format(m.id.start, m.id.end, c.sourceId.start, c.sourceId.end, c.name, if (c.supportsProbe) " CACHE" else ""))
|
||||
s"""{"axi4-id":[${m.id.start},${m.id.end}],"tilelink-id":[${c.sourceId.start},${c.sourceId.end}],"master":["${c.name}"],"cache":[${!(!c.supportsProbe)}]}"""
|
||||
}
|
||||
}
|
||||
|
||||
adapterName.foreach { n =>
|
||||
println("")
|
||||
ElaborationArtefacts.add(s"${n}.axi4.json", s"""{"mapping":[${maps.mkString(",")}]}""")
|
||||
}
|
||||
|
||||
// We need to keep the following state from A => D: (addr_lo, size, source)
|
||||
@ -203,9 +219,17 @@ class TLToAXI4(beatBytes: Int, combinational: Boolean = true)(implicit p: Parame
|
||||
object TLToAXI4
|
||||
{
|
||||
// applied to the TL source node; y.node := TLToAXI4(beatBytes)(x.node)
|
||||
def apply(beatBytes: Int, combinational: Boolean = true)(x: TLOutwardNode)(implicit p: Parameters, sourceInfo: SourceInfo): AXI4OutwardNode = {
|
||||
val axi4 = LazyModule(new TLToAXI4(beatBytes, combinational))
|
||||
def apply(beatBytes: Int, combinational: Boolean = true, adapterName: Option[String] = None)(x: TLOutwardNode)(implicit p: Parameters, sourceInfo: SourceInfo): AXI4OutwardNode = {
|
||||
val axi4 = LazyModule(new TLToAXI4(beatBytes, combinational, adapterName))
|
||||
axi4.node := x
|
||||
axi4.node
|
||||
}
|
||||
|
||||
def sortByType(a: TLClientParameters, b: TLClientParameters): Boolean = {
|
||||
if ( a.supportsProbe && !b.supportsProbe) return false
|
||||
if (!a.supportsProbe && b.supportsProbe) return true
|
||||
if ( a.requestFifo && !b.requestFifo ) return false
|
||||
if (!a.requestFifo && b.requestFifo ) return true
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
@ -81,7 +81,7 @@ class TLXbar(policy: TLArbiter.Policy = TLArbiter.lowestIndexFirst)(implicit p:
|
||||
// Find a good mask for address decoding
|
||||
val port_addrs = node.edgesOut.map(_.manager.managers.map(_.address).flatten)
|
||||
val routingMask = AddressDecoder(port_addrs)
|
||||
val route_addrs = port_addrs.map(_.map(_.widen(~routingMask)).distinct)
|
||||
val route_addrs = port_addrs.map(seq => AddressSet.unify(seq.map(_.widen(~routingMask)).distinct))
|
||||
val outputPorts = route_addrs.map(seq => (addr: UInt) => seq.map(_.contains(addr)).reduce(_ || _))
|
||||
|
||||
// Print the mapping
|
||||
|
Loading…
Reference in New Issue
Block a user