TileLink refactor; TileLinkPorts now available. L2Banks no longer have unique ids (suitable for hierarhical P&R).
This commit is contained in:
parent
ce3271aef2
commit
ba7a8b1752
@ -25,16 +25,16 @@ object DataQueueLocation {
|
||||
}
|
||||
}
|
||||
|
||||
class L2BroadcastHub(bankId: Int) extends ManagerCoherenceAgent
|
||||
class L2BroadcastHub extends ManagerCoherenceAgent
|
||||
with BroadcastHubParameters {
|
||||
val internalDataBits = new DataQueueLocation().getWidth
|
||||
val inStoreQueue :: inVolWBQueue :: inClientReleaseQueue :: Nil = Enum(UInt(), nDataQueueLocations)
|
||||
|
||||
// Create SHRs for outstanding transactions
|
||||
val trackerList = (0 until nReleaseTransactors).map(id =>
|
||||
Module(new BroadcastVoluntaryReleaseTracker(id, bankId), {case TLDataBits => internalDataBits})) ++
|
||||
(nReleaseTransactors until nTransactors).map(id =>
|
||||
Module(new BroadcastAcquireTracker(id, bankId), {case TLDataBits => internalDataBits}))
|
||||
val trackerList = (0 until nReleaseTransactors).map(id =>
|
||||
Module(new BroadcastVoluntaryReleaseTracker(id), {case TLDataBits => internalDataBits})) ++
|
||||
(nReleaseTransactors until nTransactors).map(id =>
|
||||
Module(new BroadcastAcquireTracker(id), {case TLDataBits => internalDataBits}))
|
||||
|
||||
// Propagate incoherence flags
|
||||
trackerList.map(_.io.incoherent := io.incoherent.toBits)
|
||||
@ -61,7 +61,7 @@ class L2BroadcastHub(bankId: Int) extends ManagerCoherenceAgent
|
||||
trackerAcquireIOs.zipWithIndex.foreach {
|
||||
case(tracker, i) =>
|
||||
tracker.bits := io.inner.acquire.bits
|
||||
tracker.bits.payload.data := DataQueueLocation(sdq_alloc_id, inStoreQueue).toBits
|
||||
tracker.bits.data := DataQueueLocation(sdq_alloc_id, inStoreQueue).toBits
|
||||
tracker.valid := io.inner.acquire.valid && !block_acquires && (acquire_idx === UInt(i))
|
||||
}
|
||||
|
||||
@ -82,7 +82,7 @@ class L2BroadcastHub(bankId: Int) extends ManagerCoherenceAgent
|
||||
case(tracker, i) =>
|
||||
tracker.valid := io.inner.release.valid && (release_idx === UInt(i))
|
||||
tracker.bits := io.inner.release.bits
|
||||
tracker.bits.payload.data := DataQueueLocation(rel_data_cnt,
|
||||
tracker.bits.data := DataQueueLocation(rel_data_cnt,
|
||||
(if(i < nReleaseTransactors) inVolWBQueue
|
||||
else inClientReleaseQueue)).toBits
|
||||
}
|
||||
@ -91,17 +91,17 @@ class L2BroadcastHub(bankId: Int) extends ManagerCoherenceAgent
|
||||
|
||||
// Wire probe requests and grant reply to clients, finish acks from clients
|
||||
// Note that we bypass the Grant data subbundles
|
||||
io.inner.grant.bits.payload.data := io.outer.grant.bits.data
|
||||
io.inner.grant.bits.payload.addr_beat := io.outer.grant.bits.addr_beat
|
||||
io.inner.grant.bits.data := io.outer.grant.bits.data
|
||||
io.inner.grant.bits.addr_beat := io.outer.grant.bits.addr_beat
|
||||
doOutputArbitration(io.inner.grant, trackerList.map(_.io.inner.grant))
|
||||
doOutputArbitration(io.inner.probe, trackerList.map(_.io.inner.probe))
|
||||
doInputRouting(io.inner.finish, trackerList.map(_.io.inner.finish))
|
||||
|
||||
// Create an arbiter for the one memory port
|
||||
val outer_arb = Module(new HeaderlessUncachedTileLinkIOArbiter(trackerList.size),
|
||||
val outer_arb = Module(new ClientUncachedTileLinkIOArbiter(trackerList.size),
|
||||
{ case TLId => params(OuterTLId)
|
||||
case TLDataBits => internalDataBits })
|
||||
outer_arb.io.in zip trackerList map { case(arb, t) => arb <> t.io.outer }
|
||||
outer_arb.io.in <> trackerList.map(_.io.outer)
|
||||
// Get the pending data out of the store data queue
|
||||
val outer_data_ptr = new DataQueueLocation().fromBits(outer_arb.io.out.acquire.bits.data)
|
||||
val is_in_sdq = outer_data_ptr.loc === inStoreQueue
|
||||
@ -124,12 +124,11 @@ class BroadcastXactTracker extends XactTracker {
|
||||
val io = new ManagerXactTrackerIO
|
||||
}
|
||||
|
||||
class BroadcastVoluntaryReleaseTracker(trackerId: Int, bankId: Int) extends BroadcastXactTracker {
|
||||
class BroadcastVoluntaryReleaseTracker(trackerId: Int) extends BroadcastXactTracker {
|
||||
val s_idle :: s_outer :: s_grant :: s_ack :: Nil = Enum(UInt(), 4)
|
||||
val state = Reg(init=s_idle)
|
||||
|
||||
val xact_src = Reg(io.inner.release.bits.header.src.clone)
|
||||
val xact = Reg(Bundle(new Release, { case TLId => params(InnerTLId); case TLDataBits => 0 }))
|
||||
val xact = Reg(Bundle(new ReleaseFromSrc, { case TLId => params(InnerTLId); case TLDataBits => 0 }))
|
||||
val data_buffer = Vec.fill(innerDataBeats){ Reg(io.irel().data.clone) }
|
||||
val coh = ManagerMetadata.onReset
|
||||
|
||||
@ -150,9 +149,7 @@ class BroadcastVoluntaryReleaseTracker(trackerId: Int, bankId: Int) extends Broa
|
||||
io.inner.grant.valid := Bool(false)
|
||||
io.inner.finish.ready := Bool(false)
|
||||
|
||||
io.inner.grant.bits.header.src := UInt(bankId)
|
||||
io.inner.grant.bits.header.dst := xact_src
|
||||
io.inner.grant.bits.payload := coh.makeGrant(xact, UInt(trackerId))
|
||||
io.inner.grant.bits := coh.makeGrant(xact, UInt(trackerId))
|
||||
|
||||
//TODO: Use io.outer.release instead?
|
||||
io.outer.acquire.bits := Bundle(
|
||||
@ -175,7 +172,6 @@ class BroadcastVoluntaryReleaseTracker(trackerId: Int, bankId: Int) extends Broa
|
||||
is(s_idle) {
|
||||
io.inner.release.ready := Bool(true)
|
||||
when( io.inner.release.valid ) {
|
||||
xact_src := io.inner.release.bits.header.src
|
||||
xact := io.irel()
|
||||
data_buffer(UInt(0)) := io.irel().data
|
||||
collect_irel_data := io.irel().hasMultibeatData()
|
||||
@ -207,12 +203,11 @@ class BroadcastVoluntaryReleaseTracker(trackerId: Int, bankId: Int) extends Broa
|
||||
}
|
||||
}
|
||||
|
||||
class BroadcastAcquireTracker(trackerId: Int, bankId: Int) extends BroadcastXactTracker {
|
||||
class BroadcastAcquireTracker(trackerId: Int) extends BroadcastXactTracker {
|
||||
val s_idle :: s_probe :: s_mem_read :: s_mem_write :: s_make_grant :: s_mem_resp :: s_ack :: Nil = Enum(UInt(), 7)
|
||||
val state = Reg(init=s_idle)
|
||||
|
||||
val xact_src = Reg(io.inner.acquire.bits.header.src.clone)
|
||||
val xact = Reg(Bundle(new Acquire, { case TLId => params(InnerTLId); case TLDataBits => 0 }))
|
||||
val xact = Reg(Bundle(new AcquireFromSrc, { case TLId => params(InnerTLId); case TLDataBits => 0 }))
|
||||
val data_buffer = Vec.fill(innerDataBeats){ Reg(io.iacq().data.clone) }
|
||||
val coh = ManagerMetadata.onReset
|
||||
|
||||
@ -225,9 +220,9 @@ class BroadcastAcquireTracker(trackerId: Int, bankId: Int) extends BroadcastXact
|
||||
val pending_probes = Reg(init=Bits(0, width = io.inner.tlNCoherentClients))
|
||||
val curr_p_id = PriorityEncoder(pending_probes)
|
||||
val full_sharers = coh.full()
|
||||
val probe_self = io.inner.acquire.bits.payload.requiresSelfProbe()
|
||||
val mask_self_true = UInt(UInt(1) << io.inner.acquire.bits.header.src, width = io.inner.tlNCoherentClients)
|
||||
val mask_self_false = ~UInt(UInt(1) << io.inner.acquire.bits.header.src, width = io.inner.tlNCoherentClients)
|
||||
val probe_self = io.inner.acquire.bits.requiresSelfProbe()
|
||||
val mask_self_true = UInt(UInt(1) << io.inner.acquire.bits.client_id, width = io.inner.tlNCoherentClients)
|
||||
val mask_self_false = ~UInt(UInt(1) << io.inner.acquire.bits.client_id, width = io.inner.tlNCoherentClients)
|
||||
val mask_self = Mux(probe_self, full_sharers | mask_self_true, full_sharers & mask_self_false)
|
||||
val mask_incoherent = mask_self & ~io.incoherent.toBits
|
||||
|
||||
@ -272,21 +267,17 @@ class BroadcastAcquireTracker(trackerId: Int, bankId: Int) extends BroadcastXact
|
||||
io.outer.grant.ready := Bool(false)
|
||||
|
||||
io.inner.probe.valid := Bool(false)
|
||||
io.inner.probe.bits.header.src := UInt(bankId)
|
||||
io.inner.probe.bits.header.dst := curr_p_id
|
||||
io.inner.probe.bits.payload := coh.makeProbe(xact)
|
||||
io.inner.probe.bits := coh.makeProbe(curr_p_id, xact)
|
||||
|
||||
io.inner.grant.valid := Bool(false)
|
||||
io.inner.grant.bits.header.src := UInt(bankId)
|
||||
io.inner.grant.bits.header.dst := xact_src
|
||||
io.inner.grant.bits.payload := coh.makeGrant(xact, UInt(trackerId)) // Data bypassed in parent
|
||||
io.inner.grant.bits := coh.makeGrant(xact, UInt(trackerId)) // Data bypassed in parent
|
||||
|
||||
io.inner.acquire.ready := Bool(false)
|
||||
io.inner.release.ready := Bool(false)
|
||||
io.inner.finish.ready := Bool(false)
|
||||
|
||||
assert(!(state != s_idle && collect_iacq_data && io.inner.acquire.fire() &&
|
||||
io.inner.acquire.bits.header.src != xact_src),
|
||||
io.iacq().client_id != xact.client_id),
|
||||
"AcquireTracker accepted data beat from different network source than initial request.")
|
||||
|
||||
assert(!(state != s_idle && collect_iacq_data && io.inner.acquire.fire() &&
|
||||
@ -316,7 +307,6 @@ class BroadcastAcquireTracker(trackerId: Int, bankId: Int) extends BroadcastXact
|
||||
is(s_idle) {
|
||||
io.inner.acquire.ready := Bool(true)
|
||||
when(io.inner.acquire.valid) {
|
||||
xact_src := io.inner.acquire.bits.header.src
|
||||
xact := io.iacq()
|
||||
data_buffer(UInt(0)) := io.iacq().data
|
||||
collect_iacq_data := io.iacq().hasMultibeatData()
|
||||
|
@ -188,7 +188,7 @@ abstract class L2HellaCacheModule extends Module with L2HellaCacheParameters {
|
||||
ins: Seq[DecoupledIO[T]]) {
|
||||
val arb = Module(new RRArbiter(out.bits.clone, ins.size))
|
||||
out <> arb.io.out
|
||||
arb.io.in zip ins map { case (a, in) => a <> in }
|
||||
arb.io.in <> ins
|
||||
}
|
||||
|
||||
def doInternalInputRouting[T <: HasL2Id](in: ValidIO[T], outs: Seq[ValidIO[T]]) {
|
||||
@ -342,14 +342,14 @@ class L2SecondaryMissInfo extends TLBundle
|
||||
with HasTileLinkBeatId
|
||||
with HasClientTransactionId
|
||||
|
||||
class L2HellaCacheBank(bankId: Int) extends HierarchicalCoherenceAgent
|
||||
class L2HellaCacheBank extends HierarchicalCoherenceAgent
|
||||
with L2HellaCacheParameters {
|
||||
require(isPow2(nSets))
|
||||
require(isPow2(nWays))
|
||||
|
||||
val meta = Module(new L2MetadataArray) // TODO: add delay knob
|
||||
val data = Module(new L2DataArray(1))
|
||||
val tshrfile = Module(new TSHRFile(bankId))
|
||||
val tshrfile = Module(new TSHRFile)
|
||||
tshrfile.io.inner <> io.inner
|
||||
io.outer <> tshrfile.io.outer
|
||||
io.incoherent <> tshrfile.io.incoherent
|
||||
@ -362,24 +362,21 @@ class TSHRFileIO extends HierarchicalTLIO {
|
||||
val data = new L2DataRWIO
|
||||
}
|
||||
|
||||
class TSHRFile(bankId: Int) extends L2HellaCacheModule
|
||||
class TSHRFile extends L2HellaCacheModule
|
||||
with HasCoherenceAgentWiringHelpers {
|
||||
val io = new TSHRFileIO
|
||||
|
||||
// Create TSHRs for outstanding transactions
|
||||
val trackerList = (0 until nReleaseTransactors).map { id =>
|
||||
Module(new L2VoluntaryReleaseTracker(id, bankId))
|
||||
} ++ (nReleaseTransactors until nTransactors).map { id =>
|
||||
Module(new L2AcquireTracker(id, bankId))
|
||||
}
|
||||
val trackerList = (0 until nReleaseTransactors).map(id => Module(new L2VoluntaryReleaseTracker(id))) ++
|
||||
(nReleaseTransactors until nTransactors).map(id => Module(new L2AcquireTracker(id)))
|
||||
|
||||
// WritebackUnit evicts data from L2, including invalidating L1s
|
||||
val wb = Module(new L2WritebackUnit(nTransactors, bankId))
|
||||
val wb = Module(new L2WritebackUnit(nTransactors))
|
||||
doInternalOutputArbitration(wb.io.wb.req, trackerList.map(_.io.wb.req))
|
||||
doInternalInputRouting(wb.io.wb.resp, trackerList.map(_.io.wb.resp))
|
||||
|
||||
// Propagate incoherence flags
|
||||
(trackerList.map(_.io.incoherent) :+ wb.io.incoherent).map( _ := io.incoherent.toBits)
|
||||
(trackerList.map(_.io.incoherent) :+ wb.io.incoherent) foreach { _ := io.incoherent.toBits }
|
||||
|
||||
// Handle acquire transaction initiation
|
||||
val trackerAcquireIOs = trackerList.map(_.io.inner.acquire)
|
||||
@ -419,8 +416,8 @@ class TSHRFile(bankId: Int) extends L2HellaCacheModule
|
||||
|
||||
// Create an arbiter for the one memory port
|
||||
val outerList = trackerList.map(_.io.outer) :+ wb.io.outer
|
||||
val outer_arb = Module(new HeaderlessTileLinkIOArbiter(outerList.size))(outerTLParams)
|
||||
outerList zip outer_arb.io.in map { case(out, arb) => out <> arb }
|
||||
val outer_arb = Module(new ClientTileLinkIOArbiter(outerList.size))(outerTLParams)
|
||||
outer_arb.io.in <> outerList
|
||||
io.outer <> outer_arb.io.out
|
||||
|
||||
// Wire local memory arrays
|
||||
@ -480,21 +477,20 @@ abstract class L2XactTracker extends XactTracker with L2HellaCacheParameters {
|
||||
def dropPendingBitInternal[T <: HasL2BeatAddr] (in: ValidIO[T]) =
|
||||
~Fill(in.bits.refillCycles, in.valid) | ~UIntToOH(in.bits.addr_beat)
|
||||
|
||||
def addPendingBitWhenBeatHasPartialWritemask(in: DecoupledIO[LogicalNetworkIO[Acquire]]): UInt = {
|
||||
val a = in.bits.payload
|
||||
def addPendingBitWhenBeatHasPartialWritemask(in: DecoupledIO[AcquireFromSrc]): UInt = {
|
||||
val a = in.bits
|
||||
val isPartial = a.wmask() != Acquire.fullWriteMask
|
||||
addPendingBitWhenBeat(in.fire() && isPartial && Bool(ignoresWriteMask), in.bits.payload)
|
||||
addPendingBitWhenBeat(in.fire() && isPartial && Bool(ignoresWriteMask), a)
|
||||
}
|
||||
}
|
||||
|
||||
class L2VoluntaryReleaseTracker(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
class L2VoluntaryReleaseTracker(trackerId: Int) extends L2XactTracker {
|
||||
val io = new L2XactTrackerIO
|
||||
|
||||
val s_idle :: s_meta_read :: s_meta_resp :: s_data_write :: s_meta_write :: s_inner_grant :: s_inner_finish :: Nil = Enum(UInt(), 7)
|
||||
val state = Reg(init=s_idle)
|
||||
|
||||
val xact_src = Reg(io.inner.release.bits.header.src.clone)
|
||||
val xact = Reg(Bundle(new Release, { case TLId => params(InnerTLId); case TLDataBits => 0 }))
|
||||
val xact = Reg(Bundle(new ReleaseFromSrc, { case TLId => params(InnerTLId); case TLDataBits => 0 }))
|
||||
val xact_tag_match = Reg{ Bool() }
|
||||
val xact_old_meta = Reg{ new L2Metadata }
|
||||
val xact_way_en = Reg{ Bits(width = nWays) }
|
||||
@ -520,9 +516,7 @@ class L2VoluntaryReleaseTracker(trackerId: Int, bankId: Int) extends L2XactTrack
|
||||
io.inner.grant.valid := Bool(false)
|
||||
io.inner.finish.ready := Bool(false)
|
||||
|
||||
io.inner.grant.bits.header.src := UInt(bankId)
|
||||
io.inner.grant.bits.header.dst := xact_src
|
||||
io.inner.grant.bits.payload := coh.inner.makeGrant(xact, UInt(trackerId))
|
||||
io.inner.grant.bits := coh.inner.makeGrant(xact, UInt(trackerId))
|
||||
|
||||
io.data.read.valid := Bool(false)
|
||||
io.data.write.valid := Bool(false)
|
||||
@ -541,7 +535,7 @@ class L2VoluntaryReleaseTracker(trackerId: Int, bankId: Int) extends L2XactTrack
|
||||
io.meta.write.bits.idx := xact.addr_block(idxMSB,idxLSB)
|
||||
io.meta.write.bits.way_en := xact_way_en
|
||||
io.meta.write.bits.data.tag := xact.addr_block >> UInt(idxBits)
|
||||
io.meta.write.bits.data.coh.inner := xact_old_meta.coh.inner.onRelease(xact, xact_src)
|
||||
io.meta.write.bits.data.coh.inner := xact_old_meta.coh.inner.onRelease(xact)
|
||||
io.meta.write.bits.data.coh.outer := xact_old_meta.coh.outer.onHit(M_XWR) // WB is a write
|
||||
io.wb.req.valid := Bool(false)
|
||||
|
||||
@ -558,7 +552,6 @@ class L2VoluntaryReleaseTracker(trackerId: Int, bankId: Int) extends L2XactTrack
|
||||
is(s_idle) {
|
||||
io.inner.release.ready := Bool(true)
|
||||
when( io.inner.release.valid ) {
|
||||
xact_src := io.inner.release.bits.header.src
|
||||
xact := io.irel()
|
||||
data_buffer(io.irel().addr_beat) := io.irel().data
|
||||
collect_irel_data := io.irel().hasMultibeatData()
|
||||
@ -608,15 +601,14 @@ class L2VoluntaryReleaseTracker(trackerId: Int, bankId: Int) extends L2XactTrack
|
||||
}
|
||||
|
||||
|
||||
class L2AcquireTracker(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
class L2AcquireTracker(trackerId: Int) extends L2XactTracker {
|
||||
val io = new L2XactTrackerIO
|
||||
|
||||
val s_idle :: s_meta_read :: s_meta_resp :: s_wb_req :: s_wb_resp :: s_inner_probe :: s_outer_acquire :: s_busy :: s_meta_write :: Nil = Enum(UInt(), 9)
|
||||
val state = Reg(init=s_idle)
|
||||
|
||||
// State holding transaction metadata
|
||||
val xact_src = Reg(io.inner.acquire.bits.header.src.clone)
|
||||
val xact = Reg(Bundle(new Acquire, { case TLId => params(InnerTLId) }))
|
||||
val xact = Reg(Bundle(new AcquireFromSrc, { case TLId => params(InnerTLId) }))
|
||||
val data_buffer = Vec.fill(innerDataBeats){ Reg(init=UInt(0, width = innerDataBits)) }
|
||||
val wmask_buffer = Vec.fill(innerDataBeats){ Reg(init=UInt(0,width = innerDataBits/8)) }
|
||||
val xact_tag_match = Reg{ Bool() }
|
||||
@ -631,16 +623,23 @@ class L2AcquireTracker(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
// TODO add ignt.dst <- iacq.src
|
||||
|
||||
// State holding progress made on processing this transaction
|
||||
val iacq_data_done =
|
||||
connectIncomingDataBeatCounter(io.inner.acquire)
|
||||
val pending_irels =
|
||||
connectTwoWayBeatCounter(io.inner.tlNCoherentClients, io.inner.probe, io.inner.release)._1
|
||||
val iacq_data_done = connectIncomingDataBeatCounter(io.inner.acquire)
|
||||
val pending_irels = connectTwoWayBeatCounter(
|
||||
max = io.inner.tlNCoherentClients,
|
||||
up = io.inner.probe,
|
||||
down = io.inner.release)._1
|
||||
val (pending_ognts, oacq_data_idx, oacq_data_done, ognt_data_idx, ognt_data_done) =
|
||||
connectHeaderlessTwoWayBeatCounter(1, io.outer.acquire, io.outer.grant, xact.addr_beat)
|
||||
val (ignt_data_idx, ignt_data_done) =
|
||||
connectOutgoingDataBeatCounter(io.inner.grant, ignt_q.io.deq.bits.addr_beat)
|
||||
val pending_ifins =
|
||||
connectTwoWayBeatCounter(nSecondaryMisses, io.inner.grant, io.inner.finish, (g: Grant) => g.requiresAck())._1
|
||||
connectTwoWayBeatCounter(
|
||||
max = 1,
|
||||
up = io.outer.acquire,
|
||||
down = io.outer.grant,
|
||||
beat = xact.addr_beat)
|
||||
val (ignt_data_idx, ignt_data_done) = connectOutgoingDataBeatCounter(io.inner.grant, ignt_q.io.deq.bits.addr_beat)
|
||||
val pending_ifins = connectTwoWayBeatCounter(
|
||||
max = nSecondaryMisses,
|
||||
up = io.inner.grant,
|
||||
down = io.inner.finish,
|
||||
track = (g: Grant) => g.requiresAck())._1
|
||||
val pending_puts = Reg(init=Bits(0, width = io.inner.tlDataBeats))
|
||||
val pending_iprbs = Reg(init = Bits(0, width = io.inner.tlNCoherentClients))
|
||||
val pending_reads = Reg(init=Bits(0, width = io.inner.tlDataBeats))
|
||||
@ -695,9 +694,9 @@ class L2AcquireTracker(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
def mergeDataInternal[T <: HasL2Data with HasL2BeatAddr](in: ValidIO[T]) {
|
||||
when(in.valid) { mergeData(rowBits)(in.bits.addr_beat, in.bits.data) }
|
||||
}
|
||||
def mergeDataInner[T <: HasTileLinkData with HasTileLinkBeatId](in: DecoupledIO[LogicalNetworkIO[T]]) {
|
||||
when(in.fire() && in.bits.payload.hasData()) {
|
||||
mergeData(innerDataBits)(in.bits.payload.addr_beat, in.bits.payload.data)
|
||||
def mergeDataInner[T <: HasTileLinkData with HasTileLinkBeatId](in: DecoupledIO[T]) {
|
||||
when(in.fire() && in.bits.hasData()) {
|
||||
mergeData(innerDataBits)(in.bits.addr_beat, in.bits.data)
|
||||
}
|
||||
}
|
||||
def mergeDataOuter[T <: HasTileLinkData with HasTileLinkBeatId](in: DecoupledIO[T]) {
|
||||
@ -713,7 +712,7 @@ class L2AcquireTracker(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
// and Puts-under-Put from the same client
|
||||
val can_merge_iacq_get = (xact.isBuiltInType(Acquire.getType) &&
|
||||
io.iacq().isBuiltInType(Acquire.getType)) &&
|
||||
xact_src === io.inner.acquire.bits.header.src && //TODO
|
||||
xact.client_id === io.iacq().client_id && //TODO remove
|
||||
xact.conflicts(io.iacq()) &&
|
||||
state != s_idle && state != s_meta_write &&
|
||||
!all_pending_done &&
|
||||
@ -728,7 +727,7 @@ class L2AcquireTracker(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
io.iacq().isBuiltInType(Acquire.putType)) ||
|
||||
(xact.isBuiltInType(Acquire.putBlockType) &&
|
||||
io.iacq().isBuiltInType(Acquire.putBlockType))) &&
|
||||
xact_src === io.inner.acquire.bits.header.src && //TODO
|
||||
xact.client_id === io.iacq().client_id && //TODO remove
|
||||
xact.conflicts(io.iacq()) &&
|
||||
state != s_idle && state != s_meta_write &&
|
||||
!all_pending_done &&
|
||||
@ -749,17 +748,13 @@ class L2AcquireTracker(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
pending_iprbs := pending_iprbs & dropPendingBitAtDest(io.inner.probe)
|
||||
val curr_probe_dst = PriorityEncoder(pending_iprbs)
|
||||
io.inner.probe.valid := state === s_inner_probe && pending_iprbs.orR
|
||||
io.inner.probe.bits.header.src := UInt(bankId)
|
||||
io.inner.probe.bits.header.dst := curr_probe_dst
|
||||
io.inner.probe.bits.payload := pending_coh.inner.makeProbe(xact)
|
||||
io.inner.probe.bits := pending_coh.inner.makeProbe(curr_probe_dst, xact)
|
||||
|
||||
// Handle incoming releases from clients, which may reduce sharer counts
|
||||
// and/or write back dirty data
|
||||
io.inner.release.ready := state === s_inner_probe
|
||||
val pending_coh_on_irel = HierarchicalMetadata(
|
||||
pending_coh.inner.onRelease( // Drop sharer
|
||||
incoming = io.irel(),
|
||||
src = io.inner.release.bits.header.src),
|
||||
pending_coh.inner.onRelease(io.irel()), // Drop sharer
|
||||
Mux(io.irel().hasData(), // Dirty writeback
|
||||
pending_coh.outer.onHit(M_XWR),
|
||||
pending_coh.outer))
|
||||
@ -804,23 +799,18 @@ class L2AcquireTracker(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
ignt_q.io.deq.valid &&
|
||||
(!io.ignt().hasData() ||
|
||||
pending_ignt_data(ignt_data_idx))
|
||||
io.inner.grant.bits.header.src := UInt(bankId)
|
||||
io.inner.grant.bits.header.dst := xact_src // TODO: ignt_q.io.deq.bits.src
|
||||
io.inner.grant.bits.payload := pending_coh.inner.makeGrant(
|
||||
acq = xact,
|
||||
manager_xact_id = UInt(trackerId),
|
||||
addr_beat = ignt_data_idx,
|
||||
data = Mux(xact.is(Acquire.putAtomicType),
|
||||
amo_result,
|
||||
data_buffer(ignt_data_idx)))
|
||||
// TODO: improve the ManagerMetadata.makeGrant to deal with possibility of
|
||||
// multiple client transaction ids from merged secondary misses
|
||||
io.ignt().client_xact_id := ignt_q.io.deq.bits.client_xact_id
|
||||
io.inner.grant.bits := pending_coh.inner.makeGrant(
|
||||
dst = xact.client_id, // TODO: ignt_q.io.deq.bits.src
|
||||
acq = xact,
|
||||
client_xact_id = ignt_q.io.deq.bits.client_xact_id,
|
||||
manager_xact_id = UInt(trackerId),
|
||||
addr_beat = ignt_data_idx,
|
||||
data = Mux(xact.is(Acquire.putAtomicType),
|
||||
amo_result,
|
||||
data_buffer(ignt_data_idx)))
|
||||
|
||||
val pending_coh_on_ignt = HierarchicalMetadata(
|
||||
pending_coh.inner.onGrant(
|
||||
outgoing = io.ignt(),
|
||||
dst = io.inner.grant.bits.header.dst),
|
||||
pending_coh.inner.onGrant(io.ignt()),
|
||||
Mux(ognt_data_done,
|
||||
pending_coh_on_ognt.outer,
|
||||
pending_coh.outer))
|
||||
@ -904,7 +894,6 @@ class L2AcquireTracker(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
|
||||
// State machine updates and transaction handler metadata intialization
|
||||
when(state === s_idle && io.inner.acquire.valid) {
|
||||
xact_src := io.inner.acquire.bits.header.src
|
||||
xact := io.iacq()
|
||||
xact.data := UInt(0)
|
||||
pending_puts := Mux( // Make sure to collect all data from a PutBlock
|
||||
@ -943,8 +932,8 @@ class L2AcquireTracker(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
val full_sharers = coh.inner.full()
|
||||
val mask_self = Mux(
|
||||
xact.requiresSelfProbe(),
|
||||
coh.inner.full() | UIntToOH(xact_src),
|
||||
coh.inner.full() & ~UIntToOH(xact_src))
|
||||
coh.inner.full() | UIntToOH(xact.client_id),
|
||||
coh.inner.full() & ~UIntToOH(xact.client_id))
|
||||
val mask_incoherent = mask_self & ~io.incoherent.toBits
|
||||
pending_iprbs := mask_incoherent
|
||||
}
|
||||
@ -984,7 +973,7 @@ class L2AcquireTracker(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
|
||||
// Checks for illegal behavior
|
||||
assert(!(state != s_idle && io.inner.acquire.fire() &&
|
||||
io.inner.acquire.bits.header.src != xact_src),
|
||||
io.inner.acquire.bits.client_id != xact.client_id),
|
||||
"AcquireTracker accepted data beat from different network source than initial request.")
|
||||
}
|
||||
|
||||
@ -1007,7 +996,7 @@ class L2WritebackUnitIO extends HierarchicalXactTrackerIO {
|
||||
val data = new L2DataRWIO
|
||||
}
|
||||
|
||||
class L2WritebackUnit(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
class L2WritebackUnit(trackerId: Int) extends L2XactTracker {
|
||||
val io = new L2WritebackUnitIO
|
||||
|
||||
val s_idle :: s_inner_probe :: s_data_read :: s_data_resp :: s_outer_release :: s_outer_grant :: s_wb_resp :: Nil = Enum(UInt(), 7)
|
||||
@ -1031,9 +1020,7 @@ class L2WritebackUnit(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
val (read_data_cnt, read_data_done) = connectInternalDataBeatCounter(io.data.read)
|
||||
val resp_data_done = connectInternalDataBeatCounter(io.data.resp)
|
||||
|
||||
val pending_icoh_on_irel = xact_coh.inner.onRelease(
|
||||
incoming = io.irel(),
|
||||
src = io.inner.release.bits.header.src)
|
||||
val pending_icoh_on_irel = xact_coh.inner.onRelease(io.irel())
|
||||
val pending_ocoh_on_irel = xact_coh.outer.onHit(M_XWR) // WB is a write
|
||||
val pending_ofin_on_ognt = io.ognt().makeFinish()
|
||||
|
||||
@ -1054,10 +1041,7 @@ class L2WritebackUnit(trackerId: Int, bankId: Int) extends L2XactTracker {
|
||||
io.outer.grant.ready := Bool(false) // default
|
||||
|
||||
io.inner.probe.valid := Bool(false)
|
||||
io.inner.probe.bits.header.src := UInt(bankId)
|
||||
io.inner.probe.bits.header.dst := curr_probe_dst
|
||||
io.inner.probe.bits.payload :=
|
||||
xact_coh.inner.makeProbeForVoluntaryWriteback(xact_addr_block)
|
||||
io.inner.probe.bits := xact_coh.inner.makeProbeForVoluntaryWriteback(curr_probe_dst, xact_addr_block)
|
||||
|
||||
io.inner.grant.valid := Bool(false)
|
||||
io.inner.acquire.ready := Bool(false)
|
||||
|
@ -60,7 +60,7 @@ class SCRIO extends HTIFBundle {
|
||||
class HTIFModuleIO extends HTIFBundle {
|
||||
val host = new HostIO
|
||||
val cpu = Vec.fill(nCores){new HTIFIO}.flip
|
||||
val mem = new HeaderlessUncachedTileLinkIO
|
||||
val mem = new ClientUncachedTileLinkIO
|
||||
val scr = new SCRIO
|
||||
}
|
||||
|
||||
|
@ -18,16 +18,26 @@ case object MIFDataBits extends Field[Int]
|
||||
case object MIFTagBits extends Field[Int]
|
||||
case object MIFDataBeats extends Field[Int]
|
||||
|
||||
trait HasMemData extends Bundle {
|
||||
val data = Bits(width = params(MIFDataBits))
|
||||
trait MIFParameters extends UsesParameters {
|
||||
val mifTagBits = params(MIFTagBits)
|
||||
val mifAddrBits = params(MIFAddrBits)
|
||||
val mifDataBits = params(MIFDataBits)
|
||||
val mifDataBeats = params(MIFDataBeats)
|
||||
}
|
||||
|
||||
abstract class MIFBundle extends Bundle with MIFParameters
|
||||
abstract class MIFModule extends Module with MIFParameters
|
||||
|
||||
trait HasMemData extends MIFBundle {
|
||||
val data = Bits(width = mifDataBits)
|
||||
}
|
||||
|
||||
trait HasMemAddr extends Bundle {
|
||||
val addr = UInt(width = params(MIFAddrBits))
|
||||
trait HasMemAddr extends MIFBundle {
|
||||
val addr = UInt(width = mifAddrBits)
|
||||
}
|
||||
|
||||
trait HasMemTag extends Bundle {
|
||||
val tag = UInt(width = params(MIFTagBits))
|
||||
trait HasMemTag extends MIFBundle {
|
||||
val tag = UInt(width = mifTagBits)
|
||||
}
|
||||
|
||||
class MemReqCmd extends HasMemAddr with HasMemTag {
|
||||
@ -56,7 +66,7 @@ class MemSerializedIO(w: Int) extends Bundle
|
||||
val resp = Valid(Bits(width = w)).flip
|
||||
}
|
||||
|
||||
class MemSerdes(w: Int) extends Module
|
||||
class MemSerdes(w: Int) extends MIFModule
|
||||
{
|
||||
val io = new Bundle {
|
||||
val wide = new MemIO().flip
|
||||
@ -65,7 +75,6 @@ class MemSerdes(w: Int) extends Module
|
||||
val abits = io.wide.req_cmd.bits.toBits.getWidth
|
||||
val dbits = io.wide.req_data.bits.toBits.getWidth
|
||||
val rbits = io.wide.resp.bits.getWidth
|
||||
val dbeats = params(MIFDataBeats)
|
||||
|
||||
val out_buf = Reg(Bits())
|
||||
val in_buf = Reg(Bits())
|
||||
@ -73,7 +82,7 @@ class MemSerdes(w: Int) extends Module
|
||||
val s_idle :: s_read_addr :: s_write_addr :: s_write_idle :: s_write_data :: Nil = Enum(UInt(), 5)
|
||||
val state = Reg(init=s_idle)
|
||||
val send_cnt = Reg(init=UInt(0, log2Up((max(abits, dbits)+w-1)/w)))
|
||||
val data_send_cnt = Reg(init=UInt(0, log2Up(dbeats)))
|
||||
val data_send_cnt = Reg(init=UInt(0, log2Up(mifDataBeats)))
|
||||
val adone = io.narrow.req.ready && send_cnt === UInt((abits-1)/w)
|
||||
val ddone = io.narrow.req.ready && send_cnt === UInt((dbits-1)/w)
|
||||
|
||||
@ -109,12 +118,12 @@ class MemSerdes(w: Int) extends Module
|
||||
}
|
||||
when (state === s_write_data && ddone) {
|
||||
data_send_cnt := data_send_cnt + UInt(1)
|
||||
state := Mux(data_send_cnt === UInt(dbeats-1), s_idle, s_write_idle)
|
||||
state := Mux(data_send_cnt === UInt(mifDataBeats-1), s_idle, s_write_idle)
|
||||
send_cnt := UInt(0)
|
||||
}
|
||||
|
||||
val recv_cnt = Reg(init=UInt(0, log2Up((rbits+w-1)/w)))
|
||||
val data_recv_cnt = Reg(init=UInt(0, log2Up(dbeats)))
|
||||
val data_recv_cnt = Reg(init=UInt(0, log2Up(mifDataBeats)))
|
||||
val resp_val = Reg(init=Bool(false))
|
||||
|
||||
resp_val := Bool(false)
|
||||
@ -143,11 +152,11 @@ class MemDesser(w: Int) extends Module // test rig side
|
||||
val abits = io.wide.req_cmd.bits.toBits.getWidth
|
||||
val dbits = io.wide.req_data.bits.toBits.getWidth
|
||||
val rbits = io.wide.resp.bits.getWidth
|
||||
val dbeats = params(MIFDataBeats)
|
||||
val mifDataBeats = params(MIFDataBeats)
|
||||
|
||||
require(dbits >= abits && rbits >= dbits)
|
||||
val recv_cnt = Reg(init=UInt(0, log2Up((rbits+w-1)/w)))
|
||||
val data_recv_cnt = Reg(init=UInt(0, log2Up(dbeats)))
|
||||
val data_recv_cnt = Reg(init=UInt(0, log2Up(mifDataBeats)))
|
||||
val adone = io.narrow.req.valid && recv_cnt === UInt((abits-1)/w)
|
||||
val ddone = io.narrow.req.valid && recv_cnt === UInt((dbits-1)/w)
|
||||
val rdone = io.narrow.resp.valid && recv_cnt === UInt((rbits-1)/w)
|
||||
@ -175,13 +184,13 @@ class MemDesser(w: Int) extends Module // test rig side
|
||||
}
|
||||
when (state === s_data && io.wide.req_data.ready) {
|
||||
state := s_data_recv
|
||||
when (data_recv_cnt === UInt(dbeats-1)) {
|
||||
when (data_recv_cnt === UInt(mifDataBeats-1)) {
|
||||
state := s_cmd_recv
|
||||
}
|
||||
data_recv_cnt := data_recv_cnt + UInt(1)
|
||||
}
|
||||
when (rdone) { // state === s_reply
|
||||
when (data_recv_cnt === UInt(dbeats-1)) {
|
||||
when (data_recv_cnt === UInt(mifDataBeats-1)) {
|
||||
state := s_cmd_recv
|
||||
}
|
||||
recv_cnt := UInt(0)
|
||||
@ -195,7 +204,7 @@ class MemDesser(w: Int) extends Module // test rig side
|
||||
io.wide.req_data.valid := state === s_data
|
||||
io.wide.req_data.bits.data := in_buf >> UInt(((rbits+w-1)/w - (dbits+w-1)/w)*w)
|
||||
|
||||
val dataq = Module(new Queue(new MemResp, dbeats))
|
||||
val dataq = Module(new Queue(new MemResp, mifDataBeats))
|
||||
dataq.io.enq <> io.wide.resp
|
||||
dataq.io.deq.ready := recv_cnt === UInt((rbits-1)/w)
|
||||
|
||||
@ -203,19 +212,12 @@ class MemDesser(w: Int) extends Module // test rig side
|
||||
io.narrow.resp.bits := dataq.io.deq.bits.toBits >> (recv_cnt * UInt(w))
|
||||
}
|
||||
|
||||
//Adapter between a TileLinkIO and a UncachedTileLinkIO, merges voluntary
|
||||
|
||||
|
||||
//Adapter betweewn an UncachedTileLinkIO and a mem controller MemIO
|
||||
class MemIOTileLinkIOConverter(qDepth: Int) extends TLModule {
|
||||
class MemIOTileLinkIOConverter(qDepth: Int) extends TLModule with MIFParameters {
|
||||
val io = new Bundle {
|
||||
val tl = new TileLinkIO().flip
|
||||
val tl = new ManagerTileLinkIO
|
||||
val mem = new MemIO
|
||||
}
|
||||
val mifTagBits = params(MIFTagBits)
|
||||
val mifAddrBits = params(MIFAddrBits)
|
||||
val mifDataBits = params(MIFDataBits)
|
||||
val mifDataBeats = params(MIFDataBeats)
|
||||
val dataBits = tlDataBits*tlDataBeats
|
||||
val dstIdBits = params(LNHeaderBits)
|
||||
require(tlDataBits*tlDataBeats == mifDataBits*mifDataBeats, "Data sizes between LLC and MC don't agree")
|
||||
@ -227,12 +229,12 @@ class MemIOTileLinkIOConverter(qDepth: Int) extends TLModule {
|
||||
io.tl.finish.ready := Bool(true)
|
||||
io.mem.resp.ready := Bool(false)
|
||||
|
||||
val gnt_arb = Module(new Arbiter(new LogicalNetworkIO(new Grant), 2))
|
||||
val gnt_arb = Module(new Arbiter(new GrantToDst, 2))
|
||||
io.tl.grant <> gnt_arb.io.out
|
||||
|
||||
val dst_off = dstIdBits + tlClientXactIdBits
|
||||
val acq_has_data = io.tl.acquire.bits.payload.hasData()
|
||||
val rel_has_data = io.tl.release.bits.payload.hasData()
|
||||
val acq_has_data = io.tl.acquire.bits.hasData()
|
||||
val rel_has_data = io.tl.release.bits.hasData()
|
||||
|
||||
// Decompose outgoing TL Acquires into MemIO cmd and data
|
||||
val active_out = Reg(init=Bool(false))
|
||||
@ -248,13 +250,12 @@ class MemIOTileLinkIOConverter(qDepth: Int) extends TLModule {
|
||||
val make_grant_ack = Reg(init=Bool(false))
|
||||
|
||||
gnt_arb.io.in(1).valid := Bool(false)
|
||||
gnt_arb.io.in(1).bits.payload := Grant(
|
||||
is_builtin_type = Bool(true),
|
||||
g_type = Mux(data_from_rel, Grant.voluntaryAckType, Grant.putAckType),
|
||||
client_xact_id = tag_out >> UInt(1),
|
||||
manager_xact_id = UInt(0))
|
||||
gnt_arb.io.in(1).bits.header.dst := (if(dstIdBits > 0) tag_out(dst_off, tlClientXactIdBits + 1) else UInt(0))
|
||||
gnt_arb.io.in(1).bits.header.src := UInt(0)
|
||||
gnt_arb.io.in(1).bits := Grant(
|
||||
dst = (if(dstIdBits > 0) tag_out(dst_off, tlClientXactIdBits + 1) else UInt(0)),
|
||||
is_builtin_type = Bool(true),
|
||||
g_type = Mux(data_from_rel, Grant.voluntaryAckType, Grant.putAckType),
|
||||
client_xact_id = tag_out >> UInt(1),
|
||||
manager_xact_id = UInt(0))
|
||||
|
||||
if(tlDataBits != mifDataBits || tlDataBeats != mifDataBeats) {
|
||||
val mem_cmd_q = Module(new Queue(new MemReqCmd, qDepth))
|
||||
@ -263,7 +264,7 @@ class MemIOTileLinkIOConverter(qDepth: Int) extends TLModule {
|
||||
mem_data_q.io.enq.valid := Bool(false)
|
||||
val (mif_cnt_out, mif_wrap_out) = Counter(mem_data_q.io.enq.fire(), mifDataBeats)
|
||||
val mif_done_out = Reg(init=Bool(false))
|
||||
val tl_buf_out = Vec.fill(tlDataBeats){ Reg(io.tl.acquire.bits.payload.data.clone) }
|
||||
val tl_buf_out = Vec.fill(tlDataBeats){ Reg(io.tl.acquire.bits.data.clone) }
|
||||
val mif_buf_out = Vec.fill(mifDataBeats){ new MemData }
|
||||
mif_buf_out := mif_buf_out.fromBits(tl_buf_out.toBits)
|
||||
val mif_prog_out = (mif_cnt_out+UInt(1, width = log2Up(mifDataBeats+1)))*UInt(mifDataBits)
|
||||
@ -275,27 +276,27 @@ class MemIOTileLinkIOConverter(qDepth: Int) extends TLModule {
|
||||
when(io.tl.release.valid) {
|
||||
active_out := Bool(true)
|
||||
cmd_sent_out := Bool(false)
|
||||
tag_out := Cat(io.tl.release.bits.header.src,
|
||||
io.tl.release.bits.payload.client_xact_id,
|
||||
io.tl.release.bits.payload.isVoluntary())
|
||||
addr_out := io.tl.release.bits.payload.addr_block
|
||||
tag_out := Cat(io.tl.release.bits.client_id,
|
||||
io.tl.release.bits.client_xact_id,
|
||||
io.tl.release.bits.isVoluntary())
|
||||
addr_out := io.tl.release.bits.addr_block
|
||||
has_data := rel_has_data
|
||||
data_from_rel := Bool(true)
|
||||
make_grant_ack := io.tl.release.bits.payload.requiresAck()
|
||||
make_grant_ack := io.tl.release.bits.requiresAck()
|
||||
tl_done_out := tl_wrap_out
|
||||
tl_buf_out(tl_cnt_out) := io.tl.release.bits.payload.data
|
||||
tl_buf_out(tl_cnt_out) := io.tl.release.bits.data
|
||||
} .elsewhen(io.tl.acquire.valid) {
|
||||
active_out := Bool(true)
|
||||
cmd_sent_out := Bool(false)
|
||||
tag_out := Cat(io.tl.release.bits.header.src,
|
||||
io.tl.acquire.bits.payload.client_xact_id,
|
||||
io.tl.acquire.bits.payload.isBuiltInType())
|
||||
addr_out := io.tl.acquire.bits.payload.addr_block
|
||||
tag_out := Cat(io.tl.release.bits.client_id,
|
||||
io.tl.acquire.bits.client_xact_id,
|
||||
io.tl.acquire.bits.isBuiltInType())
|
||||
addr_out := io.tl.acquire.bits.addr_block
|
||||
has_data := acq_has_data
|
||||
data_from_rel := Bool(false)
|
||||
make_grant_ack := acq_has_data
|
||||
tl_done_out := tl_wrap_out
|
||||
tl_buf_out(tl_cnt_out) := io.tl.acquire.bits.payload.data
|
||||
tl_buf_out(tl_cnt_out) := io.tl.acquire.bits.data
|
||||
}
|
||||
}
|
||||
when(active_out) {
|
||||
@ -306,8 +307,8 @@ class MemIOTileLinkIOConverter(qDepth: Int) extends TLModule {
|
||||
io.tl.acquire.ready := Bool(true)
|
||||
when(io.tl.acquire.valid) {
|
||||
tl_buf_out(tl_cnt_out) := Mux(data_from_rel,
|
||||
io.tl.release.bits.payload.data,
|
||||
io.tl.acquire.bits.payload.data)
|
||||
io.tl.release.bits.data,
|
||||
io.tl.acquire.bits.data)
|
||||
}
|
||||
}
|
||||
when(!mif_done_out) {
|
||||
@ -338,8 +339,8 @@ class MemIOTileLinkIOConverter(qDepth: Int) extends TLModule {
|
||||
io.mem.req_cmd.bits.tag := tag_out
|
||||
io.mem.req_cmd.bits.addr := addr_out
|
||||
io.mem.req_data.bits.data := Mux(data_from_rel,
|
||||
io.tl.release.bits.payload.data,
|
||||
io.tl.acquire.bits.payload.data)
|
||||
io.tl.release.bits.data,
|
||||
io.tl.acquire.bits.data)
|
||||
when(!active_out){
|
||||
io.tl.release.ready := io.mem.req_data.ready
|
||||
io.tl.acquire.ready := io.mem.req_data.ready && !io.tl.release.valid
|
||||
@ -352,12 +353,12 @@ class MemIOTileLinkIOConverter(qDepth: Int) extends TLModule {
|
||||
tl_done_out := tl_wrap_out
|
||||
when(io.tl.release.valid) {
|
||||
data_from_rel := Bool(true)
|
||||
make_grant_ack := io.tl.release.bits.payload.requiresAck()
|
||||
io.mem.req_data.bits.data := io.tl.release.bits.payload.data
|
||||
val tag = Cat(io.tl.release.bits.header.src,
|
||||
io.tl.release.bits.payload.client_xact_id,
|
||||
io.tl.release.bits.payload.isVoluntary())
|
||||
val addr = io.tl.release.bits.payload.addr_block
|
||||
make_grant_ack := io.tl.release.bits.requiresAck()
|
||||
io.mem.req_data.bits.data := io.tl.release.bits.data
|
||||
val tag = Cat(io.tl.release.bits.client_id,
|
||||
io.tl.release.bits.client_xact_id,
|
||||
io.tl.release.bits.isVoluntary())
|
||||
val addr = io.tl.release.bits.addr_block
|
||||
io.mem.req_cmd.bits.tag := tag
|
||||
io.mem.req_cmd.bits.addr := addr
|
||||
io.mem.req_cmd.bits.rw := rel_has_data
|
||||
@ -367,12 +368,12 @@ class MemIOTileLinkIOConverter(qDepth: Int) extends TLModule {
|
||||
} .elsewhen(io.tl.acquire.valid) {
|
||||
data_from_rel := Bool(false)
|
||||
make_grant_ack := acq_has_data // i.e. is it a Put
|
||||
io.mem.req_data.bits.data := io.tl.acquire.bits.payload.data
|
||||
io.mem.req_data.bits.data := io.tl.acquire.bits.data
|
||||
io.mem.req_cmd.bits.rw := acq_has_data
|
||||
val tag = Cat(io.tl.acquire.bits.header.src,
|
||||
io.tl.acquire.bits.payload.client_xact_id,
|
||||
io.tl.acquire.bits.payload.isBuiltInType())
|
||||
val addr = io.tl.acquire.bits.payload.addr_block
|
||||
val tag = Cat(io.tl.acquire.bits.client_id,
|
||||
io.tl.acquire.bits.client_xact_id,
|
||||
io.tl.acquire.bits.isBuiltInType())
|
||||
val addr = io.tl.acquire.bits.addr_block
|
||||
io.mem.req_cmd.bits.tag := tag
|
||||
io.mem.req_cmd.bits.addr := addr
|
||||
io.mem.req_cmd.bits.rw := acq_has_data
|
||||
@ -407,28 +408,25 @@ class MemIOTileLinkIOConverter(qDepth: Int) extends TLModule {
|
||||
|
||||
// Aggregate incoming MemIO responses into TL Grants
|
||||
val active_in = Reg(init=Bool(false))
|
||||
val (tl_cnt_in, tl_wrap_in) = Counter(io.tl.grant.fire() && io.tl.grant.bits.payload.hasMultibeatData(), tlDataBeats)
|
||||
val (tl_cnt_in, tl_wrap_in) = Counter(io.tl.grant.fire() && io.tl.grant.bits.hasMultibeatData(), tlDataBeats)
|
||||
val tag_in = Reg(UInt(width = mifTagBits))
|
||||
|
||||
if(tlDataBits != mifDataBits || tlDataBeats != mifDataBeats) {
|
||||
val (mif_cnt_in, mif_wrap_in) = Counter(io.mem.resp.fire(), mifDataBeats) // TODO: Assumes all resps have data
|
||||
val mif_done_in = Reg(init=Bool(false))
|
||||
val mif_buf_in = Vec.fill(mifDataBeats){ Reg(new MemData) }
|
||||
val tl_buf_in = Vec.fill(tlDataBeats){ io.tl.acquire.bits.payload.data.clone }
|
||||
val tl_buf_in = Vec.fill(tlDataBeats){ io.tl.acquire.bits.data.clone }
|
||||
tl_buf_in := tl_buf_in.fromBits(mif_buf_in.toBits)
|
||||
val tl_prog_in = (tl_cnt_in+UInt(1, width = log2Up(tlDataBeats+1)))*UInt(tlDataBits)
|
||||
val mif_prog_in = mif_cnt_in*UInt(mifDataBits)
|
||||
gnt_arb.io.in(0).bits.payload := Grant(
|
||||
is_builtin_type = tag_in(0),
|
||||
g_type = Mux(tag_in(0),
|
||||
Grant.getDataBlockType,
|
||||
UInt(0)), // TODO: Assumes MI or MEI protocol
|
||||
client_xact_id = tag_in >> UInt(1),
|
||||
manager_xact_id = UInt(0),
|
||||
addr_beat = tl_cnt_in,
|
||||
data = tl_buf_in(tl_cnt_in))
|
||||
gnt_arb.io.in(0).bits.header.dst := (if(dstIdBits > 0) tag_in(dst_off, tlClientXactIdBits + 1) else UInt(0))
|
||||
gnt_arb.io.in(0).bits.header.src := UInt(0)
|
||||
gnt_arb.io.in(0).bits := Grant(
|
||||
dst = (if(dstIdBits > 0) tag_in(dst_off, tlClientXactIdBits + 1) else UInt(0)),
|
||||
is_builtin_type = tag_in(0),
|
||||
g_type = Mux(tag_in(0), Grant.getDataBlockType, UInt(0)), // TODO: Assumes MI or MEI protocol
|
||||
client_xact_id = tag_in >> UInt(1),
|
||||
manager_xact_id = UInt(0),
|
||||
addr_beat = tl_cnt_in,
|
||||
data = tl_buf_in(tl_cnt_in))
|
||||
|
||||
when(!active_in) {
|
||||
io.mem.resp.ready := Bool(true)
|
||||
@ -453,17 +451,14 @@ class MemIOTileLinkIOConverter(qDepth: Int) extends TLModule {
|
||||
} else { // Don't generate all the uneeded data buffers and flow resp
|
||||
gnt_arb.io.in(0).valid := io.mem.resp.valid
|
||||
io.mem.resp.ready := gnt_arb.io.in(0).ready
|
||||
gnt_arb.io.in(0).bits.payload := Grant(
|
||||
is_builtin_type = io.mem.resp.bits.tag(0),
|
||||
g_type = Mux(io.mem.resp.bits.tag(0),
|
||||
Grant.getDataBlockType,
|
||||
UInt(0)), // TODO: Assumes MI or MEI protocol
|
||||
client_xact_id = io.mem.resp.bits.tag >> UInt(1),
|
||||
manager_xact_id = UInt(0),
|
||||
addr_beat = tl_cnt_in,
|
||||
data = io.mem.resp.bits.data)
|
||||
gnt_arb.io.in(0).bits.header.dst := (if(dstIdBits > 0) io.mem.resp.bits.tag(dst_off, tlClientXactIdBits + 1) else UInt(0))
|
||||
gnt_arb.io.in(0).bits.header.src := UInt(0)
|
||||
gnt_arb.io.in(0).bits := Grant(
|
||||
dst = (if(dstIdBits > 0) io.mem.resp.bits.tag(dst_off, tlClientXactIdBits + 1) else UInt(0)),
|
||||
is_builtin_type = io.mem.resp.bits.tag(0),
|
||||
g_type = Mux(io.mem.resp.bits.tag(0), Grant.getDataBlockType, UInt(0)), // TODO: Assumes MI or MEI protocol
|
||||
client_xact_id = io.mem.resp.bits.tag >> UInt(1),
|
||||
manager_xact_id = UInt(0),
|
||||
addr_beat = tl_cnt_in,
|
||||
data = io.mem.resp.bits.data)
|
||||
}
|
||||
}
|
||||
|
||||
@ -522,28 +517,28 @@ object HellaQueue
|
||||
}
|
||||
}
|
||||
|
||||
class MemPipeIOMemIOConverter(numRequests: Int, refillCycles: Int) extends Module {
|
||||
class MemPipeIOMemIOConverter(numRequests: Int) extends MIFModule {
|
||||
val io = new Bundle {
|
||||
val cpu = new MemIO().flip
|
||||
val mem = new MemPipeIO
|
||||
}
|
||||
|
||||
val numEntries = numRequests * refillCycles
|
||||
val numEntries = numRequests * mifDataBeats
|
||||
val size = log2Down(numEntries) + 1
|
||||
|
||||
val inc = Bool()
|
||||
val dec = Bool()
|
||||
val count = Reg(init=UInt(numEntries, size))
|
||||
val watermark = count >= UInt(refillCycles)
|
||||
val watermark = count >= UInt(mifDataBeats)
|
||||
|
||||
when (inc && !dec) {
|
||||
count := count + UInt(1)
|
||||
}
|
||||
when (!inc && dec) {
|
||||
count := count - UInt(refillCycles)
|
||||
count := count - UInt(mifDataBeats)
|
||||
}
|
||||
when (inc && dec) {
|
||||
count := count - UInt(refillCycles-1)
|
||||
count := count - UInt(mifDataBeats-1)
|
||||
}
|
||||
|
||||
val cmdq_mask = io.cpu.req_cmd.bits.rw || watermark
|
||||
@ -573,17 +568,17 @@ class MemPipeIOMemIOConverter(numRequests: Int, refillCycles: Int) extends Modul
|
||||
dec := io.mem.req_cmd.fire() && !io.mem.req_cmd.bits.rw
|
||||
}
|
||||
|
||||
class MemPipeIOTileLinkIOConverter(outstanding: Int, refillCycles: Int) extends Module {
|
||||
class MemPipeIOTileLinkIOConverter(outstanding: Int) extends MIFModule {
|
||||
val io = new Bundle {
|
||||
val tl = new TileLinkIO().flip
|
||||
val tl = new ManagerTileLinkIO
|
||||
val mem = new MemPipeIO
|
||||
}
|
||||
|
||||
val a = Module(new MemIOTileLinkIOConverter(1))
|
||||
val b = Module(new MemPipeIOMemIOConverter(outstanding, refillCycles))
|
||||
val b = Module(new MemPipeIOMemIOConverter(outstanding))
|
||||
a.io.tl <> io.tl
|
||||
b.io.cpu.req_cmd <> Queue(a.io.mem.req_cmd, 2, pipe=true)
|
||||
b.io.cpu.req_data <> Queue(a.io.mem.req_data, refillCycles, pipe=true)
|
||||
b.io.cpu.req_data <> Queue(a.io.mem.req_data, mifDataBeats, pipe=true)
|
||||
a.io.mem.resp <> b.io.cpu.resp
|
||||
b.io.mem <> io.mem
|
||||
}
|
||||
|
@ -108,17 +108,18 @@ class ManagerMetadata extends CoherenceMetadata {
|
||||
def requiresProbesOnVoluntaryWriteback(dummy: Int = 0): Bool =
|
||||
co.requiresProbes(M_FLUSH, this)
|
||||
|
||||
def makeProbe(cmd: UInt, addr_block: UInt): Probe =
|
||||
Bundle(Probe(co.getProbeType(cmd, this), addr_block), { case TLId => id })
|
||||
def makeProbe(dst: UInt, cmd: UInt, addr_block: UInt): ProbeToDst =
|
||||
Bundle(Probe(dst, co.getProbeType(cmd, this), addr_block), { case TLId => id })
|
||||
|
||||
def makeProbe(acq: Acquire): Probe =
|
||||
Bundle(Probe(co.getProbeType(acq, this), acq.addr_block), { case TLId => id })
|
||||
def makeProbe(dst: UInt, acq: Acquire): ProbeToDst =
|
||||
Bundle(Probe(dst, co.getProbeType(acq, this), acq.addr_block), { case TLId => id })
|
||||
|
||||
def makeProbeForVoluntaryWriteback(addr_block: UInt): Probe =
|
||||
makeProbe(M_FLUSH, addr_block)
|
||||
def makeProbeForVoluntaryWriteback(dst: UInt, addr_block: UInt): ProbeToDst =
|
||||
makeProbe(dst, M_FLUSH, addr_block)
|
||||
|
||||
def makeGrant(rel: Release, manager_xact_id: UInt): Grant = {
|
||||
def makeGrant(rel: ReleaseFromSrc, manager_xact_id: UInt): GrantToDst = {
|
||||
Bundle(Grant(
|
||||
dst = rel.client_id,
|
||||
is_builtin_type = Bool(true),
|
||||
g_type = Grant.voluntaryAckType,
|
||||
client_xact_id = rel.client_xact_id,
|
||||
@ -126,11 +127,12 @@ class ManagerMetadata extends CoherenceMetadata {
|
||||
}
|
||||
|
||||
def makeGrant(
|
||||
acq: Acquire,
|
||||
acq: AcquireFromSrc,
|
||||
manager_xact_id: UInt,
|
||||
addr_beat: UInt = UInt(0),
|
||||
data: UInt = UInt(0)): Grant = {
|
||||
data: UInt = UInt(0)): GrantToDst = {
|
||||
Bundle(Grant(
|
||||
dst = acq.client_id,
|
||||
is_builtin_type = acq.isBuiltInType(),
|
||||
g_type = Mux(acq.isBuiltInType(),
|
||||
acq.getBuiltInGrantType(),
|
||||
@ -141,11 +143,24 @@ class ManagerMetadata extends CoherenceMetadata {
|
||||
data = data), { case TLId => id })
|
||||
}
|
||||
|
||||
def onRelease(incoming: Release, src: UInt): ManagerMetadata =
|
||||
Bundle(co.managerMetadataOnRelease(incoming, src, this), { case TLId => id })
|
||||
def makeGrant(
|
||||
dst: UInt,
|
||||
acq: AcquireFromSrc,
|
||||
client_xact_id: UInt,
|
||||
manager_xact_id: UInt,
|
||||
addr_beat: UInt,
|
||||
data: UInt): GrantToDst = {
|
||||
val g = makeGrant(acq, manager_xact_id, addr_beat, data)
|
||||
g.client_id := dst
|
||||
g.client_xact_id := client_xact_id
|
||||
g
|
||||
}
|
||||
|
||||
def onRelease(incoming: ReleaseFromSrc): ManagerMetadata =
|
||||
Bundle(co.managerMetadataOnRelease(incoming, incoming.client_id, this), { case TLId => id })
|
||||
|
||||
def onGrant(outgoing: Grant, dst: UInt): ManagerMetadata =
|
||||
Bundle(co.managerMetadataOnGrant(outgoing, dst, this), { case TLId => id })
|
||||
def onGrant(outgoing: GrantToDst): ManagerMetadata =
|
||||
Bundle(co.managerMetadataOnGrant(outgoing, outgoing.client_id, this), { case TLId => id })
|
||||
}
|
||||
|
||||
object ManagerMetadata {
|
||||
|
@ -23,12 +23,14 @@ case object TLDataBits extends Field[Int]
|
||||
case object TLDataBeats extends Field[Int]
|
||||
case object TLNetworkIsOrderedP2P extends Field[Boolean]
|
||||
|
||||
abstract trait TileLinkParameters extends UsesParameters {
|
||||
trait TileLinkParameters extends UsesParameters {
|
||||
val tlCoh = params(TLCoherencePolicy)
|
||||
val tlNManagers = params(TLNManagers)
|
||||
val tlNClients = params(TLNClients)
|
||||
val tlNCoherentClients = params(TLNCoherentClients)
|
||||
val tlNIncoherentClients = params(TLNIncoherentClients)
|
||||
val tlClientIdBits = log2Up(tlNClients)
|
||||
val tlManagerIdBits = log2Up(tlNManagers)
|
||||
val tlMaxClientXacts = params(TLMaxClientXacts)
|
||||
val tlMaxClientPorts = params(TLMaxClientPorts)
|
||||
val tlMaxManagerXacts = params(TLMaxManagerXacts)
|
||||
@ -98,6 +100,10 @@ trait HasTileLinkData extends HasTileLinkBeatId {
|
||||
def hasMultibeatData(dummy: Int = 0): Bool
|
||||
}
|
||||
|
||||
trait HasClientId extends TLBundle {
|
||||
val client_id = UInt(width = tlClientIdBits)
|
||||
}
|
||||
|
||||
// Actual TileLink channel bundle definitions
|
||||
|
||||
class Acquire extends ClientToManagerChannel
|
||||
@ -350,12 +356,19 @@ class Probe extends ManagerToClientChannel
|
||||
}
|
||||
|
||||
object Probe {
|
||||
def apply(p_type: UInt, addr_block: UInt) = {
|
||||
def apply(p_type: UInt, addr_block: UInt): Probe = {
|
||||
val prb = new Probe
|
||||
prb.p_type := p_type
|
||||
prb.addr_block := addr_block
|
||||
prb
|
||||
}
|
||||
def apply(dst: UInt, p_type: UInt, addr_block: UInt): ProbeToDst = {
|
||||
val prb = new ProbeToDst
|
||||
prb.client_id := dst
|
||||
prb.p_type := p_type
|
||||
prb.addr_block := addr_block
|
||||
prb
|
||||
}
|
||||
}
|
||||
|
||||
class Release extends ClientToManagerChannel
|
||||
@ -435,8 +448,8 @@ object Grant {
|
||||
g_type: UInt,
|
||||
client_xact_id: UInt,
|
||||
manager_xact_id: UInt,
|
||||
addr_beat: UInt = UInt(0),
|
||||
data: UInt = UInt(0)): Grant = {
|
||||
addr_beat: UInt,
|
||||
data: UInt): Grant = {
|
||||
val gnt = new Grant
|
||||
gnt.is_builtin_type := is_builtin_type
|
||||
gnt.g_type := g_type
|
||||
@ -446,6 +459,25 @@ object Grant {
|
||||
gnt.data := data
|
||||
gnt
|
||||
}
|
||||
|
||||
def apply(
|
||||
dst: UInt,
|
||||
is_builtin_type: Bool,
|
||||
g_type: UInt,
|
||||
client_xact_id: UInt,
|
||||
manager_xact_id: UInt,
|
||||
addr_beat: UInt = UInt(0),
|
||||
data: UInt = UInt(0)): GrantToDst = {
|
||||
val gnt = new GrantToDst
|
||||
gnt.client_id := dst
|
||||
gnt.is_builtin_type := is_builtin_type
|
||||
gnt.g_type := g_type
|
||||
gnt.client_xact_id := client_xact_id
|
||||
gnt.manager_xact_id := manager_xact_id
|
||||
gnt.addr_beat := addr_beat
|
||||
gnt.data := data
|
||||
gnt
|
||||
}
|
||||
}
|
||||
|
||||
class Finish extends ClientToManagerChannel with HasManagerTransactionId {
|
||||
@ -453,7 +485,14 @@ class Finish extends ClientToManagerChannel with HasManagerTransactionId {
|
||||
def hasMultibeatData(dummy: Int = 0) = Bool(false)
|
||||
}
|
||||
|
||||
// Complete IO definitions for two types of TileLink clients
|
||||
// These subtypes include a field for the source or destination ClientId
|
||||
class AcquireFromSrc extends Acquire with HasClientId
|
||||
class ProbeToDst extends Probe with HasClientId
|
||||
class ReleaseFromSrc extends Release with HasClientId
|
||||
class GrantToDst extends Grant with HasClientId
|
||||
|
||||
// Complete IO definitions for two types of TileLink clients, including
|
||||
// networking headers
|
||||
class UncachedTileLinkIO extends TLBundle {
|
||||
val acquire = new DecoupledIO(new LogicalNetworkIO(new Acquire))
|
||||
val grant = new DecoupledIO(new LogicalNetworkIO(new Grant)).flip
|
||||
@ -479,28 +518,36 @@ class TileLinkIOWrapper extends TLModule {
|
||||
io.out.release.valid := Bool(false)
|
||||
}
|
||||
|
||||
// This version of TileLinkIO does not contain network headers for packets
|
||||
// that originate in the Clients (i.e. Acquire and Release). These headers
|
||||
// are provided in the top-level that instantiates the clients and network.
|
||||
// This version of TileLinkIO does not contain network headers. The headers
|
||||
// are provided in the top-level that instantiates the clients and network,
|
||||
// probably using a TileLinkClientPort module.
|
||||
// By eliding the header subbundles within the clients we can enable
|
||||
// hierarchical P&R while minimizing unconnected port errors in GDS.
|
||||
// Secondly, this version of the interface elides Finish messages, with the
|
||||
// assumption that a FinishUnit has been coupled to the TileLinkIO port
|
||||
// to deal with acking received Grants.
|
||||
class HeaderlessUncachedTileLinkIO extends TLBundle {
|
||||
class ClientUncachedTileLinkIO extends TLBundle {
|
||||
val acquire = new DecoupledIO(new Acquire)
|
||||
val grant = new DecoupledIO(new Grant).flip
|
||||
}
|
||||
|
||||
class HeaderlessTileLinkIO extends HeaderlessUncachedTileLinkIO {
|
||||
class ClientTileLinkIO extends ClientUncachedTileLinkIO {
|
||||
val probe = new DecoupledIO(new Probe).flip
|
||||
val release = new DecoupledIO(new Release)
|
||||
}
|
||||
|
||||
class HeaderlessTileLinkIOWrapper extends TLModule {
|
||||
class ManagerTileLinkIO extends TLBundle {
|
||||
val acquire = new DecoupledIO(new AcquireFromSrc).flip
|
||||
val grant = new DecoupledIO(new GrantToDst)
|
||||
val finish = new DecoupledIO(new Finish).flip
|
||||
val probe = new DecoupledIO(new ProbeToDst)
|
||||
val release = new DecoupledIO(new ReleaseFromSrc).flip
|
||||
}
|
||||
|
||||
class ClientTileLinkIOWrapper extends TLModule {
|
||||
val io = new Bundle {
|
||||
val in = new HeaderlessUncachedTileLinkIO().flip
|
||||
val out = new HeaderlessTileLinkIO
|
||||
val in = new ClientUncachedTileLinkIO().flip
|
||||
val out = new ClientTileLinkIO
|
||||
}
|
||||
io.out.acquire <> io.in.acquire
|
||||
io.out.grant <> io.in.grant
|
||||
@ -509,17 +556,17 @@ class HeaderlessTileLinkIOWrapper extends TLModule {
|
||||
}
|
||||
|
||||
object TileLinkIOWrapper {
|
||||
def apply(utl: HeaderlessUncachedTileLinkIO, p: Parameters): HeaderlessTileLinkIO = {
|
||||
val conv = Module(new HeaderlessTileLinkIOWrapper)(p)
|
||||
def apply(utl: ClientUncachedTileLinkIO, p: Parameters): ClientTileLinkIO = {
|
||||
val conv = Module(new ClientTileLinkIOWrapper)(p)
|
||||
conv.io.in <> utl
|
||||
conv.io.out
|
||||
}
|
||||
def apply(utl: HeaderlessUncachedTileLinkIO): HeaderlessTileLinkIO = {
|
||||
val conv = Module(new HeaderlessTileLinkIOWrapper)
|
||||
def apply(utl: ClientUncachedTileLinkIO): ClientTileLinkIO = {
|
||||
val conv = Module(new ClientTileLinkIOWrapper)
|
||||
conv.io.in <> utl
|
||||
conv.io.out
|
||||
}
|
||||
def apply(tl: HeaderlessTileLinkIO): HeaderlessTileLinkIO = tl
|
||||
def apply(tl: ClientTileLinkIO): ClientTileLinkIO = tl
|
||||
def apply(utl: UncachedTileLinkIO, p: Parameters): TileLinkIO = {
|
||||
val conv = Module(new TileLinkIOWrapper)(p)
|
||||
conv.io.in <> utl
|
||||
@ -533,89 +580,6 @@ object TileLinkIOWrapper {
|
||||
def apply(tl: TileLinkIO): TileLinkIO = tl
|
||||
}
|
||||
|
||||
trait HasDataBeatCounters {
|
||||
type HasBeat = TileLinkChannel with HasTileLinkBeatId
|
||||
type HasClientId = TileLinkChannel with HasClientTransactionId
|
||||
type LNAcquire = LogicalNetworkIO[Acquire]
|
||||
type LNRelease = LogicalNetworkIO[Release]
|
||||
type LNGrant = LogicalNetworkIO[Grant]
|
||||
|
||||
def connectDataBeatCounter[S <: TileLinkChannel : ClassTag](inc: Bool, data: S, beat: UInt) = {
|
||||
val multi = data.hasMultibeatData()
|
||||
val (multi_cnt, multi_done) = Counter(inc && multi, data.tlDataBeats)
|
||||
val cnt = Mux(multi, multi_cnt, beat)
|
||||
val done = Mux(multi, multi_done, inc)
|
||||
(cnt, done)
|
||||
}
|
||||
|
||||
def connectOutgoingDataBeatCounter[T <: Data : TypeTag](
|
||||
in: DecoupledIO[T],
|
||||
beat: UInt = UInt(0)): (UInt, Bool) = {
|
||||
in.bits match {
|
||||
case p: TileLinkChannel if typeTag[T].tpe <:< typeTag[TileLinkChannel].tpe =>
|
||||
connectDataBeatCounter(in.fire(), p, beat)
|
||||
case ln: LNGrant if typeTag[T].tpe <:< typeTag[LNGrant].tpe =>
|
||||
connectDataBeatCounter(in.fire(), ln.payload, beat)
|
||||
case _ => { require(false, "Don't know how to connect a beat counter to " + typeTag[T].tpe); (UInt(0), Bool(false))}
|
||||
}
|
||||
}
|
||||
|
||||
def connectIncomingDataBeatCounters[T <: HasClientId : ClassTag](
|
||||
in: DecoupledIO[LogicalNetworkIO[T]],
|
||||
entries: Int,
|
||||
getId: LogicalNetworkIO[T] => UInt): Vec[Bool] = {
|
||||
Vec((0 until entries).map { i =>
|
||||
connectDataBeatCounter(in.fire() && getId(in.bits) === UInt(i), in.bits.payload, UInt(0))._2
|
||||
})
|
||||
}
|
||||
|
||||
def connectIncomingDataBeatCounter[T <: Data : TypeTag](in: DecoupledIO[T]): Bool = {
|
||||
in.bits match {
|
||||
case p: TileLinkChannel if typeTag[T].tpe <:< typeTag[TileLinkChannel].tpe =>
|
||||
connectDataBeatCounter(in.fire(), p, UInt(0))._2
|
||||
case ln: LNAcquire if typeTag[T].tpe =:= typeTag[LNAcquire].tpe =>
|
||||
connectDataBeatCounter(in.fire(), ln.payload, UInt(0))._2
|
||||
case ln: LNRelease if typeTag[T].tpe =:= typeTag[LNRelease].tpe =>
|
||||
connectDataBeatCounter(in.fire(), ln.payload, UInt(0))._2
|
||||
case ln: LNGrant if typeTag[T].tpe =:= typeTag[LNGrant].tpe =>
|
||||
connectDataBeatCounter(in.fire(), ln.payload, UInt(0))._2
|
||||
case _ => { require(false, "Don't know how to connect a beat counter to " + typeTag[T].tpe); Bool(false)}
|
||||
}
|
||||
}
|
||||
|
||||
def connectHeaderlessTwoWayBeatCounter[ T <: TileLinkChannel : ClassTag, S <: TileLinkChannel : ClassTag](
|
||||
max: Int,
|
||||
up: DecoupledIO[T],
|
||||
down: DecoupledIO[S],
|
||||
beat: UInt): (Bool, UInt, Bool, UInt, Bool) = {
|
||||
val cnt = Reg(init = UInt(0, width = log2Up(max+1)))
|
||||
val (up_idx, do_inc) = connectDataBeatCounter(up.fire(), up.bits, beat)
|
||||
val (down_idx, do_dec) = connectDataBeatCounter(down.fire(), down.bits, beat)
|
||||
//Module.assert(!(do_dec && cnt === UInt(0)), "Decrementing 2way beat counter before ever incrementing")
|
||||
cnt := Mux(do_dec,
|
||||
Mux(do_inc, cnt, cnt - UInt(1)),
|
||||
Mux(do_inc, cnt + UInt(1), cnt))
|
||||
(cnt > UInt(0), up_idx, do_inc, down_idx, do_dec)
|
||||
}
|
||||
|
||||
def connectTwoWayBeatCounter[ T <: TileLinkChannel : ClassTag, S <: TileLinkChannel : ClassTag](
|
||||
max: Int,
|
||||
up: DecoupledIO[LogicalNetworkIO[T]],
|
||||
down: DecoupledIO[LogicalNetworkIO[S]],
|
||||
inc: T => Bool = (t: T) => Bool(true),
|
||||
dec: S => Bool = (s: S) => Bool(true)): (Bool, UInt, Bool, UInt, Bool) = {
|
||||
val cnt = Reg(init = UInt(0, width = log2Up(max+1)))
|
||||
val (up_idx, up_done) = connectDataBeatCounter(up.fire(), up.bits.payload, UInt(0))
|
||||
val (down_idx, down_done) = connectDataBeatCounter(down.fire(), down.bits.payload, UInt(0))
|
||||
val do_inc = up_done && inc(up.bits.payload)
|
||||
val do_dec = down_done && dec(down.bits.payload)
|
||||
cnt := Mux(do_dec,
|
||||
Mux(do_inc, cnt, cnt - UInt(1)),
|
||||
Mux(do_inc, cnt + UInt(1), cnt))
|
||||
(cnt > UInt(0), up_idx, up_done, down_idx, down_done)
|
||||
}
|
||||
}
|
||||
|
||||
class FinishQueueEntry extends TLBundle {
|
||||
val fin = new Finish
|
||||
val dst = UInt(width = log2Up(params(LNEndpoints)))
|
||||
@ -623,8 +587,7 @@ class FinishQueueEntry extends TLBundle {
|
||||
|
||||
class FinishQueue(entries: Int) extends Queue(new FinishQueueEntry, entries)
|
||||
|
||||
class FinishUnit(srcId: Int = 0, outstanding: Int = 2) extends TLModule
|
||||
with HasDataBeatCounters {
|
||||
class FinishUnit(srcId: Int = 0, outstanding: Int = 2) extends TLModule with HasDataBeatCounters {
|
||||
val io = new Bundle {
|
||||
val grant = Decoupled(new LogicalNetworkIO(new Grant)).flip
|
||||
val refill = Decoupled(new Grant)
|
||||
@ -645,12 +608,12 @@ class FinishUnit(srcId: Int = 0, outstanding: Int = 2) extends TLModule
|
||||
// a multibeat Grant. But Grants from multiple managers or transactions may
|
||||
// get interleaved, so we could need a counter for each.
|
||||
val done = if(tlNetworkDoesNotInterleaveBeats) {
|
||||
connectIncomingDataBeatCounter(io.grant)
|
||||
connectIncomingDataBeatCounterWithHeader(io.grant)
|
||||
} else {
|
||||
val entries = 1 << tlClientXactIdBits
|
||||
def getId(g: LogicalNetworkIO[Grant]) = g.payload.client_xact_id
|
||||
assert(getId(io.grant.bits) <= UInt(entries), "Not enough grant beat counters, only " + entries + " entries.")
|
||||
connectIncomingDataBeatCounters(io.grant, entries, getId).reduce(_||_)
|
||||
connectIncomingDataBeatCountersWithHeader(io.grant, entries, getId).reduce(_||_)
|
||||
}
|
||||
val q = Module(new FinishQueue(outstanding))
|
||||
q.io.enq.valid := io.grant.fire() && g.requiresAck() && (!g.hasMultibeatData() || done)
|
||||
@ -670,30 +633,7 @@ class FinishUnit(srcId: Int = 0, outstanding: Int = 2) extends TLModule
|
||||
}
|
||||
}
|
||||
|
||||
object TileLinkHeaderOverwriter {
|
||||
def apply[T <: ClientToManagerChannel](
|
||||
in: DecoupledIO[LogicalNetworkIO[T]],
|
||||
clientId: Int,
|
||||
passThrough: Boolean): DecoupledIO[LogicalNetworkIO[T]] = {
|
||||
val out = in.clone.asDirectionless
|
||||
out.bits.payload := in.bits.payload
|
||||
out.bits.header.src := UInt(clientId)
|
||||
out.bits.header.dst := (if(passThrough) in.bits.header.dst else UInt(0))
|
||||
out.valid := in.valid
|
||||
in.ready := out.ready
|
||||
out
|
||||
}
|
||||
|
||||
def apply[T <: ClientToManagerChannel with HasCacheBlockAddress](
|
||||
in: DecoupledIO[LogicalNetworkIO[T]],
|
||||
clientId: Int,
|
||||
nBanks: Int,
|
||||
addrConvert: UInt => UInt): DecoupledIO[LogicalNetworkIO[T]] = {
|
||||
val out: DecoupledIO[LogicalNetworkIO[T]] = apply(in, clientId, false)
|
||||
out.bits.header.dst := addrConvert(in.bits.payload.addr_block)
|
||||
out
|
||||
}
|
||||
|
||||
object ClientTileLinkHeaderCreator {
|
||||
def apply[T <: ClientToManagerChannel with HasCacheBlockAddress : ClassTag](
|
||||
in: DecoupledIO[T],
|
||||
clientId: Int,
|
||||
@ -708,9 +648,24 @@ object TileLinkHeaderOverwriter {
|
||||
}
|
||||
}
|
||||
|
||||
class TileLinkNetworkPort(clientId: Int, addrConvert: UInt => UInt) extends TLModule {
|
||||
object ManagerTileLinkHeaderCreator {
|
||||
def apply[T <: ManagerToClientChannel with HasClientId : ClassTag](
|
||||
in: DecoupledIO[T],
|
||||
managerId: Int,
|
||||
idConvert: UInt => UInt): DecoupledIO[LogicalNetworkIO[T]] = {
|
||||
val out = new DecoupledIO(new LogicalNetworkIO(in.bits.clone)).asDirectionless
|
||||
out.bits.payload := in.bits
|
||||
out.bits.header.src := UInt(managerId)
|
||||
out.bits.header.dst := idConvert(in.bits.client_id)
|
||||
out.valid := in.valid
|
||||
in.ready := out.ready
|
||||
out
|
||||
}
|
||||
}
|
||||
|
||||
class ClientTileLinkNetworkPort(clientId: Int, addrConvert: UInt => UInt) extends TLModule {
|
||||
val io = new Bundle {
|
||||
val client = new HeaderlessTileLinkIO().flip
|
||||
val client = new ClientTileLinkIO().flip
|
||||
val network = new TileLinkIO
|
||||
}
|
||||
|
||||
@ -718,8 +673,8 @@ class TileLinkNetworkPort(clientId: Int, addrConvert: UInt => UInt) extends TLMo
|
||||
finisher.io.grant <> io.network.grant
|
||||
io.network.finish <> finisher.io.finish
|
||||
|
||||
val acq_with_header = TileLinkHeaderOverwriter(io.client.acquire, clientId, addrConvert)
|
||||
val rel_with_header = TileLinkHeaderOverwriter(io.client.release, clientId, addrConvert)
|
||||
val acq_with_header = ClientTileLinkHeaderCreator(io.client.acquire, clientId, addrConvert)
|
||||
val rel_with_header = ClientTileLinkHeaderCreator(io.client.release, clientId, addrConvert)
|
||||
val prb_without_header = DecoupledLogicalNetworkIOUnwrapper(io.network.probe)
|
||||
val gnt_without_header = finisher.io.refill
|
||||
|
||||
@ -731,58 +686,63 @@ class TileLinkNetworkPort(clientId: Int, addrConvert: UInt => UInt) extends TLMo
|
||||
io.client.grant <> gnt_without_header
|
||||
}
|
||||
|
||||
object TileLinkNetworkPort {
|
||||
def apply[T <: Data](
|
||||
client: HeaderlessTileLinkIO,
|
||||
clientId: Int = 0,
|
||||
addrConvert: UInt => UInt = u => UInt(0))(implicit p: Parameters): TileLinkIO = {
|
||||
val port = Module(new TileLinkNetworkPort(clientId, addrConvert))(p)
|
||||
port.io.client <> client
|
||||
port.io.network
|
||||
class ManagerTileLinkNetworkPort(managerId: Int, idConvert: UInt => UInt) extends TLModule {
|
||||
val io = new Bundle {
|
||||
val manager = new ManagerTileLinkIO().flip
|
||||
val network = new TileLinkIO().flip
|
||||
}
|
||||
io.network.grant <> ManagerTileLinkHeaderCreator(io.manager.grant, managerId, (u: UInt) => u)
|
||||
io.network.probe <> ManagerTileLinkHeaderCreator(io.manager.probe, managerId, idConvert)
|
||||
io.manager.acquire.bits.client_id := io.network.acquire.bits.header.src
|
||||
io.manager.acquire <> DecoupledLogicalNetworkIOUnwrapper(io.network.acquire)
|
||||
io.manager.release.bits.client_id := io.network.release.bits.header.src
|
||||
io.manager.release <> DecoupledLogicalNetworkIOUnwrapper(io.network.release)
|
||||
io.manager.finish <> DecoupledLogicalNetworkIOUnwrapper(io.network.finish)
|
||||
}
|
||||
|
||||
class TileLinkEnqueuer(depths: (Int, Int, Int, Int, Int)) extends Module {
|
||||
case class TileLinkDepths(acq: Int, prb: Int, rel: Int, gnt: Int, fin: Int)
|
||||
|
||||
class TileLinkEnqueuer(depths: TileLinkDepths) extends Module {
|
||||
val io = new Bundle {
|
||||
val client = new TileLinkIO().flip
|
||||
val manager = new TileLinkIO
|
||||
}
|
||||
io.manager.acquire <> (if(depths._1 > 0) Queue(io.client.acquire, depths._1) else io.client.acquire)
|
||||
io.client.probe <> (if(depths._2 > 0) Queue(io.manager.probe, depths._2) else io.manager.probe)
|
||||
io.manager.release <> (if(depths._3 > 0) Queue(io.client.release, depths._3) else io.client.release)
|
||||
io.client.grant <> (if(depths._4 > 0) Queue(io.manager.grant, depths._4) else io.manager.grant)
|
||||
io.manager.finish <> (if(depths._5 > 0) Queue(io.client.finish, depths._5) else io.client.finish)
|
||||
io.manager.acquire <> (if(depths.acq > 0) Queue(io.client.acquire, depths.acq) else io.client.acquire)
|
||||
io.client.probe <> (if(depths.prb > 0) Queue(io.manager.probe, depths.prb) else io.manager.probe)
|
||||
io.manager.release <> (if(depths.rel > 0) Queue(io.client.release, depths.rel) else io.client.release)
|
||||
io.client.grant <> (if(depths.gnt > 0) Queue(io.manager.grant, depths.gnt) else io.manager.grant)
|
||||
io.manager.finish <> (if(depths.fin > 0) Queue(io.client.finish, depths.fin) else io.client.finish)
|
||||
}
|
||||
|
||||
object TileLinkEnqueuer {
|
||||
def apply(in: TileLinkIO, depths: (Int, Int, Int, Int, Int))(p: Parameters): TileLinkIO = {
|
||||
def apply(in: TileLinkIO, depths: TileLinkDepths)(p: Parameters): TileLinkIO = {
|
||||
val t = Module(new TileLinkEnqueuer(depths))(p)
|
||||
t.io.client <> in
|
||||
t.io.manager
|
||||
}
|
||||
def apply(in: TileLinkIO, depth: Int)(p: Parameters): TileLinkIO = {
|
||||
apply(in, (depth, depth, depth, depth, depth))(p)
|
||||
apply(in, TileLinkDepths(depth, depth, depth, depth, depth))(p)
|
||||
}
|
||||
}
|
||||
|
||||
abstract trait HasArbiterTypes {
|
||||
/** Utility functions for constructing TileLinkIO arbiters */
|
||||
trait TileLinkArbiterLike extends TileLinkParameters {
|
||||
// Some shorthand type variables
|
||||
type ManagerSourcedWithId = ManagerToClientChannel with HasClientTransactionId
|
||||
type ClientSourcedWithId = ClientToManagerChannel with HasClientTransactionId
|
||||
type ClientSourcedWithIdAndData = ClientToManagerChannel with
|
||||
HasClientTransactionId with
|
||||
HasTileLinkData
|
||||
}
|
||||
type ClientSourcedWithIdAndData = ClientToManagerChannel with HasClientTransactionId with HasTileLinkData
|
||||
|
||||
// Utility functions for constructing TileLinkIO arbiters
|
||||
trait TileLinkArbiterLike extends HasArbiterTypes with TileLinkParameters{
|
||||
val arbN: Int
|
||||
// These are filled in depending on whether the arbiter mucks with the
|
||||
// client ids and then needs to revert them on the way back
|
||||
val arbN: Int // The number of ports on the client side
|
||||
|
||||
// These abstract funcs are filled in depending on whether the arbiter mucks with the
|
||||
// outgoing client ids to track sourcing and then needs to revert them on the way back
|
||||
def clientSourcedClientXactId(in: ClientSourcedWithId, id: Int): Bits
|
||||
def managerSourcedClientXactId(in: ManagerSourcedWithId): Bits
|
||||
def arbIdx(in: ManagerSourcedWithId): UInt
|
||||
|
||||
def hookupClientSource[M <: ClientSourcedWithIdAndData : ClassTag](
|
||||
// The following functions are all wiring helpers for each of the different types of TileLink channels
|
||||
|
||||
def hookupClientSource[M <: ClientSourcedWithIdAndData](
|
||||
clts: Seq[DecoupledIO[LogicalNetworkIO[M]]],
|
||||
mngr: DecoupledIO[LogicalNetworkIO[M]]) {
|
||||
def hasData(m: LogicalNetworkIO[M]) = m.payload.hasMultibeatData()
|
||||
@ -796,7 +756,7 @@ trait TileLinkArbiterLike extends HasArbiterTypes with TileLinkParameters{
|
||||
arb.io.out <> mngr
|
||||
}
|
||||
|
||||
def hookupClientSourceHeaderless[M <: ClientSourcedWithIdAndData : ClassTag](
|
||||
def hookupClientSourceHeaderless[M <: ClientSourcedWithIdAndData](
|
||||
clts: Seq[DecoupledIO[M]],
|
||||
mngr: DecoupledIO[M]) {
|
||||
def hasData(m: M) = m.hasMultibeatData()
|
||||
@ -854,23 +814,20 @@ trait TileLinkArbiterLike extends HasArbiterTypes with TileLinkParameters{
|
||||
}
|
||||
}
|
||||
|
||||
def hookupManagerSourceBroadcast[M <: Data](
|
||||
clts: Seq[DecoupledIO[M]],
|
||||
mngr: DecoupledIO[M]) {
|
||||
def hookupManagerSourceBroadcast[M <: Data](clts: Seq[DecoupledIO[M]], mngr: DecoupledIO[M]) {
|
||||
clts.map{ _.valid := mngr.valid }
|
||||
clts.map{ _.bits := mngr.bits }
|
||||
mngr.ready := clts.map(_.ready).reduce(_&&_)
|
||||
}
|
||||
|
||||
def hookupFinish[M <: LogicalNetworkIO[Finish] : ClassTag](
|
||||
clts: Seq[DecoupledIO[M]],
|
||||
mngr: DecoupledIO[M]) {
|
||||
def hookupFinish[M <: LogicalNetworkIO[Finish]]( clts: Seq[DecoupledIO[M]], mngr: DecoupledIO[M]) {
|
||||
val arb = Module(new RRArbiter(mngr.bits.clone, arbN))
|
||||
arb.io.in zip clts map { case (arb, req) => arb <> req }
|
||||
arb.io.in <> clts
|
||||
arb.io.out <> mngr
|
||||
}
|
||||
}
|
||||
|
||||
/** Abstract base case for any Arbiters that have UncachedTileLinkIOs */
|
||||
abstract class UncachedTileLinkIOArbiter(val arbN: Int) extends Module with TileLinkArbiterLike {
|
||||
val io = new Bundle {
|
||||
val in = Vec.fill(arbN){new UncachedTileLinkIO}.flip
|
||||
@ -881,6 +838,7 @@ abstract class UncachedTileLinkIOArbiter(val arbN: Int) extends Module with Tile
|
||||
hookupManagerSourceWithId(io.in.map(_.grant), io.out.grant)
|
||||
}
|
||||
|
||||
/** Abstract base case for any Arbiters that have cached TileLinkIOs */
|
||||
abstract class TileLinkIOArbiter(val arbN: Int) extends Module with TileLinkArbiterLike {
|
||||
val io = new Bundle {
|
||||
val in = Vec.fill(arbN){new TileLinkIO}.flip
|
||||
@ -893,31 +851,7 @@ abstract class TileLinkIOArbiter(val arbN: Int) extends Module with TileLinkArbi
|
||||
hookupManagerSourceWithId(io.in.map(_.grant), io.out.grant)
|
||||
}
|
||||
|
||||
class HeaderlessUncachedTileLinkIOArbiter(val arbN: Int) extends Module
|
||||
with TileLinkArbiterLike
|
||||
with AppendsArbiterId {
|
||||
val io = new Bundle {
|
||||
val in = Vec.fill(arbN){new HeaderlessUncachedTileLinkIO}.flip
|
||||
val out = new HeaderlessUncachedTileLinkIO
|
||||
}
|
||||
hookupClientSourceHeaderless(io.in.map(_.acquire), io.out.acquire)
|
||||
hookupManagerSourceHeaderlessWithId(io.in.map(_.grant), io.out.grant)
|
||||
}
|
||||
|
||||
class HeaderlessTileLinkIOArbiter(val arbN: Int) extends Module
|
||||
with TileLinkArbiterLike
|
||||
with AppendsArbiterId {
|
||||
val io = new Bundle {
|
||||
val in = Vec.fill(arbN){new HeaderlessTileLinkIO}.flip
|
||||
val out = new HeaderlessTileLinkIO
|
||||
}
|
||||
hookupClientSourceHeaderless(io.in.map(_.acquire), io.out.acquire)
|
||||
hookupClientSourceHeaderless(io.in.map(_.release), io.out.release)
|
||||
hookupManagerSourceBroadcast(io.in.map(_.probe), io.out.probe)
|
||||
hookupManagerSourceHeaderlessWithId(io.in.map(_.grant), io.out.grant)
|
||||
}
|
||||
|
||||
// Appends the port index of the arbiter to the client_xact_id
|
||||
/** Appends the port index of the arbiter to the client_xact_id */
|
||||
trait AppendsArbiterId extends TileLinkArbiterLike {
|
||||
def clientSourcedClientXactId(in: ClientSourcedWithId, id: Int) =
|
||||
Cat(in.client_xact_id, UInt(id, log2Up(arbN)))
|
||||
@ -926,24 +860,125 @@ trait AppendsArbiterId extends TileLinkArbiterLike {
|
||||
def arbIdx(in: ManagerSourcedWithId) = in.client_xact_id(log2Up(arbN)-1,0).toUInt
|
||||
}
|
||||
|
||||
// Uses the client_xact_id as is (assumes it has been set to port index)
|
||||
/** Uses the client_xact_id as is (assumes it has been set to port index) */
|
||||
trait PassesId extends TileLinkArbiterLike {
|
||||
def clientSourcedClientXactId(in: ClientSourcedWithId, id: Int) = in.client_xact_id
|
||||
def managerSourcedClientXactId(in: ManagerSourcedWithId) = in.client_xact_id
|
||||
def arbIdx(in: ManagerSourcedWithId) = in.client_xact_id
|
||||
}
|
||||
|
||||
// Overwrites some default client_xact_id with the port idx
|
||||
/** Overwrites some default client_xact_id with the port idx */
|
||||
trait UsesNewId extends TileLinkArbiterLike {
|
||||
def clientSourcedClientXactId(in: ClientSourcedWithId, id: Int) = UInt(id, log2Up(arbN))
|
||||
def managerSourcedClientXactId(in: ManagerSourcedWithId) = UInt(0)
|
||||
def arbIdx(in: ManagerSourcedWithId) = in.client_xact_id
|
||||
}
|
||||
|
||||
// Mix-in id generation traits to make concrete arbiter classes
|
||||
// Now we can mix-in thevarious id-generation traits to make concrete arbiter classes
|
||||
class UncachedTileLinkIOArbiterThatAppendsArbiterId(val n: Int) extends UncachedTileLinkIOArbiter(n) with AppendsArbiterId
|
||||
class UncachedTileLinkIOArbiterThatPassesId(val n: Int) extends UncachedTileLinkIOArbiter(n) with PassesId
|
||||
class UncachedTileLinkIOArbiterThatUsesNewId(val n: Int) extends UncachedTileLinkIOArbiter(n) with UsesNewId
|
||||
class TileLinkIOArbiterThatAppendsArbiterId(val n: Int) extends TileLinkIOArbiter(n) with AppendsArbiterId
|
||||
class TileLinkIOArbiterThatPassesId(val n: Int) extends TileLinkIOArbiter(n) with PassesId
|
||||
class TileLinkIOArbiterThatUsesNewId(val n: Int) extends TileLinkIOArbiter(n) with UsesNewId
|
||||
|
||||
/** Concrete uncached client-side arbiter that appends the arbiter's port id to client_xact_id */
|
||||
class ClientUncachedTileLinkIOArbiter(val arbN: Int) extends Module with TileLinkArbiterLike with AppendsArbiterId {
|
||||
val io = new Bundle {
|
||||
val in = Vec.fill(arbN){new ClientUncachedTileLinkIO}.flip
|
||||
val out = new ClientUncachedTileLinkIO
|
||||
}
|
||||
hookupClientSourceHeaderless(io.in.map(_.acquire), io.out.acquire)
|
||||
hookupManagerSourceHeaderlessWithId(io.in.map(_.grant), io.out.grant)
|
||||
}
|
||||
|
||||
/** Concrete client-side arbiter that appends the arbiter's port id to client_xact_id */
|
||||
class ClientTileLinkIOArbiter(val arbN: Int) extends Module with TileLinkArbiterLike with AppendsArbiterId {
|
||||
val io = new Bundle {
|
||||
val in = Vec.fill(arbN){new ClientTileLinkIO}.flip
|
||||
val out = new ClientTileLinkIO
|
||||
}
|
||||
hookupClientSourceHeaderless(io.in.map(_.acquire), io.out.acquire)
|
||||
hookupClientSourceHeaderless(io.in.map(_.release), io.out.release)
|
||||
hookupManagerSourceBroadcast(io.in.map(_.probe), io.out.probe)
|
||||
hookupManagerSourceHeaderlessWithId(io.in.map(_.grant), io.out.grant)
|
||||
}
|
||||
|
||||
/** Utility trait containing wiring functions to keep track of how many data beats have
|
||||
* been sent or recieved over a particular TileLinkChannel or pair of channels.
|
||||
*
|
||||
* Won't count message types that don't have data.
|
||||
* Used in XactTrackers and FinishUnit.
|
||||
*/
|
||||
trait HasDataBeatCounters {
|
||||
type HasBeat = TileLinkChannel with HasTileLinkBeatId
|
||||
|
||||
/** Returns the current count on this channel and when a message is done
|
||||
* @param inc increment the counter (usually .valid or .fire())
|
||||
* @param data the actual channel data
|
||||
* @param beat count to return for single-beat messages
|
||||
*/
|
||||
def connectDataBeatCounter[S <: TileLinkChannel](inc: Bool, data: S, beat: UInt) = {
|
||||
val multi = data.hasMultibeatData()
|
||||
val (multi_cnt, multi_done) = Counter(inc && multi, data.tlDataBeats)
|
||||
val cnt = Mux(multi, multi_cnt, beat)
|
||||
val done = Mux(multi, multi_done, inc)
|
||||
(cnt, done)
|
||||
}
|
||||
|
||||
/** Counter for beats on outgoing DecoupledIOs */
|
||||
def connectOutgoingDataBeatCounter[T <: TileLinkChannel](in: DecoupledIO[T], beat: UInt = UInt(0)): (UInt, Bool) =
|
||||
connectDataBeatCounter(in.fire(), in.bits, beat)
|
||||
|
||||
/** Returns done but not cnt. Use the addr_beat subbundle instead of cnt for beats on
|
||||
* incoming channels in case of network reordering.
|
||||
*/
|
||||
def connectIncomingDataBeatCounter[T <: TileLinkChannel](in: DecoupledIO[T]): Bool =
|
||||
connectDataBeatCounter(in.fire(), in.bits, UInt(0))._2
|
||||
|
||||
/** Counter for beats on incoming DecoupledIO[LogicalNetworkIO[]]s returns done */
|
||||
def connectIncomingDataBeatCounterWithHeader[T <: TileLinkChannel](in: DecoupledIO[LogicalNetworkIO[T]]): Bool =
|
||||
connectDataBeatCounter(in.fire(), in.bits.payload, UInt(0))._2
|
||||
|
||||
/** If the network might interleave beats from different messages, we need a Vec of counters,
|
||||
* one for every outstanding message id that might be interleaved.
|
||||
*
|
||||
* @param getId mapping from Message to counter id
|
||||
*/
|
||||
def connectIncomingDataBeatCountersWithHeader[T <: TileLinkChannel with HasClientTransactionId](
|
||||
in: DecoupledIO[LogicalNetworkIO[T]],
|
||||
entries: Int,
|
||||
getId: LogicalNetworkIO[T] => UInt): Vec[Bool] = {
|
||||
Vec((0 until entries).map { i =>
|
||||
connectDataBeatCounter(in.fire() && getId(in.bits) === UInt(i), in.bits.payload, UInt(0))._2
|
||||
})
|
||||
}
|
||||
|
||||
/** Provides counters on two channels, as well a meta-counter that tracks how many
|
||||
* messages have been sent over the up channel but not yet responded to over the down channel
|
||||
*
|
||||
* @param max max number of outstanding ups with no down
|
||||
* @param up outgoing channel
|
||||
* @param down incoming channel
|
||||
* @param beat overrides cnts on single-beat messages
|
||||
* @param track whether up's message should be tracked
|
||||
* @return a tuple containing whether their are outstanding messages, up's count,
|
||||
* up's done, down's count, down's done
|
||||
*/
|
||||
def connectTwoWayBeatCounter[T <: TileLinkChannel, S <: TileLinkChannel](
|
||||
max: Int,
|
||||
up: DecoupledIO[T],
|
||||
down: DecoupledIO[S],
|
||||
beat: UInt = UInt(0),
|
||||
track: T => Bool = (t: T) => Bool(true)): (Bool, UInt, Bool, UInt, Bool) = {
|
||||
val cnt = Reg(init = UInt(0, width = log2Up(max+1)))
|
||||
val (up_idx, up_done) = connectDataBeatCounter(up.fire(), up.bits, beat)
|
||||
val (down_idx, down_done) = connectDataBeatCounter(down.fire(), down.bits, beat)
|
||||
val do_inc = up_done && track(up.bits)
|
||||
val do_dec = down_done
|
||||
cnt := Mux(do_dec,
|
||||
Mux(do_inc, cnt, cnt - UInt(1)),
|
||||
Mux(do_inc, cnt + UInt(1), cnt))
|
||||
(cnt > UInt(0), up_idx, up_done, down_idx, down_done)
|
||||
}
|
||||
}
|
||||
|
@ -32,19 +32,19 @@ abstract class CoherenceAgentBundle extends Bundle with CoherenceAgentParameters
|
||||
abstract class CoherenceAgentModule extends Module with CoherenceAgentParameters
|
||||
|
||||
trait HasCoherenceAgentWiringHelpers {
|
||||
def doOutputArbitration[T <: TileLinkChannel : ClassTag](
|
||||
out: DecoupledIO[LogicalNetworkIO[T]],
|
||||
ins: Seq[DecoupledIO[LogicalNetworkIO[T]]]) {
|
||||
def lock(o: LogicalNetworkIO[T]) = o.payload.hasMultibeatData()
|
||||
val arb = Module(new LockingRRArbiter( out.bits.clone, ins.size, out.bits.payload.tlDataBeats, lock _))
|
||||
def doOutputArbitration[T <: TileLinkChannel](
|
||||
out: DecoupledIO[T],
|
||||
ins: Seq[DecoupledIO[T]]) {
|
||||
def lock(o: T) = o.hasMultibeatData()
|
||||
val arb = Module(new LockingRRArbiter(out.bits.clone, ins.size, out.bits.tlDataBeats, lock _))
|
||||
out <> arb.io.out
|
||||
arb.io.in zip ins map { case (a, in) => a <> in }
|
||||
arb.io.in <> ins
|
||||
}
|
||||
|
||||
def doInputRouting[T <: HasManagerTransactionId](
|
||||
in: DecoupledIO[LogicalNetworkIO[T]],
|
||||
outs: Seq[DecoupledIO[LogicalNetworkIO[T]]]) {
|
||||
val idx = in.bits.payload.manager_xact_id
|
||||
in: DecoupledIO[T],
|
||||
outs: Seq[DecoupledIO[T]]) {
|
||||
val idx = in.bits.manager_xact_id
|
||||
outs.map(_.bits := in.bits)
|
||||
outs.zipWithIndex.map { case (o,i) => o.valid := in.valid && idx === UInt(i) }
|
||||
in.ready := Vec(outs.map(_.ready)).read(idx)
|
||||
@ -52,23 +52,23 @@ trait HasCoherenceAgentWiringHelpers {
|
||||
}
|
||||
|
||||
trait HasInnerTLIO extends CoherenceAgentBundle {
|
||||
val inner = Bundle(new TileLinkIO)(innerTLParams).flip
|
||||
val inner = Bundle(new ManagerTileLinkIO)(innerTLParams)
|
||||
val incoherent = Vec.fill(inner.tlNCoherentClients){Bool()}.asInput
|
||||
def iacq(dummy: Int = 0) = inner.acquire.bits.payload
|
||||
def iprb(dummy: Int = 0) = inner.probe.bits.payload
|
||||
def irel(dummy: Int = 0) = inner.release.bits.payload
|
||||
def ignt(dummy: Int = 0) = inner.grant.bits.payload
|
||||
def ifin(dummy: Int = 0) = inner.finish.bits.payload
|
||||
def iacq(dummy: Int = 0) = inner.acquire.bits
|
||||
def iprb(dummy: Int = 0) = inner.probe.bits
|
||||
def irel(dummy: Int = 0) = inner.release.bits
|
||||
def ignt(dummy: Int = 0) = inner.grant.bits
|
||||
def ifin(dummy: Int = 0) = inner.finish.bits
|
||||
}
|
||||
|
||||
trait HasUncachedOuterTLIO extends CoherenceAgentBundle {
|
||||
val outer = Bundle(new HeaderlessUncachedTileLinkIO)(outerTLParams)
|
||||
val outer = Bundle(new ClientUncachedTileLinkIO)(outerTLParams)
|
||||
def oacq(dummy: Int = 0) = outer.acquire.bits
|
||||
def ognt(dummy: Int = 0) = outer.grant.bits
|
||||
}
|
||||
|
||||
trait HasCachedOuterTLIO extends CoherenceAgentBundle {
|
||||
val outer = Bundle(new HeaderlessTileLinkIO)(outerTLParams)
|
||||
val outer = Bundle(new ClientTileLinkIO)(outerTLParams)
|
||||
def oacq(dummy: Int = 0) = outer.acquire.bits
|
||||
def oprb(dummy: Int = 0) = outer.probe.bits
|
||||
def orel(dummy: Int = 0) = outer.release.bits
|
||||
@ -78,8 +78,8 @@ trait HasCachedOuterTLIO extends CoherenceAgentBundle {
|
||||
class ManagerTLIO extends HasInnerTLIO with HasUncachedOuterTLIO
|
||||
|
||||
abstract class CoherenceAgent extends CoherenceAgentModule {
|
||||
def innerTL: TileLinkIO
|
||||
def outerTL: HeaderlessTileLinkIO
|
||||
def innerTL: ManagerTileLinkIO
|
||||
def outerTL: ClientTileLinkIO
|
||||
def incoherent: Vec[Bool]
|
||||
}
|
||||
|
||||
@ -109,49 +109,25 @@ trait HasTrackerConflictIO extends Bundle {
|
||||
class ManagerXactTrackerIO extends ManagerTLIO with HasTrackerConflictIO
|
||||
class HierarchicalXactTrackerIO extends HierarchicalTLIO with HasTrackerConflictIO
|
||||
|
||||
abstract class XactTracker extends CoherenceAgentModule
|
||||
with HasDataBeatCounters {
|
||||
abstract class XactTracker extends CoherenceAgentModule with HasDataBeatCounters {
|
||||
def addPendingBitWhenBeat[T <: HasBeat](inc: Bool, in: T): UInt =
|
||||
Fill(in.tlDataBeats, inc) & UIntToOH(in.addr_beat)
|
||||
def dropPendingBitWhenBeat[T <: HasBeat](dec: Bool, in: T): UInt =
|
||||
~Fill(in.tlDataBeats, dec) | ~UIntToOH(in.addr_beat)
|
||||
|
||||
def addPendingBitWhenBeatHasData[T <: Data : TypeTag](in: DecoupledIO[T]): UInt = {
|
||||
in.bits match {
|
||||
case p: HasBeat if typeTag[T].tpe <:< typeTag[HasBeat].tpe =>
|
||||
addPendingBitWhenBeat(in.fire() && p.hasData(), p)
|
||||
case ln: LNAcquire if typeTag[T].tpe <:< typeTag[LNAcquire].tpe =>
|
||||
addPendingBitWhenBeat(in.fire() && ln.payload.hasData(), ln.payload)
|
||||
case ln: LNRelease if typeTag[T].tpe <:< typeTag[LNRelease].tpe =>
|
||||
addPendingBitWhenBeat(in.fire() && ln.payload.hasData(), ln.payload)
|
||||
case ln: LNGrant if typeTag[T].tpe <:< typeTag[LNGrant].tpe =>
|
||||
addPendingBitWhenBeat(in.fire() && ln.payload.hasData(), ln.payload)
|
||||
case _ => { require(false, "Don't know how track beats of " + typeTag[T].tpe); UInt(0) }
|
||||
}
|
||||
}
|
||||
def addPendingBitWhenBeatHasData[T <: HasBeat](in: DecoupledIO[T]): UInt =
|
||||
addPendingBitWhenBeat(in.fire() && in.bits.hasData(), in.bits)
|
||||
|
||||
def addPendingBitWhenBeatIsGetOrAtomic(in: DecoupledIO[LogicalNetworkIO[Acquire]]): UInt = {
|
||||
val a = in.bits.payload
|
||||
def addPendingBitWhenBeatIsGetOrAtomic(in: DecoupledIO[AcquireFromSrc]): UInt = {
|
||||
val a = in.bits
|
||||
val isGetOrAtomic = a.isBuiltInType() &&
|
||||
(Vec(Acquire.getType, Acquire.getBlockType, Acquire.putAtomicType).contains(a.a_type))
|
||||
addPendingBitWhenBeat(in.fire() && isGetOrAtomic, in.bits.payload)
|
||||
(Vec(Acquire.getType, Acquire.getBlockType, Acquire.putAtomicType).contains(a.a_type))
|
||||
addPendingBitWhenBeat(in.fire() && isGetOrAtomic, a)
|
||||
}
|
||||
|
||||
def dropPendingBitWhenBeatHasData[T <: Data : TypeTag](in: DecoupledIO[T]): UInt = {
|
||||
in.bits match {
|
||||
case p: HasBeat if typeTag[T].tpe <:< typeTag[HasBeat].tpe =>
|
||||
dropPendingBitWhenBeat(in.fire() && p.hasData(), p)
|
||||
case ln: LNAcquire if typeTag[T].tpe <:< typeTag[LNAcquire].tpe =>
|
||||
dropPendingBitWhenBeat(in.fire() && ln.payload.hasData(), ln.payload)
|
||||
case ln: LNRelease if typeTag[T].tpe <:< typeTag[LNRelease].tpe =>
|
||||
dropPendingBitWhenBeat(in.fire() && ln.payload.hasData(), ln.payload)
|
||||
case ln: LNGrant if typeTag[T].tpe <:< typeTag[LNGrant].tpe =>
|
||||
dropPendingBitWhenBeat(in.fire() && ln.payload.hasData(), ln.payload)
|
||||
case _ => { require(false, "Don't know how track beats of " + typeTag[T].tpe); UInt(0) }
|
||||
}
|
||||
}
|
||||
def dropPendingBitWhenBeatHasData[T <: HasBeat](in: DecoupledIO[T]): UInt =
|
||||
dropPendingBitWhenBeat(in.fire() && in.bits.hasData(), in.bits)
|
||||
|
||||
def dropPendingBitAtDest(in: DecoupledIO[LogicalNetworkIO[Probe]]): UInt = {
|
||||
~Fill(in.bits.payload.tlNCoherentClients, in.fire()) | ~UIntToOH(in.bits.header.dst)
|
||||
}
|
||||
def dropPendingBitAtDest(in: DecoupledIO[ProbeToDst]): UInt =
|
||||
~Fill(in.bits.tlNCoherentClients, in.fire()) | ~UIntToOH(in.bits.client_id)
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user