Get rid of NASTI memory interconnects
These were made for a previous Hurricane tapeout, but we are now doing all of the memory routing in TileLink, so they are no longer needed.
This commit is contained in:
parent
7d93fd3bfc
commit
c77c244016
@ -1,109 +0,0 @@
|
|||||||
package junctions
|
|
||||||
|
|
||||||
import Chisel._
|
|
||||||
import cde.Parameters
|
|
||||||
|
|
||||||
class NastiDemuxDriver(n: Int)(implicit p: Parameters) extends Module {
|
|
||||||
val io = new Bundle {
|
|
||||||
val start = Bool(INPUT)
|
|
||||||
val finished = Bool(OUTPUT)
|
|
||||||
val nasti = new NastiIO
|
|
||||||
val select = UInt(OUTPUT, log2Up(n))
|
|
||||||
}
|
|
||||||
|
|
||||||
val (s_idle :: s_write_addr :: s_write_data :: s_write_resp ::
|
|
||||||
s_read_addr :: s_read_resp :: s_done :: Nil) = Enum(Bits(), 7)
|
|
||||||
val state = Reg(init = s_idle)
|
|
||||||
|
|
||||||
val select = Reg(init = UInt(0, log2Up(n)))
|
|
||||||
|
|
||||||
when (state === s_idle && io.start) { state := s_write_addr }
|
|
||||||
when (io.nasti.aw.fire()) { state := s_write_data }
|
|
||||||
when (io.nasti.w.fire()) { state := s_write_resp }
|
|
||||||
when (io.nasti.b.fire()) { state := s_read_addr }
|
|
||||||
when (io.nasti.ar.fire()) { state := s_read_resp }
|
|
||||||
when (io.nasti.r.fire()) {
|
|
||||||
when (select === UInt(n - 1)) {
|
|
||||||
state := s_done
|
|
||||||
} .otherwise {
|
|
||||||
select := select + UInt(1)
|
|
||||||
state := s_write_addr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
io.nasti.aw.valid := (state === s_write_addr)
|
|
||||||
io.nasti.aw.bits := NastiWriteAddressChannel(
|
|
||||||
id = UInt(0),
|
|
||||||
addr = UInt(0),
|
|
||||||
size = UInt("b011"))
|
|
||||||
io.nasti.w.valid := (state === s_write_data)
|
|
||||||
io.nasti.w.bits := NastiWriteDataChannel(data = select)
|
|
||||||
io.nasti.b.ready := (state === s_write_resp)
|
|
||||||
io.nasti.ar.valid := (state === s_read_addr)
|
|
||||||
io.nasti.ar.bits := NastiReadAddressChannel(
|
|
||||||
id = UInt(0),
|
|
||||||
addr = UInt(0),
|
|
||||||
size = UInt("b011"))
|
|
||||||
io.nasti.r.ready := (state === s_read_resp)
|
|
||||||
|
|
||||||
io.finished := (state === s_done)
|
|
||||||
io.select := select
|
|
||||||
|
|
||||||
assert(!io.nasti.r.valid || io.nasti.r.bits.data === select,
|
|
||||||
"NASTI DeMux test: Read data did not match")
|
|
||||||
}
|
|
||||||
|
|
||||||
class NastiDemuxSlave(implicit p: Parameters) extends NastiModule()(p) {
|
|
||||||
val io = (new NastiIO).flip
|
|
||||||
|
|
||||||
val (s_write_wait :: s_write_data :: s_write_resp ::
|
|
||||||
s_read_wait :: s_read_resp :: s_done :: Nil) = Enum(Bits(), 6)
|
|
||||||
val state = Reg(init = s_write_wait)
|
|
||||||
|
|
||||||
val value = Reg(UInt(width = 64))
|
|
||||||
val id = Reg(UInt(width = nastiXIdBits))
|
|
||||||
|
|
||||||
when (io.aw.fire()) {
|
|
||||||
id := io.aw.bits.id
|
|
||||||
state := s_write_data
|
|
||||||
}
|
|
||||||
|
|
||||||
when (io.w.fire()) {
|
|
||||||
value := io.w.bits.data
|
|
||||||
state := s_write_resp
|
|
||||||
}
|
|
||||||
|
|
||||||
when (io.b.fire()) { state := s_read_wait }
|
|
||||||
|
|
||||||
when (io.ar.fire()) {
|
|
||||||
id := io.ar.bits.id
|
|
||||||
state := s_read_resp
|
|
||||||
}
|
|
||||||
|
|
||||||
when (io.r.fire()) { state := s_done }
|
|
||||||
|
|
||||||
io.aw.ready := (state === s_write_wait)
|
|
||||||
io.w.ready := (state === s_write_data)
|
|
||||||
io.b.valid := (state === s_write_resp)
|
|
||||||
io.b.bits := NastiWriteResponseChannel(id = id)
|
|
||||||
io.ar.ready := (state === s_read_wait)
|
|
||||||
io.r.valid := (state === s_read_resp)
|
|
||||||
io.r.bits := NastiReadDataChannel(id = id, data = value)
|
|
||||||
}
|
|
||||||
|
|
||||||
class NastiMemoryDemuxTest(implicit p: Parameters) extends unittest.UnitTest {
|
|
||||||
val nSlaves = 4
|
|
||||||
|
|
||||||
val driver = Module(new NastiDemuxDriver(nSlaves))
|
|
||||||
driver.io.start := io.start
|
|
||||||
io.finished := driver.io.finished
|
|
||||||
|
|
||||||
val demux = Module(new NastiMemoryDemux(nSlaves))
|
|
||||||
demux.io.master <> driver.io.nasti
|
|
||||||
demux.io.select := driver.io.select
|
|
||||||
|
|
||||||
for (i <- 0 until nSlaves) {
|
|
||||||
val slave = Module(new NastiDemuxSlave)
|
|
||||||
slave.io <> demux.io.slaves(i)
|
|
||||||
}
|
|
||||||
}
|
|
@ -553,171 +553,6 @@ class NastiRecursiveInterconnect(val nMasters: Int, addrMap: AddrMap)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class ChannelHelper(nChannels: Int)
|
|
||||||
(implicit val p: Parameters) extends HasNastiParameters {
|
|
||||||
|
|
||||||
val dataBytes = p(MIFDataBits) * p(MIFDataBeats) / 8
|
|
||||||
val chanSelBits = log2Ceil(nChannels)
|
|
||||||
val selOffset = log2Up(dataBytes)
|
|
||||||
val blockOffset = selOffset + chanSelBits
|
|
||||||
|
|
||||||
def getSelect(addr: UInt) =
|
|
||||||
if (nChannels > 1) addr(blockOffset - 1, selOffset) else UInt(0)
|
|
||||||
|
|
||||||
def getAddr(addr: UInt) =
|
|
||||||
if (nChannels > 1)
|
|
||||||
Cat(addr(nastiXAddrBits - 1, blockOffset), addr(selOffset - 1, 0))
|
|
||||||
else addr
|
|
||||||
}
|
|
||||||
|
|
||||||
class NastiMemoryInterconnect(
|
|
||||||
nBanksPerChannel: Int, nChannels: Int)
|
|
||||||
(implicit p: Parameters) extends NastiInterconnect()(p) {
|
|
||||||
|
|
||||||
val nBanks = nBanksPerChannel * nChannels
|
|
||||||
val nMasters = nBanks
|
|
||||||
val nSlaves = nChannels
|
|
||||||
|
|
||||||
val chanHelper = new ChannelHelper(nChannels)
|
|
||||||
def connectChannel(outer: NastiIO, inner: NastiIO) {
|
|
||||||
outer <> inner
|
|
||||||
outer.ar.bits.addr := chanHelper.getAddr(inner.ar.bits.addr)
|
|
||||||
outer.aw.bits.addr := chanHelper.getAddr(inner.aw.bits.addr)
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i <- 0 until nChannels) {
|
|
||||||
/* Bank assignments to channels are strided so that consecutive banks
|
|
||||||
* map to different channels. That way, consecutive cache lines also
|
|
||||||
* map to different channels */
|
|
||||||
val banks = (i until nBanks by nChannels).map(j => io.masters(j))
|
|
||||||
|
|
||||||
val channelArb = Module(new NastiArbiter(nBanksPerChannel))
|
|
||||||
channelArb.io.master <> banks
|
|
||||||
connectChannel(io.slaves(i), channelArb.io.slave)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Allows users to switch between various memory configurations. Note that
|
|
||||||
* this is a dangerous operation: not only does switching the select input to
|
|
||||||
* this module violate Nasti, it also causes the memory of the machine to
|
|
||||||
* become garbled. It's expected that select only changes at boot time, as
|
|
||||||
* part of the memory controller configuration. */
|
|
||||||
class NastiMemorySelectorIO(val nBanks: Int, val maxMemChannels: Int, nConfigs: Int)
|
|
||||||
(implicit p: Parameters)
|
|
||||||
extends NastiInterconnectIO(nBanks, maxMemChannels) {
|
|
||||||
val select = UInt(INPUT, width = log2Up(nConfigs))
|
|
||||||
override def cloneType =
|
|
||||||
new NastiMemorySelectorIO(nMasters, nSlaves, nConfigs).asInstanceOf[this.type]
|
|
||||||
}
|
|
||||||
|
|
||||||
class NastiMemorySelector(nBanks: Int, maxMemChannels: Int, configs: Seq[Int])
|
|
||||||
(implicit p: Parameters)
|
|
||||||
extends NastiInterconnect()(p) {
|
|
||||||
val nMasters = nBanks
|
|
||||||
val nSlaves = maxMemChannels
|
|
||||||
val nConfigs = configs.size
|
|
||||||
|
|
||||||
override lazy val io = new NastiMemorySelectorIO(nBanks, maxMemChannels, nConfigs)
|
|
||||||
|
|
||||||
def muxOnSelect(up: DecoupledIO[Bundle], dn: DecoupledIO[Bundle], active: Bool): Unit = {
|
|
||||||
when (active) { dn.bits := up.bits }
|
|
||||||
when (active) { up.ready := dn.ready }
|
|
||||||
when (active) { dn.valid := up.valid }
|
|
||||||
}
|
|
||||||
|
|
||||||
def muxOnSelect(up: NastiIO, dn: NastiIO, active: Bool): Unit = {
|
|
||||||
muxOnSelect(up.aw, dn.aw, active)
|
|
||||||
muxOnSelect(up.w, dn.w, active)
|
|
||||||
muxOnSelect(dn.b, up.b, active)
|
|
||||||
muxOnSelect(up.ar, dn.ar, active)
|
|
||||||
muxOnSelect(dn.r, up.r, active)
|
|
||||||
}
|
|
||||||
|
|
||||||
def muxOnSelect(up: Vec[NastiIO], dn: Vec[NastiIO], active: Bool) : Unit = {
|
|
||||||
for (i <- 0 until up.size)
|
|
||||||
muxOnSelect(up(i), dn(i), active)
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Disconnects a vector of Nasti ports, which involves setting them to
|
|
||||||
* invalid. Due to Chisel reasons, we need to also set the bits to 0 (since
|
|
||||||
* there can't be any unconnected inputs). */
|
|
||||||
def disconnectSlave(slave: Vec[NastiIO]) = {
|
|
||||||
slave.foreach{ m =>
|
|
||||||
m.aw.valid := Bool(false)
|
|
||||||
m.aw.bits := m.aw.bits.fromBits( UInt(0) )
|
|
||||||
m.w.valid := Bool(false)
|
|
||||||
m.w.bits := m.w.bits.fromBits( UInt(0) )
|
|
||||||
m.b.ready := Bool(false)
|
|
||||||
m.ar.valid := Bool(false)
|
|
||||||
m.ar.bits := m.ar.bits.fromBits( UInt(0) )
|
|
||||||
m.r.ready := Bool(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def disconnectMaster(master: Vec[NastiIO]) = {
|
|
||||||
master.foreach{ m =>
|
|
||||||
m.aw.ready := Bool(false)
|
|
||||||
m.w.ready := Bool(false)
|
|
||||||
m.b.valid := Bool(false)
|
|
||||||
m.b.bits := m.b.bits.fromBits( UInt(0) )
|
|
||||||
m.ar.ready := Bool(false)
|
|
||||||
m.r.valid := Bool(false)
|
|
||||||
m.r.bits := m.r.bits.fromBits( UInt(0) )
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Provides default wires on all our outputs. */
|
|
||||||
disconnectMaster(io.masters)
|
|
||||||
disconnectSlave(io.slaves)
|
|
||||||
|
|
||||||
/* Constructs interconnects for each of the layouts suggested by the
|
|
||||||
* configuration and switches between them based on the select input. */
|
|
||||||
configs.zipWithIndex.foreach{ case (nChannels, select) =>
|
|
||||||
val nBanksPerChannel = nBanks / nChannels
|
|
||||||
val ic = Module(new NastiMemoryInterconnect(nBanksPerChannel, nChannels))
|
|
||||||
disconnectMaster(ic.io.slaves)
|
|
||||||
disconnectSlave(ic.io.masters)
|
|
||||||
muxOnSelect( io.masters, ic.io.masters, io.select === UInt(select))
|
|
||||||
muxOnSelect(ic.io.slaves, io.slaves, io.select === UInt(select))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class NastiMemoryDemux(nRoutes: Int)(implicit p: Parameters) extends NastiModule()(p) {
|
|
||||||
val io = new Bundle {
|
|
||||||
val master = (new NastiIO).flip
|
|
||||||
val slaves = Vec(nRoutes, new NastiIO)
|
|
||||||
val select = UInt(INPUT, log2Up(nRoutes))
|
|
||||||
}
|
|
||||||
|
|
||||||
def connectReqChannel[T <: Data](idx: Int, out: DecoupledIO[T], in: DecoupledIO[T]) {
|
|
||||||
out.valid := in.valid && io.select === UInt(idx)
|
|
||||||
out.bits := in.bits
|
|
||||||
when (io.select === UInt(idx)) { in.ready := out.ready }
|
|
||||||
}
|
|
||||||
|
|
||||||
def connectRespChannel[T <: Data](idx: Int, out: DecoupledIO[T], in: DecoupledIO[T]) {
|
|
||||||
when (io.select === UInt(idx)) { out.valid := in.valid }
|
|
||||||
when (io.select === UInt(idx)) { out.bits := in.bits }
|
|
||||||
in.ready := out.ready && io.select === UInt(idx)
|
|
||||||
}
|
|
||||||
|
|
||||||
io.master.ar.ready := Bool(false)
|
|
||||||
io.master.aw.ready := Bool(false)
|
|
||||||
io.master.w.ready := Bool(false)
|
|
||||||
io.master.r.valid := Bool(false)
|
|
||||||
io.master.r.bits := NastiReadDataChannel(id = UInt(0), data = UInt(0))
|
|
||||||
io.master.b.valid := Bool(false)
|
|
||||||
io.master.b.bits := NastiWriteResponseChannel(id = UInt(0))
|
|
||||||
|
|
||||||
io.slaves.zipWithIndex.foreach { case (slave, i) =>
|
|
||||||
connectReqChannel(i, slave.ar, io.master.ar)
|
|
||||||
connectReqChannel(i, slave.aw, io.master.aw)
|
|
||||||
connectReqChannel(i, slave.w, io.master.w)
|
|
||||||
connectRespChannel(i, io.master.r, slave.r)
|
|
||||||
connectRespChannel(i, io.master.b, slave.b)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
object AsyncNastiCrossing {
|
object AsyncNastiCrossing {
|
||||||
// takes from_source from the 'from' clock domain to the 'to' clock domain
|
// takes from_source from the 'from' clock domain to the 'to' clock domain
|
||||||
def apply(from_clock: Clock, from_reset: Bool, from_source: NastiIO, to_clock: Clock, to_reset: Bool, depth: Int = 8, sync: Int = 3) = {
|
def apply(from_clock: Clock, from_reset: Bool, from_source: NastiIO, to_clock: Clock, to_reset: Bool, depth: Int = 8, sync: Int = 3) = {
|
||||||
|
@ -12,7 +12,6 @@ class WithJunctionsUnitTests extends Config(
|
|||||||
case rocket.XLen => 64
|
case rocket.XLen => 64
|
||||||
case UnitTests => (p: Parameters) => Seq(
|
case UnitTests => (p: Parameters) => Seq(
|
||||||
Module(new junctions.MultiWidthFifoTest),
|
Module(new junctions.MultiWidthFifoTest),
|
||||||
Module(new junctions.NastiMemoryDemuxTest()(p)),
|
|
||||||
Module(new junctions.HastiTest()(p)))
|
Module(new junctions.HastiTest()(p)))
|
||||||
case _ => throw new CDEMatchError
|
case _ => throw new CDEMatchError
|
||||||
})
|
})
|
||||||
|
Loading…
Reference in New Issue
Block a user