Clean up some zero-width wire cases using UInt.extract
This commit is contained in:
parent
da512d4230
commit
d78f1aacd0
@ -216,9 +216,7 @@ class DCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
||||
dataArb.io.in(0).bits.addr := Mux(pstore2_valid, pstore2_addr, pstore1_addr)
|
||||
dataArb.io.in(0).bits.way_en := Mux(pstore2_valid, pstore2_way, pstore1_way)
|
||||
dataArb.io.in(0).bits.wdata := Fill(rowWords, Mux(pstore2_valid, pstore2_storegen_data, pstore1_storegen_data))
|
||||
val pstore_mask_shift =
|
||||
if (rowOffBits > offsetlsb) Mux(pstore2_valid, pstore2_addr, pstore1_addr)(rowOffBits-1,offsetlsb) << wordOffBits
|
||||
else UInt(0)
|
||||
val pstore_mask_shift = Mux(pstore2_valid, pstore2_addr, pstore1_addr).extract(rowOffBits-1,offsetlsb) << wordOffBits
|
||||
dataArb.io.in(0).bits.wmask := Mux(pstore2_valid, pstore2_storegen_mask, pstore1_storegen.mask) << pstore_mask_shift
|
||||
|
||||
// store->load RAW hazard detection
|
||||
@ -247,9 +245,7 @@ class DCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
||||
addr_byte = s2_req.addr(beatOffBits-1, 0),
|
||||
operand_size = s2_req.typ,
|
||||
alloc = Bool(false))
|
||||
val uncachedPutOffset = // TODO zero-width
|
||||
if (beatBytes > wordBytes) s2_req.addr(beatOffBits-1, wordOffBits)
|
||||
else UInt(0)
|
||||
val uncachedPutOffset = s2_req.addr.extract(beatOffBits-1, wordOffBits)
|
||||
val uncachedPutMessage = Put(
|
||||
client_xact_id = UInt(0),
|
||||
addr_block = s2_req.addr(paddrBits-1, blockOffBits),
|
||||
@ -402,9 +398,7 @@ class DCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
||||
}
|
||||
|
||||
// load data subword mux/sign extension
|
||||
val s2_word_idx = // TODO zero-width
|
||||
if (rowBits > wordBits) s2_req.addr(log2Up(rowBits/8)-1, log2Up(wordBytes))
|
||||
else UInt(0)
|
||||
val s2_word_idx = s2_req.addr.extract(log2Up(rowBits/8)-1, log2Up(wordBytes))
|
||||
val s2_data_word = s2_data >> Cat(s2_word_idx, UInt(0, log2Up(coreDataBits)))
|
||||
val loadgen = new LoadGen(s2_req.typ, s2_req.addr, s2_data_word, s2_sc, wordBytes)
|
||||
io.cpu.resp.bits.data := loadgen.data | s2_sc_fail
|
||||
|
@ -115,17 +115,13 @@ class Frontend(implicit p: Parameters) extends CoreModule()(p) with HasL1CachePa
|
||||
io.cpu.npc := Mux(io.cpu.req.valid, io.cpu.req.bits.pc, npc)
|
||||
|
||||
require(fetchWidth * coreInstBytes <= rowBytes)
|
||||
val fetch_data = // TODO zero-width
|
||||
if (fetchWidth * coreInstBytes == rowBytes) icache.io.resp.bits.datablock
|
||||
else icache.io.resp.bits.datablock >> (s2_pc(log2Up(rowBytes)-1,log2Up(fetchWidth*coreInstBytes)) << log2Up(fetchWidth*coreInstBits))
|
||||
val fetch_data = icache.io.resp.bits.datablock >> (s2_pc.extract(log2Up(rowBytes)-1,log2Up(fetchWidth*coreInstBytes)) << log2Up(fetchWidth*coreInstBits))
|
||||
|
||||
for (i <- 0 until fetchWidth) {
|
||||
io.cpu.resp.bits.data(i) := fetch_data(i*coreInstBits+coreInstBits-1, i*coreInstBits)
|
||||
}
|
||||
|
||||
val all_ones = UInt((1 << (fetchWidth+1))-1)
|
||||
val msk_pc = if (fetchWidth == 1) all_ones else all_ones << s2_pc(log2Up(fetchWidth) -1+2,2)
|
||||
io.cpu.resp.bits.mask := msk_pc
|
||||
io.cpu.resp.bits.mask := UInt((1 << fetchWidth)-1) << s2_pc.extract(log2Up(fetchWidth)+log2Up(coreInstBytes)-1, log2Up(coreInstBytes))
|
||||
io.cpu.resp.bits.xcpt_if := s2_xcpt_if
|
||||
io.cpu.resp.bits.replay := s2_speculative && !icache.io.resp.valid && !s2_xcpt_if
|
||||
|
||||
|
@ -72,7 +72,7 @@ class ICache(latency: Int)(implicit p: Parameters) extends CoreModule()(p) with
|
||||
val refill_tag = refill_addr(tagBits+untagBits-1,untagBits)
|
||||
|
||||
val narrow_grant = FlowThroughSerializer(io.mem.grant, refillCyclesPerBeat)
|
||||
val (refill_cnt, refill_wrap) = Counter(narrow_grant.fire(), refillCycles) //TODO Zero width wire
|
||||
val (refill_cnt, refill_wrap) = Counter(narrow_grant.fire(), refillCycles)
|
||||
val refill_done = state === s_refill && refill_wrap
|
||||
narrow_grant.ready := Bool(true)
|
||||
|
||||
@ -116,10 +116,9 @@ class ICache(latency: Int)(implicit p: Parameters) extends CoreModule()(p) with
|
||||
val wen = narrow_grant.valid && repl_way === UInt(i)
|
||||
when (wen) {
|
||||
val e_d = code.encode(narrow_grant.bits.data).toUInt
|
||||
if(refillCycles > 1) data_array.write(Cat(s1_idx, refill_cnt), e_d)
|
||||
else data_array.write(s1_idx, e_d)
|
||||
data_array.write((s1_idx << log2Ceil(refillCycles)) | refill_cnt, e_d)
|
||||
}
|
||||
val s0_raddr = s0_vaddr(untagBits-1,blockOffBits-(if(refillCycles > 1) refill_cnt.getWidth else 0))
|
||||
val s0_raddr = s0_vaddr(untagBits-1,blockOffBits-log2Ceil(refillCycles))
|
||||
s1_dout(i) := data_array.read(s0_raddr, !wen && s0_valid)
|
||||
}
|
||||
|
||||
|
@ -164,9 +164,7 @@ class IOMSHR(id: Int)(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
||||
val replay_next = Bool(OUTPUT)
|
||||
}
|
||||
|
||||
def beatOffset(addr: UInt) = // TODO zero-width
|
||||
if (beatOffBits > wordOffBits) addr(beatOffBits - 1, wordOffBits)
|
||||
else UInt(0)
|
||||
def beatOffset(addr: UInt) = addr.extract(beatOffBits - 1, wordOffBits)
|
||||
|
||||
def wordFromBeat(addr: UInt, dat: UInt) = {
|
||||
val shift = Cat(beatOffset(addr), UInt(0, wordOffBits + log2Up(wordBytes)))
|
||||
@ -300,7 +298,7 @@ class MSHR(id: Int)(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
||||
(Vec(s_refill_req, s_refill_resp).contains(state) &&
|
||||
!cmd_requires_second_acquire))
|
||||
val gnt_multi_data = io.mem_grant.bits.hasMultibeatData()
|
||||
val (refill_cnt, refill_count_done) = Counter(io.mem_grant.valid && gnt_multi_data, refillCycles) // TODO: Zero width?
|
||||
val (refill_cnt, refill_count_done) = Counter(io.mem_grant.valid && gnt_multi_data, refillCycles)
|
||||
val refill_done = io.mem_grant.valid && (!gnt_multi_data || refill_count_done)
|
||||
|
||||
val rpq = Module(new Queue(new ReplayInternal, p(ReplayQueueDepth)))
|
||||
@ -373,7 +371,7 @@ class MSHR(id: Int)(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
||||
|
||||
io.idx_match := (state =/= s_invalid) && idx_match
|
||||
io.refill.way_en := req.way_en
|
||||
io.refill.addr := (if(refillCycles > 1) Cat(req_idx, refill_cnt) else req_idx) << rowOffBits
|
||||
io.refill.addr := ((req_idx << log2Ceil(refillCycles)) | refill_cnt) << rowOffBits
|
||||
io.tag := req.addr >> untagBits
|
||||
io.req_pri_rdy := state === s_invalid
|
||||
io.req_sec_rdy := sec_rdy && rpq.io.enq.ready
|
||||
@ -963,11 +961,7 @@ class HellaCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
||||
}
|
||||
|
||||
writeArb.io.in(0).bits.addr := s3_req.addr
|
||||
val rowIdx =
|
||||
if (rowOffBits > offsetlsb) s3_req.addr(rowOffBits-1,offsetlsb).toUInt
|
||||
else UInt(0)
|
||||
val rowWMask = UInt(1) << (if(rowOffBits > offsetlsb) rowIdx else UInt(0))
|
||||
writeArb.io.in(0).bits.wmask := rowWMask
|
||||
writeArb.io.in(0).bits.wmask := UIntToOH(s3_req.addr.extract(rowOffBits-1,offsetlsb))
|
||||
writeArb.io.in(0).bits.data := Fill(rowWords, s3_req.data)
|
||||
writeArb.io.in(0).valid := s3_valid
|
||||
writeArb.io.in(0).bits.way_en := s3_way
|
||||
|
@ -14,10 +14,16 @@ object Util {
|
||||
implicit def booleanToBool(x: Boolean): Bits = Bool(x)
|
||||
implicit def intSeqToUIntSeq(x: Seq[Int]): Seq[UInt] = x.map(UInt(_))
|
||||
implicit def wcToUInt(c: WideCounter): UInt = c.value
|
||||
implicit def sextToConv(x: UInt) = new AnyRef {
|
||||
|
||||
implicit class UIntToAugmentedUInt(val x: UInt) extends AnyVal {
|
||||
def sextTo(n: Int): UInt =
|
||||
if (x.getWidth == n) x
|
||||
else Cat(Fill(n - x.getWidth, x(x.getWidth-1)), x)
|
||||
|
||||
def extract(hi: Int, lo: Int): UInt = {
|
||||
if (hi == lo-1) UInt(0)
|
||||
else x(hi, lo)
|
||||
}
|
||||
}
|
||||
|
||||
implicit def booleanToIntConv(x: Boolean) = new AnyRef {
|
||||
|
Loading…
Reference in New Issue
Block a user