Don't implicitly create Vecs, since they're heavyweight
This commit is contained in:
parent
8bd7e3932b
commit
25fdabdd59
@ -157,7 +157,7 @@ class BTB(implicit p: Parameters) extends BtbModule {
|
||||
private def page(addr: UInt) = addr >> matchBits
|
||||
private def pageMatch(addr: UInt) = {
|
||||
val p = page(addr)
|
||||
Vec(pages.map(_ === p)).toBits
|
||||
pages.map(_ === p).toBits
|
||||
}
|
||||
private def tagMatch(addr: UInt, pgMatch: UInt) = {
|
||||
val idxMatch = idxs.map(_ === addr(matchBits-1,0))
|
||||
|
@ -116,7 +116,7 @@ class DCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
||||
|
||||
val s1_paddr = Cat(tlb.io.resp.ppn, s1_req.addr(pgIdxBits-1,0))
|
||||
val s1_tag = Mux(s1_probe, probe_bits.addr_block >> idxBits, s1_paddr(paddrBits-1, untagBits))
|
||||
val s1_hit_way = Cat(meta.io.resp.map(r => r.coh.isValid() && r.tag === s1_tag).reverse)
|
||||
val s1_hit_way = meta.io.resp.map(r => r.coh.isValid() && r.tag === s1_tag).toBits
|
||||
val s1_hit_state = ClientMetadata.onReset.fromBits(
|
||||
meta.io.resp.map(r => Mux(r.tag === s1_tag, r.coh.toBits, UInt(0)))
|
||||
.reduce (_|_))
|
||||
|
@ -563,8 +563,8 @@ class FPU(implicit p: Parameters) extends CoreModule()(p) {
|
||||
val waddr = Mux(divSqrt_wen, divSqrt_waddr, winfo(0)(4,0).toUInt)
|
||||
val wsrc = (winfo(0) >> 6)(log2Up(pipes.size) - 1,0)
|
||||
val wcp = winfo(0)(6+log2Up(pipes.size))
|
||||
val wdata = Mux(divSqrt_wen, divSqrt_wdata, Vec(pipes.map(_.res.data))(wsrc))
|
||||
val wexc = Vec(pipes.map(_.res.exc))(wsrc)
|
||||
val wdata = Mux(divSqrt_wen, divSqrt_wdata, (pipes.map(_.res.data): Seq[UInt])(wsrc))
|
||||
val wexc = (pipes.map(_.res.exc): Seq[UInt])(wsrc)
|
||||
when ((!wcp && wen(0)) || divSqrt_wen) {
|
||||
regfile(waddr) := wdata
|
||||
if (enableCommitLog) {
|
||||
|
@ -884,7 +884,7 @@ class HellaCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
||||
writeArb.io.out.ready := data.io.write.ready
|
||||
data.io.write.bits := writeArb.io.out.bits
|
||||
val wdata_encoded = (0 until rowWords).map(i => code.encode(writeArb.io.out.bits.data(coreDataBits*(i+1)-1,coreDataBits*i)))
|
||||
data.io.write.bits.data := Cat(wdata_encoded.reverse)
|
||||
data.io.write.bits.data := wdata_encoded.toBits
|
||||
|
||||
// tag read for new requests
|
||||
metaReadArb.io.in(4).valid := io.cpu.req.valid
|
||||
@ -948,10 +948,10 @@ class HellaCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
||||
}
|
||||
val s2_data_muxed = Mux1H(s2_tag_match_way, s2_data)
|
||||
val s2_data_decoded = (0 until rowWords).map(i => code.decode(s2_data_muxed(encDataBits*(i+1)-1,encDataBits*i)))
|
||||
val s2_data_corrected = Vec(s2_data_decoded.map(_.corrected)).toBits
|
||||
val s2_data_uncorrected = Vec(s2_data_decoded.map(_.uncorrected)).toBits
|
||||
val s2_data_corrected = s2_data_decoded.map(_.corrected).toBits
|
||||
val s2_data_uncorrected = s2_data_decoded.map(_.uncorrected).toBits
|
||||
val s2_word_idx = if(doNarrowRead) UInt(0) else s2_req.addr(log2Up(rowWords*coreDataBytes)-1,log2Up(wordBytes))
|
||||
val s2_data_correctable = Vec(s2_data_decoded.map(_.correctable)).toBits()(s2_word_idx)
|
||||
val s2_data_correctable = s2_data_decoded.map(_.correctable).toBits()(s2_word_idx)
|
||||
|
||||
// store/amo hits
|
||||
s3_valid := (s2_valid_masked && s2_hit || s2_replay) && !s2_sc_fail && isWrite(s2_req.cmd)
|
||||
|
@ -81,7 +81,8 @@ class PTW(n: Int)(implicit p: Parameters) extends CoreModule()(p) {
|
||||
val r_req_dest = Reg(Bits())
|
||||
val r_pte = Reg(new PTE)
|
||||
|
||||
val vpn_idx = Vec((0 until pgLevels).map(i => (r_req.addr >> (pgLevels-i-1)*pgLevelBits)(pgLevelBits-1,0)))(count)
|
||||
val vpn_idxs = (0 until pgLevels).map(i => (r_req.addr >> (pgLevels-i-1)*pgLevelBits)(pgLevelBits-1,0))
|
||||
val vpn_idx = vpn_idxs(count)
|
||||
|
||||
val arb = Module(new RRArbiter(new PTWReq, n))
|
||||
arb.io.in <> io.requestor.map(_.req)
|
||||
@ -103,7 +104,7 @@ class PTW(n: Int)(implicit p: Parameters) extends CoreModule()(p) {
|
||||
val tags = Reg(Vec(size, UInt(width = paddrBits)))
|
||||
val data = Reg(Vec(size, UInt(width = ppnBits)))
|
||||
|
||||
val hits = Vec(tags.map(_ === pte_addr)).toBits & valid
|
||||
val hits = tags.map(_ === pte_addr).toBits & valid
|
||||
val hit = hits.orR
|
||||
when (io.mem.resp.valid && pte.table() && !hit) {
|
||||
val r = Mux(valid.andR, plru.replace, PriorityEncoder(~valid))
|
||||
@ -136,7 +137,8 @@ class PTW(n: Int)(implicit p: Parameters) extends CoreModule()(p) {
|
||||
io.mem.invalidate_lr := Bool(false)
|
||||
|
||||
val r_resp_ppn = io.mem.req.bits.addr >> pgIdxBits
|
||||
val resp_ppn = Vec((0 until pgLevels-1).map(i => Cat(r_resp_ppn >> pgLevelBits*(pgLevels-i-1), r_req.addr(pgLevelBits*(pgLevels-i-1)-1,0))) :+ r_resp_ppn)(count)
|
||||
val resp_ppns = (0 until pgLevels-1).map(i => Cat(r_resp_ppn >> pgLevelBits*(pgLevels-i-1), r_req.addr(pgLevelBits*(pgLevels-i-1)-1,0))) :+ r_resp_ppn
|
||||
val resp_ppn = resp_ppns(count)
|
||||
val resp_val = state === s_done
|
||||
|
||||
for (i <- 0 until io.requestor.size) {
|
||||
|
@ -105,7 +105,7 @@ class RocketTile(resetSignal: Bool = null)(implicit p: Parameters) extends Tile(
|
||||
respArb.io.in <> roccs.map(rocc => Queue(rocc.io.resp))
|
||||
|
||||
if (p(RoccNCSRs) > 0) {
|
||||
core.io.rocc.csr.rdata <> roccs.map(_.io.csr.rdata).reduce(_ ++ _)
|
||||
core.io.rocc.csr.rdata <> roccs.flatMap(_.io.csr.rdata)
|
||||
for ((rocc, accelParams) <- roccs.zip(buildRocc)) {
|
||||
rocc.io.csr.waddr := core.io.rocc.csr.waddr
|
||||
rocc.io.csr.wdata := core.io.rocc.csr.wdata
|
||||
|
@ -13,7 +13,6 @@ object Util {
|
||||
implicit def bigIntToUInt(x: BigInt): UInt = UInt(x)
|
||||
implicit def booleanToBool(x: Boolean): Bits = Bool(x)
|
||||
implicit def intSeqToUIntSeq(x: Seq[Int]): Seq[UInt] = x.map(UInt(_))
|
||||
implicit def seqToVec[T <: Data](x: Seq[T]): Vec[T] = Vec(x)
|
||||
implicit def wcToUInt(c: WideCounter): UInt = c.value
|
||||
implicit def sextToConv(x: UInt) = new AnyRef {
|
||||
def sextTo(n: Int): UInt =
|
||||
@ -25,6 +24,20 @@ object Util {
|
||||
def toInt: Int = if (x) 1 else 0
|
||||
}
|
||||
|
||||
implicit class SeqToAugmentedSeq[T <: Data](val x: Seq[T]) extends AnyVal {
|
||||
def apply(idx: UInt): T = {
|
||||
if (x.size == 1) {
|
||||
x.head
|
||||
} else {
|
||||
val half = 1 << (log2Ceil(x.size) - 1)
|
||||
val newIdx = idx & (half - 1)
|
||||
Mux(idx >= UInt(half), x.drop(half)(newIdx), x.take(half)(newIdx))
|
||||
}
|
||||
}
|
||||
|
||||
def toBits(): UInt = Cat(x.map(_.toBits).reverse)
|
||||
}
|
||||
|
||||
def minUInt(values: Seq[UInt]): UInt =
|
||||
values.reduce((a, b) => Mux(a < b, a, b))
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user