1
0

[rocket] take physical memory attribute check off critical path

Cache the attributes in the TLB instead.
This commit is contained in:
Andrew Waterman 2016-08-02 15:11:48 -07:00
parent 76f33d88a6
commit cc0f8962fb

View File

@ -43,7 +43,7 @@ class TLB(implicit val p: Parameters) extends Module with HasTLBParameters {
} }
val valid = Reg(init = UInt(0, entries)) val valid = Reg(init = UInt(0, entries))
val ppns = Reg(Vec(entries, io.ptw.resp.bits.pte.ppn)) val ppns = Reg(Vec(entries, UInt(width = ppnBits)))
val tags = Reg(Vec(entries, UInt(width = asIdBits + vpnBits))) val tags = Reg(Vec(entries, UInt(width = asIdBits + vpnBits)))
val s_ready :: s_request :: s_wait :: s_wait_invalidate :: Nil = Enum(UInt(), 4) val s_ready :: s_request :: s_wait :: s_wait_invalidate :: Nil = Enum(UInt(), 4)
@ -52,8 +52,23 @@ class TLB(implicit val p: Parameters) extends Module with HasTLBParameters {
val r_refill_waddr = Reg(UInt(width = log2Ceil(entries))) val r_refill_waddr = Reg(UInt(width = log2Ceil(entries)))
val r_req = Reg(new TLBReq) val r_req = Reg(new TLBReq)
val do_mprv = io.ptw.status.mprv && !io.req.bits.instruction
val priv = Mux(do_mprv, io.ptw.status.mpp, io.ptw.status.prv)
val priv_s = priv === PRV.S
val priv_uses_vm = priv <= PRV.S && !io.ptw.status.debug
// share a single physical memory attribute checker (unshare if critical path)
val passthrough_ppn = io.req.bits.vpn(ppnBits-1, 0)
val refill_ppn = io.ptw.resp.bits.pte.ppn(ppnBits-1, 0)
val do_refill = Bool(usingVM) && io.ptw.resp.valid
val mpu_ppn = Mux(do_refill, refill_ppn, passthrough_ppn)
val prot = addrMap.getProt(mpu_ppn << pgIdxBits)
val cacheable = addrMap.isCacheable(mpu_ppn << pgIdxBits)
require(addrMap.flatten.forall { case (n, r) => (r.start | r.size) % (1 << pgIdxBits) == 0 })
val lookup_tag = Cat(io.ptw.ptbr.asid, io.req.bits.vpn(vpnBits-1,0)) val lookup_tag = Cat(io.ptw.ptbr.asid, io.req.bits.vpn(vpnBits-1,0))
val hitsVec = (0 until entries).map(i => valid(i) && tags(i) === lookup_tag) val vm_enabled = Bool(usingVM) && io.ptw.status.vm(3) && priv_uses_vm && !io.req.bits.passthrough
val hitsVec = (0 until entries).map(i => valid(i) && vm_enabled && tags(i) === lookup_tag) :+ !vm_enabled
val hits = hitsVec.asUInt val hits = hitsVec.asUInt
// permission bit arrays // permission bit arrays
@ -62,8 +77,10 @@ class TLB(implicit val p: Parameters) extends Module with HasTLBParameters {
val sw_array = Reg(UInt(width = entries)) // write permission val sw_array = Reg(UInt(width = entries)) // write permission
val sx_array = Reg(UInt(width = entries)) // execute permission val sx_array = Reg(UInt(width = entries)) // execute permission
val sr_array = Reg(UInt(width = entries)) // read permission val sr_array = Reg(UInt(width = entries)) // read permission
val xr_array = Reg(UInt(width = entries)) // read permission to executable page
val cash_array = Reg(UInt(width = entries)) // cacheable
val dirty_array = Reg(UInt(width = entries)) // PTE dirty bit val dirty_array = Reg(UInt(width = entries)) // PTE dirty bit
when (io.ptw.resp.valid) { when (do_refill) {
val pte = io.ptw.resp.bits.pte val pte = io.ptw.resp.bits.pte
ppns(r_refill_waddr) := pte.ppn ppns(r_refill_waddr) := pte.ppn
tags(r_refill_waddr) := r_refill_tag tags(r_refill_waddr) := r_refill_tag
@ -71,50 +88,42 @@ class TLB(implicit val p: Parameters) extends Module with HasTLBParameters {
val mask = UIntToOH(r_refill_waddr) val mask = UIntToOH(r_refill_waddr)
valid := valid | mask valid := valid | mask
u_array := Mux(pte.u, u_array | mask, u_array & ~mask) u_array := Mux(pte.u, u_array | mask, u_array & ~mask)
sr_array := Mux(pte.sr(), sr_array | mask, sr_array & ~mask) sw_array := Mux(pte.sw() && prot.w, sw_array | mask, sw_array & ~mask)
sw_array := Mux(pte.sw(), sw_array | mask, sw_array & ~mask) sx_array := Mux(pte.sx() && prot.x, sx_array | mask, sx_array & ~mask)
sx_array := Mux(pte.sx(), sx_array | mask, sx_array & ~mask) sr_array := Mux(pte.sr() && prot.r, sr_array | mask, sr_array & ~mask)
xr_array := Mux(pte.sx() && prot.r, xr_array | mask, xr_array & ~mask)
cash_array := Mux(cacheable, cash_array | mask, cash_array & ~mask)
dirty_array := Mux(pte.d, dirty_array | mask, dirty_array & ~mask) dirty_array := Mux(pte.d, dirty_array | mask, dirty_array & ~mask)
} }
// high if there are any unused (invalid) entries in the TLB
val plru = new PseudoLRU(entries) val plru = new PseudoLRU(entries)
val repl_waddr = Mux(!valid.andR, PriorityEncoder(~valid), plru.replace) val repl_waddr = Mux(!valid.andR, PriorityEncoder(~valid), plru.replace)
val do_mprv = io.ptw.status.mprv && !io.req.bits.instruction
val priv = Mux(do_mprv, io.ptw.status.mpp, io.ptw.status.prv)
val priv_s = priv === PRV.S
val priv_uses_vm = priv <= PRV.S && !io.ptw.status.debug
val priv_ok = Mux(priv_s, ~Mux(io.ptw.status.pum, u_array, UInt(0)), u_array) val priv_ok = Mux(priv_s, ~Mux(io.ptw.status.pum, u_array, UInt(0)), u_array)
val w_array = priv_ok & sw_array val w_array = Cat(prot.w, priv_ok & sw_array)
val x_array = priv_ok & sx_array val x_array = Cat(prot.x, priv_ok & sx_array)
val r_array = priv_ok & (sr_array | Mux(io.ptw.status.mxr, x_array, UInt(0))) val r_array = Cat(prot.r, priv_ok & (sr_array | Mux(io.ptw.status.mxr, xr_array, UInt(0))))
val c_array = Cat(cacheable, cash_array)
val vm_enabled = Bool(usingVM) && io.ptw.status.vm(3) && priv_uses_vm && !io.req.bits.passthrough
val bad_va = val bad_va =
if (vpnBits == vpnBitsExtended) Bool(false) if (vpnBits == vpnBitsExtended) Bool(false)
else io.req.bits.vpn(vpnBits) =/= io.req.bits.vpn(vpnBits-1) else io.req.bits.vpn(vpnBits) =/= io.req.bits.vpn(vpnBits-1)
// it's only a store hit if the dirty bit is set // it's only a store hit if the dirty bit is set
val tag_hits = hits & (dirty_array | ~Mux(io.req.bits.store, w_array, UInt(0))) val tlb_hits = hits(entries-1, 0) & (dirty_array | ~Mux(io.req.bits.store, w_array, UInt(0)))
val tag_hit = tag_hits.orR val tlb_hit = tlb_hits.orR
val tlb_hit = vm_enabled && tag_hit val tlb_miss = vm_enabled && !bad_va && !tlb_hit
val tlb_miss = vm_enabled && !tag_hit && !bad_va
when (io.req.valid && tlb_hit) { when (io.req.valid && !tlb_miss) {
plru.access(OHToUInt(hits)) plru.access(OHToUInt(hits(entries-1, 0)))
} }
val paddr = Cat(io.resp.ppn, UInt(0, pgIdxBits))
val addr_prot = addrMap.getProt(paddr)
io.req.ready := state === s_ready io.req.ready := state === s_ready
io.resp.xcpt_ld := bad_va || (!tlb_miss && !addr_prot.r) || (tlb_hit && !(r_array & hits).orR) io.resp.xcpt_ld := bad_va || (~r_array & hits).orR
io.resp.xcpt_st := bad_va || (!tlb_miss && !addr_prot.w) || (tlb_hit && !(w_array & hits).orR) io.resp.xcpt_st := bad_va || (~w_array & hits).orR
io.resp.xcpt_if := bad_va || (!tlb_miss && !addr_prot.x) || (tlb_hit && !(x_array & hits).orR) io.resp.xcpt_if := bad_va || (~x_array & hits).orR
io.resp.cacheable := addrMap.isCacheable(paddr) io.resp.cacheable := (c_array & hits).orR
io.resp.miss := tlb_miss io.resp.miss := do_refill || tlb_miss
io.resp.ppn := Mux(vm_enabled, Mux1H(hitsVec, ppns), io.req.bits.vpn(ppnBits-1,0)) io.resp.ppn := Mux1H(hitsVec, ppns :+ passthrough_ppn)
io.ptw.req.valid := state === s_request io.ptw.req.valid := state === s_request
io.ptw.req.bits := io.ptw.status io.ptw.req.bits := io.ptw.status