1
0

For Rockets without VM, widen vaddrBits to paddrBits

This supports addressing a >39-bit physical address space.
This commit is contained in:
Andrew Waterman 2017-10-07 17:33:36 -07:00
parent a0e5a20b60
commit 986cbfb6b1
3 changed files with 21 additions and 13 deletions

View File

@ -84,9 +84,6 @@ class PTW(n: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(
val r_req = Reg(new PTWReq)
val r_req_dest = Reg(Bits())
val r_pte = Reg(new PTE)
val vpn_idxs = (0 until pgLevels).map(i => (r_req.addr >> (pgLevels-i-1)*pgLevelBits)(pgLevelBits-1,0))
val vpn_idx = vpn_idxs(count)
val arb = Module(new RRArbiter(new PTWReq, n))
arb.io.in <> io.requestor.map(_.req)
@ -104,7 +101,11 @@ class PTW(n: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(
(res, (tmp.ppn >> ppnBits) =/= 0)
}
val traverse = pte.table() && !invalid_paddr && count < pgLevels-1
val pte_addr = Cat(r_pte.ppn, vpn_idx) << log2Ceil(xLen/8)
val pte_addr = if (!usingVM) 0.U else {
val vpn_idxs = (0 until pgLevels).map(i => (r_req.addr >> (pgLevels-i-1)*pgLevelBits)(pgLevelBits-1,0))
val vpn_idx = vpn_idxs(count)
Cat(r_pte.ppn, vpn_idx) << log2Ceil(xLen/8)
}
when (arb.io.out.fire()) {
r_req := arb.io.out.bits

View File

@ -97,7 +97,7 @@ class TLB(instruction: Boolean, lgMaxSize: Int, nEntries: Int)(implicit edge: TL
val vm_enabled = Bool(usingVM) && io.ptw.ptbr.mode(io.ptw.ptbr.mode.getWidth-1) && priv_uses_vm && !io.req.bits.passthrough
// share a single physical memory attribute checker (unshare if critical path)
val (vpn, pgOffset) = Split(io.req.bits.vaddr, pgIdxBits)
val vpn = io.req.bits.vaddr(vaddrBits-1, pgIdxBits)
val refill_ppn = io.ptw.resp.bits.pte.ppn(ppnBits-1, 0)
val do_refill = Bool(usingVM) && io.ptw.resp.valid
val invalidate_refill = state.isOneOf(s_request /* don't care */, s_wait_invalidate)
@ -121,8 +121,8 @@ class TLB(instruction: Boolean, lgMaxSize: Int, nEntries: Int)(implicit edge: TL
val prot_x = fastCheck(_.executable) && pmp.io.x
val prot_eff = fastCheck(Seq(RegionType.PUT_EFFECTS, RegionType.GET_EFFECTS) contains _.regionType)
val lookup_tag = Cat(io.ptw.ptbr.asid, vpn(vpnBits-1,0))
val hitsVec = (0 until totalEntries).map { i => vm_enabled && {
val lookup_tag = Cat(io.ptw.ptbr.asid, vpn)
val hitsVec = (0 until totalEntries).map { i => if (!usingVM) false.B else vm_enabled && {
var tagMatch = valid(i)
for (j <- 0 until pgLevels) {
val base = vpnBits - (j + 1) * pgLevelBits
@ -133,7 +133,7 @@ class TLB(instruction: Boolean, lgMaxSize: Int, nEntries: Int)(implicit edge: TL
val hits = hitsVec.asUInt
val level = Mux1H(hitsVec.init, entries.map(_.level))
val partialPPN = Mux1H(hitsVec.init, entries.map(_.ppn))
val ppn = {
val ppn = if (!usingVM) vpn else {
var ppn = Mux(vm_enabled, partialPPN, vpn)(pgLevelBits*pgLevels - 1, pgLevelBits*(pgLevels - 1))
for (i <- 1 until pgLevels)
ppn = Cat(ppn, (Mux(level < i, vpn, 0.U) | partialPPN)(vpnBits - i*pgLevelBits - 1, vpnBits - (i + 1)*pgLevelBits))
@ -187,7 +187,7 @@ class TLB(instruction: Boolean, lgMaxSize: Int, nEntries: Int)(implicit edge: TL
val misaligned = (io.req.bits.vaddr & (UIntToOH(io.req.bits.size) - 1)).orR
val bad_va = vm_enabled &&
(if (vpnBits == vpnBitsExtended) Bool(false)
else vpn(vpnBits) =/= vpn(vpnBits-1))
else (io.req.bits.vaddr.asSInt < 0.S) =/= (vpn.asSInt < 0.S))
val lrscAllowed = Mux(Bool(usingDataScratchpad), 0.U, c_array)
val ae_array =
@ -230,7 +230,7 @@ class TLB(instruction: Boolean, lgMaxSize: Int, nEntries: Int)(implicit edge: TL
io.resp.cacheable := (c_array & hits).orR
io.resp.prefetchable := (prefetchable_array & hits).orR && edge.manager.managers.forall(m => !m.supportsAcquireB || m.supportsHint)
io.resp.miss := do_refill || tlb_miss || multipleHits
io.resp.paddr := Cat(ppn, pgOffset)
io.resp.paddr := Cat(ppn, io.req.bits.vaddr(pgIdxBits-1, 0))
io.ptw.req.valid := state === s_request
io.ptw.req.bits <> io.ptw.status
@ -256,7 +256,7 @@ class TLB(instruction: Boolean, lgMaxSize: Int, nEntries: Int)(implicit edge: TL
}
when (sfence) {
assert((io.req.bits.sfence.bits.addr >> pgIdxBits) === vpn(vpnBits-1,0))
assert((io.req.bits.sfence.bits.addr >> pgIdxBits) === vpn)
valid := Mux(io.req.bits.sfence.bits.rs1, valid & ~hits(totalEntries-1, 0),
Mux(io.req.bits.sfence.bits.rs2, valid & entries.map(_.g).asUInt, 0))
}

View File

@ -42,7 +42,14 @@ trait HasTileParameters {
def iLen: Int = 32
def pgIdxBits: Int = 12
def pgLevelBits: Int = 10 - log2Ceil(xLen / 32)
def vaddrBits: Int = pgIdxBits + pgLevels * pgLevelBits
def vaddrBits: Int =
if (usingVM) {
val v = pgIdxBits + pgLevels * pgLevelBits
require(v == xLen || xLen > v && v > paddrBits)
v
} else {
paddrBits min xLen
}
def paddrBits: Int = p(SharedMemoryTLEdge).bundle.addressBits
def vpnBits: Int = vaddrBits - pgIdxBits
def ppnBits: Int = paddrBits - pgIdxBits
@ -53,7 +60,7 @@ trait HasTileParameters {
def maxPAddrBits: Int = xLen match { case 32 => 34; case 64 => 56 }
def hartIdLen: Int = p(MaxHartIdBits)
def resetVectorLen: Int = paddrBits min vaddrBitsExtended
def resetVectorLen: Int = paddrBits
def dcacheArbPorts = 1 + usingVM.toInt + usingDataScratchpad.toInt + tileParams.rocc.size
}