More WIP on PMP
This commit is contained in:
parent
2888779422
commit
86d84959cf
@ -17,29 +17,80 @@ class PMPConfig extends Bundle {
|
||||
val r = Bool()
|
||||
}
|
||||
|
||||
object PMP {
|
||||
def lgAlign = 2
|
||||
}
|
||||
|
||||
class PMP(implicit p: Parameters) extends CoreBundle()(p) {
|
||||
import PMP._
|
||||
|
||||
val cfg = new PMPConfig
|
||||
val addr = UInt(width = paddrBits - lgAlign)
|
||||
|
||||
def enabled(prv: UInt) = cfg.p.orR && (cfg.m || prv <= PRV.S)
|
||||
def locked = cfg.p.andR
|
||||
def locked = cfg.p(1)
|
||||
def addrLocked(next: PMP) = locked || next.locked && next.cfg.a(1)
|
||||
|
||||
private def lgAlign = 2
|
||||
private def mask = (0 until paddrBits - lgAlign).scanLeft(cfg.a(0))((m, i) => m && addr(i)).asUInt
|
||||
private lazy val mask = Cat((0 until paddrBits - lgAlign).scanLeft(cfg.a(0))((m, i) => m && addr(i)).asUInt, UInt((BigInt(1) << lgAlign) - 1, lgAlign))
|
||||
private lazy val comparand = addr << lgAlign
|
||||
|
||||
def pow2AddressMatch(x: UInt, lgSize: UInt, lgMaxSize: Int) = {
|
||||
val m = mask
|
||||
def checkOne(a: UInt) = (((a >> lgAlign) ^ addr) & ~m) === 0
|
||||
var res = checkOne(x)
|
||||
for (i <- (1 << lgAlign) until (1 << lgMaxSize) by (1 << lgAlign))
|
||||
res = res || (lgSize > log2Ceil(i) && checkOne(x | i))
|
||||
res
|
||||
private def pow2Match(x: UInt, lgSize: UInt, lgMaxSize: Int) = {
|
||||
def eval(a: UInt, b: UInt, m: UInt) = ((a ^ b) & ~m) === 0
|
||||
if (lgMaxSize <= lgAlign) {
|
||||
eval(x, comparand, mask)
|
||||
} else {
|
||||
// break up the circuit; the MSB part will be CSE'd
|
||||
val lsbMask = mask | ~(((BigInt(1) << lgMaxSize) - 1).U << lgSize)
|
||||
val msbMatch = eval(x >> lgMaxSize, comparand >> lgMaxSize, mask >> lgMaxSize)
|
||||
val lsbMatch = eval(x(lgMaxSize-1, 0), comparand(lgMaxSize-1, 0), lsbMask(lgMaxSize-1, 0))
|
||||
msbMatch && lsbMatch
|
||||
}
|
||||
}
|
||||
|
||||
def hit(prv: UInt, x: UInt, lgSize: UInt, lgMaxSize: Int) = {
|
||||
enabled(prv) && pow2AddressMatch(x, lgSize, lgMaxSize)
|
||||
private def boundMatch(x: UInt, lsbMask: UInt, lgMaxSize: Int) = {
|
||||
if (lgMaxSize <= lgAlign) {
|
||||
x < comparand
|
||||
} else {
|
||||
// break up the circuit; the MSB part will be CSE'd
|
||||
val msbsLess = (x >> lgMaxSize) < (comparand >> lgMaxSize)
|
||||
val msbsEqual = ((x >> lgMaxSize) ^ (comparand >> lgMaxSize)) === 0
|
||||
val lsbsLess = x(lgMaxSize-1, 0) < (comparand(lgMaxSize-1, 0) & lsbMask(lgMaxSize-1, 0))
|
||||
msbsLess || (msbsEqual && lsbsLess)
|
||||
}
|
||||
}
|
||||
|
||||
private def lowerBoundMatch(x: UInt, lgSize: UInt, lgMaxSize: Int) =
|
||||
!boundMatch(x, ((BigInt(1) << lgMaxSize) - 1).U << lgSize, lgMaxSize)
|
||||
|
||||
private def upperBoundMatch(x: UInt, lgMaxSize: Int) =
|
||||
boundMatch(x, ((BigInt(1) << lgMaxSize) - 1).U, lgMaxSize)
|
||||
|
||||
private def rangeMatch(x: UInt, lgSize: UInt, lgMaxSize: Int, prev: PMP) =
|
||||
prev.lowerBoundMatch(x, lgSize, lgMaxSize) && upperBoundMatch(x, lgMaxSize)
|
||||
|
||||
private def pow2Homogeneous(x: UInt, pgMask: UInt) = {
|
||||
(mask & ~pgMask & (pgMask >> 1.U)) =/= 0 || ((x ^ comparand) & pgMask) =/= 0
|
||||
}
|
||||
|
||||
private def rangeHomogeneous(x: UInt, pgMask: UInt, lgMaxSize: Int, prev: PMP) = {
|
||||
val beginsAfterLower = !prev.boundMatch(x, ((BigInt(1) << lgMaxSize) - 1).U, lgMaxSize) // CSE with rangeMatch
|
||||
val beginsAfterUpper = !boundMatch(x, ((BigInt(1) << lgMaxSize) - 1).U, lgMaxSize) // CSE with rangeMatch
|
||||
val endsBeforeLower = (x & pgMask) < (prev.comparand & pgMask)
|
||||
val endsBeforeUpper = (x & pgMask) > (comparand & pgMask)
|
||||
endsBeforeLower || beginsAfterUpper || (beginsAfterLower && endsBeforeUpper)
|
||||
}
|
||||
|
||||
def homogeneous(x: UInt, pgMask: UInt, lgMaxSize: Int, prev: PMP): Bool =
|
||||
!cfg.p(0) || Mux(cfg.a(1), rangeHomogeneous(x, pgMask, lgMaxSize, prev), pow2Homogeneous(x, pgMask))
|
||||
|
||||
def aligned(x: UInt, lgSize: UInt, lgMaxSize: Int, prev: PMP): Bool = {
|
||||
val alignMask = ~(((BigInt(1) << lgMaxSize) - 1).U << lgSize)(lgMaxSize-1, 0)
|
||||
val rangeAligned = (prev.comparand(lgMaxSize-1, 0) & alignMask) === 0 && (comparand(lgMaxSize-1, 0) & alignMask) === 0
|
||||
val pow2Aligned = (alignMask & ~mask(lgMaxSize-1, 0)) === 0
|
||||
Mux(cfg.a(1), rangeAligned, pow2Aligned)
|
||||
}
|
||||
|
||||
def hit(x: UInt, lgSize: UInt, lgMaxSize: Int, prev: PMP): Bool =
|
||||
cfg.p(0) && Mux(cfg.a(1), rangeMatch(x, lgSize, lgMaxSize, prev), pow2Match(x, lgSize, lgMaxSize))
|
||||
}
|
||||
|
||||
class PMPChecker(lgMaxSize: Int)(implicit p: Parameters) extends CoreModule()(p)
|
||||
@ -52,15 +103,39 @@ class PMPChecker(lgMaxSize: Int)(implicit p: Parameters) extends CoreModule()(p)
|
||||
val r = Bool(OUTPUT)
|
||||
val w = Bool(OUTPUT)
|
||||
val x = Bool(OUTPUT)
|
||||
val pgLevel = UInt(INPUT, log2Ceil(pgLevels))
|
||||
val homogeneous = Bool(OUTPUT)
|
||||
}
|
||||
|
||||
def hits = io.pmp.map(_.hit(io.prv, io.addr, io.size, lgMaxSize))
|
||||
val default = io.prv > PRV.S
|
||||
val (r, w, x, _) = ((default, default, default, 0.U) /: (io.pmp zip hits).reverse) { case ((r, w, x, pri), (pmp, hit)) =>
|
||||
MuxT(hit && pmp.cfg.p >= pri, (pmp.cfg.r, pmp.cfg.w, pmp.cfg.x, pmp.cfg.p), (r, w, x, pri))
|
||||
val pmp0 = Wire(init = 0.U.asTypeOf(new PMP))
|
||||
pmp0.cfg.r := default
|
||||
pmp0.cfg.w := default
|
||||
pmp0.cfg.x := default
|
||||
|
||||
val pgMask = (0 until pgLevels).map { i =>
|
||||
val idxBits = pgIdxBits + (pgLevels - 1 - i) * pgLevelBits
|
||||
require(idxBits < paddrBits)
|
||||
val mask = (BigInt(1) << paddrBits) - (BigInt(1) << idxBits)
|
||||
Mux(io.pgLevel >= i, mask.U, 0.U)
|
||||
}.reduce(_|_)
|
||||
|
||||
io.homogeneous := ((true.B, pmp0) /: io.pmp) { case ((h, prev), pmp) =>
|
||||
(h && pmp.homogeneous(io.addr, pgMask, lgMaxSize, prev), pmp)
|
||||
}._1
|
||||
|
||||
val res = (pmp0 /: (io.pmp zip (pmp0 +: io.pmp)).reverse) { case (prev, (pmp, prevPMP)) =>
|
||||
val hit = pmp.hit(io.addr, io.size, lgMaxSize, prevPMP)
|
||||
val ignore = default && !pmp.cfg.m
|
||||
val aligned = pmp.aligned(io.addr, io.size, lgMaxSize, prevPMP)
|
||||
val cur = Wire(init = pmp)
|
||||
cur.cfg.r := (aligned && pmp.cfg.r) || ignore
|
||||
cur.cfg.w := (aligned && pmp.cfg.w) || ignore
|
||||
cur.cfg.x := (aligned && pmp.cfg.x) || ignore
|
||||
Mux(hit, cur, prev)
|
||||
}
|
||||
|
||||
io.r := r
|
||||
io.w := w
|
||||
io.x := x
|
||||
io.r := res.cfg.r
|
||||
io.w := res.cfg.w
|
||||
io.x := res.cfg.x
|
||||
}
|
||||
|
@ -73,14 +73,14 @@ class TLB(lgMaxSize: Int, entries: Int)(implicit edge: TLEdgeOut, p: Parameters)
|
||||
val refill_ppn = io.ptw.resp.bits.pte.ppn(ppnBits-1, 0)
|
||||
val do_refill = Bool(usingVM) && io.ptw.resp.valid
|
||||
val invalidate_refill = state.isOneOf(s_request /* don't care */, s_wait_invalidate)
|
||||
val mpu_ppn = Mux(do_refill, refill_ppn,
|
||||
Mux(vm_enabled, ppns.last, vpn(ppnBits-1, 0)))
|
||||
val mpu_physaddr = Cat(mpu_ppn, io.req.bits.vaddr(pgIdxBits-1, 0))
|
||||
val mpu_physaddr = Mux(do_refill, refill_ppn << pgIdxBits,
|
||||
Cat(Mux(vm_enabled, ppns.last, vpn(ppnBits-1, 0)), io.req.bits.vaddr(pgIdxBits-1, 0)))
|
||||
val pmp = Module(new PMPChecker(lgMaxSize))
|
||||
pmp.io.addr := mpu_physaddr
|
||||
pmp.io.size := io.req.bits.size
|
||||
pmp.io.pmp := io.ptw.pmp
|
||||
pmp.io.prv := Mux(io.req.bits.passthrough /* PTW */, PRV.S, priv)
|
||||
pmp.io.prv := Mux(do_refill || io.req.bits.passthrough /* PTW */, PRV.S, priv)
|
||||
pmp.io.pgLevel := io.ptw.resp.bits.level
|
||||
val legal_address = edge.manager.findSafe(mpu_physaddr).reduce(_||_)
|
||||
def fastCheck(member: TLManagerParameters => Boolean) =
|
||||
legal_address && Mux1H(edge.manager.findFast(mpu_physaddr), edge.manager.managers.map(m => Bool(member(m))))
|
||||
@ -88,10 +88,12 @@ class TLB(lgMaxSize: Int, entries: Int)(implicit edge: TLEdgeOut, p: Parameters)
|
||||
val prot_w = fastCheck(_.supportsPutFull) && pmp.io.w
|
||||
val prot_x = fastCheck(_.executable) && pmp.io.x
|
||||
val cacheable = fastCheck(_.supportsAcquireB)
|
||||
val isSpecial = {
|
||||
val isSpecial = !pmp.io.homogeneous || {
|
||||
val homogeneous = Wire(init = false.B)
|
||||
for (i <- 0 until pgLevels) {
|
||||
when (io.ptw.resp.bits.level === i) { homogeneous := TLBPageLookup(edge.manager.managers, xLen, p(CacheBlockBytes), BigInt(1) << (pgIdxBits + ((pgLevels - 1 - i) * pgLevelBits)))(mpu_physaddr).homogeneous }
|
||||
when (io.ptw.resp.bits.level >= i) {
|
||||
homogeneous := TLBPageLookup(edge.manager.managers, xLen, p(CacheBlockBytes), BigInt(1) << (pgIdxBits + ((pgLevels - 1 - i) * pgLevelBits)))(mpu_physaddr).homogeneous
|
||||
}
|
||||
}
|
||||
!homogeneous
|
||||
}
|
||||
@ -114,7 +116,7 @@ class TLB(lgMaxSize: Int, entries: Int)(implicit edge: TLEdgeOut, p: Parameters)
|
||||
ppn = Cat(ppn, (Mux(level < i, vpn, 0.U) | partialPPN)(vpnBits - i*pgLevelBits - 1, vpnBits - (i + 1)*pgLevelBits))
|
||||
ppn
|
||||
}
|
||||
|
||||
|
||||
// permission bit arrays
|
||||
val u_array = Reg(UInt(width = totalEntries)) // user permission
|
||||
val g_array = Reg(UInt(width = totalEntries)) // global mapping
|
||||
@ -140,9 +142,9 @@ class TLB(lgMaxSize: Int, entries: Int)(implicit edge: TLEdgeOut, p: Parameters)
|
||||
xr_array := Mux(pte.sx() && (isSpecial || prot_r), xr_array | mask, xr_array & ~mask)
|
||||
cash_array := Mux(cacheable, cash_array | mask, cash_array & ~mask)
|
||||
}
|
||||
|
||||
|
||||
val plru = new PseudoLRU(normalEntries)
|
||||
val repl_waddr = Mux(!valid.andR, PriorityEncoder(~valid), plru.replace)
|
||||
val repl_waddr = Mux(!valid(normalEntries-1, 0).andR, PriorityEncoder(~valid(normalEntries-1, 0)), plru.replace)
|
||||
|
||||
val priv_ok = Mux(priv_s, ~Mux(io.ptw.status.sum, UInt(0), u_array), u_array)
|
||||
val w_array = Cat(prot_w, priv_ok & ~(~prot_w << specialEntry) & sw_array)
|
||||
|
Loading…
Reference in New Issue
Block a user