1
0

Put page homogeneity checker in PMP

Avoids redundancy between ITLB and DTLB
This commit is contained in:
Andrew Waterman 2017-03-21 12:01:32 -07:00
parent 9e05200e51
commit d3bda9574c
3 changed files with 47 additions and 43 deletions

View File

@ -56,22 +56,23 @@ class PMP(implicit p: Parameters) extends PMPReg {
}
}
private def decomposedBoundMatch(xHi: UInt, xLo: UInt, lsbMask: UInt, lgMaxSize: Int) = {
val msbsLess = xHi < (comparand >> lgMaxSize)
val msbsEqual = (xHi ^ (comparand >> lgMaxSize)) === 0
val lsbsLess = xLo < (comparand(lgMaxSize-1, 0) & lsbMask(lgMaxSize-1, 0))
private def boundMatch(x: UInt, lsbMask: UInt, lgMaxSize: Int) = {
if (lgMaxSize <= lgAlign) {
x < comparand
} else {
// break up the circuit; the MSB part will be CSE'd
val msbsLess = (x >> lgMaxSize) < (comparand >> lgMaxSize)
val msbsEqual = ((x >> lgMaxSize) ^ (comparand >> lgMaxSize)) === 0
val lsbsLess = (x(lgMaxSize-1, 0) | lsbMask) < comparand(lgMaxSize-1, 0)
msbsLess || (msbsEqual && lsbsLess)
}
private def boundMatch(x: UInt, lsbMask: UInt, lgMaxSize: Int) =
if (lgMaxSize <= lgAlign) x < comparand
else decomposedBoundMatch(x >> lgMaxSize, x(lgMaxSize-1, 0), lsbMask, lgMaxSize)
}
private def lowerBoundMatch(x: UInt, lgSize: UInt, lgMaxSize: Int) =
!boundMatch(x, ((BigInt(1) << lgMaxSize) - 1).U << lgSize, lgMaxSize)
!boundMatch(x, ~(((BigInt(1) << lgMaxSize) - 1).U << lgSize)(lgMaxSize-1, 0), lgMaxSize)
private def upperBoundMatch(x: UInt, lgMaxSize: Int) =
boundMatch(x, ((BigInt(1) << lgMaxSize) - 1).U, lgMaxSize)
boundMatch(x, 0.U, lgMaxSize)
private def rangeMatch(x: UInt, lgSize: UInt, lgMaxSize: Int, prev: PMP) =
prev.lowerBoundMatch(x, lgSize, lgMaxSize) && upperBoundMatch(x, lgMaxSize)
@ -85,26 +86,25 @@ class PMP(implicit p: Parameters) extends PMPReg {
f(pgIdxBits + (pgLevels - 1 - i) * pgLevelBits)
}
private def rangeHomogeneous(x: UInt, pgLevel: UInt, lgMaxSize: Int, prev: PMP) = {
val beginsAfterLower = !prev.boundMatch(x, ((BigInt(1) << lgMaxSize) - 1).U, lgMaxSize) // CSE with rangeMatch
val beginsAfterUpper = !boundMatch(x, ((BigInt(1) << lgMaxSize) - 1).U, lgMaxSize) // CSE with rangeMatch
private def rangeHomogeneous(x: UInt, pgLevel: UInt, prev: PMP) = {
val beginsAfterLower = !(x < prev.comparand)
val beginsAfterUpper = !(x < comparand)
val endsBeforeLower = pgLevelMap { idxBits => (x >> idxBits) < (prev.comparand << idxBits) } (pgLevel)
val endsBeforeUpper = pgLevelMap { idxBits => (x >> idxBits) > (comparand << idxBits) } (pgLevel)
val endsBeforeUpper = pgLevelMap { idxBits => (x >> idxBits) < (comparand << idxBits) } (pgLevel)
endsBeforeLower || beginsAfterUpper || (beginsAfterLower && endsBeforeUpper)
}
// returns whether this PMP completely contains, or contains none of, a page
def homogeneous(x: UInt, pgLevel: UInt, lgMaxSize: Int, prev: PMP): Bool =
!cfg.p(0) || Mux(cfg.a(1), rangeHomogeneous(x, pgLevel, lgMaxSize, prev), pow2Homogeneous(x, pgLevel))
def homogeneous(x: UInt, pgLevel: UInt, prev: PMP): Bool =
!cfg.p(0) || Mux(cfg.a(1), rangeHomogeneous(x, pgLevel, prev), pow2Homogeneous(x, pgLevel))
// returns whether this matching PMP fully contains the access
def aligned(x: UInt, lgSize: UInt, lgMaxSize: Int, prev: PMP): Bool = if (lgMaxSize <= lgAlign) true.B else {
val lsbMask = ((BigInt(1) << lgMaxSize) - 1).U << lgSize
val alignMask = ~lsbMask(lgMaxSize-1, 0)
val lowerBoundOK = !prev.upperBoundMatch(x, lgMaxSize)
val upperBoundOK = decomposedBoundMatch(x >> lgMaxSize, x(lgMaxSize-1, 0) | alignMask, lsbMask, lgMaxSize)
val rangeAligned = lowerBoundOK && upperBoundOK
val pow2Aligned = (alignMask & ~mask(lgMaxSize-1, 0)) === 0
val lsbMask = ~(((BigInt(1) << lgMaxSize) - 1).U << lgSize)(lgMaxSize-1, 0)
val straddlesLowerBound = ((x >> lgMaxSize) ^ (prev.comparand >> lgMaxSize)) === 0 && (prev.comparand(lgMaxSize-1, 0) & ~x(lgMaxSize-1, 0)) =/= 0
val straddlesUpperBound = ((x >> lgMaxSize) ^ (comparand >> lgMaxSize)) === 0 && (comparand(lgMaxSize-1, 0) & (x(lgMaxSize-1, 0) | lsbMask)) =/= 0
val rangeAligned = !(straddlesLowerBound || straddlesUpperBound)
val pow2Aligned = (lsbMask & ~mask(lgMaxSize-1, 0)) === 0
Mux(cfg.a(1), rangeAligned, pow2Aligned)
}
@ -113,6 +113,14 @@ class PMP(implicit p: Parameters) extends PMPReg {
cfg.p(0) && Mux(cfg.a(1), rangeMatch(x, lgSize, lgMaxSize, prev), pow2Match(x, lgSize, lgMaxSize))
}
class PMPHomogeneityChecker(pmps: Seq[PMP])(implicit p: Parameters) {
def apply(addr: UInt, pgLevel: UInt): Bool = {
((true.B, 0.U.asTypeOf(new PMP)) /: pmps) { case ((h, prev), pmp) =>
(h && pmp.homogeneous(addr, pgLevel, prev), pmp)
}._1
}
}
class PMPChecker(lgMaxSize: Int)(implicit p: Parameters) extends CoreModule()(p)
with HasRocketCoreParameters {
val io = new Bundle {
@ -123,8 +131,6 @@ class PMPChecker(lgMaxSize: Int)(implicit p: Parameters) extends CoreModule()(p)
val r = Bool(OUTPUT)
val w = Bool(OUTPUT)
val x = Bool(OUTPUT)
val pgLevel = UInt(INPUT, log2Ceil(pgLevels))
val homogeneous = Bool(OUTPUT)
}
val default = io.prv > PRV.S
@ -133,10 +139,6 @@ class PMPChecker(lgMaxSize: Int)(implicit p: Parameters) extends CoreModule()(p)
pmp0.cfg.w := default
pmp0.cfg.x := default
io.homogeneous := ((true.B, pmp0) /: io.pmp) { case ((h, prev), pmp) =>
(h && pmp.homogeneous(io.addr, io.pgLevel, lgMaxSize, prev), pmp)
}._1
val res = (pmp0 /: (io.pmp zip (pmp0 +: io.pmp)).reverse) { case (prev, (pmp, prevPMP)) =>
val hit = pmp.hit(io.addr, io.size, lgMaxSize, prevPMP)
val ignore = default && !pmp.cfg.m

View File

@ -7,7 +7,9 @@ import Chisel._
import Chisel.ImplicitConversions._
import config._
import tile._
import coreplex.CacheBlockBytes
import uncore.constants._
import uncore.tilelink2._
import util._
import scala.collection.mutable.ListBuffer
@ -19,6 +21,7 @@ class PTWReq(implicit p: Parameters) extends CoreBundle()(p) {
class PTWResp(implicit p: Parameters) extends CoreBundle()(p) {
val pte = new PTE
val level = UInt(width = log2Ceil(pgLevels))
val homogeneous = Bool()
}
class TLBPTWIO(implicit p: Parameters) extends CoreBundle()(p)
@ -60,7 +63,7 @@ class PTE(implicit p: Parameters) extends CoreBundle()(p) {
def sx(dummy: Int = 0) = leaf() && x
}
class PTW(n: Int)(implicit p: Parameters) extends CoreModule()(p) {
class PTW(n: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(p) {
val io = new Bundle {
val requestor = Vec(n, new TLBPTWIO).flip
val mem = new HellaCacheIO
@ -131,11 +134,18 @@ class PTW(n: Int)(implicit p: Parameters) extends CoreModule()(p) {
io.mem.s1_kill := s1_kill
io.mem.invalidate_lr := Bool(false)
val pmaPgLevelHomogeneous = (0 until pgLevels) map { i =>
TLBPageLookup(edge.manager.managers, xLen, p(CacheBlockBytes), BigInt(1) << (pgIdxBits + ((pgLevels - 1 - i) * pgLevelBits)))(pte_addr >> pgIdxBits << pgIdxBits).homogeneous
}
val pmaHomogeneous = pmaPgLevelHomogeneous(count)
val pmpHomogeneous = new PMPHomogeneityChecker(io.dpath.pmp).apply(pte_addr >> pgIdxBits << pgIdxBits, count)
for (i <- 0 until io.requestor.size) {
io.requestor(i).resp.valid := resp_valid(i)
io.requestor(i).resp.bits.pte := r_pte
io.requestor(i).resp.bits.level := count
io.requestor(i).resp.bits.pte.ppn := pte_addr >> pgIdxBits
io.requestor(i).resp.bits.homogeneous := pmpHomogeneous && pmaHomogeneous
io.requestor(i).ptbr := io.dpath.ptbr
io.requestor(i).status := io.dpath.status
io.requestor(i).pmp := io.dpath.pmp
@ -195,6 +205,6 @@ trait CanHavePTW extends HasHellaCache {
trait CanHavePTWModule extends HasHellaCacheModule {
val outer: CanHavePTW
val ptwPorts = ListBuffer(outer.dcache.module.io.ptw)
val ptwOpt = if (outer.usingPTW) { Some(Module(new PTW(outer.nPTWPorts)(outer.p))) } else None
val ptwOpt = if (outer.usingPTW) { Some(Module(new PTW(outer.nPTWPorts)(outer.dcache.node.edgesOut(0), outer.p))) } else None
ptwOpt foreach { ptw => dcachePorts += ptw.io.mem }
}

View File

@ -73,14 +73,14 @@ class TLB(lgMaxSize: Int, entries: Int)(implicit edge: TLEdgeOut, p: Parameters)
val refill_ppn = io.ptw.resp.bits.pte.ppn(ppnBits-1, 0)
val do_refill = Bool(usingVM) && io.ptw.resp.valid
val invalidate_refill = state.isOneOf(s_request /* don't care */, s_wait_invalidate)
val mpu_physaddr = Mux(do_refill, refill_ppn << pgIdxBits,
Cat(Mux(vm_enabled, ppns.last, vpn(ppnBits-1, 0)), io.req.bits.vaddr(pgIdxBits-1, 0)))
val mpu_ppn = Mux(do_refill, refill_ppn,
Mux(vm_enabled, ppns.last, vpn(ppnBits-1, 0)))
val mpu_physaddr = Cat(mpu_ppn, io.req.bits.vaddr(pgIdxBits-1, 0))
val pmp = Module(new PMPChecker(lgMaxSize))
pmp.io.addr := mpu_physaddr
pmp.io.size := io.req.bits.size
pmp.io.pmp := io.ptw.pmp
pmp.io.prv := Mux(do_refill || io.req.bits.passthrough /* PTW */, PRV.S, priv)
pmp.io.pgLevel := io.ptw.resp.bits.level
val legal_address = edge.manager.findSafe(mpu_physaddr).reduce(_||_)
def fastCheck(member: TLManagerParameters => Boolean) =
legal_address && Mux1H(edge.manager.findFast(mpu_physaddr), edge.manager.managers.map(m => Bool(member(m))))
@ -88,15 +88,7 @@ class TLB(lgMaxSize: Int, entries: Int)(implicit edge: TLEdgeOut, p: Parameters)
val prot_w = fastCheck(_.supportsPutFull) && pmp.io.w
val prot_x = fastCheck(_.executable) && pmp.io.x
val cacheable = fastCheck(_.supportsAcquireB)
val isSpecial = !pmp.io.homogeneous || {
val homogeneous = Wire(init = false.B)
for (i <- 0 until pgLevels) {
when (io.ptw.resp.bits.level >= i) {
homogeneous := TLBPageLookup(edge.manager.managers, xLen, p(CacheBlockBytes), BigInt(1) << (pgIdxBits + ((pgLevels - 1 - i) * pgLevelBits)))(mpu_physaddr).homogeneous
}
}
!homogeneous
}
val isSpecial = !io.ptw.resp.bits.homogeneous
val lookup_tag = Cat(io.ptw.ptbr.asid, vpn(vpnBits-1,0))
val hitsVec = (0 until totalEntries).map { i => vm_enabled && {