Pass correct access size information to PMP checker
This commit is contained in:
parent
a6874c03f7
commit
f0796f0509
@ -103,7 +103,7 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
|
|||||||
when (!metaReadArb.io.in(2).ready) { io.cpu.req.ready := false }
|
when (!metaReadArb.io.in(2).ready) { io.cpu.req.ready := false }
|
||||||
|
|
||||||
// address translation
|
// address translation
|
||||||
val tlb = Module(new TLB(nTLBEntries))
|
val tlb = Module(new TLB(log2Ceil(coreDataBytes), nTLBEntries))
|
||||||
io.ptw <> tlb.io.ptw
|
io.ptw <> tlb.io.ptw
|
||||||
tlb.io.req.valid := s1_valid_masked && (s1_readwrite || s1_sfence)
|
tlb.io.req.valid := s1_valid_masked && (s1_readwrite || s1_sfence)
|
||||||
tlb.io.req.bits.sfence.valid := s1_sfence
|
tlb.io.req.bits.sfence.valid := s1_sfence
|
||||||
@ -114,6 +114,7 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
|
|||||||
tlb.io.req.bits.vaddr := s1_req.addr
|
tlb.io.req.bits.vaddr := s1_req.addr
|
||||||
tlb.io.req.bits.instruction := false
|
tlb.io.req.bits.instruction := false
|
||||||
tlb.io.req.bits.store := s1_write
|
tlb.io.req.bits.store := s1_write
|
||||||
|
tlb.io.req.bits.size := s1_req.typ
|
||||||
when (!tlb.io.req.ready && !io.cpu.req.bits.phys) { io.cpu.req.ready := false }
|
when (!tlb.io.req.ready && !io.cpu.req.bits.phys) { io.cpu.req.ready := false }
|
||||||
when (s1_valid && s1_readwrite && tlb.io.resp.miss) { s1_nack := true }
|
when (s1_valid && s1_readwrite && tlb.io.resp.miss) { s1_nack := true }
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ class FrontendModule(outer: Frontend) extends LazyModuleImp(outer)
|
|||||||
implicit val edge = outer.node.edgesOut(0)
|
implicit val edge = outer.node.edgesOut(0)
|
||||||
val icache = outer.icache.module
|
val icache = outer.icache.module
|
||||||
|
|
||||||
val tlb = Module(new TLB(nTLBEntries))
|
val tlb = Module(new TLB(log2Ceil(coreInstBytes*fetchWidth), nTLBEntries))
|
||||||
|
|
||||||
val s1_pc_ = Reg(UInt(width=vaddrBitsExtended))
|
val s1_pc_ = Reg(UInt(width=vaddrBitsExtended))
|
||||||
val s1_pc = ~(~s1_pc_ | (coreInstBytes-1)) // discard PC LSBS (this propagates down the pipeline)
|
val s1_pc = ~(~s1_pc_ | (coreInstBytes-1)) // discard PC LSBS (this propagates down the pipeline)
|
||||||
@ -134,6 +134,7 @@ class FrontendModule(outer: Frontend) extends LazyModuleImp(outer)
|
|||||||
tlb.io.req.bits.instruction := Bool(true)
|
tlb.io.req.bits.instruction := Bool(true)
|
||||||
tlb.io.req.bits.store := Bool(false)
|
tlb.io.req.bits.store := Bool(false)
|
||||||
tlb.io.req.bits.sfence := io.cpu.sfence
|
tlb.io.req.bits.sfence := io.cpu.sfence
|
||||||
|
tlb.io.req.bits.size := log2Ceil(coreInstBytes*fetchWidth)
|
||||||
|
|
||||||
icache.io.req.valid := !stall && !s0_same_block
|
icache.io.req.valid := !stall && !s0_same_block
|
||||||
icache.io.req.bits.addr := io.cpu.npc
|
icache.io.req.bits.addr := io.cpu.npc
|
||||||
|
@ -697,7 +697,7 @@ class NonBlockingDCacheModule(outer: NonBlockingDCache) extends HellaCacheModule
|
|||||||
val s1_write = isWrite(s1_req.cmd)
|
val s1_write = isWrite(s1_req.cmd)
|
||||||
val s1_readwrite = s1_read || s1_write || isPrefetch(s1_req.cmd)
|
val s1_readwrite = s1_read || s1_write || isPrefetch(s1_req.cmd)
|
||||||
|
|
||||||
val dtlb = Module(new TLB(nTLBEntries))
|
val dtlb = Module(new TLB(log2Ceil(coreDataBytes), nTLBEntries))
|
||||||
io.ptw <> dtlb.io.ptw
|
io.ptw <> dtlb.io.ptw
|
||||||
dtlb.io.req.valid := s1_valid_masked && (s1_readwrite || s1_sfence)
|
dtlb.io.req.valid := s1_valid_masked && (s1_readwrite || s1_sfence)
|
||||||
dtlb.io.req.bits.sfence.valid := s1_sfence
|
dtlb.io.req.bits.sfence.valid := s1_sfence
|
||||||
@ -708,6 +708,7 @@ class NonBlockingDCacheModule(outer: NonBlockingDCache) extends HellaCacheModule
|
|||||||
dtlb.io.req.bits.vaddr := s1_req.addr
|
dtlb.io.req.bits.vaddr := s1_req.addr
|
||||||
dtlb.io.req.bits.instruction := Bool(false)
|
dtlb.io.req.bits.instruction := Bool(false)
|
||||||
dtlb.io.req.bits.store := s1_write
|
dtlb.io.req.bits.store := s1_write
|
||||||
|
dtlb.io.req.bits.size := s1_req.typ
|
||||||
when (!dtlb.io.req.ready && !io.cpu.req.bits.phys) { io.cpu.req.ready := Bool(false) }
|
when (!dtlb.io.req.ready && !io.cpu.req.bits.phys) { io.cpu.req.ready := Bool(false) }
|
||||||
|
|
||||||
when (io.cpu.req.valid) {
|
when (io.cpu.req.valid) {
|
||||||
|
@ -22,12 +22,15 @@ class SFenceReq(implicit p: Parameters) extends CoreBundle()(p) {
|
|||||||
val asid = UInt(width = asIdBits max 1) // TODO zero-width
|
val asid = UInt(width = asIdBits max 1) // TODO zero-width
|
||||||
}
|
}
|
||||||
|
|
||||||
class TLBReq(implicit p: Parameters) extends CoreBundle()(p) {
|
class TLBReq(lgMaxSize: Int)(implicit p: Parameters) extends CoreBundle()(p) {
|
||||||
val vaddr = UInt(width = vaddrBitsExtended)
|
val vaddr = UInt(width = vaddrBitsExtended)
|
||||||
val passthrough = Bool()
|
val passthrough = Bool()
|
||||||
val instruction = Bool()
|
val instruction = Bool()
|
||||||
val store = Bool()
|
val store = Bool()
|
||||||
val sfence = Valid(new SFenceReq)
|
val sfence = Valid(new SFenceReq)
|
||||||
|
val size = UInt(width = log2Ceil(lgMaxSize + 1))
|
||||||
|
|
||||||
|
override def cloneType = new TLBReq(lgMaxSize).asInstanceOf[this.type]
|
||||||
}
|
}
|
||||||
|
|
||||||
class TLBResp(implicit p: Parameters) extends CoreBundle()(p) {
|
class TLBResp(implicit p: Parameters) extends CoreBundle()(p) {
|
||||||
@ -40,9 +43,9 @@ class TLBResp(implicit p: Parameters) extends CoreBundle()(p) {
|
|||||||
val cacheable = Bool(OUTPUT)
|
val cacheable = Bool(OUTPUT)
|
||||||
}
|
}
|
||||||
|
|
||||||
class TLB(entries: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(p) {
|
class TLB(lgMaxSize: Int, entries: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(p) {
|
||||||
val io = new Bundle {
|
val io = new Bundle {
|
||||||
val req = Decoupled(new TLBReq).flip
|
val req = Decoupled(new TLBReq(lgMaxSize)).flip
|
||||||
val resp = new TLBResp
|
val resp = new TLBResp
|
||||||
val ptw = new TLBPTWIO
|
val ptw = new TLBPTWIO
|
||||||
}
|
}
|
||||||
@ -58,7 +61,7 @@ class TLB(entries: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreMod
|
|||||||
val state = Reg(init=s_ready)
|
val state = Reg(init=s_ready)
|
||||||
val r_refill_tag = Reg(UInt(width = asIdBits + vpnBits))
|
val r_refill_tag = Reg(UInt(width = asIdBits + vpnBits))
|
||||||
val r_refill_waddr = Reg(UInt(width = log2Ceil(normalEntries)))
|
val r_refill_waddr = Reg(UInt(width = log2Ceil(normalEntries)))
|
||||||
val r_req = Reg(new TLBReq)
|
val r_req = Reg(new TLBReq(lgMaxSize))
|
||||||
|
|
||||||
val do_mprv = io.ptw.status.mprv && !io.req.bits.instruction
|
val do_mprv = io.ptw.status.mprv && !io.req.bits.instruction
|
||||||
val priv = Mux(do_mprv, io.ptw.status.mpp, io.ptw.status.prv)
|
val priv = Mux(do_mprv, io.ptw.status.mpp, io.ptw.status.prv)
|
||||||
@ -74,9 +77,9 @@ class TLB(entries: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreMod
|
|||||||
val mpu_ppn = Mux(do_refill, refill_ppn,
|
val mpu_ppn = Mux(do_refill, refill_ppn,
|
||||||
Mux(vm_enabled, ppns.last, vpn(ppnBits-1, 0)))
|
Mux(vm_enabled, ppns.last, vpn(ppnBits-1, 0)))
|
||||||
val mpu_physaddr = Cat(mpu_ppn, io.req.bits.vaddr(pgIdxBits-1, 0))
|
val mpu_physaddr = Cat(mpu_ppn, io.req.bits.vaddr(pgIdxBits-1, 0))
|
||||||
val pmp = Module(new PMPChecker(8))
|
val pmp = Module(new PMPChecker(lgMaxSize))
|
||||||
pmp.io.addr := mpu_physaddr
|
pmp.io.addr := mpu_physaddr
|
||||||
pmp.io.size := 2
|
pmp.io.size := io.req.bits.size
|
||||||
pmp.io.pmp := io.ptw.pmp
|
pmp.io.pmp := io.ptw.pmp
|
||||||
pmp.io.prv := Mux(io.req.bits.passthrough /* PTW */, PRV.S, priv)
|
pmp.io.prv := Mux(io.req.bits.passthrough /* PTW */, PRV.S, priv)
|
||||||
val legal_address = edge.manager.findSafe(mpu_physaddr).reduce(_||_)
|
val legal_address = edge.manager.findSafe(mpu_physaddr).reduce(_||_)
|
||||||
|
Loading…
Reference in New Issue
Block a user