Pipeline D$ exception response into s2
This commit is contained in:
parent
657f4d4e0c
commit
6de6f38894
@ -54,7 +54,7 @@ class HellaCacheArbiter(n: Int)(implicit p: Parameters) extends Module
|
||||
val resp = io.requestor(i).resp
|
||||
val tag_hit = io.mem.resp.bits.tag(log2Up(n)-1,0) === UInt(i)
|
||||
resp.valid := io.mem.resp.valid && tag_hit
|
||||
io.requestor(i).xcpt := io.mem.xcpt
|
||||
io.requestor(i).s2_xcpt := io.mem.s2_xcpt
|
||||
io.requestor(i).ordered := io.mem.ordered
|
||||
io.requestor(i).acquire := io.mem.acquire
|
||||
io.requestor(i).release := io.mem.release
|
||||
|
@ -61,7 +61,7 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
|
||||
val s1_probe = Reg(next=tl_out.b.fire(), init=Bool(false))
|
||||
val probe_bits = RegEnable(tl_out.b.bits, tl_out.b.fire()) // TODO has data now :(
|
||||
val s1_nack = Wire(init=Bool(false))
|
||||
val s1_valid_masked = s1_valid && !io.cpu.s1_kill && !io.cpu.xcpt.asUInt.orR
|
||||
val s1_valid_masked = s1_valid && !io.cpu.s1_kill
|
||||
val s1_valid_not_nacked = s1_valid && !s1_nack
|
||||
val s1_req = Reg(io.cpu.req.bits)
|
||||
when (metaReadArb.io.out.valid) {
|
||||
@ -103,7 +103,7 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
|
||||
// address translation
|
||||
val tlb = Module(new TLB(log2Ceil(coreDataBytes), nTLBEntries))
|
||||
io.ptw <> tlb.io.ptw
|
||||
io.cpu.xcpt := tlb.io.resp
|
||||
io.cpu.s2_xcpt := RegEnable(Mux(tlb.io.req.valid && !tlb.io.resp.miss, tlb.io.resp, 0.U.asTypeOf(tlb.io.resp)), s1_valid_not_nacked)
|
||||
tlb.io.req.valid := s1_valid && !io.cpu.s1_kill && (s1_readwrite || s1_sfence)
|
||||
tlb.io.req.bits.sfence.valid := s1_sfence
|
||||
tlb.io.req.bits.sfence.bits.rs1 := s1_req.typ(0)
|
||||
@ -142,7 +142,7 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
|
||||
val s1_data_way = Mux(inWriteback, releaseWay, s1_hit_way)
|
||||
val s1_data = Mux1H(s1_data_way, data.io.resp) // retime into s2 if critical
|
||||
|
||||
val s2_valid = Reg(next=s1_valid_masked && !s1_sfence, init=Bool(false))
|
||||
val s2_valid = Reg(next=s1_valid_masked && !s1_sfence, init=Bool(false)) && !io.cpu.s2_xcpt.asUInt.orR
|
||||
val s2_probe = Reg(next=s1_probe, init=Bool(false))
|
||||
val releaseInFlight = s1_probe || s2_probe || release_state =/= s_ready
|
||||
val s2_valid_masked = s2_valid && Reg(next = !s1_nack)
|
||||
@ -202,10 +202,10 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
|
||||
|
||||
if (usingDataScratchpad) {
|
||||
require(!usingVM) // therefore, req.phys means this is a slave-port access
|
||||
val s1_isSlavePortAccess = s1_req.phys
|
||||
when (s1_isSlavePortAccess) {
|
||||
assert(!s1_valid || s1_hit_state.isValid())
|
||||
io.cpu.xcpt := 0.U.asTypeOf(io.cpu.xcpt)
|
||||
val s2_isSlavePortAccess = s2_req.phys
|
||||
when (s2_isSlavePortAccess) {
|
||||
assert(!s2_valid || s2_hit_valid)
|
||||
io.cpu.s2_xcpt := 0.U.asTypeOf(io.cpu.s2_xcpt)
|
||||
}
|
||||
assert(!(s2_valid_masked && s2_req.cmd.isOneOf(M_XLR, M_XSC)))
|
||||
}
|
||||
|
@ -130,7 +130,7 @@ class HellaCacheIO(implicit p: Parameters) extends CoreBundle()(p) {
|
||||
|
||||
val resp = Valid(new HellaCacheResp).flip
|
||||
val replay_next = Bool(INPUT)
|
||||
val xcpt = (new HellaCacheExceptions).asInput
|
||||
val s2_xcpt = (new HellaCacheExceptions).asInput
|
||||
val invalidate_lr = Bool(OUTPUT)
|
||||
val ordered = Bool(INPUT)
|
||||
}
|
||||
|
@ -677,12 +677,12 @@ class NonBlockingDCacheModule(outer: NonBlockingDCache) extends HellaCacheModule
|
||||
io.cpu.req.ready := Bool(true)
|
||||
val s1_valid = Reg(next=io.cpu.req.fire(), init=Bool(false))
|
||||
val s1_req = Reg(io.cpu.req.bits)
|
||||
val s1_valid_masked = s1_valid && !io.cpu.s1_kill && !io.cpu.xcpt.asUInt.orR
|
||||
val s1_valid_masked = s1_valid && !io.cpu.s1_kill
|
||||
val s1_replay = Reg(init=Bool(false))
|
||||
val s1_clk_en = Reg(Bool())
|
||||
val s1_sfence = s1_req.cmd === M_SFENCE
|
||||
|
||||
val s2_valid = Reg(next=s1_valid_masked && !s1_sfence, init=Bool(false))
|
||||
val s2_valid = Reg(next=s1_valid_masked && !s1_sfence, init=Bool(false)) && !io.cpu.s2_xcpt.asUInt.orR
|
||||
val s2_req = Reg(io.cpu.req.bits)
|
||||
val s2_replay = Reg(next=s1_replay, init=Bool(false)) && s2_req.cmd =/= M_FLUSH_ALL
|
||||
val s2_recycle = Wire(Bool())
|
||||
@ -699,7 +699,7 @@ class NonBlockingDCacheModule(outer: NonBlockingDCache) extends HellaCacheModule
|
||||
|
||||
val dtlb = Module(new TLB(log2Ceil(coreDataBytes), nTLBEntries))
|
||||
io.ptw <> dtlb.io.ptw
|
||||
io.cpu.xcpt := dtlb.io.resp
|
||||
io.cpu.s2_xcpt := RegEnable(Mux(dtlb.io.req.valid && !dtlb.io.resp.miss, dtlb.io.resp, 0.U.asTypeOf(dtlb.io.resp)), s1_clk_en)
|
||||
dtlb.io.req.valid := s1_valid && !io.cpu.s1_kill && (s1_readwrite || s1_sfence)
|
||||
dtlb.io.req.bits.sfence.valid := s1_sfence
|
||||
dtlb.io.req.bits.sfence.bits.rs1 := s1_req.typ(0)
|
||||
|
@ -78,7 +78,6 @@ class PTW(n: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(
|
||||
val count = Reg(UInt(width = log2Up(pgLevels)))
|
||||
val s1_kill = Reg(next = Bool(false))
|
||||
val resp_valid = Reg(next = Vec.fill(io.requestor.size)(Bool(false)))
|
||||
val ae = Reg(next = io.mem.xcpt.ae.ld)
|
||||
val resp_ae = Reg(Bool())
|
||||
|
||||
val r_req = Reg(new PTWReq)
|
||||
@ -189,7 +188,7 @@ class PTW(n: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(
|
||||
resp_valid(r_req_dest) := true
|
||||
}
|
||||
}
|
||||
when (ae) {
|
||||
when (io.mem.s2_xcpt.ae.ld) {
|
||||
resp_ae := true
|
||||
state := s_ready
|
||||
resp_valid(r_req_dest) := true
|
||||
|
@ -401,9 +401,7 @@ class Rocket(implicit p: Parameters) extends CoreModule()(p)
|
||||
val (mem_new_xcpt, mem_new_cause) = checkExceptions(List(
|
||||
(mem_debug_breakpoint, UInt(CSR.debugTriggerCause)),
|
||||
(mem_breakpoint, UInt(Causes.breakpoint)),
|
||||
(mem_npc_misaligned, UInt(Causes.misaligned_fetch)),
|
||||
(mem_ctrl.mem && io.dmem.xcpt.ma.st, UInt(Causes.misaligned_store)),
|
||||
(mem_ctrl.mem && io.dmem.xcpt.ma.ld, UInt(Causes.misaligned_load))))
|
||||
(mem_npc_misaligned, UInt(Causes.misaligned_fetch))))
|
||||
|
||||
val (mem_xcpt, mem_cause) = checkExceptions(List(
|
||||
(mem_reg_xcpt_interrupt || mem_reg_xcpt, mem_reg_cause),
|
||||
@ -439,10 +437,12 @@ class Rocket(implicit p: Parameters) extends CoreModule()(p)
|
||||
|
||||
val (wb_xcpt, wb_cause) = checkExceptions(List(
|
||||
(wb_reg_xcpt, wb_reg_cause),
|
||||
(wb_reg_valid && wb_ctrl.mem && RegEnable(io.dmem.xcpt.pf.st, mem_pc_valid), UInt(Causes.store_page_fault)),
|
||||
(wb_reg_valid && wb_ctrl.mem && RegEnable(io.dmem.xcpt.pf.ld, mem_pc_valid), UInt(Causes.load_page_fault)),
|
||||
(wb_reg_valid && wb_ctrl.mem && RegEnable(io.dmem.xcpt.ae.st, mem_pc_valid), UInt(Causes.store_access)),
|
||||
(wb_reg_valid && wb_ctrl.mem && RegEnable(io.dmem.xcpt.ae.ld, mem_pc_valid), UInt(Causes.load_access))
|
||||
(wb_reg_valid && wb_ctrl.mem && io.dmem.s2_xcpt.ma.st, UInt(Causes.misaligned_store)),
|
||||
(wb_reg_valid && wb_ctrl.mem && io.dmem.s2_xcpt.ma.ld, UInt(Causes.misaligned_load)),
|
||||
(wb_reg_valid && wb_ctrl.mem && io.dmem.s2_xcpt.pf.st, UInt(Causes.store_page_fault)),
|
||||
(wb_reg_valid && wb_ctrl.mem && io.dmem.s2_xcpt.pf.ld, UInt(Causes.load_page_fault)),
|
||||
(wb_reg_valid && wb_ctrl.mem && io.dmem.s2_xcpt.ae.st, UInt(Causes.store_access)),
|
||||
(wb_reg_valid && wb_ctrl.mem && io.dmem.s2_xcpt.ae.ld, UInt(Causes.load_access))
|
||||
))
|
||||
|
||||
val wb_wxd = wb_reg_valid && wb_ctrl.wxd
|
||||
@ -628,9 +628,6 @@ class Rocket(implicit p: Parameters) extends CoreModule()(p)
|
||||
io.dmem.invalidate_lr := wb_xcpt
|
||||
io.dmem.s1_data := Mux(mem_ctrl.fp, io.fpu.store_data, mem_reg_rs2)
|
||||
io.dmem.s1_kill := killm_common || mem_breakpoint
|
||||
when (mem_ctrl.mem && mem_xcpt && !io.dmem.s1_kill) {
|
||||
assert(io.dmem.xcpt.asUInt.orR) // make sure s1_kill is exhaustive
|
||||
}
|
||||
|
||||
io.rocc.cmd.valid := wb_reg_valid && wb_ctrl.rocc && !replay_wb_common
|
||||
io.rocc.exception := wb_xcpt && csr.io.status.xs.orR
|
||||
|
@ -130,8 +130,6 @@ class SimpleHellaCacheIF(implicit p: Parameters) extends Module
|
||||
replayq.io.resp := io.cache.resp
|
||||
io.requestor.resp := io.cache.resp
|
||||
|
||||
assert(!Reg(next = io.cache.req.fire()) ||
|
||||
!(io.cache.xcpt.ma.ld || io.cache.xcpt.ma.st ||
|
||||
io.cache.xcpt.pf.ld || io.cache.xcpt.pf.st),
|
||||
assert(!RegNext(RegNext(io.cache.req.fire())) || !io.cache.s2_xcpt.asUInt.orR,
|
||||
"SimpleHellaCacheIF exception")
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user