1
0

[rocket] Automatically kill D$ access on address exceptions

Doing this internally to the cache eliminates a long control path
from the cache to the core and back to the cache.
This commit is contained in:
Andrew Waterman 2016-08-02 15:24:19 -07:00
parent b54db0ba23
commit 5d4f6383f2
3 changed files with 9 additions and 15 deletions

View File

@ -70,7 +70,7 @@ class DCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
val s1_probe = Reg(next=io.mem.probe.fire(), init=Bool(false)) val s1_probe = Reg(next=io.mem.probe.fire(), init=Bool(false))
val probe_bits = RegEnable(io.mem.probe.bits, io.mem.probe.fire()) val probe_bits = RegEnable(io.mem.probe.bits, io.mem.probe.fire())
val s1_nack = Wire(init=Bool(false)) val s1_nack = Wire(init=Bool(false))
val s1_valid_masked = s1_valid && !io.cpu.s1_kill val s1_valid_masked = s1_valid && !io.cpu.s1_kill && !io.cpu.xcpt.asUInt.orR
val s1_valid_not_nacked = s1_valid_masked && !s1_nack val s1_valid_not_nacked = s1_valid_masked && !s1_nack
val s1_req = Reg(io.cpu.req.bits) val s1_req = Reg(io.cpu.req.bits)
when (metaReadArb.io.out.valid) { when (metaReadArb.io.out.valid) {
@ -166,10 +166,6 @@ class DCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
io.cpu.xcpt.ma.st := s1_write && misaligned io.cpu.xcpt.ma.st := s1_write && misaligned
io.cpu.xcpt.pf.ld := s1_read && tlb.io.resp.xcpt_ld io.cpu.xcpt.pf.ld := s1_read && tlb.io.resp.xcpt_ld
io.cpu.xcpt.pf.st := s1_write && tlb.io.resp.xcpt_st io.cpu.xcpt.pf.st := s1_write && tlb.io.resp.xcpt_st
assert(!(Reg(next=
(io.cpu.xcpt.ma.ld || io.cpu.xcpt.ma.st || io.cpu.xcpt.pf.ld || io.cpu.xcpt.pf.st)) &&
s2_valid_masked),
"DCache exception occurred - cache response not killed.")
// load reservations // load reservations
val s2_lr = Bool(usingAtomics) && s2_req.cmd === M_XLR val s2_lr = Bool(usingAtomics) && s2_req.cmd === M_XLR

View File

@ -807,7 +807,7 @@ class HellaCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
io.cpu.req.ready := Bool(true) io.cpu.req.ready := Bool(true)
val s1_valid = Reg(next=io.cpu.req.fire(), init=Bool(false)) val s1_valid = Reg(next=io.cpu.req.fire(), init=Bool(false))
val s1_req = Reg(io.cpu.req.bits) val s1_req = Reg(io.cpu.req.bits)
val s1_valid_masked = s1_valid && !io.cpu.s1_kill val s1_valid_masked = s1_valid && !io.cpu.s1_kill && !io.cpu.xcpt.asUInt.orR
val s1_replay = Reg(init=Bool(false)) val s1_replay = Reg(init=Bool(false))
val s1_clk_en = Reg(Bool()) val s1_clk_en = Reg(Bool())
@ -872,11 +872,6 @@ class HellaCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
io.cpu.xcpt.pf.ld := s1_read && dtlb.io.resp.xcpt_ld io.cpu.xcpt.pf.ld := s1_read && dtlb.io.resp.xcpt_ld
io.cpu.xcpt.pf.st := s1_write && dtlb.io.resp.xcpt_st io.cpu.xcpt.pf.st := s1_write && dtlb.io.resp.xcpt_st
assert (!(Reg(next=
(io.cpu.xcpt.ma.ld || io.cpu.xcpt.ma.st || io.cpu.xcpt.pf.ld || io.cpu.xcpt.pf.st)) &&
s2_valid_masked),
"DCache exception occurred - cache response not killed.")
// tags // tags
def onReset = L1Metadata(UInt(0), ClientMetadata.onReset) def onReset = L1Metadata(UInt(0), ClientMetadata.onReset)
val meta = Module(new MetadataArray(onReset _)) val meta = Module(new MetadataArray(onReset _))

View File

@ -411,9 +411,9 @@ class Rocket(implicit p: Parameters) extends CoreModule()(p) {
} }
} }
val mem_breakpoint = (mem_reg_load && bpu.io.xcpt_ld) || (mem_reg_store && bpu.io.xcpt_st)
val (mem_new_xcpt, mem_new_cause) = checkExceptions(List( val (mem_new_xcpt, mem_new_cause) = checkExceptions(List(
(mem_reg_load && bpu.io.xcpt_ld, UInt(Causes.breakpoint)), (mem_breakpoint, UInt(Causes.breakpoint)),
(mem_reg_store && bpu.io.xcpt_st, UInt(Causes.breakpoint)),
(mem_npc_misaligned, UInt(Causes.misaligned_fetch)), (mem_npc_misaligned, UInt(Causes.misaligned_fetch)),
(mem_ctrl.mem && io.dmem.xcpt.ma.st, UInt(Causes.misaligned_store)), (mem_ctrl.mem && io.dmem.xcpt.ma.st, UInt(Causes.misaligned_store)),
(mem_ctrl.mem && io.dmem.xcpt.ma.ld, UInt(Causes.misaligned_load)), (mem_ctrl.mem && io.dmem.xcpt.ma.ld, UInt(Causes.misaligned_load)),
@ -617,9 +617,12 @@ class Rocket(implicit p: Parameters) extends CoreModule()(p) {
io.dmem.req.bits.typ := ex_ctrl.mem_type io.dmem.req.bits.typ := ex_ctrl.mem_type
io.dmem.req.bits.phys := Bool(false) io.dmem.req.bits.phys := Bool(false)
io.dmem.req.bits.addr := encodeVirtualAddress(ex_rs(0), alu.io.adder_out) io.dmem.req.bits.addr := encodeVirtualAddress(ex_rs(0), alu.io.adder_out)
io.dmem.s1_kill := killm_common || mem_xcpt
io.dmem.s1_data := Mux(mem_ctrl.fp, io.fpu.store_data, mem_reg_rs2)
io.dmem.invalidate_lr := wb_xcpt io.dmem.invalidate_lr := wb_xcpt
io.dmem.s1_data := Mux(mem_ctrl.fp, io.fpu.store_data, mem_reg_rs2)
io.dmem.s1_kill := killm_common || mem_breakpoint
when (mem_xcpt && !io.dmem.s1_kill) {
assert(io.dmem.xcpt.asUInt.orR) // make sure s1_kill is exhaustive
}
io.rocc.cmd.valid := wb_reg_valid && wb_ctrl.rocc && !replay_wb_common io.rocc.cmd.valid := wb_reg_valid && wb_ctrl.rocc && !replay_wb_common
io.rocc.exception := wb_xcpt && csr.io.status.xs.orR io.rocc.exception := wb_xcpt && csr.io.status.xs.orR