1
0

fix D$ store-upgrade bug

loads to the same address as stores that cause an upgrade
could return the old value
This commit is contained in:
Andrew Waterman 2012-03-10 15:50:10 -08:00
parent 4f4b990a4f
commit 8ffdac9526
3 changed files with 6 additions and 4 deletions

View File

@ -7,7 +7,7 @@ object Constants
{ {
val HAVE_RVC = false val HAVE_RVC = false
val HAVE_FPU = true val HAVE_FPU = true
val HAVE_VEC = true val HAVE_VEC = false
val BR_N = UFix(0, 4); val BR_N = UFix(0, 4);
val BR_EQ = UFix(1, 4); val BR_EQ = UFix(1, 4);

View File

@ -292,6 +292,7 @@ class MSHR(id: Int) extends Component with FourStateCoherence {
class MSHRFile extends Component { class MSHRFile extends Component {
val io = new Bundle { val io = new Bundle {
val req = (new ioDecoupled) { new MSHRReq }.flip val req = (new ioDecoupled) { new MSHRReq }.flip
val secondary_miss = Bool(OUTPUT)
val mem_resp_idx = Bits(IDX_BITS, OUTPUT) val mem_resp_idx = Bits(IDX_BITS, OUTPUT)
val mem_resp_offset = Bits(log2up(REFILL_CYCLES), OUTPUT) val mem_resp_offset = Bits(log2up(REFILL_CYCLES), OUTPUT)
@ -376,6 +377,7 @@ class MSHRFile extends Component {
wb_req_arb.io.out <> io.wb_req wb_req_arb.io.out <> io.wb_req
io.req.ready := Mux(idx_match, tag_match && sec_rdy, pri_rdy) && sdq_rdy io.req.ready := Mux(idx_match, tag_match && sec_rdy, pri_rdy) && sdq_rdy
io.secondary_miss := idx_match
io.mem_resp_idx := mem_resp_mux.io.out.inner_req.idx io.mem_resp_idx := mem_resp_mux.io.out.inner_req.idx
io.mem_resp_offset := mem_resp_mux.io.out.inner_req.offset io.mem_resp_offset := mem_resp_mux.io.out.inner_req.offset
io.mem_resp_way_oh := mem_resp_mux.io.out.way_en io.mem_resp_way_oh := mem_resp_mux.io.out.way_en
@ -930,9 +932,9 @@ class HellaCacheUniproc extends HellaCache with FourStateCoherence {
io.cpu.req_rdy := flusher.io.req.ready && !(r_cpu_req_val_ && r_req_flush) && !pending_fence io.cpu.req_rdy := flusher.io.req.ready && !(r_cpu_req_val_ && r_req_flush) && !pending_fence
io.cpu.resp_nack := r_cpu_req_val_ && !io.cpu.req_kill && nack io.cpu.resp_nack := r_cpu_req_val_ && !io.cpu.req_kill && nack
io.cpu.resp_val := (tag_hit && !nack && r_req_read) || mshr.io.cpu_resp_val io.cpu.resp_val := (tag_hit && !mshr.io.secondary_miss && !nack && r_req_read) || mshr.io.cpu_resp_val
io.cpu.resp_replay := mshr.io.cpu_resp_val io.cpu.resp_replay := mshr.io.cpu_resp_val
io.cpu.resp_miss := r_cpu_req_val_ && !tag_match && r_req_read io.cpu.resp_miss := r_cpu_req_val_ && (!tag_match || mshr.io.secondary_miss) && r_req_read
io.cpu.resp_tag := Mux(mshr.io.cpu_resp_val, mshr.io.cpu_resp_tag, r_cpu_req_tag) io.cpu.resp_tag := Mux(mshr.io.cpu_resp_val, mshr.io.cpu_resp_tag, r_cpu_req_tag)
io.cpu.resp_type := loadgen.io.typ io.cpu.resp_type := loadgen.io.typ
io.cpu.resp_data := loadgen.io.dout io.cpu.resp_data := loadgen.io.dout

View File

@ -26,7 +26,7 @@ class Top() extends Component {
arbiter.io.requestor(1) <> icache_pf.io.mem arbiter.io.requestor(1) <> icache_pf.io.mem
arbiter.io.requestor(2) <> htif.io.mem arbiter.io.requestor(2) <> htif.io.mem
val hub = new CoherenceHubNull val hub = new CoherenceHubBroadcast
// connect tile to hub // connect tile to hub
hub.io.tiles(0).xact_init <> Queue(arbiter.io.mem.xact_init) hub.io.tiles(0).xact_init <> Queue(arbiter.io.mem.xact_init)
arbiter.io.mem.xact_abort <> Queue(hub.io.tiles(0).xact_abort) arbiter.io.mem.xact_abort <> Queue(hub.io.tiles(0).xact_abort)