1
0

memory system bug fixes

This commit is contained in:
Andrew Waterman 2012-07-26 00:03:55 -07:00
parent 3a2b305ddf
commit a5bea4364f
3 changed files with 27 additions and 21 deletions

View File

@ -107,6 +107,7 @@ class rocketHTIF(w: Int, ncores: Int, co: CoherencePolicyWithUncached) extends C
}
io.mem.xact_rep.ready := Bool(true)
when (io.mem.xact_abort.valid) { mem_nacked := Bool(true) }
io.mem.xact_abort.ready := Bool(true)
val state_rx :: state_pcr_req :: state_pcr_resp :: state_mem_req :: state_mem_wdata :: state_mem_wresp :: state_mem_rdata :: state_mem_finish :: state_tx :: Nil = Enum(9) { UFix() }
val state = Reg(resetVal = state_rx)
@ -119,7 +120,8 @@ class rocketHTIF(w: Int, ncores: Int, co: CoherencePolicyWithUncached) extends C
}
val mem_cnt = Reg(resetVal = UFix(0, log2Up(REFILL_CYCLES)))
when (state === state_mem_req && io.mem.xact_init.ready) {
val x_init = new queue(1)(new TransactionInit)
when (state === state_mem_req && x_init.io.enq.ready) {
state := Mux(cmd === cmd_writemem, state_mem_wdata, state_mem_rdata)
}
when (state === state_mem_wdata && io.mem.xact_init_data.ready) {
@ -172,9 +174,10 @@ class rocketHTIF(w: Int, ncores: Int, co: CoherencePolicyWithUncached) extends C
}
mem_req_data = Cat(packet_ram(idx), mem_req_data)
}
io.mem.xact_init.valid := state === state_mem_req
io.mem.xact_init.bits.x_type := Mux(cmd === cmd_writemem, co.getTransactionInitTypeOnUncachedWrite, co.getTransactionInitTypeOnUncachedRead)
io.mem.xact_init.bits.address := addr.toUFix >> UFix(OFFSET_BITS-3)
x_init.io.enq.valid := state === state_mem_req
x_init.io.enq.bits.x_type := Mux(cmd === cmd_writemem, co.getTransactionInitTypeOnUncachedWrite, co.getTransactionInitTypeOnUncachedRead)
x_init.io.enq.bits.address := addr.toUFix >> UFix(OFFSET_BITS-3)
io.mem.xact_init <> x_init.io.deq
io.mem.xact_init_data.valid:= state === state_mem_wdata
io.mem.xact_init_data.bits.data := mem_req_data
io.mem.xact_finish.valid := (state === state_mem_finish) && mem_needs_ack

View File

@ -378,7 +378,7 @@ class DRAMSideLLC(sets: Int, ways: Int, outstanding: Int, tagLeaf: Mem[Bits], da
writeback.io.req(1).bits := s2.addr
writeback.io.data(1).valid := io.cpu.req_data.valid
writeback.io.data(1).bits := io.cpu.req_data.bits
data.io.req_data.valid := io.cpu.req_data.valid && !writeback.io.data(1).ready
data.io.req_data.valid := io.cpu.req_data.valid && writeback.io.req(1).ready
memCmdArb.io.in(0) <> mshr.io.mem.req_cmd
memCmdArb.io.in(1) <> writeback.io.mem.req_cmd
@ -393,7 +393,7 @@ class DRAMSideLLC(sets: Int, ways: Int, outstanding: Int, tagLeaf: Mem[Bits], da
io.cpu.resp <> data.io.resp
io.cpu.req_cmd.ready := !stall_s1 && !replay_s1
io.cpu.req_data.ready := writeback.io.data(1).ready || data.io.req_data.ready
io.cpu.req_data.ready := writeback.io.data(1).ready || data.io.req_data.ready && writeback.io.req(1).ready
io.mem.req_cmd <> memCmdArb.io.out
io.mem.req_data <> writeback.io.mem.req_data
}

View File

@ -95,21 +95,25 @@ class XactTracker(ntiles: Int, id: Int, co: CoherencePolicy) extends Component {
}
def doMemReqWrite(req_cmd: FIFOIO[MemReqCmd], req_data: FIFOIO[MemData], lock: Bool, data: PipeIO[MemData], trigger: Bool, cmd_sent: Bool, pop_data: Bits, pop_dep: Bits, at_front_of_dep_queue: Bool, tile_id: UFix) {
req_cmd.valid := !cmd_sent && data.valid && at_front_of_dep_queue
req_cmd.bits.rw := Bool(true)
req_data.valid := data.valid && at_front_of_dep_queue
req_data.bits := data.bits
lock := data.valid && at_front_of_dep_queue
when(req_cmd.ready && req_cmd.valid) {
cmd_sent := Bool(true)
}
when(req_data.ready && at_front_of_dep_queue) {
pop_data := UFix(1) << tile_id
when (data.valid) {
mem_cnt := mem_cnt_next
when(mem_cnt_next === UFix(0)) {
pop_dep := UFix(1) << tile_id
trigger := Bool(false)
when (at_front_of_dep_queue) {
req_cmd.valid := !cmd_sent && req_data.ready
lock := Bool(true)
when (req_cmd.ready || cmd_sent) {
req_data.valid := data.valid
when(req_data.ready) {
pop_data := UFix(1) << tile_id
when (data.valid) {
mem_cnt := mem_cnt_next
when(mem_cnt === UFix(REFILL_CYCLES-1)) {
pop_dep := UFix(1) << tile_id
trigger := Bool(false)
}
}
}
}
}
@ -141,7 +145,7 @@ class XactTracker(ntiles: Int, id: Int, co: CoherencePolicy) extends Component {
val mem_cnt_next = mem_cnt + UFix(1)
val mem_cnt_max = ~UFix(0, width = log2Up(REFILL_CYCLES))
val p_req_initial_flags = Bits(width = ntiles)
p_req_initial_flags := ~(io.tile_incoherent | UFixToOH(io.alloc_req.bits.tile_id(log2Up(ntiles)-1,0))) //TODO: Broadcast only
p_req_initial_flags := (if (ntiles == 1) Bits(0) else ~(io.tile_incoherent | UFixToOH(io.alloc_req.bits.tile_id(log2Up(ntiles)-1,0)))) //TODO: Broadcast only
io.busy := state != s_idle
io.addr := addr_
@ -376,16 +380,15 @@ class CoherenceHubBroadcast(ntiles: Int, co: CoherencePolicy) extends CoherenceH
// Create an arbiter for the one memory port
// We have to arbitrate between the different trackers' memory requests
// and once we have picked a request, get the right write data
val mem_req_cmd_arb = (new LockingArbiter(NGLOBAL_XACTS)) { new MemReqCmd() }
val mem_req_cmd_arb = (new Arbiter(NGLOBAL_XACTS)) { new MemReqCmd() }
val mem_req_data_arb = (new LockingArbiter(NGLOBAL_XACTS)) { new MemData() }
for( i <- 0 until NGLOBAL_XACTS ) {
mem_req_cmd_arb.io.in(i) <> trackerList(i).io.mem_req_cmd
mem_req_cmd_arb.io.lock(i) <> trackerList(i).io.mem_req_lock
mem_req_data_arb.io.in(i) <> trackerList(i).io.mem_req_data
mem_req_data_arb.io.lock(i) <> trackerList(i).io.mem_req_lock
}
io.mem.req_cmd <> mem_req_cmd_arb.io.out
io.mem.req_data <> mem_req_data_arb.io.out
io.mem.req_cmd <> Queue(mem_req_cmd_arb.io.out)
io.mem.req_data <> Queue(mem_req_data_arb.io.out)
// Handle probe replies, which may or may not have data
for( j <- 0 until ntiles ) {