update to new chisel
This commit is contained in:
parent
f8b937d590
commit
725190d0ee
@ -339,17 +339,17 @@ class rocketCtrl extends Component
|
|||||||
|
|
||||||
when (!io.dpath.stalld) {
|
when (!io.dpath.stalld) {
|
||||||
when (io.dpath.killf) {
|
when (io.dpath.killf) {
|
||||||
id_reg_btb_hit <== Bool(false);
|
id_reg_btb_hit := Bool(false);
|
||||||
id_reg_xcpt_ma_inst <== Bool(false);
|
id_reg_xcpt_ma_inst := Bool(false);
|
||||||
id_reg_xcpt_itlb <== Bool(false);
|
id_reg_xcpt_itlb := Bool(false);
|
||||||
}
|
}
|
||||||
otherwise{
|
.otherwise{
|
||||||
id_reg_btb_hit <== io.dpath.btb_hit;
|
id_reg_btb_hit := io.dpath.btb_hit;
|
||||||
id_reg_xcpt_ma_inst <== if_reg_xcpt_ma_inst;
|
id_reg_xcpt_ma_inst := if_reg_xcpt_ma_inst;
|
||||||
id_reg_xcpt_itlb <== io.xcpt_itlb;
|
id_reg_xcpt_itlb := io.xcpt_itlb;
|
||||||
}
|
}
|
||||||
id_reg_icmiss <== !io.imem.resp_val;
|
id_reg_icmiss := !io.imem.resp_val;
|
||||||
id_reg_replay <== !take_pc && !io.imem.resp_val;
|
id_reg_replay := !take_pc && !io.imem.resp_val;
|
||||||
}
|
}
|
||||||
|
|
||||||
// executing ERET when traps are enabled causes an illegal instruction exception (as per ISA sim)
|
// executing ERET when traps are enabled causes an illegal instruction exception (as per ISA sim)
|
||||||
@ -358,54 +358,54 @@ class rocketCtrl extends Component
|
|||||||
(id_eret.toBool && io.dpath.status(SR_ET).toBool);
|
(id_eret.toBool && io.dpath.status(SR_ET).toBool);
|
||||||
|
|
||||||
when (reset.toBool || io.dpath.killd) {
|
when (reset.toBool || io.dpath.killd) {
|
||||||
ex_reg_br_type <== BR_N;
|
ex_reg_br_type := BR_N;
|
||||||
ex_reg_btb_hit <== Bool(false);
|
ex_reg_btb_hit := Bool(false);
|
||||||
ex_reg_div_val <== Bool(false);
|
ex_reg_div_val := Bool(false);
|
||||||
ex_reg_mul_val <== Bool(false);
|
ex_reg_mul_val := Bool(false);
|
||||||
ex_reg_mem_val <== Bool(false);
|
ex_reg_mem_val := Bool(false);
|
||||||
ex_reg_wen <== Bool(false);
|
ex_reg_wen := Bool(false);
|
||||||
ex_reg_fp_wen <== Bool(false);
|
ex_reg_fp_wen := Bool(false);
|
||||||
ex_reg_eret <== Bool(false);
|
ex_reg_eret := Bool(false);
|
||||||
ex_reg_replay_next <== Bool(false);
|
ex_reg_replay_next := Bool(false);
|
||||||
ex_reg_inst_di <== Bool(false);
|
ex_reg_inst_di := Bool(false);
|
||||||
ex_reg_inst_ei <== Bool(false);
|
ex_reg_inst_ei := Bool(false);
|
||||||
ex_reg_flush_inst <== Bool(false);
|
ex_reg_flush_inst := Bool(false);
|
||||||
ex_reg_xcpt_ma_inst <== Bool(false);
|
ex_reg_xcpt_ma_inst := Bool(false);
|
||||||
ex_reg_xcpt_itlb <== Bool(false);
|
ex_reg_xcpt_itlb := Bool(false);
|
||||||
ex_reg_xcpt_illegal <== Bool(false);
|
ex_reg_xcpt_illegal := Bool(false);
|
||||||
ex_reg_xcpt_privileged <== Bool(false);
|
ex_reg_xcpt_privileged := Bool(false);
|
||||||
ex_reg_xcpt_syscall <== Bool(false);
|
ex_reg_xcpt_syscall := Bool(false);
|
||||||
ex_reg_fp_val <== Bool(false);
|
ex_reg_fp_val := Bool(false);
|
||||||
ex_reg_vec_val <== Bool(false);
|
ex_reg_vec_val := Bool(false);
|
||||||
ex_reg_replay <== Bool(false);
|
ex_reg_replay := Bool(false);
|
||||||
ex_reg_load_use <== Bool(false);
|
ex_reg_load_use := Bool(false);
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
ex_reg_br_type <== id_br_type;
|
ex_reg_br_type := id_br_type;
|
||||||
ex_reg_btb_hit <== id_reg_btb_hit;
|
ex_reg_btb_hit := id_reg_btb_hit;
|
||||||
ex_reg_div_val <== id_div_val.toBool && id_waddr != UFix(0);
|
ex_reg_div_val := id_div_val.toBool && id_waddr != UFix(0);
|
||||||
ex_reg_mul_val <== id_mul_val.toBool && id_waddr != UFix(0);
|
ex_reg_mul_val := id_mul_val.toBool && id_waddr != UFix(0);
|
||||||
ex_reg_mem_val <== id_mem_val.toBool;
|
ex_reg_mem_val := id_mem_val.toBool;
|
||||||
ex_reg_wen <== id_wen.toBool && id_waddr != UFix(0);
|
ex_reg_wen := id_wen.toBool && id_waddr != UFix(0);
|
||||||
ex_reg_fp_wen <== fpdec.io.wen;
|
ex_reg_fp_wen := fpdec.io.wen;
|
||||||
ex_reg_eret <== id_eret.toBool;
|
ex_reg_eret := id_eret.toBool;
|
||||||
ex_reg_replay_next <== id_replay_next.toBool;
|
ex_reg_replay_next := id_replay_next.toBool;
|
||||||
ex_reg_inst_di <== (id_irq === I_DI);
|
ex_reg_inst_di := (id_irq === I_DI);
|
||||||
ex_reg_inst_ei <== (id_irq === I_EI);
|
ex_reg_inst_ei := (id_irq === I_EI);
|
||||||
ex_reg_flush_inst <== (id_sync === SYNC_I);
|
ex_reg_flush_inst := (id_sync === SYNC_I);
|
||||||
ex_reg_xcpt_ma_inst <== id_reg_xcpt_ma_inst;
|
ex_reg_xcpt_ma_inst := id_reg_xcpt_ma_inst;
|
||||||
ex_reg_xcpt_itlb <== id_reg_xcpt_itlb;
|
ex_reg_xcpt_itlb := id_reg_xcpt_itlb;
|
||||||
ex_reg_xcpt_illegal <== illegal_inst;
|
ex_reg_xcpt_illegal := illegal_inst;
|
||||||
ex_reg_xcpt_privileged <== (id_privileged & ~io.dpath.status(SR_S)).toBool;
|
ex_reg_xcpt_privileged := (id_privileged & ~io.dpath.status(SR_S)).toBool;
|
||||||
ex_reg_xcpt_syscall <== id_syscall.toBool;
|
ex_reg_xcpt_syscall := id_syscall.toBool;
|
||||||
ex_reg_fp_val <== fpdec.io.valid;
|
ex_reg_fp_val := fpdec.io.valid;
|
||||||
ex_reg_vec_val <== id_vec_val.toBool
|
ex_reg_vec_val := id_vec_val.toBool
|
||||||
ex_reg_replay <== id_reg_replay || ex_reg_replay_next;
|
ex_reg_replay := id_reg_replay || ex_reg_replay_next;
|
||||||
ex_reg_load_use <== id_load_use;
|
ex_reg_load_use := id_load_use;
|
||||||
}
|
}
|
||||||
ex_reg_ext_mem_val <== io.ext_mem.req_val
|
ex_reg_ext_mem_val := io.ext_mem.req_val
|
||||||
ex_reg_mem_cmd <== Mux(io.ext_mem.req_val, io.ext_mem.req_cmd, id_mem_cmd).toUFix
|
ex_reg_mem_cmd := Mux(io.ext_mem.req_val, io.ext_mem.req_cmd, id_mem_cmd).toUFix
|
||||||
ex_reg_mem_type <== Mux(io.ext_mem.req_val, io.ext_mem.req_type, id_mem_type).toUFix
|
ex_reg_mem_type := Mux(io.ext_mem.req_val, io.ext_mem.req_type, id_mem_type).toUFix
|
||||||
|
|
||||||
val beq = io.dpath.br_eq;
|
val beq = io.dpath.br_eq;
|
||||||
val bne = ~io.dpath.br_eq;
|
val bne = ~io.dpath.br_eq;
|
||||||
@ -431,60 +431,60 @@ class rocketCtrl extends Component
|
|||||||
val mem_reg_mem_type = Reg(){UFix(width = 3)};
|
val mem_reg_mem_type = Reg(){UFix(width = 3)};
|
||||||
|
|
||||||
when (reset.toBool || io.dpath.killx) {
|
when (reset.toBool || io.dpath.killx) {
|
||||||
mem_reg_div_mul_val <== Bool(false);
|
mem_reg_div_mul_val := Bool(false);
|
||||||
mem_reg_wen <== Bool(false);
|
mem_reg_wen := Bool(false);
|
||||||
mem_reg_fp_wen <== Bool(false);
|
mem_reg_fp_wen := Bool(false);
|
||||||
mem_reg_eret <== Bool(false);
|
mem_reg_eret := Bool(false);
|
||||||
mem_reg_mem_val <== Bool(false);
|
mem_reg_mem_val := Bool(false);
|
||||||
mem_reg_inst_di <== Bool(false);
|
mem_reg_inst_di := Bool(false);
|
||||||
mem_reg_inst_ei <== Bool(false);
|
mem_reg_inst_ei := Bool(false);
|
||||||
mem_reg_flush_inst <== Bool(false);
|
mem_reg_flush_inst := Bool(false);
|
||||||
mem_reg_xcpt_ma_inst <== Bool(false);
|
mem_reg_xcpt_ma_inst := Bool(false);
|
||||||
mem_reg_xcpt_itlb <== Bool(false);
|
mem_reg_xcpt_itlb := Bool(false);
|
||||||
mem_reg_xcpt_illegal <== Bool(false);
|
mem_reg_xcpt_illegal := Bool(false);
|
||||||
mem_reg_xcpt_privileged <== Bool(false);
|
mem_reg_xcpt_privileged := Bool(false);
|
||||||
mem_reg_xcpt_fpu <== Bool(false);
|
mem_reg_xcpt_fpu := Bool(false);
|
||||||
mem_reg_xcpt_vec <== Bool(false);
|
mem_reg_xcpt_vec := Bool(false);
|
||||||
mem_reg_xcpt_syscall <== Bool(false);
|
mem_reg_xcpt_syscall := Bool(false);
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
mem_reg_div_mul_val <== ex_reg_div_val || ex_reg_mul_val;
|
mem_reg_div_mul_val := ex_reg_div_val || ex_reg_mul_val;
|
||||||
mem_reg_wen <== ex_reg_wen;
|
mem_reg_wen := ex_reg_wen;
|
||||||
mem_reg_fp_wen <== ex_reg_fp_wen;
|
mem_reg_fp_wen := ex_reg_fp_wen;
|
||||||
mem_reg_eret <== ex_reg_eret;
|
mem_reg_eret := ex_reg_eret;
|
||||||
mem_reg_mem_val <== ex_reg_mem_val;
|
mem_reg_mem_val := ex_reg_mem_val;
|
||||||
mem_reg_inst_di <== ex_reg_inst_di;
|
mem_reg_inst_di := ex_reg_inst_di;
|
||||||
mem_reg_inst_ei <== ex_reg_inst_ei;
|
mem_reg_inst_ei := ex_reg_inst_ei;
|
||||||
mem_reg_flush_inst <== ex_reg_flush_inst;
|
mem_reg_flush_inst := ex_reg_flush_inst;
|
||||||
mem_reg_xcpt_ma_inst <== ex_reg_xcpt_ma_inst;
|
mem_reg_xcpt_ma_inst := ex_reg_xcpt_ma_inst;
|
||||||
mem_reg_xcpt_itlb <== ex_reg_xcpt_itlb;
|
mem_reg_xcpt_itlb := ex_reg_xcpt_itlb;
|
||||||
mem_reg_xcpt_illegal <== ex_reg_xcpt_illegal;
|
mem_reg_xcpt_illegal := ex_reg_xcpt_illegal;
|
||||||
mem_reg_xcpt_privileged <== ex_reg_xcpt_privileged;
|
mem_reg_xcpt_privileged := ex_reg_xcpt_privileged;
|
||||||
mem_reg_xcpt_fpu <== ex_reg_fp_val && !io.dpath.status(SR_EF).toBool;
|
mem_reg_xcpt_fpu := ex_reg_fp_val && !io.dpath.status(SR_EF).toBool;
|
||||||
mem_reg_xcpt_vec <== ex_reg_vec_val && !io.dpath.status(SR_EV).toBool;
|
mem_reg_xcpt_vec := ex_reg_vec_val && !io.dpath.status(SR_EV).toBool;
|
||||||
mem_reg_xcpt_syscall <== ex_reg_xcpt_syscall;
|
mem_reg_xcpt_syscall := ex_reg_xcpt_syscall;
|
||||||
}
|
}
|
||||||
mem_reg_ext_mem_val <== ex_reg_ext_mem_val;
|
mem_reg_ext_mem_val := ex_reg_ext_mem_val;
|
||||||
mem_reg_mem_cmd <== ex_reg_mem_cmd;
|
mem_reg_mem_cmd := ex_reg_mem_cmd;
|
||||||
mem_reg_mem_type <== ex_reg_mem_type;
|
mem_reg_mem_type := ex_reg_mem_type;
|
||||||
|
|
||||||
when (io.dpath.killm) {
|
when (io.dpath.killm) {
|
||||||
wb_reg_wen <== Bool(false);
|
wb_reg_wen := Bool(false);
|
||||||
wb_reg_fp_wen <== Bool(false);
|
wb_reg_fp_wen := Bool(false);
|
||||||
wb_reg_eret <== Bool(false);
|
wb_reg_eret := Bool(false);
|
||||||
wb_reg_inst_di <== Bool(false);
|
wb_reg_inst_di := Bool(false);
|
||||||
wb_reg_inst_ei <== Bool(false);
|
wb_reg_inst_ei := Bool(false);
|
||||||
wb_reg_flush_inst <== Bool(false);
|
wb_reg_flush_inst := Bool(false);
|
||||||
wb_reg_div_mul_val <== Bool(false);
|
wb_reg_div_mul_val := Bool(false);
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
wb_reg_wen <== mem_reg_wen;
|
wb_reg_wen := mem_reg_wen;
|
||||||
wb_reg_fp_wen <== mem_reg_fp_wen;
|
wb_reg_fp_wen := mem_reg_fp_wen;
|
||||||
wb_reg_eret <== mem_reg_eret;
|
wb_reg_eret := mem_reg_eret;
|
||||||
wb_reg_inst_di <== mem_reg_inst_di;
|
wb_reg_inst_di := mem_reg_inst_di;
|
||||||
wb_reg_inst_ei <== mem_reg_inst_ei;
|
wb_reg_inst_ei := mem_reg_inst_ei;
|
||||||
wb_reg_flush_inst <== mem_reg_flush_inst;
|
wb_reg_flush_inst := mem_reg_flush_inst;
|
||||||
wb_reg_div_mul_val <== mem_reg_div_mul_val;
|
wb_reg_div_mul_val := mem_reg_div_mul_val;
|
||||||
}
|
}
|
||||||
|
|
||||||
val sboard = new rocketCtrlSboard();
|
val sboard = new rocketCtrlSboard();
|
||||||
@ -591,12 +591,12 @@ class rocketCtrl extends Component
|
|||||||
ex_reg_mul_val && !io.dpath.mul_rdy
|
ex_reg_mul_val && !io.dpath.mul_rdy
|
||||||
val kill_ex = take_pc_wb || replay_ex
|
val kill_ex = take_pc_wb || replay_ex
|
||||||
|
|
||||||
mem_reg_replay <== replay_ex && !take_pc_wb;
|
mem_reg_replay := replay_ex && !take_pc_wb;
|
||||||
mem_reg_kill <== kill_ex;
|
mem_reg_kill := kill_ex;
|
||||||
|
|
||||||
wb_reg_replay <== replay_mem && !take_pc_wb;
|
wb_reg_replay := replay_mem && !take_pc_wb;
|
||||||
wb_reg_exception <== mem_exception && !take_pc_wb;
|
wb_reg_exception := mem_exception && !take_pc_wb;
|
||||||
wb_reg_cause <== mem_cause;
|
wb_reg_cause := mem_cause;
|
||||||
|
|
||||||
val wb_badvaddr_wen = wb_reg_exception && ((wb_reg_cause === UFix(10)) || (wb_reg_cause === UFix(11)))
|
val wb_badvaddr_wen = wb_reg_exception && ((wb_reg_cause === UFix(10)) || (wb_reg_cause === UFix(11)))
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
package Top
|
package Top
|
||||||
{
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import Node._;
|
import Node._;
|
||||||
import Constants._;
|
import Constants._;
|
||||||
@ -23,38 +23,14 @@ class ioCtrlSboard extends Bundle()
|
|||||||
class rocketCtrlSboard extends Component
|
class rocketCtrlSboard extends Component
|
||||||
{
|
{
|
||||||
override val io = new ioCtrlSboard();
|
override val io = new ioCtrlSboard();
|
||||||
val reg_busy = Reg(width = 32, resetVal = Bits(0, 32));
|
val reg_busy = Reg(resetVal = Bits(0, 32));
|
||||||
|
|
||||||
val set_mask = io.set.toUFix << io.seta;
|
val set_mask = io.set.toUFix << io.seta;
|
||||||
val clr_mask = ~(io.clr.toUFix << io.clra);
|
val clr_mask = ~(io.clr.toUFix << io.clra);
|
||||||
reg_busy <== (reg_busy | set_mask) & clr_mask
|
reg_busy := (reg_busy | set_mask) & clr_mask
|
||||||
|
|
||||||
io.stalla := reg_busy(io.raddra).toBool;
|
io.stalla := reg_busy(io.raddra).toBool;
|
||||||
io.stallb := reg_busy(io.raddrb).toBool;
|
io.stallb := reg_busy(io.raddrb).toBool;
|
||||||
io.stallc := reg_busy(io.raddrc).toBool;
|
io.stallc := reg_busy(io.raddrc).toBool;
|
||||||
io.stalld := reg_busy(io.raddrd).toBool;
|
io.stalld := reg_busy(io.raddrd).toBool;
|
||||||
}
|
}
|
||||||
|
|
||||||
class ioCtrlCnt extends Bundle()
|
|
||||||
{
|
|
||||||
val enq = Bool(INPUT);
|
|
||||||
val deq = Bool(INPUT);
|
|
||||||
val empty = Bool(OUTPUT);
|
|
||||||
val full = Bool(OUTPUT);
|
|
||||||
}
|
|
||||||
|
|
||||||
class rocketCtrlCnt(n_bits: Int, limit: Int) extends Component
|
|
||||||
{
|
|
||||||
override val io = new ioCtrlCnt();
|
|
||||||
val counter = Reg(width = n_bits, resetVal = UFix(0, n_bits));
|
|
||||||
when (io.enq && !io.deq) {
|
|
||||||
counter <== counter + UFix(1, n_bits);
|
|
||||||
}
|
|
||||||
when (!io.enq && io.deq) {
|
|
||||||
counter <== counter - UFix(1, n_bits);
|
|
||||||
}
|
|
||||||
io.empty := counter === UFix(0, n_bits);
|
|
||||||
io.full := counter === UFix(limit, n_bits);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
@ -137,23 +137,23 @@ class rocketDCacheDM_flush(lines: Int) extends Component {
|
|||||||
|
|
||||||
when (io.cpu.req_val && io.cpu.req_rdy && (io.cpu.req_cmd === M_FLA))
|
when (io.cpu.req_val && io.cpu.req_rdy && (io.cpu.req_cmd === M_FLA))
|
||||||
{
|
{
|
||||||
r_cpu_req_tag <== io.cpu.req_tag;
|
r_cpu_req_tag := io.cpu.req_tag;
|
||||||
flushing <== Bool(true);
|
flushing := Bool(true);
|
||||||
flush_waiting <== Bool(true);
|
flush_waiting := Bool(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
when (dcache.io.cpu.req_rdy && (flush_count === ~Bits(0, indexbits))) {
|
when (dcache.io.cpu.req_rdy && (flush_count === ~Bits(0, indexbits))) {
|
||||||
flushing <== Bool(false);
|
flushing := Bool(false);
|
||||||
}
|
}
|
||||||
when (dcache.io.cpu.resp_val && (dcache.io.cpu.resp_tag === r_cpu_req_tag) && (flush_resp_count === ~Bits(0, indexbits))) {
|
when (dcache.io.cpu.resp_val && (dcache.io.cpu.resp_tag === r_cpu_req_tag) && (flush_resp_count === ~Bits(0, indexbits))) {
|
||||||
flush_waiting <== Bool(false);
|
flush_waiting := Bool(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
when (flushing && dcache.io.cpu.req_rdy) {
|
when (flushing && dcache.io.cpu.req_rdy) {
|
||||||
flush_count <== flush_count + UFix(1,1);
|
flush_count := flush_count + UFix(1,1);
|
||||||
}
|
}
|
||||||
when (flush_waiting && dcache.io.cpu.resp_val && (dcache.io.cpu.resp_tag === r_cpu_req_tag)) {
|
when (flush_waiting && dcache.io.cpu.resp_val && (dcache.io.cpu.resp_tag === r_cpu_req_tag)) {
|
||||||
flush_resp_count <== flush_resp_count + UFix(1,1);
|
flush_resp_count := flush_resp_count + UFix(1,1);
|
||||||
}
|
}
|
||||||
|
|
||||||
dcache.io.cpu.req_val := (io.cpu.req_val && (io.cpu.req_cmd != M_FLA) && !flush_waiting) || flushing;
|
dcache.io.cpu.req_val := (io.cpu.req_val && (io.cpu.req_cmd != M_FLA) && !flush_waiting) || flushing;
|
||||||
@ -220,33 +220,33 @@ class rocketDCacheDM(lines: Int) extends Component {
|
|||||||
val r_req_amo = r_cpu_req_cmd(3).toBool;
|
val r_req_amo = r_cpu_req_cmd(3).toBool;
|
||||||
|
|
||||||
when (io.cpu.req_val && io.cpu.req_rdy) {
|
when (io.cpu.req_val && io.cpu.req_rdy) {
|
||||||
r_cpu_req_idx <== io.cpu.req_idx;
|
r_cpu_req_idx := io.cpu.req_idx;
|
||||||
r_cpu_req_cmd <== io.cpu.req_cmd;
|
r_cpu_req_cmd := io.cpu.req_cmd;
|
||||||
r_cpu_req_type <== io.cpu.req_type;
|
r_cpu_req_type := io.cpu.req_type;
|
||||||
r_cpu_req_tag <== io.cpu.req_tag;
|
r_cpu_req_tag := io.cpu.req_tag;
|
||||||
}
|
}
|
||||||
|
|
||||||
when ((state === s_ready) && r_cpu_req_val && !io.cpu.req_kill) {
|
when ((state === s_ready) && r_cpu_req_val && !io.cpu.req_kill) {
|
||||||
r_cpu_req_ppn <== io.cpu.req_ppn;
|
r_cpu_req_ppn := io.cpu.req_ppn;
|
||||||
}
|
}
|
||||||
when (io.cpu.req_rdy) {
|
when (io.cpu.req_rdy) {
|
||||||
r_cpu_req_val <== io.cpu.req_val;
|
r_cpu_req_val := io.cpu.req_val;
|
||||||
}
|
}
|
||||||
otherwise {
|
otherwise {
|
||||||
r_cpu_req_val <== Bool(false);
|
r_cpu_req_val := Bool(false);
|
||||||
}
|
}
|
||||||
when (((state === s_resolve_miss) && (r_req_load || r_req_amo)) || (state === s_replay_load)) {
|
when (((state === s_resolve_miss) && (r_req_load || r_req_amo)) || (state === s_replay_load)) {
|
||||||
r_cpu_resp_val <== Bool(true);
|
r_cpu_resp_val := Bool(true);
|
||||||
}
|
}
|
||||||
otherwise {
|
otherwise {
|
||||||
r_cpu_resp_val <== Bool(false);
|
r_cpu_resp_val := Bool(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// refill counter
|
// refill counter
|
||||||
val rr_count = Reg(resetVal = UFix(0,2));
|
val rr_count = Reg(resetVal = UFix(0,2));
|
||||||
val rr_count_next = rr_count + UFix(1);
|
val rr_count_next = rr_count + UFix(1);
|
||||||
when (((state === s_refill) && io.mem.resp_val) || ((state === s_writeback) && io.mem.req_rdy)) {
|
when (((state === s_refill) && io.mem.resp_val) || ((state === s_writeback) && io.mem.req_rdy)) {
|
||||||
rr_count <== rr_count_next;
|
rr_count := rr_count_next;
|
||||||
}
|
}
|
||||||
|
|
||||||
// tag array
|
// tag array
|
||||||
@ -257,7 +257,7 @@ class rocketDCacheDM(lines: Int) extends Component {
|
|||||||
((state === s_refill) && io.mem.resp_val && (rr_count === UFix(3,2))) ||
|
((state === s_refill) && io.mem.resp_val && (rr_count === UFix(3,2))) ||
|
||||||
((state === s_resolve_miss) && r_req_flush);
|
((state === s_resolve_miss) && r_req_flush);
|
||||||
|
|
||||||
val tag_array = Mem4(lines, r_cpu_req_ppn);
|
val tag_array = Mem(lines, r_cpu_req_ppn);
|
||||||
tag_array.setReadLatency(1);
|
tag_array.setReadLatency(1);
|
||||||
tag_array.setTarget('inst);
|
tag_array.setTarget('inst);
|
||||||
val tag_rdata = tag_array.rw(tag_addr, r_cpu_req_ppn, tag_we);
|
val tag_rdata = tag_array.rw(tag_addr, r_cpu_req_ppn, tag_we);
|
||||||
@ -265,10 +265,10 @@ class rocketDCacheDM(lines: Int) extends Component {
|
|||||||
// valid bit array
|
// valid bit array
|
||||||
val vb_array = Reg(resetVal = Bits(0, lines));
|
val vb_array = Reg(resetVal = Bits(0, lines));
|
||||||
when (tag_we && !r_req_flush) {
|
when (tag_we && !r_req_flush) {
|
||||||
vb_array <== vb_array.bitSet(r_cpu_req_idx(PGIDX_BITS-1,offsetbits).toUFix, UFix(1,1));
|
vb_array := vb_array.bitSet(r_cpu_req_idx(PGIDX_BITS-1,offsetbits).toUFix, UFix(1,1));
|
||||||
}
|
}
|
||||||
when (tag_we && r_req_flush) {
|
when (tag_we && r_req_flush) {
|
||||||
vb_array <== vb_array.bitSet(r_cpu_req_idx(PGIDX_BITS-1,offsetbits).toUFix, UFix(0,1));
|
vb_array := vb_array.bitSet(r_cpu_req_idx(PGIDX_BITS-1,offsetbits).toUFix, UFix(0,1));
|
||||||
}
|
}
|
||||||
val vb_rdata = vb_array(r_cpu_req_idx(PGIDX_BITS-1,offsetbits).toUFix).toBool;
|
val vb_rdata = vb_array(r_cpu_req_idx(PGIDX_BITS-1,offsetbits).toUFix).toBool;
|
||||||
val tag_valid = r_cpu_req_val && vb_rdata;
|
val tag_valid = r_cpu_req_val && vb_rdata;
|
||||||
@ -294,33 +294,33 @@ class rocketDCacheDM(lines: Int) extends Component {
|
|||||||
|
|
||||||
// pending store data
|
// pending store data
|
||||||
when (io.cpu.req_val && io.cpu.req_rdy && req_store) {
|
when (io.cpu.req_val && io.cpu.req_rdy && req_store) {
|
||||||
p_store_idx <== io.cpu.req_idx;
|
p_store_idx := io.cpu.req_idx;
|
||||||
p_store_data <== io.cpu.req_data;
|
p_store_data := io.cpu.req_data;
|
||||||
p_store_type <== io.cpu.req_type;
|
p_store_type := io.cpu.req_type;
|
||||||
}
|
}
|
||||||
when (store_hit && !drain_store) {
|
when (store_hit && !drain_store) {
|
||||||
p_store_valid <== Bool(true);
|
p_store_valid := Bool(true);
|
||||||
}
|
}
|
||||||
when (drain_store) {
|
when (drain_store) {
|
||||||
p_store_valid <== Bool(false);
|
p_store_valid := Bool(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// AMO operand
|
// AMO operand
|
||||||
when (io.cpu.req_val && io.cpu.req_rdy && req_amo) {
|
when (io.cpu.req_val && io.cpu.req_rdy && req_amo) {
|
||||||
r_amo_data <== io.cpu.req_data;
|
r_amo_data := io.cpu.req_data;
|
||||||
}
|
}
|
||||||
|
|
||||||
// dirty bit array
|
// dirty bit array
|
||||||
val db_array = Reg(resetVal = Bits(0, lines));
|
val db_array = Reg(resetVal = Bits(0, lines));
|
||||||
val tag_dirty = db_array(r_cpu_req_idx(PGIDX_BITS-1,offsetbits).toUFix).toBool;
|
val tag_dirty = db_array(r_cpu_req_idx(PGIDX_BITS-1,offsetbits).toUFix).toBool;
|
||||||
when ((r_cpu_req_val && !io.cpu.req_kill && tag_hit && r_req_store) || resolve_store) {
|
when ((r_cpu_req_val && !io.cpu.req_kill && tag_hit && r_req_store) || resolve_store) {
|
||||||
db_array <== db_array.bitSet(p_store_idx(PGIDX_BITS-1,offsetbits).toUFix, UFix(1,1));
|
db_array := db_array.bitSet(p_store_idx(PGIDX_BITS-1,offsetbits).toUFix, UFix(1,1));
|
||||||
}
|
}
|
||||||
when (state === s_write_amo) {
|
when (state === s_write_amo) {
|
||||||
db_array <== db_array.bitSet(r_cpu_req_idx(PGIDX_BITS-1,offsetbits).toUFix, UFix(1,1));
|
db_array := db_array.bitSet(r_cpu_req_idx(PGIDX_BITS-1,offsetbits).toUFix, UFix(1,1));
|
||||||
}
|
}
|
||||||
when (tag_we) {
|
when (tag_we) {
|
||||||
db_array <== db_array.bitSet(r_cpu_req_idx(PGIDX_BITS-1,offsetbits).toUFix, UFix(0,1));
|
db_array := db_array.bitSet(r_cpu_req_idx(PGIDX_BITS-1,offsetbits).toUFix, UFix(0,1));
|
||||||
}
|
}
|
||||||
|
|
||||||
// generate write mask and data signals for stores and amos
|
// generate write mask and data signals for stores and amos
|
||||||
@ -374,7 +374,7 @@ class rocketDCacheDM(lines: Int) extends Component {
|
|||||||
Mux((state === s_write_amo), amo_store_wmask,
|
Mux((state === s_write_amo), amo_store_wmask,
|
||||||
store_wmask));
|
store_wmask));
|
||||||
|
|
||||||
val data_array = Mem4(lines*4, data_wdata);
|
val data_array = Mem(lines*4, data_wdata);
|
||||||
data_array.setReadLatency(1);
|
data_array.setReadLatency(1);
|
||||||
data_array.setTarget('inst);
|
data_array.setTarget('inst);
|
||||||
val data_array_rdata = data_array.rw(data_addr, data_wdata, data_we, data_wmask);
|
val data_array_rdata = data_array.rw(data_addr, data_wdata, data_we, data_wmask);
|
||||||
@ -424,62 +424,62 @@ class rocketDCacheDM(lines: Int) extends Component {
|
|||||||
// control state machine
|
// control state machine
|
||||||
switch (state) {
|
switch (state) {
|
||||||
is (s_reset) {
|
is (s_reset) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
is (s_ready) {
|
is (s_ready) {
|
||||||
when (io.cpu.req_kill) {
|
when (io.cpu.req_kill) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
when (ldst_conflict) {
|
when (ldst_conflict) {
|
||||||
state <== s_replay_load;
|
state := s_replay_load;
|
||||||
}
|
}
|
||||||
when (!r_cpu_req_val || (tag_hit && !(r_req_flush || r_req_amo))) {
|
when (!r_cpu_req_val || (tag_hit && !(r_req_flush || r_req_amo))) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
when (tag_hit && r_req_amo) {
|
when (tag_hit && r_req_amo) {
|
||||||
state <== s_write_amo;
|
state := s_write_amo;
|
||||||
}
|
}
|
||||||
when (tag_valid & tag_dirty) {
|
when (tag_valid & tag_dirty) {
|
||||||
state <== s_start_writeback;
|
state := s_start_writeback;
|
||||||
}
|
}
|
||||||
when (r_req_flush) {
|
when (r_req_flush) {
|
||||||
state <== s_resolve_miss;
|
state := s_resolve_miss;
|
||||||
}
|
}
|
||||||
otherwise {
|
otherwise {
|
||||||
state <== s_req_refill;
|
state := s_req_refill;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_replay_load) {
|
is (s_replay_load) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
is (s_write_amo) {
|
is (s_write_amo) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
is (s_start_writeback) {
|
is (s_start_writeback) {
|
||||||
state <== s_writeback;
|
state := s_writeback;
|
||||||
}
|
}
|
||||||
is (s_writeback) {
|
is (s_writeback) {
|
||||||
when (io.mem.req_rdy && (rr_count === UFix(3,2))) {
|
when (io.mem.req_rdy && (rr_count === UFix(3,2))) {
|
||||||
when (r_req_flush) {
|
when (r_req_flush) {
|
||||||
state <== s_resolve_miss;
|
state := s_resolve_miss;
|
||||||
}
|
}
|
||||||
otherwise {
|
otherwise {
|
||||||
state <== s_req_refill;
|
state := s_req_refill;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_req_refill)
|
is (s_req_refill)
|
||||||
{
|
{
|
||||||
when (io.mem.req_rdy) { state <== s_refill; }
|
when (io.mem.req_rdy) { state := s_refill; }
|
||||||
}
|
}
|
||||||
is (s_refill) {
|
is (s_refill) {
|
||||||
when (io.mem.resp_val && (rr_count === UFix(3,2))) { state <== s_resolve_miss; }
|
when (io.mem.resp_val && (rr_count === UFix(3,2))) { state := s_resolve_miss; }
|
||||||
}
|
}
|
||||||
is (s_resolve_miss) {
|
is (s_resolve_miss) {
|
||||||
when (r_req_amo) {
|
when (r_req_amo) {
|
||||||
state <== s_write_amo;
|
state := s_write_amo;
|
||||||
}
|
}
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -505,12 +505,12 @@ class rocketDCacheAmoALU extends Component {
|
|||||||
val adder_out = adder_lhs + adder_rhs;
|
val adder_out = adder_lhs + adder_rhs;
|
||||||
val alu_out = Wire() { UFix() };
|
val alu_out = Wire() { UFix() };
|
||||||
switch (io.cmd) {
|
switch (io.cmd) {
|
||||||
// is (M_XA_ADD) { alu_out <== adder_out; }
|
// is (M_XA_ADD) { alu_out := adder_out; }
|
||||||
is (M_XA_SWAP) { alu_out <== io.rhs; }
|
is (M_XA_SWAP) { alu_out := io.rhs; }
|
||||||
is (M_XA_AND) { alu_out <== io.lhs & io.rhs; }
|
is (M_XA_AND) { alu_out := io.lhs & io.rhs; }
|
||||||
is (M_XA_OR) { alu_out <== io.lhs | io.rhs; }
|
is (M_XA_OR) { alu_out := io.lhs | io.rhs; }
|
||||||
}
|
}
|
||||||
alu_out <== adder_out;
|
alu_out := adder_out;
|
||||||
io.result := alu_out;
|
io.result := alu_out;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,20 +21,6 @@ class ioDivider(width: Int) extends Bundle {
|
|||||||
val result_rdy = Bool(INPUT);
|
val result_rdy = Bool(INPUT);
|
||||||
}
|
}
|
||||||
|
|
||||||
// class ioDivider extends Bundle {
|
|
||||||
// // requests
|
|
||||||
// val req_val = Bool(INPUT);
|
|
||||||
// val req_rdy = Bool(OUTPUT);
|
|
||||||
// val req_fn = UFix(3, INPUT);
|
|
||||||
// val req_tag = UFix(5, INPUT);
|
|
||||||
// val req_rs1 = Bits(64, INPUT);
|
|
||||||
// val req_rs2 = Bits(64, INPUT);
|
|
||||||
// // responses
|
|
||||||
// val resp_val = Bool(OUTPUT);
|
|
||||||
// val resp_data = Bits(64, OUTPUT);
|
|
||||||
// val resp_tag = UFix(5, OUTPUT);
|
|
||||||
// }
|
|
||||||
|
|
||||||
class rocketDivider(width : Int) extends Component {
|
class rocketDivider(width : Int) extends Component {
|
||||||
val io = new ioDivider(width);
|
val io = new ioDivider(width);
|
||||||
|
|
||||||
@ -55,28 +41,36 @@ class rocketDivider(width : Int) extends Component {
|
|||||||
|
|
||||||
val tc = (io.div_fn === DIV_D) || (io.div_fn === DIV_R);
|
val tc = (io.div_fn === DIV_D) || (io.div_fn === DIV_R);
|
||||||
|
|
||||||
when (io.div_kill && Reg(state === s_ready)) { // can only kill on first cycle
|
val do_kill = io.div_kill && Reg(io.div_rdy) // kill on 1st cycle only
|
||||||
state <== s_ready;
|
|
||||||
|
switch (state) {
|
||||||
|
is (s_ready) {
|
||||||
|
when (io.div_val) {
|
||||||
|
state := Mux(tc, s_neg_inputs, s_busy)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
is (s_neg_inputs) {
|
||||||
|
state := Mux(do_kill, s_ready, s_busy)
|
||||||
|
}
|
||||||
|
is (s_busy) {
|
||||||
|
when (do_kill) {
|
||||||
|
state := s_ready
|
||||||
|
}
|
||||||
|
.elsewhen (count === UFix(width)) {
|
||||||
|
state := Mux(neg_quo || neg_rem, s_neg_outputs, s_done)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
is (s_neg_outputs) {
|
||||||
|
state := s_done
|
||||||
|
}
|
||||||
|
is (s_done) {
|
||||||
|
when (io.result_rdy) {
|
||||||
|
state := s_ready
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// state machine
|
// state machine
|
||||||
switch (state) {
|
|
||||||
is (s_ready) {
|
|
||||||
when (!io.div_val) { state <== s_ready; }
|
|
||||||
when (tc) { state <== s_neg_inputs };
|
|
||||||
otherwise { state <== s_busy; }
|
|
||||||
}
|
|
||||||
is (s_neg_inputs) { state <== s_busy; }
|
|
||||||
is (s_busy) {
|
|
||||||
when (count != UFix(width)) { state <== s_busy; }
|
|
||||||
when (!(neg_quo || neg_rem)) { state <== s_done; }
|
|
||||||
otherwise { state <== s_neg_outputs; }
|
|
||||||
}
|
|
||||||
is (s_neg_outputs) { state <== s_done; }
|
|
||||||
is (s_done) {
|
|
||||||
when (io.result_rdy) { state <== s_ready; }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
val lhs_sign = tc && Mux(io.dw === DW_64, io.in0(width-1), io.in0(width/2-1)).toBool
|
val lhs_sign = tc && Mux(io.dw === DW_64, io.in0(width-1), io.in0(width/2-1)).toBool
|
||||||
val lhs_hi = Mux(io.dw === DW_64, io.in0(width-1,width/2), Fill(width/2, lhs_sign))
|
val lhs_hi = Mux(io.dw === DW_64, io.in0(width-1,width/2), Fill(width/2, lhs_sign))
|
||||||
@ -87,45 +81,45 @@ class rocketDivider(width : Int) extends Component {
|
|||||||
val rhs_in = Cat(rhs_hi, io.in1(width/2-1,0))
|
val rhs_in = Cat(rhs_hi, io.in1(width/2-1,0))
|
||||||
|
|
||||||
when ((state === s_ready) && io.div_val) {
|
when ((state === s_ready) && io.div_val) {
|
||||||
count <== UFix(0, log2up(width+1));
|
count := UFix(0, log2up(width+1));
|
||||||
half <== (io.dw === DW_32);
|
half := (io.dw === DW_32);
|
||||||
neg_quo <== Bool(false);
|
neg_quo := Bool(false);
|
||||||
neg_rem <== Bool(false);
|
neg_rem := Bool(false);
|
||||||
rem <== (io.div_fn === DIV_R) || (io.div_fn === DIV_RU);
|
rem := (io.div_fn === DIV_R) || (io.div_fn === DIV_RU);
|
||||||
reg_tag <== io.div_tag;
|
reg_tag := io.div_tag;
|
||||||
divby0 <== Bool(true);
|
divby0 := Bool(true);
|
||||||
divisor <== rhs_in.toUFix;
|
divisor := rhs_in.toUFix;
|
||||||
remainder <== Cat(UFix(0,width+1), lhs_in).toUFix;
|
remainder := Cat(UFix(0,width+1), lhs_in).toUFix;
|
||||||
}
|
}
|
||||||
|
|
||||||
when (state === s_neg_inputs) {
|
when (state === s_neg_inputs) {
|
||||||
neg_rem <== remainder(width-1).toBool;
|
neg_rem := remainder(width-1).toBool;
|
||||||
neg_quo <== (remainder(width-1) != divisor(width-1));
|
neg_quo := (remainder(width-1) != divisor(width-1));
|
||||||
when (remainder(width-1).toBool) {
|
when (remainder(width-1).toBool) {
|
||||||
remainder <== Cat(remainder(2*width, width), -remainder(width-1,0)).toUFix;
|
remainder := Cat(remainder(2*width, width), -remainder(width-1,0)).toUFix;
|
||||||
}
|
}
|
||||||
when (divisor(width-1).toBool) {
|
when (divisor(width-1).toBool) {
|
||||||
divisor <== subtractor(width-1,0);
|
divisor := subtractor(width-1,0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
when (state === s_neg_outputs) {
|
when (state === s_neg_outputs) {
|
||||||
when (neg_rem && neg_quo && !divby0) {
|
when (neg_rem && neg_quo && !divby0) {
|
||||||
remainder <== Cat(-remainder(2*width, width+1), remainder(width), -remainder(width-1,0)).toUFix;
|
remainder := Cat(-remainder(2*width, width+1), remainder(width), -remainder(width-1,0)).toUFix;
|
||||||
}
|
}
|
||||||
when (neg_quo && !divby0) {
|
.elsewhen (neg_quo && !divby0) {
|
||||||
remainder <== Cat(remainder(2*width, width), -remainder(width-1,0)).toUFix;
|
remainder := Cat(remainder(2*width, width), -remainder(width-1,0)).toUFix;
|
||||||
}
|
}
|
||||||
when (neg_rem) {
|
.elsewhen (neg_rem) {
|
||||||
remainder <== Cat(-remainder(2*width, width+1), remainder(width,0)).toUFix;
|
remainder := Cat(-remainder(2*width, width+1), remainder(width,0)).toUFix;
|
||||||
}
|
}
|
||||||
|
|
||||||
when (divisor(width-1).toBool) {
|
when (divisor(width-1).toBool) {
|
||||||
divisor <== subtractor(width-1,0);
|
divisor := subtractor(width-1,0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
when (state === s_busy) {
|
when (state === s_busy) {
|
||||||
count <== count + UFix(1);
|
count := count + UFix(1);
|
||||||
divby0 <== divby0 && !subtractor(width).toBool;
|
divby0 := divby0 && !subtractor(width).toBool;
|
||||||
remainder <== Mux(subtractor(width).toBool,
|
remainder := Mux(subtractor(width).toBool,
|
||||||
Cat(remainder(2*width-1, width), remainder(width-1,0), ~subtractor(width)),
|
Cat(remainder(2*width-1, width), remainder(width-1,0), ~subtractor(width)),
|
||||||
Cat(subtractor(width-1, 0), remainder(width-1,0), ~subtractor(width))).toUFix;
|
Cat(subtractor(width-1, 0), remainder(width-1,0), ~subtractor(width))).toUFix;
|
||||||
}
|
}
|
||||||
|
@ -152,7 +152,7 @@ class rocketDpath extends Component
|
|||||||
if_pc_plus4))))))); // PC_4
|
if_pc_plus4))))))); // PC_4
|
||||||
|
|
||||||
when (!io.ctrl.stallf) {
|
when (!io.ctrl.stallf) {
|
||||||
if_reg_pc <== if_next_pc.toUFix;
|
if_reg_pc := if_next_pc.toUFix;
|
||||||
}
|
}
|
||||||
|
|
||||||
io.ctrl.xcpt_ma_inst := if_next_pc(1,0) != Bits(0)
|
io.ctrl.xcpt_ma_inst := if_next_pc(1,0) != Bits(0)
|
||||||
@ -171,14 +171,14 @@ class rocketDpath extends Component
|
|||||||
|
|
||||||
// instruction decode stage
|
// instruction decode stage
|
||||||
when (!io.ctrl.stalld) {
|
when (!io.ctrl.stalld) {
|
||||||
id_reg_pc <== if_reg_pc;
|
id_reg_pc := if_reg_pc;
|
||||||
when(io.ctrl.killf) {
|
when(io.ctrl.killf) {
|
||||||
id_reg_inst <== NOP;
|
id_reg_inst := NOP;
|
||||||
id_reg_valid <== Bool(false);
|
id_reg_valid := Bool(false);
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
id_reg_inst <== io.imem.resp_data;
|
id_reg_inst := io.imem.resp_data;
|
||||||
id_reg_valid <== Bool(true);
|
id_reg_valid := Bool(true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -235,34 +235,34 @@ class rocketDpath extends Component
|
|||||||
io.ctrl.inst := id_reg_inst;
|
io.ctrl.inst := id_reg_inst;
|
||||||
|
|
||||||
// execute stage
|
// execute stage
|
||||||
ex_reg_pc <== id_reg_pc;
|
ex_reg_pc := id_reg_pc;
|
||||||
ex_reg_inst <== id_reg_inst
|
ex_reg_inst := id_reg_inst
|
||||||
ex_reg_raddr1 <== id_raddr1
|
ex_reg_raddr1 := id_raddr1
|
||||||
ex_reg_raddr2 <== id_raddr2;
|
ex_reg_raddr2 := id_raddr2;
|
||||||
ex_reg_op2 <== id_op2;
|
ex_reg_op2 := id_op2;
|
||||||
ex_reg_rs2 <== id_rs2;
|
ex_reg_rs2 := id_rs2;
|
||||||
ex_reg_rs1 <== id_rs1;
|
ex_reg_rs1 := id_rs1;
|
||||||
ex_reg_waddr <== id_waddr;
|
ex_reg_waddr := id_waddr;
|
||||||
ex_reg_ctrl_fn_dw <== io.ctrl.fn_dw.toUFix;
|
ex_reg_ctrl_fn_dw := io.ctrl.fn_dw.toUFix;
|
||||||
ex_reg_ctrl_fn_alu <== io.ctrl.fn_alu;
|
ex_reg_ctrl_fn_alu := io.ctrl.fn_alu;
|
||||||
ex_reg_ctrl_mul_fn <== io.ctrl.mul_fn;
|
ex_reg_ctrl_mul_fn := io.ctrl.mul_fn;
|
||||||
ex_reg_ctrl_div_fn <== io.ctrl.div_fn;
|
ex_reg_ctrl_div_fn := io.ctrl.div_fn;
|
||||||
ex_reg_ctrl_sel_wb <== io.ctrl.sel_wb;
|
ex_reg_ctrl_sel_wb := io.ctrl.sel_wb;
|
||||||
ex_reg_ctrl_ren_pcr <== io.ctrl.ren_pcr;
|
ex_reg_ctrl_ren_pcr := io.ctrl.ren_pcr;
|
||||||
|
|
||||||
when(io.ctrl.killd) {
|
when(io.ctrl.killd) {
|
||||||
ex_reg_valid <== Bool(false);
|
ex_reg_valid := Bool(false);
|
||||||
ex_reg_ctrl_div_val <== Bool(false);
|
ex_reg_ctrl_div_val := Bool(false);
|
||||||
ex_reg_ctrl_mul_val <== Bool(false);
|
ex_reg_ctrl_mul_val := Bool(false);
|
||||||
ex_reg_ctrl_wen_pcr <== Bool(false);
|
ex_reg_ctrl_wen_pcr := Bool(false);
|
||||||
ex_reg_ctrl_eret <== Bool(false);
|
ex_reg_ctrl_eret := Bool(false);
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
ex_reg_valid <== id_reg_valid;
|
ex_reg_valid := id_reg_valid;
|
||||||
ex_reg_ctrl_div_val <== io.ctrl.div_val;
|
ex_reg_ctrl_div_val := io.ctrl.div_val;
|
||||||
ex_reg_ctrl_mul_val <== io.ctrl.mul_val;
|
ex_reg_ctrl_mul_val := io.ctrl.mul_val;
|
||||||
ex_reg_ctrl_wen_pcr <== io.ctrl.wen_pcr;
|
ex_reg_ctrl_wen_pcr := io.ctrl.wen_pcr;
|
||||||
ex_reg_ctrl_eret <== io.ctrl.id_eret;
|
ex_reg_ctrl_eret := io.ctrl.id_eret;
|
||||||
}
|
}
|
||||||
|
|
||||||
alu.io.dw := ex_reg_ctrl_fn_dw;
|
alu.io.dw := ex_reg_ctrl_fn_dw;
|
||||||
@ -330,10 +330,10 @@ class rocketDpath extends Component
|
|||||||
|
|
||||||
// time stamp counter
|
// time stamp counter
|
||||||
val tsc_reg = Reg(resetVal = UFix(0,64));
|
val tsc_reg = Reg(resetVal = UFix(0,64));
|
||||||
tsc_reg <== tsc_reg + UFix(1);
|
tsc_reg := tsc_reg + UFix(1);
|
||||||
// instructions retired counter
|
// instructions retired counter
|
||||||
val irt_reg = Reg(resetVal = UFix(0,64));
|
val irt_reg = Reg(resetVal = UFix(0,64));
|
||||||
when (wb_reg_valid) { irt_reg <== irt_reg + UFix(1); }
|
when (wb_reg_valid) { irt_reg := irt_reg + UFix(1); }
|
||||||
|
|
||||||
// writeback select mux
|
// writeback select mux
|
||||||
ex_wdata :=
|
ex_wdata :=
|
||||||
@ -344,21 +344,21 @@ class rocketDpath extends Component
|
|||||||
ex_alu_out)))).toBits; // WB_ALU
|
ex_alu_out)))).toBits; // WB_ALU
|
||||||
|
|
||||||
// memory stage
|
// memory stage
|
||||||
mem_reg_pc <== ex_reg_pc;
|
mem_reg_pc := ex_reg_pc;
|
||||||
mem_reg_inst <== ex_reg_inst
|
mem_reg_inst := ex_reg_inst
|
||||||
mem_reg_rs2 <== ex_reg_rs2
|
mem_reg_rs2 := ex_reg_rs2
|
||||||
mem_reg_waddr <== ex_reg_waddr;
|
mem_reg_waddr := ex_reg_waddr;
|
||||||
mem_reg_wdata <== ex_wdata;
|
mem_reg_wdata := ex_wdata;
|
||||||
mem_reg_raddr1 <== ex_reg_raddr1
|
mem_reg_raddr1 := ex_reg_raddr1
|
||||||
mem_reg_raddr2 <== ex_reg_raddr2;
|
mem_reg_raddr2 := ex_reg_raddr2;
|
||||||
|
|
||||||
when (io.ctrl.killx) {
|
when (io.ctrl.killx) {
|
||||||
mem_reg_valid <== Bool(false);
|
mem_reg_valid := Bool(false);
|
||||||
mem_reg_ctrl_wen_pcr <== Bool(false);
|
mem_reg_ctrl_wen_pcr := Bool(false);
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
mem_reg_valid <== ex_reg_valid;
|
mem_reg_valid := ex_reg_valid;
|
||||||
mem_reg_ctrl_wen_pcr <== ex_reg_ctrl_wen_pcr;
|
mem_reg_ctrl_wen_pcr := ex_reg_ctrl_wen_pcr;
|
||||||
}
|
}
|
||||||
|
|
||||||
// for load/use hazard detection (load byte/halfword)
|
// for load/use hazard detection (load byte/halfword)
|
||||||
@ -376,9 +376,9 @@ class rocketDpath extends Component
|
|||||||
val dmem_resp_waddr = io.dmem.resp_tag.toUFix >> UFix(2)
|
val dmem_resp_waddr = io.dmem.resp_tag.toUFix >> UFix(2)
|
||||||
val dmem_resp_ext_tag = io.dmem.resp_tag.toUFix >> UFix(1)
|
val dmem_resp_ext_tag = io.dmem.resp_tag.toUFix >> UFix(1)
|
||||||
dmem_resp_replay := io.dmem.resp_replay && dmem_resp_xpu;
|
dmem_resp_replay := io.dmem.resp_replay && dmem_resp_xpu;
|
||||||
r_dmem_resp_replay <== dmem_resp_replay
|
r_dmem_resp_replay := dmem_resp_replay
|
||||||
r_dmem_resp_waddr <== dmem_resp_waddr
|
r_dmem_resp_waddr := dmem_resp_waddr
|
||||||
r_dmem_fp_replay <== io.dmem.resp_replay && dmem_resp_fpu;
|
r_dmem_fp_replay := io.dmem.resp_replay && dmem_resp_fpu;
|
||||||
|
|
||||||
val mem_ll_waddr = Mux(dmem_resp_replay, dmem_resp_waddr,
|
val mem_ll_waddr = Mux(dmem_resp_replay, dmem_resp_waddr,
|
||||||
Mux(div_result_val, div_result_tag,
|
Mux(div_result_val, div_result_tag,
|
||||||
@ -389,22 +389,22 @@ class rocketDpath extends Component
|
|||||||
mem_reg_wdata))
|
mem_reg_wdata))
|
||||||
val mem_ll_wb = dmem_resp_replay || div_result_val || mul_result_val
|
val mem_ll_wb = dmem_resp_replay || div_result_val || mul_result_val
|
||||||
|
|
||||||
wb_reg_pc <== mem_reg_pc;
|
wb_reg_pc := mem_reg_pc;
|
||||||
wb_reg_inst <== mem_reg_inst
|
wb_reg_inst := mem_reg_inst
|
||||||
wb_reg_ll_wb <== mem_ll_wb
|
wb_reg_ll_wb := mem_ll_wb
|
||||||
wb_reg_rs2 <== mem_reg_rs2
|
wb_reg_rs2 := mem_reg_rs2
|
||||||
wb_reg_waddr <== mem_ll_waddr
|
wb_reg_waddr := mem_ll_waddr
|
||||||
wb_reg_wdata <== mem_ll_wdata
|
wb_reg_wdata := mem_ll_wdata
|
||||||
wb_reg_raddr1 <== mem_reg_raddr1
|
wb_reg_raddr1 := mem_reg_raddr1
|
||||||
wb_reg_raddr2 <== mem_reg_raddr2;
|
wb_reg_raddr2 := mem_reg_raddr2;
|
||||||
|
|
||||||
when (io.ctrl.killm) {
|
when (io.ctrl.killm) {
|
||||||
wb_reg_valid <== Bool(false);
|
wb_reg_valid := Bool(false);
|
||||||
wb_reg_ctrl_wen_pcr <== Bool(false);
|
wb_reg_ctrl_wen_pcr := Bool(false);
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
wb_reg_valid <== mem_reg_valid;
|
wb_reg_valid := mem_reg_valid;
|
||||||
wb_reg_ctrl_wen_pcr <== mem_reg_ctrl_wen_pcr;
|
wb_reg_ctrl_wen_pcr := mem_reg_ctrl_wen_pcr;
|
||||||
}
|
}
|
||||||
|
|
||||||
// vector datapath
|
// vector datapath
|
||||||
|
@ -40,13 +40,10 @@ class rocketDpathBTB(entries: Int) extends Component
|
|||||||
val my_clr = io.clr && my_hit || io.invalidate
|
val my_clr = io.clr && my_hit || io.invalidate
|
||||||
val my_wen = io.wen && (my_hit || !hit && UFix(i) === repl_way)
|
val my_wen = io.wen && (my_hit || !hit && UFix(i) === repl_way)
|
||||||
|
|
||||||
when (my_clr) {
|
valid := !my_clr && (valid || my_wen)
|
||||||
valid <== Bool(false)
|
|
||||||
}
|
|
||||||
when (my_wen) {
|
when (my_wen) {
|
||||||
valid <== Bool(true)
|
tag := io.correct_pc
|
||||||
tag <== io.correct_pc
|
target := io.correct_target
|
||||||
target <== io.correct_target
|
|
||||||
}
|
}
|
||||||
|
|
||||||
hit_reduction = hit_reduction || my_hit
|
hit_reduction = hit_reduction || my_hit
|
||||||
@ -66,14 +63,14 @@ class ioDpathPCR extends Bundle()
|
|||||||
val r = new ioReadPort();
|
val r = new ioReadPort();
|
||||||
val w = new ioWritePort();
|
val w = new ioWritePort();
|
||||||
|
|
||||||
val status = Bits(17, OUTPUT);
|
val status = Bits(17, OUTPUT);
|
||||||
val ptbr = UFix(PADDR_BITS, OUTPUT);
|
val ptbr = UFix(PADDR_BITS, OUTPUT);
|
||||||
val evec = UFix(VADDR_BITS, OUTPUT);
|
val evec = UFix(VADDR_BITS, OUTPUT);
|
||||||
val exception = Bool(INPUT);
|
val exception = Bool(INPUT);
|
||||||
val cause = UFix(5, INPUT);
|
val cause = UFix(5, INPUT);
|
||||||
val badvaddr_wen = Bool(INPUT);
|
val badvaddr_wen = Bool(INPUT);
|
||||||
val pc = UFix(VADDR_BITS+1, INPUT);
|
val pc = UFix(VADDR_BITS+1, INPUT);
|
||||||
val eret = Bool(INPUT);
|
val eret = Bool(INPUT);
|
||||||
val ei = Bool(INPUT);
|
val ei = Bool(INPUT);
|
||||||
val di = Bool(INPUT);
|
val di = Bool(INPUT);
|
||||||
val ptbr_wen = Bool(OUTPUT);
|
val ptbr_wen = Bool(OUTPUT);
|
||||||
@ -120,12 +117,12 @@ class rocketDpathPCR extends Component
|
|||||||
val reg_status = Cat(reg_status_sx, reg_status_ux, reg_status_s, reg_status_ps, reg_status_ec, reg_status_ev, reg_status_ef, reg_status_et);
|
val reg_status = Cat(reg_status_sx, reg_status_ux, reg_status_s, reg_status_ps, reg_status_ec, reg_status_ev, reg_status_ef, reg_status_et);
|
||||||
val rdata = Wire() { Bits() };
|
val rdata = Wire() { Bits() };
|
||||||
|
|
||||||
io.ptbr_wen := reg_status_vm.toBool && !io.exception && io.w.en && (io.w.addr === PCR_PTBR);
|
io.ptbr_wen := reg_status_vm.toBool && io.w.en && (io.w.addr === PCR_PTBR);
|
||||||
io.status := Cat(reg_status_vm, reg_status_im, reg_status);
|
io.status := Cat(reg_status_vm, reg_status_im, reg_status);
|
||||||
io.evec := reg_ebase;
|
io.evec := reg_ebase;
|
||||||
io.ptbr := reg_ptbr;
|
io.ptbr := reg_ptbr;
|
||||||
io.host.to := Mux(io.host.from_wen, Bits(0), reg_tohost);
|
io.host.to := Mux(io.host.from_wen, Bits(0), reg_tohost);
|
||||||
io.debug.error_mode := reg_error_mode;
|
io.debug.error_mode := reg_error_mode;
|
||||||
io.r.data := rdata;
|
io.r.data := rdata;
|
||||||
|
|
||||||
io.vecbank := reg_vecbank
|
io.vecbank := reg_vecbank
|
||||||
@ -139,100 +136,99 @@ class rocketDpathPCR extends Component
|
|||||||
io.console_val := console_wen;
|
io.console_val := console_wen;
|
||||||
|
|
||||||
when (io.host.from_wen) {
|
when (io.host.from_wen) {
|
||||||
reg_tohost <== Bits(0);
|
reg_tohost := Bits(0);
|
||||||
reg_fromhost <== io.host.from;
|
reg_fromhost := io.host.from;
|
||||||
}
|
}
|
||||||
otherwise {
|
.elsewhen (io.w.en && (io.w.addr === PCR_TOHOST)) {
|
||||||
when (!io.exception && io.w.en && (io.w.addr === PCR_TOHOST)) {
|
reg_tohost := io.w.data;
|
||||||
reg_tohost <== io.w.data;
|
reg_fromhost := Bits(0);
|
||||||
reg_fromhost <== Bits(0);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
val badvaddr_sign = Mux(io.w.data(VADDR_BITS-1), ~io.w.data(63,VADDR_BITS) === UFix(0), io.w.data(63,VADDR_BITS) != UFix(0))
|
val badvaddr_sign = Mux(io.w.data(VADDR_BITS-1), ~io.w.data(63,VADDR_BITS) === UFix(0), io.w.data(63,VADDR_BITS) != UFix(0))
|
||||||
when (io.badvaddr_wen) {
|
when (io.badvaddr_wen) {
|
||||||
reg_badvaddr <== Cat(badvaddr_sign, io.w.data(VADDR_BITS-1,0)).toUFix;
|
reg_badvaddr := Cat(badvaddr_sign, io.w.data(VADDR_BITS-1,0)).toUFix;
|
||||||
}
|
|
||||||
|
|
||||||
when (io.exception && !reg_status_et) {
|
|
||||||
reg_error_mode <== Bool(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
when (io.exception && reg_status_et) {
|
|
||||||
reg_status_s <== Bool(true);
|
|
||||||
reg_status_ps <== reg_status_s;
|
|
||||||
reg_status_et <== Bool(false);
|
|
||||||
reg_epc <== io.pc;
|
|
||||||
reg_cause <== io.cause;
|
|
||||||
}
|
|
||||||
|
|
||||||
when (!io.exception && io.di) {
|
|
||||||
reg_status_et <== Bool(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
when (!io.exception && io.ei) {
|
|
||||||
reg_status_et <== Bool(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
when (!io.exception && io.eret) {
|
|
||||||
reg_status_s <== reg_status_ps;
|
|
||||||
reg_status_et <== Bool(true);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
when (!io.exception && !io.eret && io.w.en) {
|
when (io.exception) {
|
||||||
when (io.w.addr === PCR_STATUS) {
|
when (!reg_status_et) {
|
||||||
reg_status_vm <== io.w.data(SR_VM).toBool;
|
reg_error_mode := Bool(true)
|
||||||
reg_status_im <== io.w.data(15,8);
|
}
|
||||||
reg_status_sx <== io.w.data(SR_SX).toBool;
|
.otherwise {
|
||||||
reg_status_ux <== io.w.data(SR_UX).toBool;
|
reg_status_s := Bool(true);
|
||||||
reg_status_s <== io.w.data(SR_S).toBool;
|
reg_status_ps := reg_status_s;
|
||||||
reg_status_ps <== io.w.data(SR_PS).toBool;
|
reg_status_et := Bool(false);
|
||||||
reg_status_ev <== Bool(HAVE_VEC) && io.w.data(SR_EV).toBool;
|
reg_epc := io.pc;
|
||||||
reg_status_ef <== Bool(HAVE_FPU) && io.w.data(SR_EF).toBool;
|
reg_cause := io.cause;
|
||||||
reg_status_ec <== Bool(HAVE_RVC) && io.w.data(SR_EC).toBool;
|
}
|
||||||
reg_status_et <== io.w.data(SR_ET).toBool;
|
|
||||||
}
|
|
||||||
when (io.w.addr === PCR_EPC) { reg_epc <== io.w.data(VADDR_BITS,0).toUFix; }
|
|
||||||
when (io.w.addr === PCR_BADVADDR) { reg_badvaddr <== io.w.data(VADDR_BITS,0).toUFix; }
|
|
||||||
when (io.w.addr === PCR_EVEC) { reg_ebase <== io.w.data(VADDR_BITS-1,0).toUFix; }
|
|
||||||
when (io.w.addr === PCR_COUNT) { reg_count <== io.w.data(31,0).toUFix; }
|
|
||||||
when (io.w.addr === PCR_COMPARE) { reg_compare <== io.w.data(31,0).toUFix; r_irq_timer <== Bool(false); }
|
|
||||||
when (io.w.addr === PCR_CAUSE) { reg_cause <== io.w.data(4,0); }
|
|
||||||
when (io.w.addr === PCR_FROMHOST) { reg_fromhost <== io.w.data; }
|
|
||||||
when (io.w.addr === PCR_SEND_IPI) { r_irq_ipi <== Bool(true); }
|
|
||||||
when (io.w.addr === PCR_CLR_IPI) { r_irq_ipi <== Bool(false); }
|
|
||||||
when (io.w.addr === PCR_K0) { reg_k0 <== io.w.data; }
|
|
||||||
when (io.w.addr === PCR_K1) { reg_k1 <== io.w.data; }
|
|
||||||
when (io.w.addr === PCR_PTBR) { reg_ptbr <== Cat(io.w.data(PADDR_BITS-1, PGIDX_BITS), Bits(0, PGIDX_BITS)).toUFix; }
|
|
||||||
when (io.w.addr === PCR_VECBANK) { reg_vecbank <== io.w.data(7,0) }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
otherwise {
|
when (io.di) {
|
||||||
reg_count <== reg_count + UFix(1);
|
reg_status_et := Bool(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
when (io.ei) {
|
||||||
|
reg_status_et := Bool(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
when (io.eret) {
|
||||||
|
reg_status_s := reg_status_ps;
|
||||||
|
reg_status_et := Bool(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
when (reg_count === reg_compare) {
|
when (reg_count === reg_compare) {
|
||||||
r_irq_timer <== Bool(true);
|
r_irq_timer := Bool(true);
|
||||||
}
|
}
|
||||||
|
reg_count := reg_count + UFix(1);
|
||||||
|
|
||||||
io.irq_timer := r_irq_timer;
|
io.irq_timer := r_irq_timer;
|
||||||
io.irq_ipi := r_irq_ipi;
|
io.irq_ipi := r_irq_ipi;
|
||||||
|
|
||||||
when (!io.r.en) { rdata <== Bits(0,64); }
|
when (io.w.en) {
|
||||||
switch (io.r.addr) {
|
when (io.w.addr === PCR_STATUS) {
|
||||||
is (PCR_STATUS) { rdata <== Cat(Bits(0,47), reg_status_vm, reg_status_im, reg_status); }
|
reg_status_vm := io.w.data(SR_VM).toBool;
|
||||||
is (PCR_EPC) { rdata <== Cat(Fill(64-VADDR_BITS-1, reg_epc(VADDR_BITS)), reg_epc); }
|
reg_status_im := io.w.data(15,8);
|
||||||
is (PCR_BADVADDR) { rdata <== Cat(Fill(64-VADDR_BITS-1, reg_badvaddr(VADDR_BITS)), reg_badvaddr); }
|
reg_status_sx := io.w.data(SR_SX).toBool;
|
||||||
is (PCR_EVEC) { rdata <== Cat(Fill(64-VADDR_BITS, reg_ebase(VADDR_BITS-1)), reg_ebase); }
|
reg_status_ux := io.w.data(SR_UX).toBool;
|
||||||
is (PCR_COUNT) { rdata <== Cat(Fill(32, reg_count(31)), reg_count); }
|
reg_status_s := io.w.data(SR_S).toBool;
|
||||||
is (PCR_COMPARE) { rdata <== Cat(Fill(32, reg_compare(31)), reg_compare); }
|
reg_status_ps := io.w.data(SR_PS).toBool;
|
||||||
is (PCR_CAUSE) { rdata <== Cat(Bits(0,59), reg_cause); }
|
reg_status_ev := Bool(HAVE_VEC) && io.w.data(SR_EV).toBool;
|
||||||
is (PCR_COREID) { rdata <== Bits(COREID,64); }
|
reg_status_ef := Bool(HAVE_FPU) && io.w.data(SR_EF).toBool;
|
||||||
is (PCR_FROMHOST) { rdata <== reg_fromhost; }
|
reg_status_ec := Bool(HAVE_RVC) && io.w.data(SR_EC).toBool;
|
||||||
is (PCR_TOHOST) { rdata <== reg_tohost; }
|
reg_status_et := io.w.data(SR_ET).toBool;
|
||||||
is (PCR_K0) { rdata <== reg_k0; }
|
}
|
||||||
is (PCR_K1) { rdata <== reg_k1; }
|
when (io.w.addr === PCR_EPC) { reg_epc := io.w.data(VADDR_BITS,0).toUFix; }
|
||||||
is (PCR_PTBR) { rdata <== Cat(Bits(0,64-PADDR_BITS), reg_ptbr); }
|
when (io.w.addr === PCR_BADVADDR) { reg_badvaddr := io.w.data(VADDR_BITS,0).toUFix; }
|
||||||
is (PCR_VECBANK) { rdata <== Cat(Bits(0, 56), reg_vecbank) }
|
when (io.w.addr === PCR_EVEC) { reg_ebase := io.w.data(VADDR_BITS-1,0).toUFix; }
|
||||||
otherwise { rdata <== Bits(0,64); }
|
when (io.w.addr === PCR_COUNT) { reg_count := io.w.data(31,0).toUFix; }
|
||||||
|
when (io.w.addr === PCR_COMPARE) { reg_compare := io.w.data(31,0).toUFix; r_irq_timer := Bool(false); }
|
||||||
|
when (io.w.addr === PCR_CAUSE) { reg_cause := io.w.data(4,0); }
|
||||||
|
when (io.w.addr === PCR_FROMHOST) { reg_fromhost := io.w.data; }
|
||||||
|
when (io.w.addr === PCR_SEND_IPI) { r_irq_ipi := Bool(true); }
|
||||||
|
when (io.w.addr === PCR_CLR_IPI) { r_irq_ipi := Bool(false); }
|
||||||
|
when (io.w.addr === PCR_K0) { reg_k0 := io.w.data; }
|
||||||
|
when (io.w.addr === PCR_K1) { reg_k1 := io.w.data; }
|
||||||
|
when (io.w.addr === PCR_PTBR) { reg_ptbr := Cat(io.w.data(PADDR_BITS-1, PGIDX_BITS), Bits(0, PGIDX_BITS)).toUFix; }
|
||||||
|
when (io.w.addr === PCR_VECBANK) { reg_vecbank := io.w.data(7,0) }
|
||||||
|
}
|
||||||
|
|
||||||
|
rdata := Bits(0, 64)
|
||||||
|
when (io.r.en) {
|
||||||
|
switch (io.r.addr) {
|
||||||
|
is (PCR_STATUS) { rdata := Cat(Bits(0,47), reg_status_vm, reg_status_im, reg_status); }
|
||||||
|
is (PCR_EPC) { rdata := Cat(Fill(64-VADDR_BITS-1, reg_epc(VADDR_BITS)), reg_epc); }
|
||||||
|
is (PCR_BADVADDR) { rdata := Cat(Fill(64-VADDR_BITS-1, reg_badvaddr(VADDR_BITS)), reg_badvaddr); }
|
||||||
|
is (PCR_EVEC) { rdata := Cat(Fill(64-VADDR_BITS, reg_ebase(VADDR_BITS-1)), reg_ebase); }
|
||||||
|
is (PCR_COUNT) { rdata := Cat(Fill(32, reg_count(31)), reg_count); }
|
||||||
|
is (PCR_COMPARE) { rdata := Cat(Fill(32, reg_compare(31)), reg_compare); }
|
||||||
|
is (PCR_CAUSE) { rdata := Cat(Bits(0,59), reg_cause); }
|
||||||
|
is (PCR_COREID) { rdata := Bits(COREID,64); }
|
||||||
|
is (PCR_FROMHOST) { rdata := reg_fromhost; }
|
||||||
|
is (PCR_TOHOST) { rdata := reg_tohost; }
|
||||||
|
is (PCR_K0) { rdata := reg_k0; }
|
||||||
|
is (PCR_K1) { rdata := reg_k1; }
|
||||||
|
is (PCR_PTBR) { rdata := Cat(Bits(0,64-PADDR_BITS), reg_ptbr); }
|
||||||
|
is (PCR_VECBANK) { rdata := Cat(Bits(0, 56), reg_vecbank) }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -261,7 +257,7 @@ class rocketDpathRegfile extends Component
|
|||||||
{
|
{
|
||||||
override val io = new ioRegfile();
|
override val io = new ioRegfile();
|
||||||
|
|
||||||
val regfile = Mem4(32, io.w0.data);
|
val regfile = Mem(32, io.w0.data);
|
||||||
regfile.setReadLatency(0);
|
regfile.setReadLatency(0);
|
||||||
regfile.setTarget('inst);
|
regfile.setTarget('inst);
|
||||||
regfile.write(io.w0.addr, io.w0.data, io.w0.en);
|
regfile.write(io.w0.addr, io.w0.data, io.w0.en);
|
||||||
|
@ -150,8 +150,8 @@ class rocketDpathVec extends Component
|
|||||||
|
|
||||||
when (io.valid && wb_vec_wen.toBool && wb_vec_fn.toBool)
|
when (io.valid && wb_vec_wen.toBool && wb_vec_fn.toBool)
|
||||||
{
|
{
|
||||||
reg_hwvl <== hwvl_vcfg
|
reg_hwvl := hwvl_vcfg
|
||||||
reg_appvl0 <== !(appvl.orR())
|
reg_appvl0 := !(appvl.orR())
|
||||||
}
|
}
|
||||||
|
|
||||||
io.wen := io.valid && wb_vec_wen.toBool
|
io.wen := io.valid && wb_vec_wen.toBool
|
||||||
|
@ -52,13 +52,13 @@ class rocketDTLB(entries: Int) extends Component
|
|||||||
val repl_count = Reg(resetVal = UFix(0,addr_bits));
|
val repl_count = Reg(resetVal = UFix(0,addr_bits));
|
||||||
|
|
||||||
when (io.cpu.req_val && io.cpu.req_rdy) {
|
when (io.cpu.req_val && io.cpu.req_rdy) {
|
||||||
r_cpu_req_vpn <== io.cpu.req_vpn;
|
r_cpu_req_vpn := io.cpu.req_vpn;
|
||||||
r_cpu_req_cmd <== io.cpu.req_cmd;
|
r_cpu_req_cmd := io.cpu.req_cmd;
|
||||||
r_cpu_req_asid <== io.cpu.req_asid;
|
r_cpu_req_asid := io.cpu.req_asid;
|
||||||
r_cpu_req_val <== Bool(true);
|
r_cpu_req_val := Bool(true);
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
r_cpu_req_val <== Bool(false);
|
r_cpu_req_val := Bool(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
val req_load = (r_cpu_req_cmd === M_XRD);
|
val req_load = (r_cpu_req_cmd === M_XRD);
|
||||||
@ -96,19 +96,19 @@ class rocketDTLB(entries: Int) extends Component
|
|||||||
val sr_array = Reg(resetVal = Bits(0, entries)); // supervisor read permission
|
val sr_array = Reg(resetVal = Bits(0, entries)); // supervisor read permission
|
||||||
val sw_array = Reg(resetVal = Bits(0, entries)); // supervisor write permission
|
val sw_array = Reg(resetVal = Bits(0, entries)); // supervisor write permission
|
||||||
when (io.ptw.resp_val) {
|
when (io.ptw.resp_val) {
|
||||||
ur_array <== ur_array.bitSet(r_refill_waddr, ptw_perm_ur);
|
ur_array := ur_array.bitSet(r_refill_waddr, ptw_perm_ur);
|
||||||
uw_array <== uw_array.bitSet(r_refill_waddr, ptw_perm_uw);
|
uw_array := uw_array.bitSet(r_refill_waddr, ptw_perm_uw);
|
||||||
sr_array <== sr_array.bitSet(r_refill_waddr, ptw_perm_sr);
|
sr_array := sr_array.bitSet(r_refill_waddr, ptw_perm_sr);
|
||||||
sw_array <== sw_array.bitSet(r_refill_waddr, ptw_perm_sw);
|
sw_array := sw_array.bitSet(r_refill_waddr, ptw_perm_sw);
|
||||||
}
|
}
|
||||||
|
|
||||||
// when the page table lookup reports an error, set all permission
|
// when the page table lookup reports an error, set all permission
|
||||||
// bits to 0 so the next access will cause an exception
|
// bits to 0 so the next access will cause an exception
|
||||||
when (io.ptw.resp_err) {
|
when (io.ptw.resp_err) {
|
||||||
ur_array <== ur_array.bitSet(r_refill_waddr, Bool(false));
|
ur_array := ur_array.bitSet(r_refill_waddr, Bool(false));
|
||||||
uw_array <== uw_array.bitSet(r_refill_waddr, Bool(false));
|
uw_array := uw_array.bitSet(r_refill_waddr, Bool(false));
|
||||||
sr_array <== sr_array.bitSet(r_refill_waddr, Bool(false));
|
sr_array := sr_array.bitSet(r_refill_waddr, Bool(false));
|
||||||
sw_array <== sw_array.bitSet(r_refill_waddr, Bool(false));
|
sw_array := sw_array.bitSet(r_refill_waddr, Bool(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
// high if there are any unused (invalid) entries in the TLB
|
// high if there are any unused (invalid) entries in the TLB
|
||||||
@ -128,10 +128,10 @@ class rocketDTLB(entries: Int) extends Component
|
|||||||
// currently replace TLB entries in LIFO order
|
// currently replace TLB entries in LIFO order
|
||||||
// TODO: implement LRU replacement policy
|
// TODO: implement LRU replacement policy
|
||||||
when (tlb_miss) {
|
when (tlb_miss) {
|
||||||
r_refill_tag <== lookup_tag;
|
r_refill_tag := lookup_tag;
|
||||||
r_refill_waddr <== repl_waddr;
|
r_refill_waddr := repl_waddr;
|
||||||
when (!invalid_entry) {
|
when (!invalid_entry) {
|
||||||
repl_count <== repl_count + UFix(1);
|
repl_count := repl_count + UFix(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -166,17 +166,17 @@ class rocketDTLB(entries: Int) extends Component
|
|||||||
switch (state) {
|
switch (state) {
|
||||||
is (s_ready) {
|
is (s_ready) {
|
||||||
when (tlb_miss) {
|
when (tlb_miss) {
|
||||||
state <== s_request;
|
state := s_request;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_request) {
|
is (s_request) {
|
||||||
when (io.ptw.req_rdy) {
|
when (io.ptw.req_rdy) {
|
||||||
state <== s_wait;
|
state := s_wait;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_wait) {
|
is (s_wait) {
|
||||||
when (io.ptw.resp_val || io.ptw.resp_err) {
|
when (io.ptw.resp_val || io.ptw.resp_err) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -126,7 +126,7 @@ class rocketFPU extends Component
|
|||||||
|
|
||||||
val ex_reg_inst = Reg() { Bits() }
|
val ex_reg_inst = Reg() { Bits() }
|
||||||
when (io.req_valid) {
|
when (io.req_valid) {
|
||||||
ex_reg_inst <== io.req_inst
|
ex_reg_inst := io.req_inst
|
||||||
}
|
}
|
||||||
|
|
||||||
// load response
|
// load response
|
||||||
@ -135,12 +135,12 @@ class rocketFPU extends Component
|
|||||||
val load_wb_data = Reg() { Bits() }
|
val load_wb_data = Reg() { Bits() }
|
||||||
val load_wb_tag = Reg() { UFix() }
|
val load_wb_tag = Reg() { UFix() }
|
||||||
when (dmem_resp_val_fpu) {
|
when (dmem_resp_val_fpu) {
|
||||||
load_wb_data <== io.dmem.resp_data
|
load_wb_data := io.dmem.resp_data
|
||||||
load_wb_tag <== io.dmem.resp_tag.toUFix >> UFix(1)
|
load_wb_tag := io.dmem.resp_tag.toUFix >> UFix(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
// regfile
|
// regfile
|
||||||
val regfile = Mem4(32, load_wb_data);
|
val regfile = Mem(32, load_wb_data);
|
||||||
regfile.setReadLatency(0);
|
regfile.setReadLatency(0);
|
||||||
regfile.setTarget('inst);
|
regfile.setTarget('inst);
|
||||||
regfile.write(load_wb_tag, load_wb_data, load_wb);
|
regfile.write(load_wb_tag, load_wb_data, load_wb);
|
||||||
|
@ -68,14 +68,14 @@ class rocketICache(sets: Int, assoc: Int) extends Component {
|
|||||||
val tag_hit = Wire() { Bool() }
|
val tag_hit = Wire() { Bool() }
|
||||||
|
|
||||||
when (io.cpu.req_val && rdy) {
|
when (io.cpu.req_val && rdy) {
|
||||||
r_cpu_req_val <== Bool(true)
|
r_cpu_req_val := Bool(true)
|
||||||
r_cpu_req_idx <== io.cpu.req_idx
|
r_cpu_req_idx := io.cpu.req_idx
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
r_cpu_req_val <== Bool(false)
|
r_cpu_req_val := Bool(false)
|
||||||
}
|
}
|
||||||
when (state === s_ready && r_cpu_req_val && !io.cpu.itlb_miss) {
|
when (state === s_ready && r_cpu_req_val && !io.cpu.itlb_miss) {
|
||||||
r_cpu_req_ppn <== io.cpu.req_ppn
|
r_cpu_req_ppn := io.cpu.req_ppn
|
||||||
}
|
}
|
||||||
|
|
||||||
val r_cpu_hit_addr = Cat(io.cpu.req_ppn, r_cpu_req_idx)
|
val r_cpu_hit_addr = Cat(io.cpu.req_ppn, r_cpu_req_idx)
|
||||||
@ -86,7 +86,7 @@ class rocketICache(sets: Int, assoc: Int) extends Component {
|
|||||||
// refill counter
|
// refill counter
|
||||||
val refill_count = Reg(resetVal = UFix(0, rf_cnt_bits));
|
val refill_count = Reg(resetVal = UFix(0, rf_cnt_bits));
|
||||||
when (io.mem.resp_val) {
|
when (io.mem.resp_val) {
|
||||||
refill_count <== refill_count + UFix(1);
|
refill_count := refill_count + UFix(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
val repl_way = LFSR16(state === s_ready && r_cpu_req_val && !io.cpu.itlb_miss && !tag_hit)(log2up(assoc)-1,0)
|
val repl_way = LFSR16(state === s_ready && r_cpu_req_val && !io.cpu.itlb_miss && !tag_hit)(log2up(assoc)-1,0)
|
||||||
@ -104,7 +104,7 @@ class rocketICache(sets: Int, assoc: Int) extends Component {
|
|||||||
for (i <- 0 until assoc)
|
for (i <- 0 until assoc)
|
||||||
{
|
{
|
||||||
val repl_me = (repl_way === UFix(i))
|
val repl_me = (repl_way === UFix(i))
|
||||||
val tag_array = Mem4(lines, r_cpu_miss_tag);
|
val tag_array = Mem(lines, r_cpu_miss_tag);
|
||||||
tag_array.setReadLatency(1);
|
tag_array.setReadLatency(1);
|
||||||
tag_array.setTarget('inst);
|
tag_array.setTarget('inst);
|
||||||
val tag_rdata = tag_array.rw(tag_addr, r_cpu_miss_tag, tag_we && repl_me);
|
val tag_rdata = tag_array.rw(tag_addr, r_cpu_miss_tag, tag_we && repl_me);
|
||||||
@ -112,17 +112,17 @@ class rocketICache(sets: Int, assoc: Int) extends Component {
|
|||||||
// valid bit array
|
// valid bit array
|
||||||
val vb_array = Reg(resetVal = Bits(0, lines));
|
val vb_array = Reg(resetVal = Bits(0, lines));
|
||||||
when (io.cpu.invalidate) {
|
when (io.cpu.invalidate) {
|
||||||
vb_array <== Bits(0,lines);
|
vb_array := Bits(0,lines);
|
||||||
}
|
}
|
||||||
when (tag_we && repl_me) {
|
.elsewhen (tag_we && repl_me) {
|
||||||
vb_array <== vb_array.bitSet(r_cpu_req_idx(indexmsb,indexlsb).toUFix, UFix(1,1));
|
vb_array := vb_array.bitSet(r_cpu_req_idx(indexmsb,indexlsb).toUFix, UFix(1,1));
|
||||||
}
|
}
|
||||||
|
|
||||||
val valid = vb_array(r_cpu_req_idx(indexmsb,indexlsb)).toBool;
|
val valid = vb_array(r_cpu_req_idx(indexmsb,indexlsb)).toBool;
|
||||||
val hit = valid && (tag_rdata === r_cpu_hit_addr(tagmsb,taglsb))
|
val hit = valid && (tag_rdata === r_cpu_hit_addr(tagmsb,taglsb))
|
||||||
|
|
||||||
// data array
|
// data array
|
||||||
val data_array = Mem4(lines*REFILL_CYCLES, io.mem.resp_data);
|
val data_array = Mem(lines*REFILL_CYCLES, io.mem.resp_data);
|
||||||
data_array.setReadLatency(1);
|
data_array.setReadLatency(1);
|
||||||
data_array.setTarget('inst);
|
data_array.setTarget('inst);
|
||||||
val data_out = data_array.rw(data_addr, io.mem.resp_data, io.mem.resp_val && repl_me)
|
val data_out = data_array.rw(data_addr, io.mem.resp_data, io.mem.resp_val && repl_me)
|
||||||
@ -144,30 +144,30 @@ class rocketICache(sets: Int, assoc: Int) extends Component {
|
|||||||
// control state machine
|
// control state machine
|
||||||
switch (state) {
|
switch (state) {
|
||||||
is (s_reset) {
|
is (s_reset) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
is (s_ready) {
|
is (s_ready) {
|
||||||
when (io.cpu.itlb_miss) {
|
when (io.cpu.itlb_miss) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
when (r_cpu_req_val && !tag_hit) {
|
.elsewhen (r_cpu_req_val && !tag_hit) {
|
||||||
state <== s_request;
|
state := s_request;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_request)
|
is (s_request)
|
||||||
{
|
{
|
||||||
when (io.mem.req_rdy) {
|
when (io.mem.req_rdy) {
|
||||||
state <== s_refill_wait;
|
state := s_refill_wait;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_refill_wait) {
|
is (s_refill_wait) {
|
||||||
when (io.mem.resp_val) {
|
when (io.mem.resp_val) {
|
||||||
state <== s_refill;
|
state := s_refill;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_refill) {
|
is (s_refill) {
|
||||||
when (io.mem.resp_val && (~refill_count === UFix(0))) {
|
when (io.mem.resp_val && (~refill_count === UFix(0))) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@ class rocketIPrefetcher extends Component() {
|
|||||||
|
|
||||||
val demand_miss = io.icache.req_val & io.icache.req_rdy;
|
val demand_miss = io.icache.req_val & io.icache.req_rdy;
|
||||||
val prefetch_addr = Reg() { UFix(width = io.icache.req_addr.width) };
|
val prefetch_addr = Reg() { UFix(width = io.icache.req_addr.width) };
|
||||||
when (demand_miss) { prefetch_addr <== io.icache.req_addr + UFix(1); }
|
when (demand_miss) { prefetch_addr := io.icache.req_addr + UFix(1); }
|
||||||
|
|
||||||
val addr_match = (prefetch_addr === io.icache.req_addr);
|
val addr_match = (prefetch_addr === io.icache.req_addr);
|
||||||
val hit = (state != s_invalid) & (state != s_req_wait) & addr_match;
|
val hit = (state != s_invalid) & (state != s_req_wait) & addr_match;
|
||||||
@ -44,14 +44,14 @@ class rocketIPrefetcher extends Component() {
|
|||||||
io.mem.req_addr := Mux(io.mem.req_tag(0).toBool, prefetch_addr, io.icache.req_addr);
|
io.mem.req_addr := Mux(io.mem.req_tag(0).toBool, prefetch_addr, io.icache.req_addr);
|
||||||
|
|
||||||
val fill_cnt = Reg(resetVal = UFix(0, ceil(log(REFILL_CYCLES)/log(2)).toInt));
|
val fill_cnt = Reg(resetVal = UFix(0, ceil(log(REFILL_CYCLES)/log(2)).toInt));
|
||||||
when (ip_mem_resp_val.toBool) { fill_cnt <== fill_cnt + UFix(1); }
|
when (ip_mem_resp_val.toBool) { fill_cnt := fill_cnt + UFix(1); }
|
||||||
val fill_done = (~fill_cnt === UFix(0)) & ip_mem_resp_val;
|
val fill_done = (~fill_cnt === UFix(0)) & ip_mem_resp_val;
|
||||||
|
|
||||||
val forward = Reg(resetVal = Bool(false));
|
val forward = Reg(resetVal = Bool(false));
|
||||||
val forward_cnt = Reg(resetVal = UFix(0, ceil(log(REFILL_CYCLES)/log(2)).toInt));
|
val forward_cnt = Reg(resetVal = UFix(0, ceil(log(REFILL_CYCLES)/log(2)).toInt));
|
||||||
when (forward & pdq.io.deq.valid) { forward_cnt <== forward_cnt + UFix(1); }
|
when (forward & pdq.io.deq.valid) { forward_cnt := forward_cnt + UFix(1); }
|
||||||
val forward_done = (~forward_cnt === UFix(0)) & pdq.io.deq.valid;
|
val forward_done = (~forward_cnt === UFix(0)) & pdq.io.deq.valid;
|
||||||
forward <== (demand_miss & hit | forward & ~forward_done);
|
forward := (demand_miss & hit | forward & ~forward_done);
|
||||||
|
|
||||||
io.icache.resp_val := (io.mem.resp_val && !io.mem.resp_tag(0).toBool) || (forward && pdq.io.deq.valid);
|
io.icache.resp_val := (io.mem.resp_val && !io.mem.resp_tag(0).toBool) || (forward && pdq.io.deq.valid);
|
||||||
io.icache.resp_data := Mux(forward, pdq.io.deq.bits, io.mem.resp_data);
|
io.icache.resp_data := Mux(forward, pdq.io.deq.bits, io.mem.resp_data);
|
||||||
@ -63,25 +63,25 @@ class rocketIPrefetcher extends Component() {
|
|||||||
|
|
||||||
switch (state) {
|
switch (state) {
|
||||||
is (s_invalid) {
|
is (s_invalid) {
|
||||||
when (demand_miss) { state <== s_req_wait; }
|
when (demand_miss) { state := s_req_wait; }
|
||||||
}
|
}
|
||||||
is (s_valid) {
|
is (s_valid) {
|
||||||
when (demand_miss | (forward & forward_done)) { state <== s_req_wait; }
|
when (demand_miss | (forward & forward_done)) { state := s_req_wait; }
|
||||||
}
|
}
|
||||||
is (s_refilling) {
|
is (s_refilling) {
|
||||||
when (demand_miss & ~addr_match & fill_done.toBool) { state <== s_req_wait; }
|
when (demand_miss & ~addr_match & fill_done.toBool) { state := s_req_wait; }
|
||||||
when (demand_miss & ~addr_match) { state <== s_bad_resp_wait; }
|
.elsewhen (demand_miss & ~addr_match) { state := s_bad_resp_wait; }
|
||||||
when (fill_done.toBool) { state <== s_valid; }
|
.elsewhen (fill_done.toBool) { state := s_valid; }
|
||||||
}
|
}
|
||||||
is (s_req_wait) {
|
is (s_req_wait) {
|
||||||
when (ip_mem_req_rdy) { state <== s_resp_wait; }
|
when (ip_mem_req_rdy) { state := s_resp_wait; }
|
||||||
}
|
}
|
||||||
is (s_resp_wait) {
|
is (s_resp_wait) {
|
||||||
when (demand_miss & ~addr_match) { state <== s_bad_resp_wait; }
|
when (demand_miss & ~addr_match) { state := s_bad_resp_wait; }
|
||||||
when (ip_mem_resp_val.toBool) { state <== s_refilling; }
|
.elsewhen (ip_mem_resp_val.toBool) { state := s_refilling; }
|
||||||
}
|
}
|
||||||
is (s_bad_resp_wait) {
|
is (s_bad_resp_wait) {
|
||||||
when (fill_done.toBool & ip_mem_resp_val.toBool) { state <== s_req_wait; }
|
when (fill_done.toBool & ip_mem_resp_val.toBool) { state := s_req_wait; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -23,30 +23,26 @@ class rocketCAM(entries: Int, tag_bits: Int) extends Component {
|
|||||||
val io = new ioCAM(entries, addr_bits, tag_bits);
|
val io = new ioCAM(entries, addr_bits, tag_bits);
|
||||||
val cam_tags = Mem(entries, io.write, io.write_addr, io.write_tag);
|
val cam_tags = Mem(entries, io.write, io.write_addr, io.write_tag);
|
||||||
|
|
||||||
val l_hit = Wire() { Bool() };
|
|
||||||
val l_hit_addr = Wire() { UFix() };
|
|
||||||
|
|
||||||
val vb_array = Reg(resetVal = Bits(0, entries));
|
val vb_array = Reg(resetVal = Bits(0, entries));
|
||||||
when (io.clear) {
|
when (io.clear) {
|
||||||
vb_array <== Bits(0, entries);
|
vb_array := Bits(0, entries);
|
||||||
}
|
}
|
||||||
when (io.write) {
|
.elsewhen (io.write) {
|
||||||
vb_array <== vb_array.bitSet(io.write_addr, Bool(true));
|
vb_array := vb_array.bitSet(io.write_addr, Bool(true));
|
||||||
}
|
|
||||||
|
|
||||||
for (i <- 0 to entries-1) {
|
|
||||||
when (vb_array(UFix(i)).toBool && (cam_tags(UFix(i)) === io.tag)) {
|
|
||||||
l_hit <== Bool(true);
|
|
||||||
l_hit_addr <== UFix(i,addr_bits);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
l_hit <== Bool(false);
|
var l_hit = Bool(false)
|
||||||
l_hit_addr <== UFix(0, addr_bits);
|
val mux = (new Mux1H(entries)) { Bits(width = addr_bits) }
|
||||||
|
for (i <- 0 to entries-1) {
|
||||||
|
val my_hit = vb_array(UFix(i)).toBool && (cam_tags(UFix(i)) === io.tag)
|
||||||
|
l_hit = l_hit || my_hit
|
||||||
|
mux.io.in(i) := Bits(i)
|
||||||
|
mux.io.sel(i) := my_hit
|
||||||
|
}
|
||||||
|
|
||||||
io.valid_bits := vb_array;
|
io.valid_bits := vb_array;
|
||||||
io.hit := l_hit;
|
io.hit := l_hit;
|
||||||
io.hit_addr := l_hit_addr;
|
io.hit_addr := mux.io.out.toUFix;
|
||||||
}
|
}
|
||||||
|
|
||||||
// interface between TLB and PTW
|
// interface between TLB and PTW
|
||||||
@ -104,12 +100,12 @@ class rocketITLB(entries: Int) extends Component
|
|||||||
val repl_count = Reg(resetVal = UFix(0, addr_bits));
|
val repl_count = Reg(resetVal = UFix(0, addr_bits));
|
||||||
|
|
||||||
when (io.cpu.req_val && io.cpu.req_rdy) {
|
when (io.cpu.req_val && io.cpu.req_rdy) {
|
||||||
r_cpu_req_vpn <== io.cpu.req_vpn;
|
r_cpu_req_vpn := io.cpu.req_vpn;
|
||||||
r_cpu_req_asid <== io.cpu.req_asid;
|
r_cpu_req_asid := io.cpu.req_asid;
|
||||||
r_cpu_req_val <== Bool(true);
|
r_cpu_req_val := Bool(true);
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
r_cpu_req_val <== Bool(false);
|
r_cpu_req_val := Bool(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
val bad_va = r_cpu_req_vpn(VPN_BITS) != r_cpu_req_vpn(VPN_BITS-1);
|
val bad_va = r_cpu_req_vpn(VPN_BITS) != r_cpu_req_vpn(VPN_BITS-1);
|
||||||
@ -139,15 +135,15 @@ class rocketITLB(entries: Int) extends Component
|
|||||||
val ux_array = Reg(resetVal = Bits(0, entries)); // user execute permission
|
val ux_array = Reg(resetVal = Bits(0, entries)); // user execute permission
|
||||||
val sx_array = Reg(resetVal = Bits(0, entries)); // supervisor execute permission
|
val sx_array = Reg(resetVal = Bits(0, entries)); // supervisor execute permission
|
||||||
when (io.ptw.resp_val) {
|
when (io.ptw.resp_val) {
|
||||||
ux_array <== ux_array.bitSet(r_refill_waddr, ptw_perm_ux);
|
ux_array := ux_array.bitSet(r_refill_waddr, ptw_perm_ux);
|
||||||
sx_array <== sx_array.bitSet(r_refill_waddr, ptw_perm_sx);
|
sx_array := sx_array.bitSet(r_refill_waddr, ptw_perm_sx);
|
||||||
}
|
}
|
||||||
|
|
||||||
// when the page table lookup reports an error, set both execute permission
|
// when the page table lookup reports an error, set both execute permission
|
||||||
// bits to 0 so the next access will cause an exceptions
|
// bits to 0 so the next access will cause an exceptions
|
||||||
when (io.ptw.resp_err) {
|
when (io.ptw.resp_err) {
|
||||||
ux_array <== ux_array.bitSet(r_refill_waddr, Bool(false));
|
ux_array := ux_array.bitSet(r_refill_waddr, Bool(false));
|
||||||
sx_array <== sx_array.bitSet(r_refill_waddr, Bool(false));
|
sx_array := sx_array.bitSet(r_refill_waddr, Bool(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
// high if there are any unused entries in the ITLB
|
// high if there are any unused entries in the ITLB
|
||||||
@ -165,10 +161,10 @@ class rocketITLB(entries: Int) extends Component
|
|||||||
val tlb_miss = status_vm && lookup_miss;
|
val tlb_miss = status_vm && lookup_miss;
|
||||||
|
|
||||||
when (tlb_miss) {
|
when (tlb_miss) {
|
||||||
r_refill_tag <== lookup_tag;
|
r_refill_tag := lookup_tag;
|
||||||
r_refill_waddr <== repl_waddr;
|
r_refill_waddr := repl_waddr;
|
||||||
when (!invalid_entry) {
|
when (!invalid_entry) {
|
||||||
repl_count <== repl_count + UFix(1);
|
repl_count := repl_count + UFix(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -190,17 +186,17 @@ class rocketITLB(entries: Int) extends Component
|
|||||||
switch (state) {
|
switch (state) {
|
||||||
is (s_ready) {
|
is (s_ready) {
|
||||||
when (tlb_miss) {
|
when (tlb_miss) {
|
||||||
state <== s_request;
|
state := s_request;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_request) {
|
is (s_request) {
|
||||||
when (io.ptw.req_rdy) {
|
when (io.ptw.req_rdy) {
|
||||||
state <== s_wait;
|
state := s_wait;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_wait) {
|
is (s_wait) {
|
||||||
when (io.ptw.resp_val || io.ptw.resp_err) {
|
when (io.ptw.resp_val || io.ptw.resp_err) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -51,19 +51,21 @@ class rocketMultiplier extends Component {
|
|||||||
val rhs_sign = (io.mul_fn === MUL_HS) && rhs_msb
|
val rhs_sign = (io.mul_fn === MUL_HS) && rhs_msb
|
||||||
val rhs_hi = Mux(io.dw === DW_64, io.in1(63,32), Fill(32, rhs_sign))
|
val rhs_hi = Mux(io.dw === DW_64, io.in1(63,32), Fill(32, rhs_sign))
|
||||||
val rhs_in = Cat(rhs_sign, rhs_sign, rhs_hi, io.in1(31,0))
|
val rhs_in = Cat(rhs_sign, rhs_sign, rhs_hi, io.in1(31,0))
|
||||||
|
|
||||||
|
val do_kill = io.mul_kill && r_cnt === UFix(0) // can only kill on 1st cycle
|
||||||
|
|
||||||
when (io.mul_val && io.mul_rdy) {
|
when (io.mul_val && io.mul_rdy) {
|
||||||
r_val <== Bool(true)
|
r_val := Bool(true)
|
||||||
r_cnt <== UFix(0, log2up(cycles+1))
|
r_cnt := UFix(0, log2up(cycles+1))
|
||||||
r_dw <== io.dw
|
r_dw := io.dw
|
||||||
r_fn <== io.mul_fn
|
r_fn := io.mul_fn
|
||||||
r_tag <== io.mul_tag
|
r_tag := io.mul_tag
|
||||||
r_lhs <== lhs_in
|
r_lhs := lhs_in
|
||||||
r_prod<== rhs_in
|
r_prod:= rhs_in
|
||||||
r_lsb <== Bool(false)
|
r_lsb := Bool(false)
|
||||||
}
|
}
|
||||||
when (io.result_val && io.result_rdy || io.mul_kill && r_cnt === UFix(0)) { // can only kill on first cycle
|
.elsewhen (io.result_val && io.result_rdy || do_kill) { // can only kill on first cycle
|
||||||
r_val <== Bool(false)
|
r_val := Bool(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
val lhs_sext = Cat(r_lhs(width-2), r_lhs(width-2), r_lhs).toUFix
|
val lhs_sext = Cat(r_lhs(width-2), r_lhs(width-2), r_lhs).toUFix
|
||||||
@ -86,9 +88,9 @@ class rocketMultiplier extends Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
when (r_val && (r_cnt != UFix(cycles))) {
|
when (r_val && (r_cnt != UFix(cycles))) {
|
||||||
r_lsb <== lsb
|
r_lsb := lsb
|
||||||
r_prod <== prod
|
r_prod := prod
|
||||||
r_cnt <== r_cnt + UFix(1)
|
r_cnt := r_cnt + UFix(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
val mul_output64 = Mux(r_fn === MUL_LO, r_prod(63,0), r_prod(127,64))
|
val mul_output64 = Mux(r_fn === MUL_LO, r_prod(63,0), r_prod(127,64))
|
||||||
|
@ -197,35 +197,46 @@ class MSHR(id: Int) extends Component {
|
|||||||
val next_dirty = dirty || io.req_sec_val && io.req_sec_rdy && !req_load
|
val next_dirty = dirty || io.req_sec_val && io.req_sec_rdy && !req_load
|
||||||
val sec_rdy = io.idx_match && !refilled && (dirty || !requested || req_load)
|
val sec_rdy = io.idx_match && !refilled && (dirty || !requested || req_load)
|
||||||
|
|
||||||
val rpq = (new queue(NRPQ)) { new RPQEntry() }
|
// XXX why doesn't this work?
|
||||||
|
// val rpq = (new queue(NRPQ)) { new RPQEntry() }
|
||||||
|
val rpq_enq_bits = Cat(io.req_offset, io.req_cmd, io.req_type, io.req_sdq_id, io.req_tag)
|
||||||
|
val rpq = (new queue(NRPQ)) { Bits(width = rpq_enq_bits.getWidth) }
|
||||||
rpq.io.enq.valid := (io.req_pri_val && io.req_pri_rdy || io.req_sec_val && sec_rdy) && req_use_rpq
|
rpq.io.enq.valid := (io.req_pri_val && io.req_pri_rdy || io.req_sec_val && sec_rdy) && req_use_rpq
|
||||||
rpq.io.enq.bits.offset := io.req_offset
|
rpq.io.enq.bits := rpq_enq_bits
|
||||||
rpq.io.enq.bits.cmd := io.req_cmd
|
|
||||||
rpq.io.enq.bits.typ := io.req_type
|
|
||||||
rpq.io.enq.bits.sdq_id := io.req_sdq_id
|
|
||||||
rpq.io.enq.bits.tag := io.req_tag
|
|
||||||
rpq.io.deq.ready := io.replay.ready && refilled
|
rpq.io.deq.ready := io.replay.ready && refilled
|
||||||
|
|
||||||
|
var rpq_deq_bits = rpq.io.deq.bits
|
||||||
|
io.replay.bits.tag := rpq_deq_bits
|
||||||
|
rpq_deq_bits = rpq_deq_bits >> UFix(io.req_tag.width)
|
||||||
|
io.replay.bits.sdq_id := rpq_deq_bits.toUFix
|
||||||
|
rpq_deq_bits = rpq_deq_bits >> UFix(io.req_sdq_id.width)
|
||||||
|
io.replay.bits.typ := rpq_deq_bits
|
||||||
|
rpq_deq_bits = rpq_deq_bits >> UFix(io.req_type.width)
|
||||||
|
io.replay.bits.cmd := rpq_deq_bits
|
||||||
|
rpq_deq_bits = rpq_deq_bits >> UFix(io.req_cmd.width)
|
||||||
|
io.replay.bits.offset := rpq_deq_bits
|
||||||
|
rpq_deq_bits = rpq_deq_bits >> UFix(io.req_offset.width)
|
||||||
|
|
||||||
when (io.req_pri_val && io.req_pri_rdy) {
|
when (io.req_pri_val && io.req_pri_rdy) {
|
||||||
valid <== Bool(true)
|
valid := Bool(true)
|
||||||
dirty <== !req_load
|
dirty := !req_load
|
||||||
requested <== Bool(false)
|
requested := Bool(false)
|
||||||
refilled <== Bool(false)
|
refilled := Bool(false)
|
||||||
ppn <== io.req_ppn
|
ppn := io.req_ppn
|
||||||
idx_ <== io.req_idx
|
idx_ := io.req_idx
|
||||||
way_oh_ <== io.req_way_oh
|
way_oh_ := io.req_way_oh
|
||||||
}
|
}
|
||||||
when (io.mem_req.valid && io.mem_req.ready) {
|
.otherwise {
|
||||||
requested <== Bool(true)
|
when (io.mem_req.valid && io.mem_req.ready) {
|
||||||
}
|
requested := Bool(true)
|
||||||
when (io.mem_resp_val) {
|
}
|
||||||
refilled <== Bool(true)
|
when (io.mem_resp_val) {
|
||||||
}
|
refilled := Bool(true)
|
||||||
when (io.meta_req.valid && io.meta_req.ready) {
|
}
|
||||||
valid <== Bool(false)
|
when (io.meta_req.valid && io.meta_req.ready) {
|
||||||
}
|
valid := Bool(false)
|
||||||
otherwise {
|
}
|
||||||
dirty <== next_dirty
|
dirty := next_dirty
|
||||||
}
|
}
|
||||||
|
|
||||||
io.idx_match := valid && (idx_ === io.req_idx)
|
io.idx_match := valid && (idx_ === io.req_idx)
|
||||||
@ -251,11 +262,6 @@ class MSHR(id: Int) extends Component {
|
|||||||
|
|
||||||
io.replay.valid := rpq.io.deq.valid && refilled
|
io.replay.valid := rpq.io.deq.valid && refilled
|
||||||
io.replay.bits.idx := idx_
|
io.replay.bits.idx := idx_
|
||||||
io.replay.bits.tag := rpq.io.deq.bits.tag
|
|
||||||
io.replay.bits.offset := rpq.io.deq.bits.offset
|
|
||||||
io.replay.bits.cmd := rpq.io.deq.bits.cmd
|
|
||||||
io.replay.bits.typ := rpq.io.deq.bits.typ
|
|
||||||
io.replay.bits.sdq_id := rpq.io.deq.bits.sdq_id
|
|
||||||
io.replay.bits.way_oh := way_oh_
|
io.replay.bits.way_oh := way_oh_
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -366,10 +372,10 @@ class ReplayUnit extends Component {
|
|||||||
|
|
||||||
val replay_val = Reg(resetVal = Bool(false))
|
val replay_val = Reg(resetVal = Bool(false))
|
||||||
val replay_retry = replay_val && !io.data_req.ready
|
val replay_retry = replay_val && !io.data_req.ready
|
||||||
replay_val <== io.replay.valid || replay_retry
|
replay_val := io.replay.valid || replay_retry
|
||||||
|
|
||||||
val rp = Reg { new Replay() }
|
val rp = Reg { new Replay() }
|
||||||
when (io.replay.valid && io.replay.ready) { rp <== io.replay.bits }
|
when (io.replay.valid && io.replay.ready) { rp := io.replay.bits }
|
||||||
|
|
||||||
val rp_amo = rp.cmd(3).toBool
|
val rp_amo = rp.cmd(3).toBool
|
||||||
val rp_store = (rp.cmd === M_XWR)
|
val rp_store = (rp.cmd === M_XWR)
|
||||||
@ -383,13 +389,13 @@ class ReplayUnit extends Component {
|
|||||||
val sdq_wen = io.sdq_enq.valid && io.sdq_enq.ready
|
val sdq_wen = io.sdq_enq.valid && io.sdq_enq.ready
|
||||||
val sdq_addr = Mux(sdq_ren_retry, rp.sdq_id, Mux(sdq_ren_new, io.replay.bits.sdq_id, sdq_alloc_id))
|
val sdq_addr = Mux(sdq_ren_retry, rp.sdq_id, Mux(sdq_ren_new, io.replay.bits.sdq_id, sdq_alloc_id))
|
||||||
|
|
||||||
val sdq = Mem4(NSDQ, io.sdq_enq.bits)
|
val sdq = Mem(NSDQ, io.sdq_enq.bits)
|
||||||
sdq.setReadLatency(1);
|
sdq.setReadLatency(1);
|
||||||
sdq.setTarget('inst)
|
sdq.setTarget('inst)
|
||||||
val sdq_dout = sdq.rw(sdq_addr, io.sdq_enq.bits, sdq_wen, cs = sdq_ren || sdq_wen)
|
val sdq_dout = sdq.rw(sdq_addr, io.sdq_enq.bits, sdq_wen, cs = sdq_ren || sdq_wen)
|
||||||
|
|
||||||
val sdq_free = replay_val && !replay_retry && rp_write
|
val sdq_free = replay_val && !replay_retry && rp_write
|
||||||
sdq_val <== sdq_val & ~(sdq_free.toUFix << rp.sdq_id) | (sdq_wen.toUFix << sdq_alloc_id)
|
sdq_val := sdq_val & ~(sdq_free.toUFix << rp.sdq_id) | (sdq_wen.toUFix << sdq_alloc_id)
|
||||||
|
|
||||||
io.sdq_enq.ready := (~sdq_val != UFix(0)) && !sdq_ren
|
io.sdq_enq.ready := (~sdq_val != UFix(0)) && !sdq_ren
|
||||||
io.sdq_id := sdq_alloc_id
|
io.sdq_id := sdq_alloc_id
|
||||||
@ -433,9 +439,9 @@ class WritebackUnit extends Component {
|
|||||||
wbq.io.enq.bits := io.data_resp
|
wbq.io.enq.bits := io.data_resp
|
||||||
wbq.io.deq.ready := io.mem_req.ready && !refill_val && (cnt === UFix(REFILL_CYCLES))
|
wbq.io.deq.ready := io.mem_req.ready && !refill_val && (cnt === UFix(REFILL_CYCLES))
|
||||||
|
|
||||||
when (io.req.valid && io.req.ready) { valid <== Bool(true); cnt <== UFix(0); addr <== io.req.bits }
|
when (io.data_req.valid && io.data_req.ready) { cnt := cnt + UFix(1) }
|
||||||
when (io.data_req.valid && io.data_req.ready) { cnt <== cnt + UFix(1) }
|
when ((cnt === UFix(REFILL_CYCLES)) && !wbq.io.deq.valid) { valid := Bool(false) }
|
||||||
when ((cnt === UFix(REFILL_CYCLES)) && !wbq.io.deq.valid) { valid <== Bool(false) }
|
when (io.req.valid && io.req.ready) { valid := Bool(true); cnt := UFix(0); addr := io.req.bits }
|
||||||
|
|
||||||
io.req.ready := !valid
|
io.req.ready := !valid
|
||||||
io.data_req.valid := valid && (cnt < UFix(REFILL_CYCLES))
|
io.data_req.valid := valid && (cnt < UFix(REFILL_CYCLES))
|
||||||
@ -474,22 +480,22 @@ class FlushUnit(lines: Int) extends Component {
|
|||||||
switch (state) {
|
switch (state) {
|
||||||
is(s_reset) {
|
is(s_reset) {
|
||||||
when (io.meta_req.ready) {
|
when (io.meta_req.ready) {
|
||||||
state <== Mux(~way_cnt === UFix(0) && ~idx_cnt === UFix(0), s_ready, s_reset);
|
state := Mux(~way_cnt === UFix(0) && ~idx_cnt === UFix(0), s_ready, s_reset);
|
||||||
when (~way_cnt === UFix(0)) { idx_cnt <== next_idx_cnt };
|
when (~way_cnt === UFix(0)) { idx_cnt := next_idx_cnt };
|
||||||
way_cnt <== next_way_cnt;
|
way_cnt := next_way_cnt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is(s_ready) { when (io.req.valid) { state <== s_meta_read; tag <== io.req.bits } }
|
is(s_ready) { when (io.req.valid) { state := s_meta_read; tag := io.req.bits } }
|
||||||
is(s_meta_read) { when (io.meta_req.ready) { state <== s_meta_wait } }
|
is(s_meta_read) { when (io.meta_req.ready) { state := s_meta_wait } }
|
||||||
is(s_meta_wait) { state <== Mux(io.meta_resp.valid && io.meta_resp.dirty && !io.wb_req.ready, s_meta_read, s_meta_write) }
|
is(s_meta_wait) { state := Mux(io.meta_resp.valid && io.meta_resp.dirty && !io.wb_req.ready, s_meta_read, s_meta_write) }
|
||||||
is(s_meta_write) {
|
is(s_meta_write) {
|
||||||
when (io.meta_req.ready) {
|
when (io.meta_req.ready) {
|
||||||
state <== Mux(~way_cnt === UFix(0) && ~idx_cnt === UFix(0), s_done, s_meta_read);
|
state := Mux(~way_cnt === UFix(0) && ~idx_cnt === UFix(0), s_done, s_meta_read);
|
||||||
when (~way_cnt === UFix(0)) { idx_cnt <== next_idx_cnt };
|
when (~way_cnt === UFix(0)) { idx_cnt := next_idx_cnt };
|
||||||
way_cnt <== next_way_cnt;
|
way_cnt := next_way_cnt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is(s_done) { when (io.resp.ready) { state <== s_ready } }
|
is(s_done) { when (io.resp.ready) { state := s_ready } }
|
||||||
}
|
}
|
||||||
|
|
||||||
io.req.ready := state === s_ready
|
io.req.ready := state === s_ready
|
||||||
@ -515,7 +521,7 @@ class MetaDataArray(lines: Int) extends Component {
|
|||||||
val state_req = (new ioDecoupled) { new MetaArrayReq() }
|
val state_req = (new ioDecoupled) { new MetaArrayReq() }
|
||||||
}
|
}
|
||||||
|
|
||||||
val vd_array = Mem4(lines, Bits(width = 2))
|
val vd_array = Mem(lines, Bits(width = 2))
|
||||||
vd_array.setReadLatency(1);
|
vd_array.setReadLatency(1);
|
||||||
val vd_wdata2 = Cat(io.state_req.bits.data.valid, io.state_req.bits.data.dirty)
|
val vd_wdata2 = Cat(io.state_req.bits.data.valid, io.state_req.bits.data.dirty)
|
||||||
vd_array.write(io.state_req.bits.idx, vd_wdata2, io.state_req.valid && io.state_req.bits.rw)
|
vd_array.write(io.state_req.bits.idx, vd_wdata2, io.state_req.valid && io.state_req.bits.rw)
|
||||||
@ -526,7 +532,7 @@ class MetaDataArray(lines: Int) extends Component {
|
|||||||
// this could be eliminated if the read port were combinational.
|
// this could be eliminated if the read port were combinational.
|
||||||
val vd_conflict = io.state_req.valid && (io.req.bits.idx === io.state_req.bits.idx)
|
val vd_conflict = io.state_req.valid && (io.req.bits.idx === io.state_req.bits.idx)
|
||||||
|
|
||||||
val tag_array = Mem4(lines, io.resp.tag)
|
val tag_array = Mem(lines, io.resp.tag)
|
||||||
tag_array.setReadLatency(1);
|
tag_array.setReadLatency(1);
|
||||||
tag_array.setTarget('inst)
|
tag_array.setTarget('inst)
|
||||||
val tag_rdata = tag_array.rw(io.req.bits.idx, io.req.bits.data.tag, io.req.valid && io.req.bits.rw, cs = io.req.valid)
|
val tag_rdata = tag_array.rw(io.req.bits.idx, io.req.bits.data.tag, io.req.valid && io.req.bits.rw, cs = io.req.valid)
|
||||||
@ -547,7 +553,7 @@ class MetaDataArrayArray(lines: Int) extends Component {
|
|||||||
|
|
||||||
val way_en_ = Reg { Bits(width=NWAYS) }
|
val way_en_ = Reg { Bits(width=NWAYS) }
|
||||||
when (io.req.valid && io.req.ready) {
|
when (io.req.valid && io.req.ready) {
|
||||||
way_en_ <== io.req.bits.way_en
|
way_en_ := io.req.bits.way_en
|
||||||
}
|
}
|
||||||
|
|
||||||
var tag_ready = Bool(true)
|
var tag_ready = Bool(true)
|
||||||
@ -576,7 +582,7 @@ class DataArray(lines: Int) extends Component {
|
|||||||
|
|
||||||
val wmask = FillInterleaved(8, io.req.bits.wmask)
|
val wmask = FillInterleaved(8, io.req.bits.wmask)
|
||||||
|
|
||||||
val array = Mem4(lines*REFILL_CYCLES, io.resp)
|
val array = Mem(lines*REFILL_CYCLES, io.resp)
|
||||||
array.setReadLatency(1);
|
array.setReadLatency(1);
|
||||||
array.setTarget('inst)
|
array.setTarget('inst)
|
||||||
val addr = Cat(io.req.bits.idx, io.req.bits.offset)
|
val addr = Cat(io.req.bits.idx, io.req.bits.offset)
|
||||||
@ -594,7 +600,7 @@ class DataArrayArray(lines: Int) extends Component {
|
|||||||
|
|
||||||
val way_en_ = Reg { Bits(width=NWAYS) }
|
val way_en_ = Reg { Bits(width=NWAYS) }
|
||||||
when (io.req.valid && io.req.ready) {
|
when (io.req.valid && io.req.ready) {
|
||||||
way_en_ <== io.req.bits.way_en
|
way_en_ := io.req.bits.way_en
|
||||||
}
|
}
|
||||||
|
|
||||||
//val data_ready_arr = Vec(NWAYS){ Bool() }
|
//val data_ready_arr = Vec(NWAYS){ Bool() }
|
||||||
@ -694,26 +700,26 @@ class HellaCacheDM extends Component {
|
|||||||
val replayer = new ReplayUnit()
|
val replayer = new ReplayUnit()
|
||||||
val replay_amo_val = replayer.io.data_req.valid && replayer.io.data_req.bits.cmd(3).toBool
|
val replay_amo_val = replayer.io.data_req.valid && replayer.io.data_req.bits.cmd(3).toBool
|
||||||
|
|
||||||
when (replay_amo_val) {
|
|
||||||
r_cpu_req_idx <== Cat(replayer.io.data_req.bits.idx, replayer.io.data_req.bits.offset)
|
|
||||||
r_cpu_req_cmd <== replayer.io.data_req.bits.cmd
|
|
||||||
r_cpu_req_type <== replayer.io.data_req.bits.typ
|
|
||||||
r_cpu_req_data <== replayer.io.data_req.bits.data
|
|
||||||
}
|
|
||||||
when (io.cpu.req_val) {
|
when (io.cpu.req_val) {
|
||||||
r_cpu_req_idx <== io.cpu.req_idx
|
r_cpu_req_idx := io.cpu.req_idx
|
||||||
r_cpu_req_cmd <== io.cpu.req_cmd
|
r_cpu_req_cmd := io.cpu.req_cmd
|
||||||
r_cpu_req_type <== io.cpu.req_type
|
r_cpu_req_type := io.cpu.req_type
|
||||||
r_cpu_req_tag <== io.cpu.req_tag
|
r_cpu_req_tag := io.cpu.req_tag
|
||||||
when (req_write) {
|
when (req_write) {
|
||||||
r_cpu_req_data <== io.cpu.req_data
|
r_cpu_req_data := io.cpu.req_data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
when (replay_amo_val) {
|
||||||
|
r_cpu_req_idx := Cat(replayer.io.data_req.bits.idx, replayer.io.data_req.bits.offset)
|
||||||
|
r_cpu_req_cmd := replayer.io.data_req.bits.cmd
|
||||||
|
r_cpu_req_type := replayer.io.data_req.bits.typ
|
||||||
|
r_cpu_req_data := replayer.io.data_req.bits.data
|
||||||
|
}
|
||||||
|
|
||||||
// refill counter
|
// refill counter
|
||||||
val rr_count = Reg(resetVal = UFix(0, log2up(REFILL_CYCLES)))
|
val rr_count = Reg(resetVal = UFix(0, log2up(REFILL_CYCLES)))
|
||||||
val rr_count_next = rr_count + UFix(1)
|
val rr_count_next = rr_count + UFix(1)
|
||||||
when (io.mem.resp_val) { rr_count <== rr_count_next }
|
when (io.mem.resp_val) { rr_count := rr_count_next }
|
||||||
|
|
||||||
val misaligned =
|
val misaligned =
|
||||||
(((r_cpu_req_type === MT_H) || (r_cpu_req_type === MT_HU)) && (r_cpu_req_idx(0) != Bits(0))) ||
|
(((r_cpu_req_type === MT_H) || (r_cpu_req_type === MT_HU)) && (r_cpu_req_idx(0) != Bits(0))) ||
|
||||||
@ -788,7 +794,7 @@ class HellaCacheDM extends Component {
|
|||||||
val drain_store = drain_store_val && data_arb.io.in(2).ready
|
val drain_store = drain_store_val && data_arb.io.in(2).ready
|
||||||
val p_store_rdy = !p_store_valid || drain_store
|
val p_store_rdy = !p_store_valid || drain_store
|
||||||
val p_amo = Reg(tag_hit && r_req_amo && p_store_rdy && !p_store_match || r_replay_amo, resetVal = Bool(false))
|
val p_amo = Reg(tag_hit && r_req_amo && p_store_rdy && !p_store_match || r_replay_amo, resetVal = Bool(false))
|
||||||
p_store_valid <== !p_store_rdy || (tag_hit && r_req_store) || p_amo
|
p_store_valid := !p_store_rdy || (tag_hit && r_req_store) || p_amo
|
||||||
|
|
||||||
// writeback
|
// writeback
|
||||||
val wb_rdy = wb_arb.io.in(1).ready && !p_store_idx_match
|
val wb_rdy = wb_arb.io.in(1).ready && !p_store_idx_match
|
||||||
@ -811,14 +817,14 @@ class HellaCacheDM extends Component {
|
|||||||
val amoalu = new AMOALU
|
val amoalu = new AMOALU
|
||||||
storegen.io.typ := r_cpu_req_type
|
storegen.io.typ := r_cpu_req_type
|
||||||
storegen.io.din := r_cpu_req_data
|
storegen.io.din := r_cpu_req_data
|
||||||
when (p_amo) {
|
|
||||||
p_store_data <== amoalu.io.out
|
|
||||||
}
|
|
||||||
when (tag_hit && r_req_write && p_store_rdy || r_replay_amo) {
|
when (tag_hit && r_req_write && p_store_rdy || r_replay_amo) {
|
||||||
p_store_idx <== r_cpu_req_idx
|
p_store_idx := r_cpu_req_idx
|
||||||
p_store_type <== r_cpu_req_type
|
p_store_type := r_cpu_req_type
|
||||||
p_store_cmd <== r_cpu_req_cmd
|
p_store_cmd := r_cpu_req_cmd
|
||||||
p_store_data <== storegen.io.dout
|
p_store_data := storegen.io.dout
|
||||||
|
}
|
||||||
|
when (p_amo) {
|
||||||
|
p_store_data := amoalu.io.out
|
||||||
}
|
}
|
||||||
|
|
||||||
// miss handling
|
// miss handling
|
||||||
@ -852,7 +858,7 @@ class HellaCacheDM extends Component {
|
|||||||
data_arb.io.in(1).bits.rw := replay.cmd === M_XWR
|
data_arb.io.in(1).bits.rw := replay.cmd === M_XWR
|
||||||
data_arb.io.in(1).valid := replay_val
|
data_arb.io.in(1).valid := replay_val
|
||||||
replayer.io.data_req.ready := replay_rdy && !stall_replay
|
replayer.io.data_req.ready := replay_rdy && !stall_replay
|
||||||
r_replay_amo <== replay_amo_val && replay_rdy && !stall_replay
|
r_replay_amo := replay_amo_val && replay_rdy && !stall_replay
|
||||||
|
|
||||||
// store write mask generation.
|
// store write mask generation.
|
||||||
// assumes store replays are higher-priority than pending stores.
|
// assumes store replays are higher-priority than pending stores.
|
||||||
@ -881,13 +887,13 @@ class HellaCacheDM extends Component {
|
|||||||
amoalu.io.lhs := loadgen.io.r_dout.toUFix
|
amoalu.io.lhs := loadgen.io.r_dout.toUFix
|
||||||
amoalu.io.rhs := p_store_data.toUFix
|
amoalu.io.rhs := p_store_data.toUFix
|
||||||
|
|
||||||
early_nack <== early_tag_nack || early_load_nack || r_cpu_req_val && r_req_amo || replay_amo_val || r_replay_amo
|
early_nack := early_tag_nack || early_load_nack || r_cpu_req_val && r_req_amo || replay_amo_val || r_replay_amo
|
||||||
|
|
||||||
// reset and flush unit
|
// reset and flush unit
|
||||||
val flusher = new FlushUnit(lines)
|
val flusher = new FlushUnit(lines)
|
||||||
val flushed = Reg(resetVal = Bool(true))
|
val flushed = Reg(resetVal = Bool(true))
|
||||||
val flush_rdy = mshr.io.fence_rdy && wb_rdy && !p_store_valid
|
val flush_rdy = mshr.io.fence_rdy && wb_rdy && !p_store_valid
|
||||||
flushed <== flushed && !r_cpu_req_val || r_cpu_req_val && r_req_flush && flush_rdy && flusher.io.req.ready
|
flushed := flushed && !r_cpu_req_val || r_cpu_req_val && r_req_flush && flush_rdy && flusher.io.req.ready
|
||||||
flusher.io.req.valid := r_cpu_req_val && r_req_flush && flush_rdy && !flushed
|
flusher.io.req.valid := r_cpu_req_val && r_req_flush && flush_rdy && !flushed
|
||||||
flusher.io.wb_req <> wb_arb.io.in(0)
|
flusher.io.wb_req <> wb_arb.io.in(0)
|
||||||
flusher.io.meta_req.bits.inner_req <> meta_arb.io.in(0).bits
|
flusher.io.meta_req.bits.inner_req <> meta_arb.io.in(0).bits
|
||||||
@ -899,7 +905,7 @@ class HellaCacheDM extends Component {
|
|||||||
// we usually nack rather than reporting that the cache is not ready.
|
// we usually nack rather than reporting that the cache is not ready.
|
||||||
// fences and flushes are the exceptions.
|
// fences and flushes are the exceptions.
|
||||||
val pending_fence = Reg(resetVal = Bool(false))
|
val pending_fence = Reg(resetVal = Bool(false))
|
||||||
pending_fence <== (r_cpu_req_val && r_req_fence || pending_fence) && !flush_rdy
|
pending_fence := (r_cpu_req_val && r_req_fence || pending_fence) && !flush_rdy
|
||||||
val nack_hit = p_store_match || r_req_write && !p_store_rdy
|
val nack_hit = p_store_match || r_req_write && !p_store_rdy
|
||||||
val nack_miss = dirty && !wb_rdy || !mshr.io.req_rdy || r_req_write && !replayer.io.sdq_enq.ready
|
val nack_miss = dirty && !wb_rdy || !mshr.io.req_rdy || r_req_write && !replayer.io.sdq_enq.ready
|
||||||
val nack_flush = !flush_rdy && (r_req_fence || r_req_flush) ||
|
val nack_flush = !flush_rdy && (r_req_fence || r_req_flush) ||
|
||||||
@ -974,26 +980,26 @@ class HellaCacheAssoc extends Component {
|
|||||||
val replayer = new ReplayUnit()
|
val replayer = new ReplayUnit()
|
||||||
val replay_amo_val = replayer.io.data_req.valid && replayer.io.data_req.bits.cmd(3).toBool
|
val replay_amo_val = replayer.io.data_req.valid && replayer.io.data_req.bits.cmd(3).toBool
|
||||||
|
|
||||||
when (replay_amo_val) {
|
|
||||||
r_cpu_req_idx <== Cat(replayer.io.data_req.bits.idx, replayer.io.data_req.bits.offset)
|
|
||||||
r_cpu_req_cmd <== replayer.io.data_req.bits.cmd
|
|
||||||
r_cpu_req_type <== replayer.io.data_req.bits.typ
|
|
||||||
r_cpu_req_data <== replayer.io.data_req.bits.data
|
|
||||||
}
|
|
||||||
when (io.cpu.req_val) {
|
when (io.cpu.req_val) {
|
||||||
r_cpu_req_idx <== io.cpu.req_idx
|
r_cpu_req_idx := io.cpu.req_idx
|
||||||
r_cpu_req_cmd <== io.cpu.req_cmd
|
r_cpu_req_cmd := io.cpu.req_cmd
|
||||||
r_cpu_req_type <== io.cpu.req_type
|
r_cpu_req_type := io.cpu.req_type
|
||||||
r_cpu_req_tag <== io.cpu.req_tag
|
r_cpu_req_tag := io.cpu.req_tag
|
||||||
when (req_write) {
|
when (req_write) {
|
||||||
r_cpu_req_data <== io.cpu.req_data
|
r_cpu_req_data := io.cpu.req_data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
when (replay_amo_val) {
|
||||||
|
r_cpu_req_idx := Cat(replayer.io.data_req.bits.idx, replayer.io.data_req.bits.offset)
|
||||||
|
r_cpu_req_cmd := replayer.io.data_req.bits.cmd
|
||||||
|
r_cpu_req_type := replayer.io.data_req.bits.typ
|
||||||
|
r_cpu_req_data := replayer.io.data_req.bits.data
|
||||||
|
}
|
||||||
|
|
||||||
// refill counter
|
// refill counter
|
||||||
val rr_count = Reg(resetVal = UFix(0, log2up(REFILL_CYCLES)))
|
val rr_count = Reg(resetVal = UFix(0, log2up(REFILL_CYCLES)))
|
||||||
val rr_count_next = rr_count + UFix(1)
|
val rr_count_next = rr_count + UFix(1)
|
||||||
when (io.mem.resp_val) { rr_count <== rr_count_next }
|
when (io.mem.resp_val) { rr_count := rr_count_next }
|
||||||
|
|
||||||
val misaligned =
|
val misaligned =
|
||||||
(((r_cpu_req_type === MT_H) || (r_cpu_req_type === MT_HU)) && (r_cpu_req_idx(0) != Bits(0))) ||
|
(((r_cpu_req_type === MT_H) || (r_cpu_req_type === MT_HU)) && (r_cpu_req_idx(0) != Bits(0))) ||
|
||||||
@ -1081,7 +1087,7 @@ class HellaCacheAssoc extends Component {
|
|||||||
val drain_store = drain_store_val && data_arb.io.in(2).ready
|
val drain_store = drain_store_val && data_arb.io.in(2).ready
|
||||||
val p_store_rdy = !p_store_valid || drain_store
|
val p_store_rdy = !p_store_valid || drain_store
|
||||||
val p_amo = Reg(tag_hit && r_req_amo && p_store_rdy && !p_store_match || r_replay_amo, resetVal = Bool(false))
|
val p_amo = Reg(tag_hit && r_req_amo && p_store_rdy && !p_store_match || r_replay_amo, resetVal = Bool(false))
|
||||||
p_store_valid <== !p_store_rdy || (tag_hit && r_req_store) || p_amo
|
p_store_valid := !p_store_rdy || (tag_hit && r_req_store) || p_amo
|
||||||
|
|
||||||
// writeback
|
// writeback
|
||||||
val wb_rdy = wb_arb.io.in(1).ready && !p_store_idx_match
|
val wb_rdy = wb_arb.io.in(1).ready && !p_store_idx_match
|
||||||
@ -1105,15 +1111,15 @@ class HellaCacheAssoc extends Component {
|
|||||||
val amoalu = new AMOALU
|
val amoalu = new AMOALU
|
||||||
storegen.io.typ := r_cpu_req_type
|
storegen.io.typ := r_cpu_req_type
|
||||||
storegen.io.din := r_cpu_req_data
|
storegen.io.din := r_cpu_req_data
|
||||||
when (p_amo) {
|
|
||||||
p_store_data <== amoalu.io.out
|
|
||||||
}
|
|
||||||
when (tag_hit && r_req_write && p_store_rdy || r_replay_amo) {
|
when (tag_hit && r_req_write && p_store_rdy || r_replay_amo) {
|
||||||
p_store_idx <== r_cpu_req_idx
|
p_store_idx := r_cpu_req_idx
|
||||||
p_store_type <== r_cpu_req_type
|
p_store_type := r_cpu_req_type
|
||||||
p_store_cmd <== r_cpu_req_cmd
|
p_store_cmd := r_cpu_req_cmd
|
||||||
p_store_way_oh <== Mux(r_replay_amo, replayer.io.way_oh, hit_way_oh)
|
p_store_way_oh := Mux(r_replay_amo, replayer.io.way_oh, hit_way_oh)
|
||||||
p_store_data <== storegen.io.dout
|
p_store_data := storegen.io.dout
|
||||||
|
}
|
||||||
|
when (p_amo) {
|
||||||
|
p_store_data := amoalu.io.out
|
||||||
}
|
}
|
||||||
|
|
||||||
// miss handling
|
// miss handling
|
||||||
@ -1149,7 +1155,7 @@ class HellaCacheAssoc extends Component {
|
|||||||
data_arb.io.in(1).valid := replay_val
|
data_arb.io.in(1).valid := replay_val
|
||||||
data_arb.io.in(1).bits.way_en := replayer.io.way_oh
|
data_arb.io.in(1).bits.way_en := replayer.io.way_oh
|
||||||
replayer.io.data_req.ready := replay_rdy && !stall_replay
|
replayer.io.data_req.ready := replay_rdy && !stall_replay
|
||||||
r_replay_amo <== replay_amo_val && replay_rdy && !stall_replay
|
r_replay_amo := replay_amo_val && replay_rdy && !stall_replay
|
||||||
|
|
||||||
// store write mask generation.
|
// store write mask generation.
|
||||||
// assumes store replays are higher-priority than pending stores.
|
// assumes store replays are higher-priority than pending stores.
|
||||||
@ -1178,13 +1184,13 @@ class HellaCacheAssoc extends Component {
|
|||||||
amoalu.io.lhs := loadgen.io.r_dout.toUFix
|
amoalu.io.lhs := loadgen.io.r_dout.toUFix
|
||||||
amoalu.io.rhs := p_store_data.toUFix
|
amoalu.io.rhs := p_store_data.toUFix
|
||||||
|
|
||||||
early_nack <== early_tag_nack || early_load_nack || r_cpu_req_val && r_req_amo || replay_amo_val || r_replay_amo
|
early_nack := early_tag_nack || early_load_nack || r_cpu_req_val && r_req_amo || replay_amo_val || r_replay_amo
|
||||||
|
|
||||||
// reset and flush unit
|
// reset and flush unit
|
||||||
val flusher = new FlushUnit(lines)
|
val flusher = new FlushUnit(lines)
|
||||||
val flushed = Reg(resetVal = Bool(true))
|
val flushed = Reg(resetVal = Bool(true))
|
||||||
val flush_rdy = mshr.io.fence_rdy && wb_rdy && !p_store_valid
|
val flush_rdy = mshr.io.fence_rdy && wb_rdy && !p_store_valid
|
||||||
flushed <== flushed && !r_cpu_req_val || r_cpu_req_val && r_req_flush && flush_rdy && flusher.io.req.ready
|
flushed := flushed && !r_cpu_req_val || r_cpu_req_val && r_req_flush && flush_rdy && flusher.io.req.ready
|
||||||
flusher.io.req.valid := r_cpu_req_val && r_req_flush && flush_rdy && !flushed
|
flusher.io.req.valid := r_cpu_req_val && r_req_flush && flush_rdy && !flushed
|
||||||
flusher.io.wb_req <> wb_arb.io.in(0)
|
flusher.io.wb_req <> wb_arb.io.in(0)
|
||||||
flusher.io.meta_req <> meta_arb.io.in(0)
|
flusher.io.meta_req <> meta_arb.io.in(0)
|
||||||
@ -1194,7 +1200,7 @@ class HellaCacheAssoc extends Component {
|
|||||||
// we usually nack rather than reporting that the cache is not ready.
|
// we usually nack rather than reporting that the cache is not ready.
|
||||||
// fences and flushes are the exceptions.
|
// fences and flushes are the exceptions.
|
||||||
val pending_fence = Reg(resetVal = Bool(false))
|
val pending_fence = Reg(resetVal = Bool(false))
|
||||||
pending_fence <== (r_cpu_req_val && r_req_fence || pending_fence) && !flush_rdy
|
pending_fence := (r_cpu_req_val && r_req_fence || pending_fence) && !flush_rdy
|
||||||
val nack_hit = p_store_match || r_req_write && !p_store_rdy
|
val nack_hit = p_store_match || r_req_write && !p_store_rdy
|
||||||
val nack_miss = dirty && !wb_rdy || !mshr.io.req_rdy || r_req_write && !replayer.io.sdq_enq.ready
|
val nack_miss = dirty && !wb_rdy || !mshr.io.req_rdy || r_req_write && !replayer.io.sdq_enq.ready
|
||||||
val nack_flush = !flush_rdy && (r_req_fence || r_req_flush) ||
|
val nack_flush = !flush_rdy && (r_req_fence || r_req_flush) ||
|
||||||
|
@ -77,22 +77,22 @@ class rocketPTW extends Component
|
|||||||
val req_itlb_val = io.itlb.req_val;
|
val req_itlb_val = io.itlb.req_val;
|
||||||
val req_dtlb_val = io.dtlb.req_val && !io.itlb.req_val;
|
val req_dtlb_val = io.dtlb.req_val && !io.itlb.req_val;
|
||||||
|
|
||||||
when ((state === s_ready) && req_itlb_val) {
|
when ((state === s_ready) && req_dtlb_val) {
|
||||||
r_req_vpn <== io.itlb.req_vpn;
|
r_req_vpn := io.dtlb.req_vpn;
|
||||||
r_req_dest <== Bool(false);
|
r_req_dest := Bool(true);
|
||||||
req_addr <== Cat(io.ptbr(PADDR_BITS-1,PGIDX_BITS), io.itlb.req_vpn(VPN_BITS-1,VPN_BITS-10), Bits(0,3)).toUFix;
|
req_addr := Cat(io.ptbr(PADDR_BITS-1,PGIDX_BITS), io.dtlb.req_vpn(VPN_BITS-1,VPN_BITS-10), Bits(0,3)).toUFix;
|
||||||
}
|
}
|
||||||
|
|
||||||
when ((state === s_ready) && req_dtlb_val) {
|
when ((state === s_ready) && req_itlb_val) {
|
||||||
r_req_vpn <== io.dtlb.req_vpn;
|
r_req_vpn := io.itlb.req_vpn;
|
||||||
r_req_dest <== Bool(true);
|
r_req_dest := Bool(false);
|
||||||
req_addr <== Cat(io.ptbr(PADDR_BITS-1,PGIDX_BITS), io.dtlb.req_vpn(VPN_BITS-1,VPN_BITS-10), Bits(0,3)).toUFix;
|
req_addr := Cat(io.ptbr(PADDR_BITS-1,PGIDX_BITS), io.itlb.req_vpn(VPN_BITS-1,VPN_BITS-10), Bits(0,3)).toUFix;
|
||||||
}
|
}
|
||||||
|
|
||||||
when (io.dmem.resp_val) {
|
when (io.dmem.resp_val) {
|
||||||
req_addr <== Cat(io.dmem.resp_data(PADDR_BITS-1, PGIDX_BITS), vpn_idx, Bits(0,3)).toUFix;
|
req_addr := Cat(io.dmem.resp_data(PADDR_BITS-1, PGIDX_BITS), vpn_idx, Bits(0,3)).toUFix;
|
||||||
r_resp_perm <== io.dmem.resp_data(9,4);
|
r_resp_perm := io.dmem.resp_data(9,4);
|
||||||
r_resp_ppn <== io.dmem.resp_data(PADDR_BITS-1, PGIDX_BITS);
|
r_resp_ppn := io.dmem.resp_data(PADDR_BITS-1, PGIDX_BITS);
|
||||||
}
|
}
|
||||||
|
|
||||||
io.dmem.req_val :=
|
io.dmem.req_val :=
|
||||||
@ -133,83 +133,83 @@ class rocketPTW extends Component
|
|||||||
switch (state) {
|
switch (state) {
|
||||||
is (s_ready) {
|
is (s_ready) {
|
||||||
when (req_val) {
|
when (req_val) {
|
||||||
state <== s_l1_req;
|
state := s_l1_req;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// level 1
|
// level 1
|
||||||
is (s_l1_req) {
|
is (s_l1_req) {
|
||||||
when (io.dmem.req_rdy) {
|
when (io.dmem.req_rdy) {
|
||||||
state <== s_l1_wait;
|
state := s_l1_wait;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_l1_wait) {
|
is (s_l1_wait) {
|
||||||
when (io.dmem.resp_nack) {
|
when (io.dmem.resp_nack) {
|
||||||
state <== s_l1_req
|
state := s_l1_req
|
||||||
}
|
}
|
||||||
when (io.dmem.resp_val) {
|
when (io.dmem.resp_val) {
|
||||||
when (resp_ptd) { // page table descriptor
|
when (resp_ptd) { // page table descriptor
|
||||||
state <== s_l2_req;
|
state := s_l2_req;
|
||||||
}
|
}
|
||||||
when (resp_pte) { // page table entry
|
.elsewhen (resp_pte) { // page table entry
|
||||||
state <== s_l1_fake;
|
state := s_l1_fake;
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
state <== s_error;
|
state := s_error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_l1_fake) {
|
is (s_l1_fake) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
// level 2
|
// level 2
|
||||||
is (s_l2_req) {
|
is (s_l2_req) {
|
||||||
when (io.dmem.req_rdy) {
|
when (io.dmem.req_rdy) {
|
||||||
state <== s_l2_wait;
|
state := s_l2_wait;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_l2_wait) {
|
is (s_l2_wait) {
|
||||||
when (io.dmem.resp_nack) {
|
when (io.dmem.resp_nack) {
|
||||||
state <== s_l2_req
|
state := s_l2_req
|
||||||
}
|
}
|
||||||
when (io.dmem.resp_val) {
|
when (io.dmem.resp_val) {
|
||||||
when (resp_ptd) { // page table descriptor
|
when (resp_ptd) { // page table descriptor
|
||||||
state <== s_l3_req;
|
state := s_l3_req;
|
||||||
}
|
}
|
||||||
when (resp_pte) { // page table entry
|
.elsewhen (resp_pte) { // page table entry
|
||||||
state <== s_l2_fake;
|
state := s_l2_fake;
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
state <== s_error;
|
state := s_error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_l2_fake) {
|
is (s_l2_fake) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
// level 3
|
// level 3
|
||||||
is (s_l3_req) {
|
is (s_l3_req) {
|
||||||
when (io.dmem.req_rdy) {
|
when (io.dmem.req_rdy) {
|
||||||
state <== s_l3_wait;
|
state := s_l3_wait;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_l3_wait) {
|
is (s_l3_wait) {
|
||||||
when (io.dmem.resp_nack) {
|
when (io.dmem.resp_nack) {
|
||||||
state <== s_l3_req
|
state := s_l3_req
|
||||||
}
|
}
|
||||||
when (io.dmem.resp_val) {
|
when (io.dmem.resp_val) {
|
||||||
when (resp_pte) { // page table entry
|
when (resp_pte) { // page table entry
|
||||||
state <== s_done;
|
state := s_done;
|
||||||
}
|
}
|
||||||
otherwise {
|
.otherwise {
|
||||||
state <== s_error;
|
state := s_error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is (s_done) {
|
is (s_done) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
is (s_error) {
|
is (s_error) {
|
||||||
state <== s_ready;
|
state := s_ready;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -24,22 +24,22 @@ class queue[T <: Data](entries: Int, flushable: Boolean = false)(data: => T) ext
|
|||||||
val do_enq = io.enq.ready && io.enq.valid
|
val do_enq = io.enq.ready && io.enq.valid
|
||||||
val do_deq = io.deq.ready && io.deq.valid
|
val do_deq = io.deq.ready && io.deq.valid
|
||||||
|
|
||||||
if (flushable) {
|
|
||||||
when (io.flush) {
|
|
||||||
deq_ptr <== UFix(0)
|
|
||||||
enq_ptr <== UFix(0)
|
|
||||||
maybe_full <== Bool(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
when (do_deq) {
|
when (do_deq) {
|
||||||
deq_ptr <== deq_ptr + UFix(1)
|
deq_ptr := deq_ptr + UFix(1)
|
||||||
}
|
}
|
||||||
when (do_enq) {
|
when (do_enq) {
|
||||||
enq_ptr <== enq_ptr + UFix(1)
|
enq_ptr := enq_ptr + UFix(1)
|
||||||
}
|
}
|
||||||
when (do_enq != do_deq) {
|
when (do_enq != do_deq) {
|
||||||
maybe_full <== do_enq
|
maybe_full := do_enq
|
||||||
|
}
|
||||||
|
if (flushable) {
|
||||||
|
when (io.flush) {
|
||||||
|
deq_ptr := UFix(0)
|
||||||
|
enq_ptr := UFix(0)
|
||||||
|
maybe_full := Bool(false)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Mem(entries, do_enq, enq_ptr, io.enq.bits).read(deq_ptr) <> io.deq.bits
|
io.deq.bits <> Mem(entries, do_enq, enq_ptr, io.enq.bits).read(deq_ptr)
|
||||||
}
|
}
|
||||||
|
@ -66,7 +66,7 @@ object LFSR16
|
|||||||
{
|
{
|
||||||
val width = 16
|
val width = 16
|
||||||
val lfsr = Reg(resetVal = UFix(1, width))
|
val lfsr = Reg(resetVal = UFix(1, width))
|
||||||
when (increment) { lfsr <== Cat(lfsr(0)^lfsr(2)^lfsr(3)^lfsr(5), lfsr(width-1,1)).toUFix }
|
when (increment) { lfsr := Cat(lfsr(0)^lfsr(2)^lfsr(3)^lfsr(5), lfsr(width-1,1)).toUFix }
|
||||||
lfsr
|
lfsr
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -176,13 +176,13 @@ class priorityDecoder(width: Int) extends Component
|
|||||||
val io = new ioPriorityEncoder(in_width, width);
|
val io = new ioPriorityEncoder(in_width, width);
|
||||||
val l_out = Wire() { Bits() };
|
val l_out = Wire() { Bits() };
|
||||||
|
|
||||||
for (i <- 0 to width-1) {
|
l_out := Bits(0, width);
|
||||||
|
for (i <- width-1 to 0 by -1) {
|
||||||
when (io.in === UFix(i, in_width)) {
|
when (io.in === UFix(i, in_width)) {
|
||||||
l_out <== Bits(1,1) << UFix(i);
|
l_out := Bits(1,1) << UFix(i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
l_out <== Bits(0, width);
|
|
||||||
io.out := l_out;
|
io.out := l_out;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -198,13 +198,13 @@ class priorityEncoder(width: Int) extends Component
|
|||||||
val io = new ioPriorityDecoder(width, out_width);
|
val io = new ioPriorityDecoder(width, out_width);
|
||||||
val l_out = Wire() { UFix() };
|
val l_out = Wire() { UFix() };
|
||||||
|
|
||||||
for (i <- 0 to width-1) {
|
l_out := UFix(0, out_width);
|
||||||
|
for (i <- width-1 to 1 by -1) {
|
||||||
when (io.in(i).toBool) {
|
when (io.in(i).toBool) {
|
||||||
l_out <== UFix(i, out_width);
|
l_out := UFix(i, out_width);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
l_out <== UFix(0, out_width);
|
|
||||||
io.out := l_out;
|
io.out := l_out;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user