1
0

Merge pull request #622 from ucb-bar/priv-1.10

Various priv-1.10 changes
This commit is contained in:
Wesley W. Terpstra 2017-03-27 19:28:30 -07:00 committed by GitHub
commit ed38787c36
16 changed files with 198 additions and 100 deletions

16
README_TRAVIS.md Normal file
View File

@ -0,0 +1,16 @@
# Travis Notes for Administrators
Administrators: Are PRs taking > 1 hr to run through Travis? If you look at the Travis logs is it building `riscv-tools`?
This is because someone committed a PR to `master` which bumped `riscv-tools` and the master cache needs to be updated.
This is the procedure to follow to get
things fast again. We don't generally build on merges to master, just PRs.
1. Wait for your PR that you want to merge to go green. This will take a long time.
2. On Travis, click `More Options -> Caches` on the upper right.
3. Click `Delete all Repository Caches`.
4. Click `More Options->Settings`
5. On the `General Settings` section, switch the `Build Branch Updates` toggle to `ON`.
6. perform your PR's merge to master. This will cause the master cache to build `riscv-tools`.
7. Once the merge commit goes green on Jenkins, switch the `Build Branch Updates` toggle to `OFF`.

@ -1 +1 @@
Subproject commit 3e6ef13ff5cda2e65efbbf5d306cc101582ad0e5 Subproject commit 8e4ddc62db448b613ae327792e72defca4d115d4

@ -1 +1 @@
Subproject commit 4f430b184ed07890cd30ad144ded6d7cb07dcdf0 Subproject commit 89d487023c1e59ff574872e2f51ee479cda380ab

View File

@ -21,9 +21,7 @@ class MStatus extends Bundle {
val dprv = UInt(width = PRV.SZ) // effective privilege for data accesses val dprv = UInt(width = PRV.SZ) // effective privilege for data accesses
val prv = UInt(width = PRV.SZ) // not truly part of mstatus, but convenient val prv = UInt(width = PRV.SZ) // not truly part of mstatus, but convenient
val sd = Bool() val sd = Bool()
val zero2 = UInt(width = 27) val zero2 = UInt(width = 31)
val sxl = UInt(width = 2)
val uxl = UInt(width = 2)
val sd_rv32 = Bool() val sd_rv32 = Bool()
val zero1 = UInt(width = 8) val zero1 = UInt(width = 8)
val tsr = Bool() val tsr = Bool()
@ -67,7 +65,12 @@ class DCSR extends Bundle {
val prv = UInt(width = PRV.SZ) val prv = UInt(width = PRV.SZ)
} }
class MIP extends Bundle { class MIP(implicit p: Parameters) extends CoreBundle()(p)
with HasRocketCoreParameters {
val lip = Vec(coreParams.nLocalInterrupts, Bool())
val zero2 = Bool()
val debug = Bool() // keep in sync with CSR.debugIntCause
val zero1 = Bool()
val rocc = Bool() val rocc = Bool()
val meip = Bool() val meip = Bool()
val heip = Bool() val heip = Bool()
@ -121,10 +124,11 @@ object CSR
def R = UInt(5,SZ) def R = UInt(5,SZ)
val ADDRSZ = 12 val ADDRSZ = 12
def debugIntCause = new MIP().getWidth def debugIntCause = 14 // keep in sync with MIP.debug
def debugTriggerCause = { def debugTriggerCause = {
require(debugIntCause >= Causes.all.max) val res = debugIntCause
debugIntCause require(!(Causes.all contains res))
res
} }
val firstCtr = CSRs.cycle val firstCtr = CSRs.cycle
@ -208,14 +212,24 @@ class CSRFile(perfEventSets: EventSets = new EventSets(Seq()))(implicit p: Param
val reg_dcsr = Reg(init=reset_dcsr) val reg_dcsr = Reg(init=reset_dcsr)
val (supported_interrupts, delegable_interrupts) = { val (supported_interrupts, delegable_interrupts) = {
val sup = Wire(init=new MIP().fromBits(0)) val sup = Wire(new MIP)
sup.usip := false
sup.ssip := Bool(usingVM) sup.ssip := Bool(usingVM)
sup.hsip := false
sup.msip := true sup.msip := true
sup.utip := false
sup.stip := Bool(usingVM) sup.stip := Bool(usingVM)
sup.htip := false
sup.mtip := true sup.mtip := true
sup.meip := true sup.ueip := false
sup.seip := Bool(usingVM) sup.seip := Bool(usingVM)
sup.heip := false
sup.meip := true
sup.rocc := usingRoCC sup.rocc := usingRoCC
sup.zero1 := false
sup.debug := false
sup.zero2 := false
sup.lip foreach { _ := true }
val del = Wire(init=sup) val del = Wire(init=sup)
del.msip := false del.msip := false
@ -226,10 +240,10 @@ class CSRFile(perfEventSets: EventSets = new EventSets(Seq()))(implicit p: Param
} }
val delegable_exceptions = UInt(Seq( val delegable_exceptions = UInt(Seq(
Causes.misaligned_fetch, Causes.misaligned_fetch,
Causes.fault_fetch, Causes.fetch_page_fault,
Causes.breakpoint, Causes.breakpoint,
Causes.fault_load, Causes.load_page_fault,
Causes.fault_store, Causes.store_page_fault,
Causes.user_ecall).map(1 << _).sum) Causes.user_ecall).map(1 << _).sum)
val reg_debug = Reg(init=Bool(false)) val reg_debug = Reg(init=Bool(false))
@ -277,6 +291,8 @@ class CSRFile(perfEventSets: EventSets = new EventSets(Seq()))(implicit p: Param
val hpm_mask = reg_mcounteren & Mux((!usingVM).B || reg_mstatus.prv === PRV.S, delegable_counters.U, reg_scounteren) val hpm_mask = reg_mcounteren & Mux((!usingVM).B || reg_mstatus.prv === PRV.S, delegable_counters.U, reg_scounteren)
val mip = Wire(init=reg_mip) val mip = Wire(init=reg_mip)
// seip is the OR of reg_mip.seip and the actual line from the PLIC
io.interrupts.seip.foreach { mip.seip := reg_mip.seip || RegNext(_) }
mip.rocc := io.rocc_interrupt mip.rocc := io.rocc_interrupt
val read_mip = mip.asUInt & supported_interrupts val read_mip = mip.asUInt & supported_interrupts
@ -363,7 +379,6 @@ class CSRFile(perfEventSets: EventSets = new EventSets(Seq()))(implicit p: Param
val read_sip = read_mip & reg_mideleg val read_sip = read_mip & reg_mideleg
val read_sstatus = Wire(init = 0.U.asTypeOf(new MStatus)) val read_sstatus = Wire(init = 0.U.asTypeOf(new MStatus))
read_sstatus.sd := io.status.sd read_sstatus.sd := io.status.sd
read_sstatus.uxl := io.status.uxl
read_sstatus.sd_rv32 := io.status.sd_rv32 read_sstatus.sd_rv32 := io.status.sd_rv32
read_sstatus.mxr := io.status.mxr read_sstatus.mxr := io.status.mxr
read_sstatus.sum := io.status.sum read_sstatus.sum := io.status.sum
@ -417,8 +432,7 @@ class CSRFile(perfEventSets: EventSets = new EventSets(Seq()))(implicit p: Param
} }
val decoded_addr = read_mapping map { case (k, v) => k -> (io.rw.addr === k) } val decoded_addr = read_mapping map { case (k, v) => k -> (io.rw.addr === k) }
val wdata = (Mux(io.rw.cmd.isOneOf(CSR.S, CSR.C), io.rw.rdata, UInt(0)) | io.rw.wdata) & val wdata = readModifyWriteCSR(io.rw.cmd, io.rw.rdata, io.rw.wdata)
~Mux(io.rw.cmd === CSR.C, io.rw.wdata, UInt(0))
val system_insn = io.rw.cmd === CSR.I val system_insn = io.rw.cmd === CSR.I
val opcode = UInt(1) << io.rw.addr(2,0) val opcode = UInt(1) << io.rw.addr(2,0)
@ -465,8 +479,6 @@ class CSRFile(perfEventSets: EventSets = new EventSets(Seq()))(implicit p: Param
io.status.sd := io.status.fs.andR || io.status.xs.andR io.status.sd := io.status.fs.andR || io.status.xs.andR
io.status.debug := reg_debug io.status.debug := reg_debug
io.status.isa := reg_misa io.status.isa := reg_misa
io.status.uxl := (if (usingUser) log2Ceil(xLen) - 4 else 0)
io.status.sxl := (if (usingVM) log2Ceil(xLen) - 4 else 0)
io.status.dprv := Reg(next = Mux(reg_mstatus.mprv && !reg_debug, reg_mstatus.mpp, reg_mstatus.prv)) io.status.dprv := Reg(next = Mux(reg_mstatus.mprv && !reg_debug, reg_mstatus.mpp, reg_mstatus.prv))
if (xLen == 32) if (xLen == 32)
io.status.sd_rv32 := io.status.sd io.status.sd_rv32 := io.status.sd
@ -488,7 +500,9 @@ class CSRFile(perfEventSets: EventSets = new EventSets(Seq()))(implicit p: Param
val write_badaddr = cause isOneOf (Causes.breakpoint, val write_badaddr = cause isOneOf (Causes.breakpoint,
Causes.misaligned_load, Causes.misaligned_store, Causes.misaligned_fetch, Causes.misaligned_load, Causes.misaligned_store, Causes.misaligned_fetch,
Causes.fault_load, Causes.fault_store, Causes.fault_fetch) Causes.load_access, Causes.store_access, Causes.fetch_access,
Causes.load_page_fault, Causes.store_page_fault, Causes.fetch_page_fault
)
when (trapToDebug) { when (trapToDebug) {
reg_debug := true reg_debug := true
@ -570,15 +584,20 @@ class CSRFile(perfEventSets: EventSets = new EventSets(Seq()))(implicit p: Param
if (usingRoCC) reg_mstatus.xs := Fill(2, new_mstatus.xs.orR) if (usingRoCC) reg_mstatus.xs := Fill(2, new_mstatus.xs.orR)
} }
when (decoded_addr(CSRs.misa)) { when (decoded_addr(CSRs.misa)) {
val mask = UInt(isaStringToMask(isaMaskString)) val mask = UInt(isaStringToMask(isaMaskString), xLen)
val f = wdata('f' - 'a') val f = wdata('f' - 'a')
reg_misa := ~(~wdata | (!f << ('d' - 'a'))) & mask | reg_misa & ~mask reg_misa := ~(~wdata | (!f << ('d' - 'a'))) & mask | reg_misa & ~mask
} }
when (decoded_addr(CSRs.mip)) { when (decoded_addr(CSRs.mip)) {
val new_mip = new MIP().fromBits(wdata) // MIP should be modified based on the value in reg_mip, not the value
// in read_mip, since read_mip.seip is the OR of reg_mip.seip and
// io.interrupts.seip. We don't want the value on the PLIC line to
// inadvertently be OR'd into read_mip.seip.
val new_mip = readModifyWriteCSR(io.rw.cmd, reg_mip.asUInt, io.rw.wdata).asTypeOf(new MIP)
if (usingVM) { if (usingVM) {
reg_mip.ssip := new_mip.ssip reg_mip.ssip := new_mip.ssip
reg_mip.stip := new_mip.stip reg_mip.stip := new_mip.stip
reg_mip.seip := new_mip.seip
} }
} }
when (decoded_addr(CSRs.mie)) { reg_mie := wdata & supported_interrupts } when (decoded_addr(CSRs.mie)) { reg_mie := wdata & supported_interrupts }
@ -678,7 +697,10 @@ class CSRFile(perfEventSets: EventSets = new EventSets(Seq()))(implicit p: Param
} }
} }
reg_mip <> io.interrupts reg_mip.lip := (io.interrupts.lip: Seq[Bool])
reg_mip.mtip := io.interrupts.mtip
reg_mip.msip := io.interrupts.msip
reg_mip.meip := io.interrupts.meip
reg_dcsr.debugint := io.interrupts.debug reg_dcsr.debugint := io.interrupts.debug
if (!usingVM) { if (!usingVM) {
@ -721,6 +743,9 @@ class CSRFile(perfEventSets: EventSets = new EventSets(Seq()))(implicit p: Param
} }
} }
def readModifyWriteCSR(cmd: UInt, rdata: UInt, wdata: UInt) =
(Mux(cmd.isOneOf(CSR.S, CSR.C), rdata, UInt(0)) | wdata) & ~Mux(cmd === CSR.C, wdata, UInt(0))
def legalizePrivilege(priv: UInt): UInt = def legalizePrivilege(priv: UInt): UInt =
if (usingVM) Mux(priv === PRV.H, PRV.U, priv) if (usingVM) Mux(priv === PRV.H, PRV.U, priv)
else if (usingUser) Fill(2, priv(0)) else if (usingUser) Fill(2, priv(0))

View File

@ -87,8 +87,9 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
io.cpu.req.ready := (release_state === s_ready) && !cached_grant_wait && !s1_nack io.cpu.req.ready := (release_state === s_ready) && !cached_grant_wait && !s1_nack
// I/O MSHRs // I/O MSHRs
val uncachedInFlight = Reg(init = Vec.fill(cacheParams.nMMIOs)(false.B)) val mmioOffset = if (outer.scratch().isDefined) 0 else 1
val uncachedReqs = Reg(Vec(cacheParams.nMMIOs, new HellaCacheReq)) val uncachedInFlight = Seq.fill(maxUncachedInFlight) { RegInit(Bool(false)) }
val uncachedReqs = Seq.fill(maxUncachedInFlight) { Reg(new HellaCacheReq) }
// hit initiation path // hit initiation path
dataArb.io.in(3).valid := io.cpu.req.valid && isRead(io.cpu.req.bits.cmd) dataArb.io.in(3).valid := io.cpu.req.valid && isRead(io.cpu.req.bits.cmd)
@ -184,8 +185,10 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
val no_xcpt = Bool(usingDataScratchpad) && s1_req.phys /* slave port */ && s1_hit_state.isValid() val no_xcpt = Bool(usingDataScratchpad) && s1_req.phys /* slave port */ && s1_hit_state.isValid()
io.cpu.xcpt.ma.ld := !no_xcpt && s1_read && s1_storegen.misaligned io.cpu.xcpt.ma.ld := !no_xcpt && s1_read && s1_storegen.misaligned
io.cpu.xcpt.ma.st := !no_xcpt && s1_write && s1_storegen.misaligned io.cpu.xcpt.ma.st := !no_xcpt && s1_write && s1_storegen.misaligned
io.cpu.xcpt.pf.ld := !no_xcpt && s1_read && tlb.io.resp.xcpt_ld io.cpu.xcpt.pf.ld := !no_xcpt && s1_read && tlb.io.resp.pf.ld
io.cpu.xcpt.pf.st := !no_xcpt && s1_write && tlb.io.resp.xcpt_st io.cpu.xcpt.pf.st := !no_xcpt && s1_write && tlb.io.resp.pf.st
io.cpu.xcpt.ae.ld := !no_xcpt && s1_read && tlb.io.resp.ae.ld
io.cpu.xcpt.ae.st := !no_xcpt && s1_write && tlb.io.resp.ae.st
// load reservations // load reservations
val s2_lr = Bool(usingAtomics) && s2_req.cmd === M_XLR val s2_lr = Bool(usingAtomics) && s2_req.cmd === M_XLR
@ -251,13 +254,13 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
metaWriteArb.io.in(0).bits.data.tag := s2_req.addr(paddrBits-1, untagBits) metaWriteArb.io.in(0).bits.data.tag := s2_req.addr(paddrBits-1, untagBits)
// Prepare a TileLink request message that initiates a transaction // Prepare a TileLink request message that initiates a transaction
val a_source = PriorityEncoder(~uncachedInFlight.asUInt) // skip the MSHR val a_source = PriorityEncoder(~uncachedInFlight.asUInt << mmioOffset) // skip the MSHR
val acquire_address = s2_req_block_addr val acquire_address = s2_req_block_addr
val access_address = s2_req.addr val access_address = s2_req.addr
val a_size = s2_req.typ(MT_SZ-2, 0) val a_size = s2_req.typ(MT_SZ-2, 0)
val a_data = Fill(beatWords, pstore1_storegen.data) val a_data = Fill(beatWords, pstore1_storegen.data)
val acquire = if (edge.manager.anySupportAcquireB) { val acquire = if (edge.manager.anySupportAcquireB) {
edge.Acquire(UInt(cacheParams.nMMIOs), acquire_address, lgCacheBlockBytes, s2_grow_param)._2 // Cacheability checked by tlb edge.Acquire(UInt(0), acquire_address, lgCacheBlockBytes, s2_grow_param)._2 // Cacheability checked by tlb
} else { } else {
Wire(new TLBundleA(edge.bundle)) Wire(new TLBundleA(edge.bundle))
} }
@ -285,10 +288,15 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
tl_out.a.bits := Mux(!s2_uncached, acquire, Mux(!s2_write, get, Mux(!pstore1_amo, put, atomics))) tl_out.a.bits := Mux(!s2_uncached, acquire, Mux(!s2_write, get, Mux(!pstore1_amo, put, atomics)))
// Set pending bits for outstanding TileLink transaction // Set pending bits for outstanding TileLink transaction
val a_sel = UIntToOH(a_source, maxUncachedInFlight+mmioOffset) >> mmioOffset
when (tl_out.a.fire()) { when (tl_out.a.fire()) {
when (s2_uncached) { when (s2_uncached) {
uncachedInFlight(a_source) := true (a_sel.toBools zip (uncachedInFlight zip uncachedReqs)) foreach { case (s, (f, r)) =>
uncachedReqs(a_source) := s2_req when (s) {
f := Bool(true)
r := s2_req
}
}
}.otherwise { }.otherwise {
cached_grant_wait := true cached_grant_wait := true
} }
@ -307,10 +315,14 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
assert(cached_grant_wait, "A GrantData was unexpected by the dcache.") assert(cached_grant_wait, "A GrantData was unexpected by the dcache.")
when(d_last) { cached_grant_wait := false } when(d_last) { cached_grant_wait := false }
} .elsewhen (grantIsUncached) { } .elsewhen (grantIsUncached) {
assert(d_last, "DCache MMIO responses must be single-beat") val d_sel = UIntToOH(tl_out.d.bits.source, maxUncachedInFlight+mmioOffset) >> mmioOffset
assert(uncachedInFlight(tl_out.d.bits.source), "An AccessAck was unexpected by the dcache.") // TODO must handle Ack coming back on same cycle! val req = Mux1H(d_sel, uncachedReqs)
uncachedInFlight(tl_out.d.bits.source) := false (d_sel.toBools zip uncachedInFlight) foreach { case (s, f) =>
val req = uncachedReqs(tl_out.d.bits.source) when (s && d_last) {
assert(f, "An AccessAck was unexpected by the dcache.") // TODO must handle Ack coming back on same cycle!
f := false
}
}
when (grantIsUncachedData) { when (grantIsUncachedData) {
s2_data := tl_out.d.bits.data s2_data := tl_out.d.bits.data
s2_req.cmd := M_XRD s2_req.cmd := M_XRD
@ -381,7 +393,7 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
val voluntaryReleaseMessage = if (edge.manager.anySupportAcquireB) { val voluntaryReleaseMessage = if (edge.manager.anySupportAcquireB) {
edge.Release( edge.Release(
fromSource = UInt(cacheParams.nMMIOs - 1), fromSource = UInt(0),
toAddress = probe_bits.address, toAddress = probe_bits.address,
lgSize = lgCacheBlockBytes, lgSize = lgCacheBlockBytes,
shrinkPermissions = s2_shrink_param, shrinkPermissions = s2_shrink_param,

View File

@ -22,7 +22,8 @@ class FrontendResp(implicit p: Parameters) extends CoreBundle()(p) {
val pc = UInt(width = vaddrBitsExtended) // ID stage PC val pc = UInt(width = vaddrBitsExtended) // ID stage PC
val data = UInt(width = fetchWidth * coreInstBits) val data = UInt(width = fetchWidth * coreInstBits)
val mask = Bits(width = fetchWidth) val mask = Bits(width = fetchWidth)
val xcpt_if = Bool() val pf = Bool()
val ae = Bool()
val replay = Bool() val replay = Bool()
} }
@ -72,9 +73,12 @@ class FrontendModule(outer: Frontend) extends LazyModuleImp(outer)
val s2_pc = Reg(init=io.resetVector) val s2_pc = Reg(init=io.resetVector)
val s2_btb_resp_valid = Reg(init=Bool(false)) val s2_btb_resp_valid = Reg(init=Bool(false))
val s2_btb_resp_bits = Reg(new BTBResp) val s2_btb_resp_bits = Reg(new BTBResp)
val s2_maybe_xcpt_if = Reg(init=Bool(false)) val s2_maybe_pf = Reg(init=Bool(false))
val s2_maybe_ae = Reg(init=Bool(false))
val s2_tlb_miss = Reg(Bool()) val s2_tlb_miss = Reg(Bool())
val s2_xcpt_if = s2_maybe_xcpt_if && !s2_tlb_miss val s2_pf = s2_maybe_pf && !s2_tlb_miss
val s2_ae = s2_maybe_ae && !s2_tlb_miss
val s2_xcpt = s2_pf || s2_ae
val s2_speculative = Reg(init=Bool(false)) val s2_speculative = Reg(init=Bool(false))
val s2_cacheable = Reg(init=Bool(false)) val s2_cacheable = Reg(init=Bool(false))
@ -101,7 +105,8 @@ class FrontendModule(outer: Frontend) extends LazyModuleImp(outer)
s2_pc := s1_pc s2_pc := s1_pc
s2_speculative := s1_speculative s2_speculative := s1_speculative
s2_cacheable := tlb.io.resp.cacheable s2_cacheable := tlb.io.resp.cacheable
s2_maybe_xcpt_if := tlb.io.resp.xcpt_if s2_maybe_pf := tlb.io.resp.pf.inst
s2_maybe_ae := tlb.io.resp.ae.inst
s2_tlb_miss := tlb.io.resp.miss s2_tlb_miss := tlb.io.resp.miss
} }
} }
@ -144,18 +149,19 @@ class FrontendModule(outer: Frontend) extends LazyModuleImp(outer)
icache.io.invalidate := io.cpu.flush_icache icache.io.invalidate := io.cpu.flush_icache
icache.io.s1_paddr := tlb.io.resp.paddr icache.io.s1_paddr := tlb.io.resp.paddr
icache.io.s1_kill := io.cpu.req.valid || tlb.io.resp.miss || icmiss icache.io.s1_kill := io.cpu.req.valid || tlb.io.resp.miss || icmiss
icache.io.s2_kill := s2_speculative && !s2_cacheable || s2_xcpt_if icache.io.s2_kill := s2_speculative && !s2_cacheable || s2_xcpt
icache.io.resp.ready := !stall && !s1_same_block icache.io.resp.ready := !stall && !s1_same_block
io.cpu.resp.valid := s2_valid && (icache.io.resp.valid || icache.io.s2_kill || s2_xcpt_if) io.cpu.resp.valid := s2_valid && (icache.io.resp.valid || icache.io.s2_kill || s2_xcpt)
io.cpu.resp.bits.pc := s2_pc io.cpu.resp.bits.pc := s2_pc
io.cpu.npc := Mux(io.cpu.req.valid, io.cpu.req.bits.pc, npc) io.cpu.npc := Mux(io.cpu.req.valid, io.cpu.req.bits.pc, npc)
require(fetchWidth * coreInstBytes <= rowBytes && isPow2(fetchWidth)) require(fetchWidth * coreInstBytes <= rowBytes && isPow2(fetchWidth))
io.cpu.resp.bits.data := icache.io.resp.bits.datablock >> (s2_pc.extract(log2Ceil(rowBytes)-1,log2Ceil(fetchWidth*coreInstBytes)) << log2Ceil(fetchWidth*coreInstBits)) io.cpu.resp.bits.data := icache.io.resp.bits.datablock >> (s2_pc.extract(log2Ceil(rowBytes)-1,log2Ceil(fetchWidth*coreInstBytes)) << log2Ceil(fetchWidth*coreInstBits))
io.cpu.resp.bits.mask := UInt((1 << fetchWidth)-1) << s2_pc.extract(log2Ceil(fetchWidth)+log2Ceil(coreInstBytes)-1, log2Ceil(coreInstBytes)) io.cpu.resp.bits.mask := UInt((1 << fetchWidth)-1) << s2_pc.extract(log2Ceil(fetchWidth)+log2Ceil(coreInstBytes)-1, log2Ceil(coreInstBytes))
io.cpu.resp.bits.xcpt_if := s2_xcpt_if io.cpu.resp.bits.pf := s2_pf
io.cpu.resp.bits.replay := icache.io.s2_kill && !icache.io.resp.valid && !s2_xcpt_if io.cpu.resp.bits.ae := s2_ae
io.cpu.resp.bits.replay := icache.io.s2_kill && !icache.io.resp.valid && !s2_xcpt
io.cpu.resp.bits.btb.valid := s2_btb_resp_valid io.cpu.resp.bits.btb.valid := s2_btb_resp_valid
io.cpu.resp.bits.btb.bits := s2_btb_resp_bits io.cpu.resp.bits.btb.bits := s2_btb_resp_bits

View File

@ -60,6 +60,7 @@ trait HasL1HellaCacheParameters extends HasL1CacheParameters with HasCoreParamet
def encRowBits = encDataBits*rowWords def encRowBits = encDataBits*rowWords
def lrscCycles = 32 // ISA requires 16-insn LRSC sequences to succeed def lrscCycles = 32 // ISA requires 16-insn LRSC sequences to succeed
def nIOMSHRs = cacheParams.nMMIOs def nIOMSHRs = cacheParams.nMMIOs
def maxUncachedInFlight = cacheParams.nMMIOs
def dataScratchpadSize = cacheParams.dataScratchpadBytes def dataScratchpadSize = cacheParams.dataScratchpadBytes
require(rowBits >= coreDataBits, s"rowBits($rowBits) < coreDataBits($coreDataBits)") require(rowBits >= coreDataBits, s"rowBits($rowBits) < coreDataBits($coreDataBits)")
@ -112,6 +113,7 @@ class AlignmentExceptions extends Bundle {
class HellaCacheExceptions extends Bundle { class HellaCacheExceptions extends Bundle {
val ma = new AlignmentExceptions val ma = new AlignmentExceptions
val pf = new AlignmentExceptions val pf = new AlignmentExceptions
val ae = new AlignmentExceptions
} }
// interface between D$ and processor/DTLB // interface between D$ and processor/DTLB

View File

@ -11,6 +11,8 @@ import util._
class Instruction(implicit val p: Parameters) extends ParameterizedBundle with HasCoreParameters { class Instruction(implicit val p: Parameters) extends ParameterizedBundle with HasCoreParameters {
val pf0 = Bool() // page fault on first half of instruction val pf0 = Bool() // page fault on first half of instruction
val pf1 = Bool() // page fault on second half of instruction val pf1 = Bool() // page fault on second half of instruction
val ae0 = Bool() // access exception on first half of instruction
val ae1 = Bool() // access exception on second half of instruction
val replay = Bool() val replay = Bool()
val btb_hit = Bool() val btb_hit = Bool()
val rvc = Bool() val rvc = Bool()
@ -78,7 +80,8 @@ class IBuf(implicit p: Parameters) extends CoreModule {
val valid = (UIntToOH(nValid) - 1)(fetchWidth-1, 0) val valid = (UIntToOH(nValid) - 1)(fetchWidth-1, 0)
val bufMask = UIntToOH(nBufValid) - 1 val bufMask = UIntToOH(nBufValid) - 1
val xcpt_if = valid & (Mux(buf.xcpt_if, bufMask, UInt(0)) | Mux(io.imem.bits.xcpt_if, ~bufMask, UInt(0))) val pf = valid & (Mux(buf.pf, bufMask, UInt(0)) | Mux(io.imem.bits.pf, ~bufMask, UInt(0)))
val ae = valid & (Mux(buf.ae, bufMask, UInt(0)) | Mux(io.imem.bits.ae, ~bufMask, UInt(0)))
val ic_replay = valid & (Mux(buf.replay, bufMask, UInt(0)) | Mux(io.imem.bits.replay, ~bufMask, UInt(0))) val ic_replay = valid & (Mux(buf.replay, bufMask, UInt(0)) | Mux(io.imem.bits.replay, ~bufMask, UInt(0)))
val ibufBTBHitMask = Mux(ibufBTBHit, UIntToOH(ibufBTBResp.bridx), UInt(0)) val ibufBTBHitMask = Mux(ibufBTBHit, UIntToOH(ibufBTBResp.bridx), UInt(0))
assert(!io.imem.bits.btb.valid || io.imem.bits.btb.bits.bridx >= pcWordBits) assert(!io.imem.bits.btb.valid || io.imem.bits.btb.bits.bridx >= pcWordBits)
@ -97,9 +100,11 @@ class IBuf(implicit p: Parameters) extends CoreModule {
if (usingCompressed) { if (usingCompressed) {
val replay = ic_replay(j) || (!exp.io.rvc && (btbHitMask(j) || ic_replay(j+1))) val replay = ic_replay(j) || (!exp.io.rvc && (btbHitMask(j) || ic_replay(j+1)))
io.inst(i).valid := valid(j) && (exp.io.rvc || valid(j+1) || xcpt_if(j+1) || replay) io.inst(i).valid := valid(j) && (exp.io.rvc || valid(j+1) || pf(j+1) || ae(j+1) || replay)
io.inst(i).bits.pf0 := xcpt_if(j) io.inst(i).bits.pf0 := pf(j)
io.inst(i).bits.pf1 := !exp.io.rvc && xcpt_if(j+1) io.inst(i).bits.pf1 := !exp.io.rvc && pf(j+1)
io.inst(i).bits.ae0 := ae(j)
io.inst(i).bits.ae1 := !exp.io.rvc && ae(j+1)
io.inst(i).bits.replay := replay io.inst(i).bits.replay := replay
io.inst(i).bits.btb_hit := btbHitMask(j) || (!exp.io.rvc && btbHitMask(j+1)) io.inst(i).bits.btb_hit := btbHitMask(j) || (!exp.io.rvc && btbHitMask(j+1))
io.inst(i).bits.rvc := exp.io.rvc io.inst(i).bits.rvc := exp.io.rvc
@ -110,8 +115,10 @@ class IBuf(implicit p: Parameters) extends CoreModule {
} else { } else {
when (io.inst(i).ready) { nReady := i+1 } when (io.inst(i).ready) { nReady := i+1 }
io.inst(i).valid := valid(i) io.inst(i).valid := valid(i)
io.inst(i).bits.pf0 := xcpt_if(i) io.inst(i).bits.pf0 := pf(i)
io.inst(i).bits.pf1 := false io.inst(i).bits.pf1 := false
io.inst(i).bits.ae0 := ae(i)
io.inst(i).bits.ae1 := false
io.inst(i).bits.replay := ic_replay(i) io.inst(i).bits.replay := ic_replay(i)
io.inst(i).bits.rvc := false io.inst(i).bits.rvc := false
io.inst(i).bits.btb_hit := btbHitMask(i) io.inst(i).bits.btb_hit := btbHitMask(i)

View File

@ -216,31 +216,37 @@ object Instructions {
} }
object Causes { object Causes {
val misaligned_fetch = 0x0 val misaligned_fetch = 0x0
val fault_fetch = 0x1 val fetch_access = 0x1
val illegal_instruction = 0x2 val illegal_instruction = 0x2
val breakpoint = 0x3 val breakpoint = 0x3
val misaligned_load = 0x4 val misaligned_load = 0x4
val fault_load = 0x5 val load_access = 0x5
val misaligned_store = 0x6 val misaligned_store = 0x6
val fault_store = 0x7 val store_access = 0x7
val user_ecall = 0x8 val user_ecall = 0x8
val supervisor_ecall = 0x9 val supervisor_ecall = 0x9
val hypervisor_ecall = 0xa val hypervisor_ecall = 0xa
val machine_ecall = 0xb val machine_ecall = 0xb
val fetch_page_fault = 0xc
val load_page_fault = 0xd
val store_page_fault = 0xf
val all = { val all = {
val res = collection.mutable.ArrayBuffer[Int]() val res = collection.mutable.ArrayBuffer[Int]()
res += misaligned_fetch res += misaligned_fetch
res += fault_fetch res += fetch_access
res += illegal_instruction res += illegal_instruction
res += breakpoint res += breakpoint
res += misaligned_load res += misaligned_load
res += fault_load res += load_access
res += misaligned_store res += misaligned_store
res += fault_store res += store_access
res += user_ecall res += user_ecall
res += supervisor_ecall res += supervisor_ecall
res += hypervisor_ecall res += hypervisor_ecall
res += machine_ecall res += machine_ecall
res += fetch_page_fault
res += load_page_fault
res += store_page_fault
res.toArray res.toArray
} }
} }

View File

@ -745,8 +745,10 @@ class NonBlockingDCacheModule(outer: NonBlockingDCache) extends HellaCacheModule
val misaligned = new StoreGen(s1_req.typ, s1_req.addr, UInt(0), wordBytes).misaligned val misaligned = new StoreGen(s1_req.typ, s1_req.addr, UInt(0), wordBytes).misaligned
io.cpu.xcpt.ma.ld := s1_read && misaligned io.cpu.xcpt.ma.ld := s1_read && misaligned
io.cpu.xcpt.ma.st := s1_write && misaligned io.cpu.xcpt.ma.st := s1_write && misaligned
io.cpu.xcpt.pf.ld := s1_read && dtlb.io.resp.xcpt_ld io.cpu.xcpt.pf.ld := s1_read && dtlb.io.resp.pf.ld
io.cpu.xcpt.pf.st := s1_write && dtlb.io.resp.xcpt_st io.cpu.xcpt.pf.st := s1_write && dtlb.io.resp.pf.st
io.cpu.xcpt.ae.ld := s1_read && dtlb.io.resp.ae.ld
io.cpu.xcpt.ae.st := s1_write && dtlb.io.resp.ae.st
// tags // tags
def onReset = L1Metadata(UInt(0), ClientMetadata.onReset) def onReset = L1Metadata(UInt(0), ClientMetadata.onReset)

View File

@ -19,6 +19,7 @@ class PTWReq(implicit p: Parameters) extends CoreBundle()(p) {
} }
class PTWResp(implicit p: Parameters) extends CoreBundle()(p) { class PTWResp(implicit p: Parameters) extends CoreBundle()(p) {
val ae = Bool()
val pte = new PTE val pte = new PTE
val level = UInt(width = log2Ceil(pgLevels)) val level = UInt(width = log2Ceil(pgLevels))
val homogeneous = Bool() val homogeneous = Bool()
@ -77,7 +78,7 @@ class PTW(n: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(
val count = Reg(UInt(width = log2Up(pgLevels))) val count = Reg(UInt(width = log2Up(pgLevels)))
val s1_kill = Reg(next = Bool(false)) val s1_kill = Reg(next = Bool(false))
val resp_valid = Reg(next = Vec.fill(io.requestor.size)(Bool(false))) val resp_valid = Reg(next = Vec.fill(io.requestor.size)(Bool(false)))
val exception = Reg(next = io.mem.xcpt.pf.ld) val ae = Reg(next = io.mem.xcpt.ae.ld)
val r_req = Reg(new PTWReq) val r_req = Reg(new PTWReq)
val r_req_dest = Reg(Bits()) val r_req_dest = Reg(Bits())
@ -90,14 +91,15 @@ class PTW(n: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(
arb.io.in <> io.requestor.map(_.req) arb.io.in <> io.requestor.map(_.req)
arb.io.out.ready := state === s_ready arb.io.out.ready := state === s_ready
val pte = { val (pte, invalid_paddr) = {
val tmp = new PTE().fromBits(io.mem.resp.bits.data) val tmp = new PTE().fromBits(io.mem.resp.bits.data)
val res = Wire(init = new PTE().fromBits(io.mem.resp.bits.data)) val res = Wire(init = new PTE().fromBits(io.mem.resp.bits.data))
res.ppn := tmp.ppn(ppnBits-1, 0) res.ppn := tmp.ppn(ppnBits-1, 0)
when ((tmp.ppn >> ppnBits) =/= 0) { res.v := false } (res, (tmp.ppn >> ppnBits) =/= 0)
res
} }
val traverse = pte.table() && !invalid_paddr && count < pgLevels-1
val pte_addr = Cat(r_pte.ppn, vpn_idx) << log2Ceil(xLen/8) val pte_addr = Cat(r_pte.ppn, vpn_idx) << log2Ceil(xLen/8)
val resp_ae = Reg(next = ae || invalid_paddr)
when (arb.io.out.fire()) { when (arb.io.out.fire()) {
r_req := arb.io.out.bits r_req := arb.io.out.bits
@ -114,7 +116,7 @@ class PTW(n: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(
val hits = tags.map(_ === pte_addr).asUInt & valid val hits = tags.map(_ === pte_addr).asUInt & valid
val hit = hits.orR val hit = hits.orR
when (io.mem.resp.valid && pte.table() && !hit) { when (io.mem.resp.valid && traverse && !hit) {
val r = Mux(valid.andR, plru.replace, PriorityEncoder(~valid)) val r = Mux(valid.andR, plru.replace, PriorityEncoder(~valid))
valid := valid | UIntToOH(r) valid := valid | UIntToOH(r)
tags(r) := pte_addr tags(r) := pte_addr
@ -142,6 +144,7 @@ class PTW(n: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(
for (i <- 0 until io.requestor.size) { for (i <- 0 until io.requestor.size) {
io.requestor(i).resp.valid := resp_valid(i) io.requestor(i).resp.valid := resp_valid(i)
io.requestor(i).resp.bits.ae := resp_ae
io.requestor(i).resp.bits.pte := r_pte io.requestor(i).resp.bits.pte := r_pte
io.requestor(i).resp.bits.level := count io.requestor(i).resp.bits.level := count
io.requestor(i).resp.bits.pte.ppn := pte_addr >> pgIdxBits io.requestor(i).resp.bits.pte.ppn := pte_addr >> pgIdxBits
@ -177,16 +180,17 @@ class PTW(n: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(
} }
when (io.mem.resp.valid) { when (io.mem.resp.valid) {
r_pte := pte r_pte := pte
when (pte.table() && count < pgLevels-1) { when (traverse) {
state := s_req state := s_req
count := count + 1 count := count + 1
}.otherwise { }.otherwise {
resp_ae := invalid_paddr
state := s_ready state := s_ready
resp_valid(r_req_dest) := true resp_valid(r_req_dest) := true
} }
} }
when (exception) { when (ae) {
r_pte.v := false resp_ae := true
state := s_ready state := s_ready
resp_valid(r_req_dest) := true resp_valid(r_req_dest) := true
} }

View File

@ -17,6 +17,7 @@ case class RocketCoreParams(
useDebug: Boolean = true, useDebug: Boolean = true,
useAtomics: Boolean = true, useAtomics: Boolean = true,
useCompressed: Boolean = true, useCompressed: Boolean = true,
nLocalInterrupts: Int = 0,
nBreakpoints: Int = 1, nBreakpoints: Int = 1,
nPMPs: Int = 8, nPMPs: Int = 8,
nPerfCounters: Int = 0, nPerfCounters: Int = 0,
@ -225,12 +226,14 @@ class Rocket(implicit p: Parameters) extends CoreModule()(p)
bpu.io.pc := ibuf.io.pc bpu.io.pc := ibuf.io.pc
bpu.io.ea := mem_reg_wdata bpu.io.ea := mem_reg_wdata
val id_xcpt_if = ibuf.io.inst(0).bits.pf0 || ibuf.io.inst(0).bits.pf1 val id_xcpt_pf = ibuf.io.inst(0).bits.pf0 || ibuf.io.inst(0).bits.pf1
val id_xcpt_ae = ibuf.io.inst(0).bits.ae0 || ibuf.io.inst(0).bits.ae1
val (id_xcpt, id_cause) = checkExceptions(List( val (id_xcpt, id_cause) = checkExceptions(List(
(csr.io.interrupt, csr.io.interrupt_cause), (csr.io.interrupt, csr.io.interrupt_cause),
(bpu.io.debug_if, UInt(CSR.debugTriggerCause)), (bpu.io.debug_if, UInt(CSR.debugTriggerCause)),
(bpu.io.xcpt_if, UInt(Causes.breakpoint)), (bpu.io.xcpt_if, UInt(Causes.breakpoint)),
(id_xcpt_if, UInt(Causes.fault_fetch)), (id_xcpt_pf, UInt(Causes.fetch_page_fault)),
(id_xcpt_ae, UInt(Causes.fetch_access)),
(id_illegal_insn, UInt(Causes.illegal_instruction)))) (id_illegal_insn, UInt(Causes.illegal_instruction))))
val dcache_bypass_data = val dcache_bypass_data =
@ -423,8 +426,10 @@ class Rocket(implicit p: Parameters) extends CoreModule()(p)
val (wb_xcpt, wb_cause) = checkExceptions(List( val (wb_xcpt, wb_cause) = checkExceptions(List(
(wb_reg_xcpt, wb_reg_cause), (wb_reg_xcpt, wb_reg_cause),
(wb_reg_valid && wb_ctrl.mem && RegEnable(io.dmem.xcpt.pf.st, mem_pc_valid), UInt(Causes.fault_store)), (wb_reg_valid && wb_ctrl.mem && RegEnable(io.dmem.xcpt.pf.st, mem_pc_valid), UInt(Causes.store_page_fault)),
(wb_reg_valid && wb_ctrl.mem && RegEnable(io.dmem.xcpt.pf.ld, mem_pc_valid), UInt(Causes.fault_load)) (wb_reg_valid && wb_ctrl.mem && RegEnable(io.dmem.xcpt.pf.ld, mem_pc_valid), UInt(Causes.load_page_fault)),
(wb_reg_valid && wb_ctrl.mem && RegEnable(io.dmem.xcpt.ae.st, mem_pc_valid), UInt(Causes.store_access)),
(wb_reg_valid && wb_ctrl.mem && RegEnable(io.dmem.xcpt.ae.ld, mem_pc_valid), UInt(Causes.load_access))
)) ))
val wb_wxd = wb_reg_valid && wb_ctrl.wxd val wb_wxd = wb_reg_valid && wb_ctrl.wxd

View File

@ -33,20 +33,25 @@ class TLBReq(lgMaxSize: Int)(implicit p: Parameters) extends CoreBundle()(p) {
override def cloneType = new TLBReq(lgMaxSize).asInstanceOf[this.type] override def cloneType = new TLBReq(lgMaxSize).asInstanceOf[this.type]
} }
class TLBExceptions extends Bundle {
val ld = Bool()
val st = Bool()
val inst = Bool()
}
class TLBResp(implicit p: Parameters) extends CoreBundle()(p) { class TLBResp(implicit p: Parameters) extends CoreBundle()(p) {
// lookup responses // lookup responses
val miss = Bool(OUTPUT) val miss = Bool()
val paddr = UInt(OUTPUT, paddrBits) val paddr = UInt(width = paddrBits)
val xcpt_ld = Bool(OUTPUT) val pf = new TLBExceptions
val xcpt_st = Bool(OUTPUT) val ae = new TLBExceptions
val xcpt_if = Bool(OUTPUT) val cacheable = Bool()
val cacheable = Bool(OUTPUT)
} }
class TLB(lgMaxSize: Int, nEntries: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(p) { class TLB(lgMaxSize: Int, nEntries: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(p) {
val io = new Bundle { val io = new Bundle {
val req = Decoupled(new TLBReq(lgMaxSize)).flip val req = Decoupled(new TLBReq(lgMaxSize)).flip
val resp = new TLBResp val resp = new TLBResp().asOutput
val ptw = new TLBPTWIO val ptw = new TLBPTWIO
} }
@ -59,8 +64,10 @@ class TLB(lgMaxSize: Int, nEntries: Int)(implicit edge: TLEdgeOut, p: Parameters
val sw = Bool() val sw = Bool()
val sx = Bool() val sx = Bool()
val sr = Bool() val sr = Bool()
val xr = Bool() val pw = Bool()
val cacheable = Bool() val px = Bool()
val pr = Bool()
val c = Bool()
} }
val totalEntries = nEntries + 1 val totalEntries = nEntries + 1
@ -92,7 +99,7 @@ class TLB(lgMaxSize: Int, nEntries: Int)(implicit edge: TLEdgeOut, p: Parameters
val pmp = Module(new PMPChecker(lgMaxSize)) val pmp = Module(new PMPChecker(lgMaxSize))
pmp.io.addr := mpu_physaddr pmp.io.addr := mpu_physaddr
pmp.io.size := io.req.bits.size pmp.io.size := io.req.bits.size
pmp.io.pmp := io.ptw.pmp pmp.io.pmp := (io.ptw.pmp: Seq[PMP])
pmp.io.prv := Mux(Bool(usingVM) && (do_refill || io.req.bits.passthrough /* PTW */), PRV.S, priv) pmp.io.prv := Mux(Bool(usingVM) && (do_refill || io.req.bits.passthrough /* PTW */), PRV.S, priv)
val legal_address = edge.manager.findSafe(mpu_physaddr).reduce(_||_) val legal_address = edge.manager.findSafe(mpu_physaddr).reduce(_||_)
def fastCheck(member: TLManagerParameters => Boolean) = def fastCheck(member: TLManagerParameters => Boolean) =
@ -101,7 +108,7 @@ class TLB(lgMaxSize: Int, nEntries: Int)(implicit edge: TLEdgeOut, p: Parameters
val prot_w = fastCheck(_.supportsPutFull) && pmp.io.w val prot_w = fastCheck(_.supportsPutFull) && pmp.io.w
val prot_x = fastCheck(_.executable) && pmp.io.x val prot_x = fastCheck(_.executable) && pmp.io.x
val cacheable = fastCheck(_.supportsAcquireB) val cacheable = fastCheck(_.supportsAcquireB)
val isSpecial = !io.ptw.resp.bits.homogeneous val isSpecial = !(io.ptw.resp.bits.homogeneous || io.ptw.resp.bits.ae)
val lookup_tag = Cat(io.ptw.ptbr.asid, vpn(vpnBits-1,0)) val lookup_tag = Cat(io.ptw.ptbr.asid, vpn(vpnBits-1,0))
val hitsVec = (0 until totalEntries).map { i => vm_enabled && { val hitsVec = (0 until totalEntries).map { i => vm_enabled && {
@ -123,13 +130,6 @@ class TLB(lgMaxSize: Int, nEntries: Int)(implicit edge: TLEdgeOut, p: Parameters
} }
// permission bit arrays // permission bit arrays
val u_array = Reg(Vec(totalEntries, Bool())) // user permission
val g_array = Reg(Vec(totalEntries, Bool())) // global mapping
val sw_array = Reg(Vec(totalEntries, Bool())) // write permission
val sx_array = Reg(Vec(totalEntries, Bool())) // execute permission
val sr_array = Reg(Vec(totalEntries, Bool())) // read permission
val xr_array = Reg(Vec(totalEntries, Bool())) // read permission to executable page
val cash_array = Reg(Vec(normalEntries, Bool())) // cacheable
when (do_refill && !invalidate_refill) { when (do_refill && !invalidate_refill) {
val waddr = Mux(isSpecial, specialEntry.U, r_refill_waddr) val waddr = Mux(isSpecial, specialEntry.U, r_refill_waddr)
val pte = io.ptw.resp.bits.pte val pte = io.ptw.resp.bits.pte
@ -137,13 +137,15 @@ class TLB(lgMaxSize: Int, nEntries: Int)(implicit edge: TLEdgeOut, p: Parameters
newEntry.ppn := pte.ppn newEntry.ppn := pte.ppn
newEntry.tag := r_refill_tag newEntry.tag := r_refill_tag
newEntry.level := io.ptw.resp.bits.level newEntry.level := io.ptw.resp.bits.level
newEntry.c := cacheable
newEntry.u := pte.u newEntry.u := pte.u
newEntry.g := pte.g newEntry.g := pte.g
newEntry.sw := pte.sw() && (isSpecial || prot_w) newEntry.sr := pte.sr()
newEntry.sx := pte.sx() && (isSpecial || prot_x) newEntry.sw := pte.sw()
newEntry.sr := pte.sr() && (isSpecial || prot_r) newEntry.sx := pte.sx()
newEntry.xr := pte.sx() && (isSpecial || prot_r) newEntry.pr := prot_r && !io.ptw.resp.bits.ae
newEntry.cacheable := isSpecial || cacheable newEntry.pw := prot_w && !io.ptw.resp.bits.ae
newEntry.px := prot_x && !io.ptw.resp.bits.ae
valid := valid | UIntToOH(waddr) valid := valid | UIntToOH(waddr)
reg_entries(waddr) := newEntry.asUInt reg_entries(waddr) := newEntry.asUInt
@ -153,10 +155,13 @@ class TLB(lgMaxSize: Int, nEntries: Int)(implicit edge: TLEdgeOut, p: Parameters
val repl_waddr = Mux(!valid(normalEntries-1, 0).andR, PriorityEncoder(~valid(normalEntries-1, 0)), plru.replace) val repl_waddr = Mux(!valid(normalEntries-1, 0).andR, PriorityEncoder(~valid(normalEntries-1, 0)), plru.replace)
val priv_ok = Mux(priv_s, ~Mux(io.ptw.status.sum, UInt(0), entries.map(_.u).asUInt), entries.map(_.u).asUInt) val priv_ok = Mux(priv_s, ~Mux(io.ptw.status.sum, UInt(0), entries.map(_.u).asUInt), entries.map(_.u).asUInt)
val w_array = Cat(prot_w, priv_ok & ~(~prot_w << specialEntry) & entries.map(_.sw).asUInt) val r_array = Cat(true.B, priv_ok & (entries.map(_.sr).asUInt | Mux(io.ptw.status.mxr, entries.map(_.sx).asUInt, UInt(0))))
val x_array = Cat(prot_x, priv_ok & ~(~prot_x << specialEntry) & entries.map(_.sx).asUInt) val w_array = Cat(true.B, priv_ok & entries.map(_.sw).asUInt)
val r_array = Cat(prot_r, priv_ok & ~(~prot_r << specialEntry) & (entries.map(_.sr).asUInt | Mux(io.ptw.status.mxr, entries.map(_.xr).asUInt, UInt(0)))) val x_array = Cat(true.B, priv_ok & entries.map(_.sx).asUInt)
val c_array = Cat(cacheable, ~(~cacheable << specialEntry) & entries.map(_.cacheable).asUInt) val pr_array = Cat(Fill(2, prot_r), entries.init.map(_.pr).asUInt)
val pw_array = Cat(Fill(2, prot_w), entries.init.map(_.pw).asUInt)
val px_array = Cat(Fill(2, prot_x), entries.init.map(_.px).asUInt)
val c_array = Cat(Fill(2, cacheable), entries.init.map(_.c).asUInt)
val bad_va = val bad_va =
if (vpnBits == vpnBitsExtended) Bool(false) if (vpnBits == vpnBitsExtended) Bool(false)
@ -176,9 +181,12 @@ class TLB(lgMaxSize: Int, nEntries: Int)(implicit edge: TLEdgeOut, p: Parameters
val multipleHits = PopCountAtLeast(hits(totalEntries-1, 0), 2) val multipleHits = PopCountAtLeast(hits(totalEntries-1, 0), 2)
io.req.ready := state === s_ready io.req.ready := state === s_ready
io.resp.xcpt_ld := bad_va || (~r_array & hits).orR io.resp.pf.ld := bad_va || (~r_array & hits).orR
io.resp.xcpt_st := bad_va || (~w_array & hits).orR io.resp.pf.st := bad_va || (~w_array & hits).orR
io.resp.xcpt_if := bad_va || (~x_array & hits).orR io.resp.pf.inst := bad_va || (~x_array & hits).orR
io.resp.ae.ld := (~pr_array & hits).orR
io.resp.ae.st := (~pw_array & hits).orR
io.resp.ae.inst := (~px_array & hits).orR
io.resp.cacheable := (c_array & hits).orR io.resp.cacheable := (c_array & hits).orR
io.resp.miss := do_refill || tlb_miss || multipleHits io.resp.miss := do_refill || tlb_miss || multipleHits
io.resp.paddr := Cat(ppn, pgOffset) io.resp.paddr := Cat(ppn, pgOffset)
@ -208,7 +216,7 @@ class TLB(lgMaxSize: Int, nEntries: Int)(implicit edge: TLEdgeOut, p: Parameters
when (sfence) { when (sfence) {
valid := Mux(io.req.bits.sfence.bits.rs1, valid & ~hits(totalEntries-1, 0), valid := Mux(io.req.bits.sfence.bits.rs1, valid & ~hits(totalEntries-1, 0),
Mux(io.req.bits.sfence.bits.rs2, valid & g_array.asUInt, 0)) Mux(io.req.bits.sfence.bits.rs2, valid & entries.map(_.g).asUInt, 0))
} }
when (multipleHits) { when (multipleHits) {
valid := 0 valid := 0

View File

@ -23,6 +23,7 @@ trait CoreParams {
val decodeWidth: Int val decodeWidth: Int
val retireWidth: Int val retireWidth: Int
val instBits: Int val instBits: Int
val nLocalInterrupts: Int
} }
trait HasCoreParameters extends HasTileParameters { trait HasCoreParameters extends HasTileParameters {

View File

@ -7,6 +7,7 @@ import config.Parameters
import util._ import util._
class TileInterrupts(implicit p: Parameters) extends CoreBundle()(p) { class TileInterrupts(implicit p: Parameters) extends CoreBundle()(p) {
val lip = Vec(coreParams.nLocalInterrupts, Bool())
val debug = Bool() val debug = Bool()
val mtip = Bool() val mtip = Bool()
val msip = Bool() val msip = Bool()

View File

@ -44,6 +44,7 @@ class TLMonitor(args: TLMonitorArgs) extends TLMonitorBase(args)
when (bundle.opcode === TLMessages.Acquire) { when (bundle.opcode === TLMessages.Acquire) {
assert (edge.manager.supportsAcquireBSafe(edge.address(bundle), bundle.size), "'A' channel carries Acquire type unsupported by manager" + extra) assert (edge.manager.supportsAcquireBSafe(edge.address(bundle), bundle.size), "'A' channel carries Acquire type unsupported by manager" + extra)
assert (edge.client.supportsProbe(edge.source(bundle), bundle.size), "'A' channel carries Acquire from a client which does not support Probe" + extra)
assert (source_ok, "'A' channel Acquire carries invalid source ID" + extra) assert (source_ok, "'A' channel Acquire carries invalid source ID" + extra)
assert (bundle.size >= UInt(log2Ceil(edge.manager.beatBytes)), "'A' channel Acquire smaller than a beat" + extra) assert (bundle.size >= UInt(log2Ceil(edge.manager.beatBytes)), "'A' channel Acquire smaller than a beat" + extra)
assert (is_aligned, "'A' channel Acquire address not aligned to size" + extra) assert (is_aligned, "'A' channel Acquire address not aligned to size" + extra)
@ -189,6 +190,7 @@ class TLMonitor(args: TLMonitorArgs) extends TLMonitorBase(args)
when (bundle.opcode === TLMessages.Release) { when (bundle.opcode === TLMessages.Release) {
assert (edge.manager.supportsAcquireBSafe(edge.address(bundle), bundle.size), "'C' channel carries Release type unsupported by manager" + extra) assert (edge.manager.supportsAcquireBSafe(edge.address(bundle), bundle.size), "'C' channel carries Release type unsupported by manager" + extra)
assert (edge.client.supportsProbe(edge.source(bundle), bundle.size), "'C' channel carries Release from a client which does not support Probe" + extra)
assert (source_ok, "'C' channel Release carries invalid source ID" + extra) assert (source_ok, "'C' channel Release carries invalid source ID" + extra)
assert (bundle.size >= UInt(log2Ceil(edge.manager.beatBytes)), "'C' channel Release smaller than a beat" + extra) assert (bundle.size >= UInt(log2Ceil(edge.manager.beatBytes)), "'C' channel Release smaller than a beat" + extra)
assert (is_aligned, "'C' channel Release address not aligned to size" + extra) assert (is_aligned, "'C' channel Release address not aligned to size" + extra)
@ -198,6 +200,7 @@ class TLMonitor(args: TLMonitorArgs) extends TLMonitorBase(args)
when (bundle.opcode === TLMessages.ReleaseData) { when (bundle.opcode === TLMessages.ReleaseData) {
assert (edge.manager.supportsAcquireBSafe(edge.address(bundle), bundle.size), "'C' channel carries ReleaseData type unsupported by manager" + extra) assert (edge.manager.supportsAcquireBSafe(edge.address(bundle), bundle.size), "'C' channel carries ReleaseData type unsupported by manager" + extra)
assert (edge.client.supportsProbe(edge.source(bundle), bundle.size), "'C' channel carries Release from a client which does not support Probe" + extra)
assert (source_ok, "'C' channel ReleaseData carries invalid source ID" + extra) assert (source_ok, "'C' channel ReleaseData carries invalid source ID" + extra)
assert (bundle.size >= UInt(log2Ceil(edge.manager.beatBytes)), "'C' channel ReleaseData smaller than a beat" + extra) assert (bundle.size >= UInt(log2Ceil(edge.manager.beatBytes)), "'C' channel ReleaseData smaller than a beat" + extra)
assert (is_aligned, "'C' channel ReleaseData address not aligned to size" + extra) assert (is_aligned, "'C' channel ReleaseData address not aligned to size" + extra)