From 4ebca73d59dc78559e82d49b72982ff01daea04f Mon Sep 17 00:00:00 2001 From: Andrew Waterman Date: Thu, 9 Nov 2017 17:25:10 -0800 Subject: [PATCH] Provide option to support AMOs only on I/O, not DTIM/D$ --- src/main/scala/rocket/DCache.scala | 6 +++--- src/main/scala/rocket/RocketCore.scala | 1 + src/main/scala/rocket/ScratchpadSlavePort.scala | 2 +- src/main/scala/rocket/TLB.scala | 6 +++--- src/main/scala/tile/Core.scala | 3 +++ 5 files changed, 11 insertions(+), 7 deletions(-) diff --git a/src/main/scala/rocket/DCache.scala b/src/main/scala/rocket/DCache.scala index dfa58812..1f28ffc4 100644 --- a/src/main/scala/rocket/DCache.scala +++ b/src/main/scala/rocket/DCache.scala @@ -76,7 +76,7 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) { val eccBits = eccBytes * 8 require(isPow2(eccBytes) && eccBytes <= wordBytes) require(eccBytes == 1 || !dECC.isInstanceOf[IdentityCode]) - val usingRMW = eccBytes > 1 || usingAtomics + val usingRMW = eccBytes > 1 || usingAtomicsInCache // tags val replacer = cacheParams.replacement @@ -688,11 +688,11 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) { if (usingRMW) { val amoalu = Module(new AMOALU(xLen)) amoalu.io.mask := pstore1_mask - amoalu.io.cmd := (if (usingAtomics) pstore1_cmd else M_XWR) + amoalu.io.cmd := (if (usingAtomicsInCache) pstore1_cmd else M_XWR) amoalu.io.lhs := s2_data_word amoalu.io.rhs := pstore1_data pstore1_storegen_data := amoalu.io.out - } else { + } else if (!usingAtomics) { assert(!(s1_valid_masked && s1_read && s1_write), "unsupported D$ operation") } when (s2_correct) { pstore1_storegen_data := s2_data_word_corrected } diff --git a/src/main/scala/rocket/RocketCore.scala b/src/main/scala/rocket/RocketCore.scala index e210cbb1..a7f3d175 100644 --- a/src/main/scala/rocket/RocketCore.scala +++ b/src/main/scala/rocket/RocketCore.scala @@ -18,6 +18,7 @@ case class RocketCoreParams( useUser: Boolean = false, useDebug: Boolean = true, useAtomics: Boolean = true, + useAtomicsOnlyForIO: Boolean = false, useCompressed: Boolean = true, nLocalInterrupts: Int = 0, nBreakpoints: Int = 1, diff --git a/src/main/scala/rocket/ScratchpadSlavePort.scala b/src/main/scala/rocket/ScratchpadSlavePort.scala index cd897fa0..3a94e0d7 100644 --- a/src/main/scala/rocket/ScratchpadSlavePort.scala +++ b/src/main/scala/rocket/ScratchpadSlavePort.scala @@ -100,7 +100,7 @@ trait CanHaveScratchpad extends HasHellaCache with HasICacheFrontend { val cacheBlockBytes = p(CacheBlockBytes) val scratch = tileParams.dcache.flatMap { d => d.scratch.map(s => - LazyModule(new ScratchpadSlavePort(AddressSet(s, d.dataScratchpadBytes-1), xBytes, tileParams.core.useAtomics))) + LazyModule(new ScratchpadSlavePort(AddressSet(s, d.dataScratchpadBytes-1), xBytes, tileParams.core.useAtomics && !tileParams.core.useAtomicsOnlyForIO))) } val intOutputNode = tileParams.core.tileControlAddr.map(dummy => IntIdentityNode()) diff --git a/src/main/scala/rocket/TLB.scala b/src/main/scala/rocket/TLB.scala index dc41295a..ed2cc23f 100644 --- a/src/main/scala/rocket/TLB.scala +++ b/src/main/scala/rocket/TLB.scala @@ -117,8 +117,8 @@ class TLB(instruction: Boolean, lgMaxSize: Int, nEntries: Int)(implicit edge: TL val homogeneous = TLBPageLookup(edge.manager.managers, xLen, p(CacheBlockBytes), BigInt(1) << pgIdxBits)(mpu_physaddr).homogeneous val prot_r = fastCheck(_.supportsGet) && pmp.io.r val prot_w = fastCheck(_.supportsPutFull) && pmp.io.w - val prot_al = fastCheck(_.supportsLogical) || cacheable - val prot_aa = fastCheck(_.supportsArithmetic) || cacheable + val prot_al = fastCheck(_.supportsLogical) || (cacheable && usingAtomicsInCache) + val prot_aa = fastCheck(_.supportsArithmetic) || (cacheable && usingAtomicsInCache) val prot_x = fastCheck(_.executable) && pmp.io.x val prot_eff = fastCheck(Seq(RegionType.PUT_EFFECTS, RegionType.GET_EFFECTS) contains _.regionType) @@ -190,7 +190,7 @@ class TLB(instruction: Boolean, lgMaxSize: Int, nEntries: Int)(implicit edge: TL (if (vpnBits == vpnBitsExtended) Bool(false) else (io.req.bits.vaddr.asSInt < 0.S) =/= (vpn.asSInt < 0.S)) - val lrscAllowed = Mux(Bool(usingDataScratchpad), 0.U, c_array) + val lrscAllowed = Mux(Bool(usingDataScratchpad || usingAtomicsOnlyForIO), 0.U, c_array) val ae_array = Mux(misaligned, eff_array, 0.U) | Mux(Bool(usingAtomics) && io.req.bits.cmd.isOneOf(M_XLR, M_XSC), ~lrscAllowed, 0.U) diff --git a/src/main/scala/tile/Core.scala b/src/main/scala/tile/Core.scala index 49839a71..b58d31c2 100644 --- a/src/main/scala/tile/Core.scala +++ b/src/main/scala/tile/Core.scala @@ -17,6 +17,7 @@ trait CoreParams { val useUser: Boolean val useDebug: Boolean val useAtomics: Boolean + val useAtomicsOnlyForIO: Boolean val useCompressed: Boolean val mulDiv: Option[MulDivParams] val fpu: Option[FPUParams] @@ -47,6 +48,8 @@ trait HasCoreParameters extends HasTileParameters { val usingMulDiv = coreParams.mulDiv.nonEmpty val usingFPU = coreParams.fpu.nonEmpty val usingAtomics = coreParams.useAtomics + val usingAtomicsOnlyForIO = coreParams.useAtomicsOnlyForIO + val usingAtomicsInCache = usingAtomics && !usingAtomicsOnlyForIO val usingCompressed = coreParams.useCompressed val retireWidth = coreParams.retireWidth