1
0

Heterogeneous Tiles (#550)

Fundamental new features:

* Added tile package: This package is intended to hold components re-usable across different types of tile. Will be the future location of TL2-RoCC accelerators and new diplomatic versions of intra-tile interfaces.
* Adopted [ModuleName]Params convention: Code base was very inconsistent about what to name case classes that provide parameters to modules. Settled on calling them [ModuleName]Params to distinguish them from config.Parameters and config.Config. So far applied mostly only to case classes defined within rocket and tile.
* Defined RocketTileParams: A nested case class containing case classes for all the components of a tile (L1 caches and core). Allows all such parameters to vary per-tile.
* Defined RocketCoreParams: All the parameters that can be varied per-core.
* Defined L1CacheParams: A trait defining the parameters common to L1 caches, made concrete in different derived case classes.
* Defined RocketTilesKey: A sequence of RocketTileParams, one for every tile to be created.
* Provided HeterogeneousDualCoreConfig: An example of making a heterogeneous chip with two cores, one big and one little.
* Changes to legacy code: ReplacementPolicy moved to package util. L1Metadata moved to package tile. Legacy L2 cache agent removed because it can no longer share the metadata array implementation with the L1. Legacy GroundTests on life support.

Additional changes that got rolled in along the way:

* rocket: 	Fix critical path through BTB for I$ index bits > pgIdxBits
* coreplex: tiles connected via :=*
* groundtest: updated to use TileParams
* tilelink: cache cork requirements are relaxed to allow more cacheless masters
This commit is contained in:
Henry Cook 2017-02-09 13:59:09 -08:00 committed by GitHub
parent f9acd4988c
commit e8c8d2af71
57 changed files with 1084 additions and 1933 deletions

View File

@ -1,9 +1,35 @@
git:
submodules: false
language: scala
sudo: required
services:
- docker
# run on new infrastructure
sudo: false
cache:
apt: true
directories:
$HOME/.ivy2
regression/install
emulator/verilator
# packages needed to build riscv-tools
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gcc-4.8
- g++-4.8
- gperf
- autoconf
- automake
- autotools-dev
- libmpc-dev
- libmpfr-dev
- libgmp-dev
- gawk
- build-essential
- bison
- flex
- texinfo
env:
matrix:
@ -21,8 +47,15 @@ branches:
- boom
- /^hurricane.*$/
install:
- make tools verilator -C regression SUITE=none
before_install:
- docker pull hcook/docker-riscv:chisel
- export CXX=g++-4.8 CC=gcc-4.8
script:
- docker run -v $(pwd):/opt/riscv/test hcook/docker-riscv:chisel /bin/sh -c "make emulator-ndebug -C regression SUITE=$SUITE; make emulator-regression-tests -C regression SUITE=$SUITE"
- make emulator-ndebug -C regression SUITE=$SUITE
- travis_wait make emulator-regression-tests -C regression SUITE=$SUITE
before_cache:
- ls -t regression/install | tail -n+2 | sed s@^@regression/install/@ | xargs rm -rf

View File

@ -5,9 +5,9 @@ package coreplex
import Chisel._
import config._
import diplomacy._
import rocket.{TileInterrupts, XLen}
import tile.XLen
import tile.TileInterrupts
import uncore.tilelink2._
import uncore.util._
import util._
/** Widths of various points in the SoC */
@ -15,6 +15,9 @@ case class TLBusConfig(beatBytes: Int)
case object CBusConfig extends Field[TLBusConfig]
case object L1toL2Config extends Field[TLBusConfig]
// These parameters apply to all caches, for now
case object CacheBlockBytes extends Field[Int]
/** L2 Broadcast Hub configuration */
case class BroadcastConfig(
nTrackers: Int = 4,
@ -42,10 +45,11 @@ case object BootROMFile extends Field[String]
trait HasCoreplexParameters {
implicit val p: Parameters
lazy val tilesParams = p(RocketTilesKey)
lazy val cbusConfig = p(CBusConfig)
lazy val l1tol2Config = p(L1toL2Config)
lazy val nTiles = p(uncore.devices.NTiles)
lazy val hasSupervisor = p(rocket.UseVM)
lazy val nTiles = tilesParams.size
lazy val hasSupervisor = tilesParams.exists(_.core.useVM) // TODO ask andrew about this
lazy val l2Config = p(BankedL2Config)
}

View File

@ -1,56 +0,0 @@
// See LICENSE.SiFive for license details.
package coreplex
import Chisel._
import config._
import diplomacy._
import rocket.{TileInterrupts, XLen}
import uncore.tilelink2._
import util.GenericParameterizedBundle
abstract class BareTile(implicit p: Parameters) extends LazyModule
abstract class BareTileBundle[+L <: BareTile](_outer: L) extends GenericParameterizedBundle(_outer) {
val outer = _outer
implicit val p = outer.p
}
abstract class BareTileModule[+L <: BareTile, +B <: BareTileBundle[L]](_outer: L, _io: () => B) extends LazyModuleImp(_outer) {
val outer = _outer
val io = _io ()
}
// Uses a tile-internal crossbar to provide a single TileLink master port
trait TileNetwork {
implicit val p: Parameters
val module: TileNetworkModule
val l1backend = LazyModule(new TLXbar)
val masterNodes = List(TLOutputNode())
masterNodes.head := l1backend.node
}
trait TileNetworkBundle {
val outer: TileNetwork
val master = outer.masterNodes.head.bundleOut
}
trait TileNetworkModule {
val outer: TileNetwork
val io: TileNetworkBundle
}
abstract class BaseTile(implicit p: Parameters) extends BareTile
with TileNetwork {
override lazy val module = new BaseTileModule(this, () => new BaseTileBundle(this))
}
class BaseTileBundle[+L <: BaseTile](_outer: L) extends BareTileBundle(_outer)
with TileNetworkBundle {
val hartid = UInt(INPUT, p(XLen))
val interrupts = new TileInterrupts()(p).asInput
val resetVector = UInt(INPUT, p(XLen))
}
class BaseTileModule[+L <: BaseTile, +B <: BaseTileBundle[L]](_outer: L, _io: () => B) extends BareTileModule(_outer, _io)
with TileNetworkModule

View File

@ -7,6 +7,7 @@ import Chisel._
import config._
import diplomacy._
import rocket._
import tile._
import uncore.converters._
import uncore.devices._
import uncore.tilelink2._
@ -14,71 +15,42 @@ import uncore.util._
import util._
class BaseCoreplexConfig extends Config ((site, here, up) => {
//Memory Parameters
case PAddrBits => 32
case PgLevels => if (site(XLen) == 64) 3 /* Sv39 */ else 2 /* Sv32 */
case ASIdBits => 7
case XLen => 64 // Applies to all cores
case BuildCore => (p: Parameters) => new Rocket()(p)
case RocketCrossing => Synchronous
//Params used by all caches
case CacheName("L1I") => CacheConfig(
nSets = 64,
nWays = 4,
rowBits = site(L1toL2Config).beatBytes*8,
nTLBEntries = 8,
cacheIdBits = 0,
splitMetadata = false)
case CacheName("L1D") => CacheConfig(
nSets = 64,
nWays = 4,
rowBits = site(L1toL2Config).beatBytes*8,
nTLBEntries = 8,
cacheIdBits = 0,
splitMetadata = false)
case ECCCode => None
case Replacer => () => new RandomReplacement(site(site(CacheName)).nWays)
//L1InstCache
case BtbKey => BtbParameters()
//L1DataCache
case DCacheKey => DCacheConfig(nMSHRs = 2)
case DataScratchpadSize => 0
//Tile Constants
case BuildRoCC => Nil
//Rocket Core Constants
case CoreInstBits => if (site(UseCompressed)) 16 else 32
case FetchWidth => if (site(UseCompressed)) 2 else 1
case RetireWidth => 1
case UseVM => true
case UseUser => false
case UseDebug => true
case NBreakpoints => 1
case NPerfCounters => 0
case NPerfEvents => 0
case FastLoadWord => true
case FastLoadByte => false
case FastJAL => false
case XLen => 64
case FPUKey => Some(FPUConfig())
case MulDivKey => Some(MulDivConfig(mulUnroll = 8, mulEarlyOut = (site(XLen) > 32), divEarlyOut = true))
case UseAtomics => true
case UseCompressed => true
case RocketTilesKey => Nil
case DMKey => new DefaultDebugModuleConfig(site(NTiles), site(XLen))
case NCustomMRWCSRs => 0
case MtvecInit => Some(BigInt(0))
case MtvecWritable => true
//Uncore Paramters
case NTiles => site(RocketTilesKey).size
case CBusConfig => TLBusConfig(beatBytes = site(XLen)/8)
case L1toL2Config => TLBusConfig(beatBytes = site(XLen)/8) // increase for more PCIe bandwidth
case BootROMFile => "./bootrom/bootrom.img"
case NTiles => site(RocketConfigs).size
case RocketConfigs => List(RocketConfig(site(XLen)))
case BuildCore => (c: RocketConfig, p: Parameters) => new Rocket(c)(p)
case BroadcastConfig => BroadcastConfig()
case BankedL2Config => BankedL2Config()
case CacheBlockBytes => 64
})
class WithNCores(n: Int) extends Config((site, here, up) => {
case RocketConfigs => List.fill(n){ RocketConfig(site(XLen)) }
class WithNBigCores(n: Int) extends Config((site, here, up) => {
case RocketTilesKey => {
val big = RocketTileParams(
core = RocketCoreParams(mulDiv = Some(MulDivParams(mulUnroll = 8, mulEarlyOut = true, divEarlyOut = true))),
dcache = Some(DCacheParams(rowBits = site(L1toL2Config).beatBytes*8, nMSHRs = 2)),
icache = Some(ICacheParams(rowBits = site(L1toL2Config).beatBytes*8)))
List.fill(n)(big) ++ up(RocketTilesKey, site)
}
})
class WithNSmallCores(n: Int) extends Config((site, here, up) => {
case RocketTilesKey => {
val small = RocketTileParams(
core = RocketCoreParams(useVM = false, fpu = None),
btb = None,
dcache = Some(DCacheParams(rowBits = site(L1toL2Config).beatBytes*8, nSets = 64, nWays = 1, nTLBEntries = 4, nMSHRs = 0)),
icache = Some(ICacheParams(rowBits = site(L1toL2Config).beatBytes*8, nSets = 64, nWays = 1, nTLBEntries = 4)))
List.fill(n)(small) ++ up(RocketTilesKey, site)
}
})
class WithNBanksPerMemChannel(n: Int) extends Config((site, here, up) => {
@ -89,43 +61,50 @@ class WithNTrackersPerBank(n: Int) extends Config((site, here, up) => {
case BroadcastConfig => up(BroadcastConfig, site).copy(nTrackers = n)
})
// This is the number of sets **per way**
// This is the number of icache sets for all Rocket tiles
class WithL1ICacheSets(sets: Int) extends Config((site, here, up) => {
case CacheName("L1I") => up(CacheName("L1I"), site).copy(nSets = sets)
case RocketTilesKey => up(RocketTilesKey, site) map { r =>
r.copy(icache = r.icache.map(_.copy(nSets = sets))) }
})
// This is the number of sets **per way**
// This is the number of icache sets for all Rocket tiles
class WithL1DCacheSets(sets: Int) extends Config((site, here, up) => {
case CacheName("L1D") => up(CacheName("L1D"), site).copy(nSets = sets)
case RocketTilesKey => up(RocketTilesKey, site) map { r =>
r.copy(dcache = r.dcache.map(_.copy(nSets = sets))) }
})
class WithL1ICacheWays(ways: Int) extends Config((site, here, up) => {
case CacheName("L1I") => up(CacheName("L1I"), site).copy(nWays = ways)
case RocketTilesKey => up(RocketTilesKey, site) map { r =>
r.copy(icache = r.icache.map(_.copy(nWays = ways)))
}
})
class WithL1DCacheWays(ways: Int) extends Config((site, here, up) => {
case CacheName("L1D") => up(CacheName("L1D"), site).copy(nWays = ways)
case RocketTilesKey => up(RocketTilesKey, site) map { r =>
r.copy(dcache = r.dcache.map(_.copy(nWays = ways)))
}
})
class WithCacheBlockBytes(linesize: Int) extends Config((site, here, up) => {
case CacheBlockBytes => linesize
})
class WithDataScratchpad(n: Int) extends Config((site, here, up) => {
case DataScratchpadSize => n
case CacheName("L1D") => up(CacheName("L1D"), site).copy(nSets = n / site(CacheBlockBytes))
/** Warning: applies only to the most recently added tile.
* TODO: For now, there can only be a single scratchpad in the design
* because its address is hardcoded.
*/
class WithDataScratchpad(size: Int) extends Config((site, here, up) => {
case RocketTilesKey => {
val prev = up(RocketTilesKey, site)
prev.head.copy(
dcache = prev.head.dcache.map(_.copy(nSets = size / site(CacheBlockBytes))),
dataScratchpadBytes = size) +: prev.tail
}
})
// TODO: re-add L2
class WithL2Cache extends Config((site, here, up) => {
case CacheName("L2") => CacheConfig(
nSets = 1024,
nWays = 1,
rowBits = site(L1toL2Config).beatBytes*8,
nTLBEntries = 0,
cacheIdBits = 1,
splitMetadata = false)
})
class WithL2Cache extends Config(Parameters.empty) // TODO: re-add L2
class WithL2Capacity(size_kb: Int) extends Config(Parameters.empty) // TODO: re-add L2
class WithNL2Ways(n: Int) extends Config(Parameters.empty) // TODO: re-add L2
class WithBufferlessBroadcastHub extends Config((site, here, up) => {
case BroadcastConfig => up(BroadcastConfig, site).copy(bufferless = true)
@ -151,44 +130,39 @@ class WithStatelessBridge extends Config((site, here, up) => {
ww.node :*= cork.node
(cork.node, ww.node)
})
case DCacheKey => up(DCacheKey, site).copy(nMSHRs = 0)
})
class WithL2Capacity(size_kb: Int) extends Config(Parameters.empty) // TODO
class WithNL2Ways(n: Int) extends Config((site, here, up) => {
case CacheName("L2") => up(CacheName("L2"), site).copy(nWays = n)
})
class WithRV32 extends Config((site, here, up) => {
case XLen => 32
case FPUKey => Some(FPUConfig(divSqrt = false))
case RocketTilesKey => up(RocketTilesKey, site) map { r =>
r.copy(core = r.core.copy(
mulDiv = Some(MulDivParams(mulUnroll = 8)),
fpu = r.core.fpu.map(_.copy(divSqrt = false))))
}
})
class WithBlockingL1 extends Config((site, here, up) => {
case DCacheKey => up(DCacheKey, site).copy(nMSHRs = 0)
case RocketTilesKey => up(RocketTilesKey, site) map { r =>
r.copy(dcache = r.dcache.map(_.copy(nMSHRs = 0)))
}
})
class WithSmallCores extends Config((site, here, up) => {
case MulDivKey => Some(MulDivConfig())
case FPUKey => None
case UseVM => false
case BtbKey => BtbParameters(nEntries = 0)
case CacheName("L1D") => up(CacheName("L1D"), site).copy(nSets = 64, nWays = 1, nTLBEntries = 4)
case CacheName("L1I") => up(CacheName("L1I"), site).copy(nSets = 64, nWays = 1, nTLBEntries = 4)
case DCacheKey => up(DCacheKey, site).copy(nMSHRs = 0)
class WithNBreakpoints(hwbp: Int) extends Config ((site, here, up) => {
case RocketTilesKey => up(RocketTilesKey, site) map { r =>
r.copy(core = r.core.copy(nBreakpoints = hwbp))
}
})
class WithRoccExample extends Config((site, here, up) => {
case BuildRoCC => Seq(
RoccParameters(
RoCCParams(
opcodes = OpcodeSet.custom0,
generator = (p: Parameters) => Module(new AccumulatorExample()(p))),
RoccParameters(
RoCCParams(
opcodes = OpcodeSet.custom1,
generator = (p: Parameters) => Module(new TranslatorExample()(p)),
nPTWPorts = 1),
RoccParameters(
RoCCParams(
opcodes = OpcodeSet.custom2,
generator = (p: Parameters) => Module(new CharacterCountExample()(p))))
@ -196,23 +170,34 @@ class WithRoccExample extends Config((site, here, up) => {
})
class WithDefaultBtb extends Config((site, here, up) => {
case BtbKey => BtbParameters()
case RocketTilesKey => up(RocketTilesKey, site) map { r =>
r.copy(btb = Some(BTBParams()))
}
})
class WithFastMulDiv extends Config((site, here, up) => {
case MulDivKey => Some(MulDivConfig(mulUnroll = 8, mulEarlyOut = (site(XLen) > 32), divEarlyOut = true))
case RocketTilesKey => up(RocketTilesKey, site) map { r =>
r.copy(core = r.core.copy(mulDiv = Some(
MulDivParams(mulUnroll = 8, mulEarlyOut = (site(XLen) > 32), divEarlyOut = true)
)))}
})
class WithoutMulDiv extends Config((site, here, up) => {
case MulDivKey => None
case RocketTilesKey => up(RocketTilesKey, site) map { r =>
r.copy(core = r.core.copy(mulDiv = None))
}
})
class WithoutFPU extends Config((site, here, up) => {
case FPUKey => None
case RocketTilesKey => up(RocketTilesKey, site) map { r =>
r.copy(core = r.core.copy(fpu = None))
}
})
class WithFPUWithoutDivSqrt extends Config((site, here, up) => {
case FPUKey => Some(FPUConfig(divSqrt = false))
case RocketTilesKey => up(RocketTilesKey, site) map { r =>
r.copy(core = r.core.copy(fpu = r.core.fpu.map(_.copy(divSqrt = false))))
}
})
class WithBootROMFile(bootROMFile: String) extends Config((site, here, up) => {

View File

@ -6,14 +6,9 @@ import Chisel._
import config._
import junctions._
import diplomacy._
import uncore.tilelink._
import tile._
import uncore.tilelink2._
import uncore.coherence._
import uncore.agents._
import uncore.devices._
import uncore.util._
import uncore.converters._
import rocket._
import util._
trait CoreplexRISCVPlatform extends CoreplexNetwork {

View File

@ -6,6 +6,7 @@ import Chisel._
import config._
import diplomacy._
import rocket._
import tile._
import uncore.tilelink2._
sealed trait ClockCrossing
@ -13,18 +14,14 @@ case object Synchronous extends ClockCrossing
case object Rational extends ClockCrossing
case class Asynchronous(depth: Int, sync: Int = 2) extends ClockCrossing
case object RocketConfigs extends Field[Seq[RocketConfig]]
case object RocketTilesKey extends Field[Seq[RocketTileParams]]
case object RocketCrossing extends Field[ClockCrossing]
trait HasRocketTiles extends CoreplexRISCVPlatform {
val module: HasRocketTilesModule
private val crossing = p(RocketCrossing)
private val configs = p(RocketConfigs)
private val pWithExtra = p.alterPartial {
case SharedMemoryTLEdge => l1tol2.node.edgesIn(0)
case PAddrBits => l1tol2.node.edgesIn(0).bundle.addressBits
}
private val configs = p(RocketTilesKey)
private val rocketTileIntNodes = configs.map { _ => IntInternalOutputNode() }
rocketTileIntNodes.foreach { _ := plic.intnode }
@ -37,11 +34,20 @@ trait HasRocketTiles extends CoreplexRISCVPlatform {
}
val rocketWires: Seq[HasRocketTilesBundle => Unit] = configs.zipWithIndex.map { case (c, i) =>
val pWithExtra = p.alterPartial {
case TileKey => c
case BuildRoCC => c.rocc
case SharedMemoryTLEdge => l1tol2.node.edgesIn(0)
case PAddrBits => l1tol2.node.edgesIn(0).bundle.addressBits
}
crossing match {
case Synchronous => {
val tile = LazyModule(new RocketTile(c)(pWithExtra))
tile.masterNodes.foreach { l1tol2.node := TLBuffer()(_) }
tile.slaveNode.foreach { _ := cbus.node }
val buffer = LazyModule(new TLBuffer)
buffer.node :=* tile.masterNode
l1tol2.node :=* buffer.node
tile.slaveNode :*= cbus.node
(io: HasRocketTilesBundle) => {
// leave clock as default (simpler for hierarchical PnR)
tile.module.io.hartid := UInt(i)
@ -51,8 +57,12 @@ trait HasRocketTiles extends CoreplexRISCVPlatform {
}
case Asynchronous(depth, sync) => {
val wrapper = LazyModule(new AsyncRocketTile(c)(pWithExtra))
wrapper.masterNodes.foreach { l1tol2.node := TLAsyncCrossingSink(depth, sync)(_) }
wrapper.slaveNode.foreach { _ := TLAsyncCrossingSource(sync)(cbus.node) }
val sink = LazyModule(new TLAsyncCrossingSink(depth, sync))
val source = LazyModule(new TLAsyncCrossingSource(sync))
sink.node :=* wrapper.masterNode
l1tol2.node :=* sink.node
wrapper.slaveNode :*= source.node
source.node :*= cbus.node
(io: HasRocketTilesBundle) => {
wrapper.module.clock := io.tcrs(i).clock
wrapper.module.reset := io.tcrs(i).reset
@ -63,8 +73,12 @@ trait HasRocketTiles extends CoreplexRISCVPlatform {
}
case Rational => {
val wrapper = LazyModule(new RationalRocketTile(c)(pWithExtra))
wrapper.masterNodes.foreach { l1tol2.node := TLRationalCrossingSink()(_) }
wrapper.slaveNode.foreach { _ := TLRationalCrossingSource()(cbus.node) }
val sink = LazyModule(new TLRationalCrossingSink)
val source = LazyModule(new TLRationalCrossingSource)
sink.node :=* wrapper.masterNode
l1tol2.node :=* sink.node
wrapper.slaveNode :*= source.node
source.node :*= cbus.node
(io: HasRocketTilesBundle) => {
wrapper.module.clock := io.tcrs(i).clock
wrapper.module.reset := io.tcrs(i).reset
@ -79,7 +93,7 @@ trait HasRocketTiles extends CoreplexRISCVPlatform {
trait HasRocketTilesBundle extends CoreplexRISCVPlatformBundle {
val outer: HasRocketTiles
val tcrs = Vec(p(RocketConfigs).size, new Bundle {
val tcrs = Vec(p(RocketTilesKey).size, new Bundle {
val clock = Clock(INPUT)
val reset = Bool(INPUT)
})

View File

@ -4,6 +4,7 @@
package groundtest
import Chisel._
import coreplex.CacheBlockBytes
import uncore.tilelink._
import uncore.constants._
import uncore.util._
@ -12,8 +13,9 @@ import config._
class CacheFillTest(implicit p: Parameters) extends GroundTest()(p)
with HasTileLinkParameters {
val l2Config = p(CacheName("L2"))
val capacityKb = l2Config.nSets * l2Config.nWays * l2Config.rowBits / (1024*8)
//val l2Config = p(CacheName("L2"))
//val capacityKb = l2Config.nSets * l2Config.nWays * l2Config.rowBits / (1024*8)
val capacityKb = 1024 // TODO
val nblocks = capacityKb * 1024 / p(CacheBlockBytes)
val s_start :: s_prefetch :: s_retrieve :: s_finished :: Nil = Enum(Bits(), 4)
val state = Reg(init = s_start)

View File

@ -11,6 +11,7 @@ import uncore.coherence._
import uncore.agents._
import uncore.util._
import uncore.devices.NTiles
import tile.TileKey
import junctions._
import config._
import coreplex._
@ -18,10 +19,10 @@ import rocketchip._
/** Actual testing target Configs */
class GroundTestConfig extends Config(new WithGroundTest ++ new BaseConfig)
class GroundTestConfig extends Config(new WithGroundTestTiles ++ new BaseConfig)
class ComparatorConfig extends Config(
new WithComparator ++ new GroundTestConfig)
new WithComparator(1) ++ new GroundTestConfig)
class ComparatorL2Config extends Config(
new WithAtomics ++ new WithPrefetches ++
new WithL2Cache ++ new ComparatorConfig)
@ -30,31 +31,32 @@ class ComparatorBufferlessConfig extends Config(
class ComparatorStatelessConfig extends Config(
new WithStatelessBridge ++ new ComparatorConfig)
class MemtestConfig extends Config(new WithMemtest ++ new GroundTestConfig)
class MemtestConfig extends Config(new WithMemtest(1) ++ new GroundTestConfig)
class MemtestL2Config extends Config(
new WithL2Cache ++ new MemtestConfig)
class MemtestBufferlessConfig extends Config(
new WithBufferlessBroadcastHub ++ new MemtestConfig)
class MemtestStatelessConfig extends Config(
new WithNGenerators(0, 1) ++ new WithStatelessBridge ++ new MemtestConfig)
new WithStatelessBridge ++ new MemtestConfig)
// Test ALL the things
class FancyMemtestConfig extends Config(
new WithNGenerators(1, 2) ++ new WithNCores(2) ++ new WithMemtest ++
new WithMemtest(2) ++
new WithNMemoryChannels(2) ++ new WithNBanksPerMemChannel(4) ++
new WithL2Cache ++ new GroundTestConfig)
class CacheFillTestConfig extends Config(
new WithNL2Ways(4) ++ new WithL2Capacity(4) ++ new WithCacheFillTest ++ new WithL2Cache ++ new GroundTestConfig)
new WithNL2Ways(4) ++ new WithL2Capacity(4) ++
new WithCacheFillTest(1) ++ new WithL2Cache ++ new GroundTestConfig)
class BroadcastRegressionTestConfig extends Config(
new WithBroadcastRegressionTest ++ new GroundTestConfig)
new WithBroadcastRegressionTest(1) ++ new GroundTestConfig)
class BufferlessRegressionTestConfig extends Config(
new WithBufferlessBroadcastHub ++ new BroadcastRegressionTestConfig)
class CacheRegressionTestConfig extends Config(
new WithCacheRegressionTest ++ new WithL2Cache ++ new GroundTestConfig)
new WithCacheRegressionTest(1) ++ new WithL2Cache ++ new GroundTestConfig)
class TraceGenConfig extends Config(
new WithNCores(2) ++ new WithTraceGen ++ new GroundTestConfig)
new WithTraceGen(2) ++ new GroundTestConfig)
class TraceGenBufferlessConfig extends Config(
new WithBufferlessBroadcastHub ++ new TraceGenConfig)
class TraceGenL2Config extends Config(
@ -72,15 +74,15 @@ class Edge32BitMemtestConfig extends Config(
new WithEdgeDataBits(32) ++ new MemtestConfig)
/* Composable Configs to set individual parameters */
class WithGroundTest extends Config((site, here, up) => {
case FPUKey => None
case UseAtomics => false
case UseCompressed => false
class WithGroundTestTiles extends Config((site, here, up) => {
case TileKey => site(GroundTestKey).head
case NTiles => site(GroundTestKey).size
})
class WithComparator extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(site(NTiles)) {
GroundTestTileSettings(uncached = 2)
class WithComparator(n: Int) extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(n) {
GroundTestTileParams(uncached = 2, dcache = None)
}
case BuildGroundTest =>
(p: Parameters) => Module(new ComparatorCore()(p))
@ -88,23 +90,21 @@ class WithComparator extends Config((site, here, up) => {
targets = Seq(site(ExtMem).base, testRamAddr),
width = 8,
operations = 1000,
atomics = site(UseAtomics),
atomics = false,
prefetches = false)
case FPUConfig => None
case UseAtomics => false
})
class WithAtomics extends Config((site, here, up) => {
case UseAtomics => true
case ComparatorKey => up(ComparatorKey, site).copy(atomics = true)
})
class WithPrefetches extends Config((site, here, up) => {
case ComparatorKey => up(ComparatorKey, site).copy(prefetches = true)
})
class WithMemtest extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(site(NTiles)) {
GroundTestTileSettings(1, 1)
class WithMemtest(n: Int) extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(n) {
GroundTestTileParams(uncached = 1)
}
case GeneratorKey => TrafficGeneratorParameters(
maxRequests = 128,
@ -113,23 +113,17 @@ class WithMemtest extends Config((site, here, up) => {
(p: Parameters) => Module(new GeneratorTest()(p))
})
class WithNGenerators(nUncached: Int, nCached: Int) extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(site(NTiles)) {
GroundTestTileSettings(nUncached, nCached)
}
})
class WithCacheFillTest extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(site(NTiles)) {
GroundTestTileSettings(uncached = 1)
class WithCacheFillTest(n: Int) extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(n) {
GroundTestTileParams(uncached = 1, dcache = None)
}
case BuildGroundTest =>
(p: Parameters) => Module(new CacheFillTest()(p))
})
class WithBroadcastRegressionTest extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(site(NTiles)) {
GroundTestTileSettings(1, 1, maxXacts = 3)
class WithBroadcastRegressionTest(n: Int) extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(n) {
GroundTestTileParams(uncached = 1, maxXacts = 3)
}
case BuildGroundTest =>
(p: Parameters) => Module(new RegressionTest()(p))
@ -137,9 +131,9 @@ class WithBroadcastRegressionTest extends Config((site, here, up) => {
(p: Parameters) => RegressionTests.broadcastRegressions(p)
})
class WithCacheRegressionTest extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(site(NTiles)) {
GroundTestTileSettings(1, 1, maxXacts = 5)
class WithCacheRegressionTest(n: Int) extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(n) {
GroundTestTileParams(uncached = 1, maxXacts = 5)
}
case BuildGroundTest =>
(p: Parameters) => Module(new RegressionTest()(p))
@ -147,9 +141,9 @@ class WithCacheRegressionTest extends Config((site, here, up) => {
(p: Parameters) => RegressionTests.cacheRegressions(p)
})
class WithTraceGen extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(site(NTiles)) {
GroundTestTileSettings(uncached = 0, cached = 1)
class WithTraceGen(n: Int) extends Config((site, here, up) => {
case GroundTestKey => Seq.fill(n) {
GroundTestTileParams(dcache = Some(DCacheParams(nSets = 16, nWays = 1)))
}
case BuildGroundTest =>
(p: Parameters) => Module(new GroundTestTraceGenerator()(p))
@ -160,11 +154,9 @@ class WithTraceGen extends Config((site, here, up) => {
val nSets = 2
val nWays = 1
val blockOffset = site(CacheBlockOffsetBits)
val nBeats = site(TLKey("L1toL2")).dataBeats
val nBeats = site(CacheBlockBytes)/site(L1toL2Config).beatBytes
List.tabulate(4 * nWays) { i =>
Seq.tabulate(nBeats) { j => BigInt((j * 8) + ((i * nSets) << blockOffset)) }
}.flatten
}
case UseAtomics => true
case CacheName("L1D") => up(CacheName("L1D"), site).copy(nSets = 16, nWays = 1)
})

View File

@ -7,6 +7,7 @@ import config._
import diplomacy._
import coreplex._
import rocket._
import tile._
import uncore.agents._
import uncore.coherence._
import uncore.devices._
@ -31,9 +32,7 @@ class GroundTestCoreplex(implicit p: Parameters) extends BaseCoreplex {
nManagers = site(BankedL2Config).nBanks + 1,
nCachingClients = 1,
nCachelessClients = 1,
maxClientXacts = ((site(DCacheKey).nMSHRs + 1) +:
site(GroundTestKey).map(_.maxXacts))
.reduce(max(_, _)),
maxClientXacts = site(GroundTestKey).map(_.maxXacts).reduce(max(_, _)),
maxClientsPerPort = site(GroundTestKey).map(_.uncached).sum,
maxManagerXacts = 8,
dataBeats = (8 * site(CacheBlockBytes)) / site(XLen),
@ -41,10 +40,7 @@ class GroundTestCoreplex(implicit p: Parameters) extends BaseCoreplex {
}}))
}
tiles.foreach { lm =>
l1tol2.node := lm.cachedOut
l1tol2.node := lm.uncachedOut
}
tiles.foreach { l1tol2.node :=* _.masterNode }
val cbusRAM = LazyModule(new TLRAM(AddressSet(testRamAddr, 0xffff), false, cbus_beatBytes))
cbusRAM.node := TLFragmenter(cbus_beatBytes, cbus_lineBytes)(cbus.node)
@ -52,8 +48,7 @@ class GroundTestCoreplex(implicit p: Parameters) extends BaseCoreplex {
override lazy val module = new GroundTestCoreplexModule(this, () => new GroundTestCoreplexBundle(this))
}
class GroundTestCoreplexBundle[+L <: GroundTestCoreplex](_outer: L) extends BaseCoreplexBundle(_outer)
{
class GroundTestCoreplexBundle[+L <: GroundTestCoreplex](_outer: L) extends BaseCoreplexBundle(_outer) {
val success = Bool(OUTPUT)
}

View File

@ -0,0 +1,57 @@
// See LICENSE.SiFive for license details.
// See LICENSE.Berkeley for license details.
package groundtest
import Chisel._
import config._
import rocket._
import tile._
import util.ParameterizedBundle
class DummyPTW(n: Int)(implicit p: Parameters) extends CoreModule()(p) {
val io = new Bundle {
val requestors = Vec(n, new TLBPTWIO).flip
}
val req_arb = Module(new RRArbiter(new PTWReq, n))
req_arb.io.in <> io.requestors.map(_.req)
req_arb.io.out.ready := Bool(true)
def vpn_to_ppn(vpn: UInt): UInt = vpn(ppnBits - 1, 0)
class QueueChannel extends ParameterizedBundle()(p) {
val ppn = UInt(width = ppnBits)
val chosen = UInt(width = log2Up(n))
}
val s1_ppn = vpn_to_ppn(req_arb.io.out.bits.addr)
val s2_ppn = RegEnable(s1_ppn, req_arb.io.out.valid)
val s2_chosen = RegEnable(req_arb.io.chosen, req_arb.io.out.valid)
val s2_valid = Reg(next = req_arb.io.out.valid)
val s2_resp = Wire(new PTWResp)
s2_resp.pte.ppn := s2_ppn
s2_resp.pte.reserved_for_software := UInt(0)
s2_resp.pte.d := Bool(true)
s2_resp.pte.a := Bool(false)
s2_resp.pte.g := Bool(false)
s2_resp.pte.u := Bool(true)
s2_resp.pte.r := Bool(true)
s2_resp.pte.w := Bool(true)
s2_resp.pte.x := Bool(false)
s2_resp.pte.v := Bool(true)
io.requestors.zipWithIndex.foreach { case (requestor, i) =>
requestor.resp.valid := s2_valid && s2_chosen === UInt(i)
requestor.resp.bits := s2_resp
requestor.status.vm := UInt("b01000")
requestor.status.prv := UInt(PRV.S)
requestor.status.debug := Bool(false)
requestor.status.mprv := Bool(true)
requestor.status.mpp := UInt(0)
requestor.ptbr.asid := UInt(0)
requestor.ptbr.ppn := UInt(0)
requestor.invalidate := Bool(false)
}
}

View File

@ -103,14 +103,13 @@ class IOGetAfterPutBlockRegression(implicit p: Parameters) extends Regression()(
* acknowledge both of them when the first one finished.
* This caused the state to go funky since the next time around it would
* start the put in the middle */
class PutBlockMergeRegression(implicit p: Parameters)
class PutBlockMergeRegression(nSets: Int)(implicit p: Parameters)
extends Regression()(p) with HasTileLinkParameters {
val s_idle :: s_put :: s_wait :: s_done :: Nil = Enum(Bits(), 4)
val state = Reg(init = s_idle)
disableCache()
val nSets = p(CacheName("L2")).nSets
val addr_blocks = Vec(Seq(0, 0, nSets).map(num => UInt(num + memStartBlock)))
val nSteps = addr_blocks.size
val (acq_beat, acq_done) = Counter(io.mem.acquire.fire(), tlDataBeats)
@ -391,12 +390,9 @@ class PrefetchHitRegression(implicit p: Parameters) extends Regression()(p) {
* set. This assumes that there is only a single cache bank. If we want to
* test multibank configurations, we'll have to think of some other way to
* determine which banks are conflicting */
class WritebackRegression(implicit p: Parameters) extends Regression()(p) {
class WritebackRegression(nSets: Int, nWays: Int)(implicit p: Parameters) extends Regression()(p) {
disableCache()
val nSets = p(CacheName("L2")).nSets
val nWays = p(CacheName("L2")).nWays
val addr_blocks = Vec.tabulate(nWays + 1) { i => UInt(memStartBlock + i * nSets) }
val data = Vec.tabulate(nWays + 1) { i => UInt((i + 1) * 1423) }
@ -443,11 +439,9 @@ class WritebackRegression(implicit p: Parameters) extends Regression()(p) {
io.errored := data_mismatch
}
class ReleaseRegression(implicit p: Parameters) extends Regression()(p) {
class ReleaseRegression(nSets: Int, nWays: Int)(implicit p: Parameters) extends Regression()(p) {
disableMem()
val nSets = p(CacheName("L1D")).nSets
val nWays = p(CacheName("L1D")).nWays
val blockOffset = p(CacheBlockOffsetBits)
val startBlock = memStartBlock + 10
@ -530,12 +524,9 @@ class PutBeforePutBlockRegression(implicit p: Parameters) extends Regression()(p
* Make sure that multiple gets to the same line and beat are merged
* correctly, even if it is a cache miss.
*/
class MergedGetRegression(implicit p: Parameters) extends Regression()(p) {
class MergedGetRegression(nSets: Int, nWays: Int)(implicit p: Parameters) extends Regression()(p) {
disableCache()
val nSets = p(CacheName("L2")).nSets
val nWays = p(CacheName("L2")).nWays
val (s_idle :: s_put :: s_get :: s_done :: Nil) = Enum(Bits(), 4)
val state = Reg(init = s_idle)
@ -697,23 +688,27 @@ class PutAfterReleaseRegression(implicit p: Parameters) extends Regression()(p)
}
object RegressionTests {
val l1sets = 16 // TODO
val l1ways = 1 // TODO
val l2sets = 32 // TODO
val l2ways = 2 // TODO
def cacheRegressions(implicit p: Parameters) = Seq(
Module(new PutBlockMergeRegression),
Module(new PutBlockMergeRegression(l2sets)),
Module(new NoAllocPutHitRegression),
Module(new RepeatedNoAllocPutRegression),
Module(new WriteMaskedPutBlockRegression),
Module(new PrefetchHitRegression),
Module(new WritebackRegression),
Module(new WritebackRegression(l2sets, l2ways)),
Module(new PutBeforePutBlockRegression),
Module(new MixedAllocPutRegression),
Module(new ReleaseRegression),
Module(new MergedGetRegression),
Module(new ReleaseRegression(l1sets, l1ways)),
Module(new MergedGetRegression(l2sets, l2ways)),
Module(new MergedPutRegression))
def broadcastRegressions(implicit p: Parameters) = Seq(
Module(new IOGetAfterPutBlockRegression),
Module(new WriteMaskedPutBlockRegression),
Module(new PutBeforePutBlockRegression),
Module(new ReleaseRegression),
Module(new ReleaseRegression(l1sets, l1ways)),
Module(new PutAfterReleaseRegression))
}

View File

@ -4,23 +4,33 @@
package groundtest
import Chisel._
import coreplex.BareTile
import config._
import coreplex._
import rocket._
import tile._
import uncore.tilelink._
import uncore.util.CacheName
import uncore.tilelink2._
import rocketchip.ExtMem
import diplomacy._
import scala.util.Random
import scala.collection.mutable.ListBuffer
import util.ParameterizedBundle
import config._
import scala.collection.mutable.ListBuffer
case object BuildGroundTest extends Field[Parameters => GroundTest]
case class GroundTestTileSettings(
uncached: Int = 0, cached: Int = 0, ptw: Int = 0, maxXacts: Int = 1)
case object GroundTestKey extends Field[Seq[GroundTestTileSettings]]
case class GroundTestTileParams(
uncached: Int = 0,
ptw: Int = 0,
maxXacts: Int = 1,
dcache: Option[DCacheParams] = Some(DCacheParams())) extends TileParams {
val icache = None
val btb = None
val rocc = Nil
val core = rocket.RocketCoreParams() //TODO remove this
val cached = if(dcache.isDefined) 1 else 0
val dataScratchpadBytes = 0
}
case object GroundTestKey extends Field[Seq[GroundTestTileParams]]
trait HasGroundTestConstants {
val timeoutCodeBits = 4
@ -29,61 +39,14 @@ trait HasGroundTestConstants {
trait HasGroundTestParameters {
implicit val p: Parameters
val tileSettings = p(GroundTestKey)(p(TileId))
val nUncached = tileSettings.uncached
val nCached = tileSettings.cached
val nPTW = tileSettings.ptw
val tileParams = p(GroundTestKey)(p(TileId))
val nUncached = tileParams.uncached
val nCached = tileParams.cached
val nPTW = tileParams.ptw
val memStart = p(ExtMem).base
val memStartBlock = memStart >> p(CacheBlockOffsetBits)
}
class DummyPTW(n: Int)(implicit p: Parameters) extends CoreModule()(p) {
val io = new Bundle {
val requestors = Vec(n, new TLBPTWIO).flip
}
val req_arb = Module(new RRArbiter(new PTWReq, n))
req_arb.io.in <> io.requestors.map(_.req)
req_arb.io.out.ready := Bool(true)
def vpn_to_ppn(vpn: UInt): UInt = vpn(ppnBits - 1, 0)
class QueueChannel extends ParameterizedBundle()(p) {
val ppn = UInt(width = ppnBits)
val chosen = UInt(width = log2Up(n))
}
val s1_ppn = vpn_to_ppn(req_arb.io.out.bits.addr)
val s2_ppn = RegEnable(s1_ppn, req_arb.io.out.valid)
val s2_chosen = RegEnable(req_arb.io.chosen, req_arb.io.out.valid)
val s2_valid = Reg(next = req_arb.io.out.valid)
val s2_resp = Wire(new PTWResp)
s2_resp.pte.ppn := s2_ppn
s2_resp.pte.reserved_for_software := UInt(0)
s2_resp.pte.d := Bool(true)
s2_resp.pte.a := Bool(false)
s2_resp.pte.g := Bool(false)
s2_resp.pte.u := Bool(true)
s2_resp.pte.r := Bool(true)
s2_resp.pte.w := Bool(true)
s2_resp.pte.x := Bool(false)
s2_resp.pte.v := Bool(true)
io.requestors.zipWithIndex.foreach { case (requestor, i) =>
requestor.resp.valid := s2_valid && s2_chosen === UInt(i)
requestor.resp.bits := s2_resp
requestor.status.vm := UInt("b01000")
requestor.status.prv := UInt(PRV.S)
requestor.status.debug := Bool(false)
requestor.status.mprv := Bool(true)
requestor.status.mpp := UInt(0)
requestor.ptbr.asid := UInt(0)
requestor.ptbr.ppn := UInt(0)
requestor.invalidate := Bool(false)
}
}
class GroundTestStatus extends Bundle with HasGroundTestConstants {
val finished = Bool(OUTPUT)
val timeout = Valid(UInt(width = timeoutCodeBits))
@ -103,38 +66,33 @@ abstract class GroundTest(implicit val p: Parameters) extends Module
val io = new GroundTestIO
}
class GroundTestTile(implicit p: Parameters) extends LazyModule with HasGroundTestParameters {
val dcacheParams = p.alterPartial {
case CacheName => CacheName("L1D")
}
class GroundTestTile(implicit p: Parameters) extends LazyModule
with HasGroundTestParameters {
val slave = None
val dcache = HellaCache(p(DCacheKey))(dcacheParams)
val dcacheOpt = tileParams.dcache.map { dc => HellaCache(dc.nMSHRs == 0) }
val ucLegacy = LazyModule(new TLLegacy)
val cachedOut = TLOutputNode()
val uncachedOut = TLOutputNode()
cachedOut := dcache.node
uncachedOut := TLHintHandler()(ucLegacy.node)
val masterNodes = List(cachedOut, uncachedOut)
val masterNode = TLOutputNode()
dcacheOpt.foreach { masterNode := _.node }
masterNode := TLHintHandler()(ucLegacy.node)
lazy val module = new LazyModuleImp(this) {
val io = new Bundle {
val cached = cachedOut.bundleOut
val uncached = uncachedOut.bundleOut
val out = masterNode.bundleOut
val success = Bool(OUTPUT)
}
val test = p(BuildGroundTest)(dcacheParams)
val test = p(BuildGroundTest)(p)
val ptwPorts = ListBuffer.empty ++= test.io.ptw
val uncachedArbPorts = ListBuffer.empty ++= test.io.mem
if (nCached > 0) {
val dcacheArb = Module(new HellaCacheArbiter(nCached)(dcacheParams))
dcacheOpt foreach { dcache =>
val dcacheArb = Module(new HellaCacheArbiter(nCached))
dcacheArb.io.requestor.zip(test.io.cache).foreach {
case (requestor, cache) =>
val dcacheIF = Module(new SimpleHellaCacheIF()(dcacheParams))
val dcacheIF = Module(new SimpleHellaCacheIF())
dcacheIF.io.requestor <> cache
requestor <> dcacheIF.io.cache
}
@ -147,7 +105,7 @@ class GroundTestTile(implicit p: Parameters) extends LazyModule with HasGroundTe
}
if (ptwPorts.size > 0) {
val ptw = Module(new DummyPTW(ptwPorts.size)(dcacheParams))
val ptw = Module(new DummyPTW(ptwPorts.size))
ptw.io.requestors <> ptwPorts
}

View File

@ -24,6 +24,7 @@ import uncore.tilelink._
import uncore.constants._
import uncore.devices.NTiles
import rocket._
import tile._
import util.{Timer, DynamicTimer}
import scala.util.Random
import config._
@ -179,7 +180,7 @@ class TagMan(val logNumTags : Int) extends Module {
// ===============
class TraceGenerator(id: Int)
(implicit p: Parameters) extends L1HellaCacheModule()(p)
(implicit val p: Parameters) extends Module
with HasTraceGenParams
with HasGroundTestParameters {
val io = new Bundle {

View File

@ -129,7 +129,7 @@ class UncachedTileLinkGenerator(id: Int)
}
class HellaCacheGenerator(id: Int)
(implicit p: Parameters) extends L1HellaCacheModule()(p) with HasTrafficGeneratorParameters {
(implicit val p: Parameters) extends Module with HasTrafficGeneratorParameters {
val io = new Bundle {
val mem = new HellaCacheIO
val status = new GroundTestStatus

View File

@ -5,6 +5,7 @@ package rocket
import Chisel._
import config._
import tile._
import Instructions._
object ALU

View File

@ -4,23 +4,22 @@
package rocket
import Chisel._
import config._
import util._
import Chisel.ImplicitConversions._
import uncore.util.PseudoLRU
import config._
import tile.HasCoreParameters
import util._
case object BtbKey extends Field[BtbParameters]
case class BtbParameters(
case class BTBParams(
nEntries: Int = 40,
nRAS: Int = 2,
updatesOutOfOrder: Boolean = false)
abstract trait HasBtbParameters extends HasCoreParameters {
val matchBits = pgIdxBits
val entries = p(BtbKey).nEntries
val nRAS = p(BtbKey).nRAS
val updatesOutOfOrder = p(BtbKey).updatesOutOfOrder
trait HasBtbParameters extends HasCoreParameters {
val btbParams = tileParams.btb.getOrElse(BTBParams(nEntries = 0))
val matchBits = pgIdxBits max log2Ceil(p(coreplex.CacheBlockBytes) * tileParams.icache.get.nSets)
val entries = btbParams.nEntries
val nRAS = btbParams.nRAS
val updatesOutOfOrder = btbParams.updatesOutOfOrder
val nPages = ((1 max(log2Up(entries)))+1)/2*2 // control logic assumes 2 divides pages
val opaqueBits = log2Up(entries)
val nBHT = 1 << log2Up(entries*2)

View File

@ -3,9 +3,10 @@
package rocket
import Chisel._
import util._
import Chisel.ImplicitConversions._
import config._
import tile._
import util._
class BPControl(implicit p: Parameters) extends CoreBundle()(p) {
val ttype = UInt(width = 4)

View File

@ -6,6 +6,7 @@ package rocket
import Chisel._
import Instructions._
import config._
import tile._
import uncore.devices._
import util._
import Chisel.ImplicitConversions._
@ -60,14 +61,6 @@ class DCSR extends Bundle {
val prv = UInt(width = PRV.SZ)
}
class TileInterrupts(implicit p: Parameters) extends CoreBundle()(p) {
val debug = Bool()
val mtip = Bool()
val msip = Bool()
val meip = Bool()
val seip = usingVM.option(Bool())
}
class MIP extends Bundle {
val rocc = Bool()
val meip = Bool()
@ -127,7 +120,8 @@ object CSR
val nCtr = firstHPM + nHPM
}
class CSRFileIO(implicit p: Parameters) extends CoreBundle {
class CSRFileIO(implicit p: Parameters) extends CoreBundle
with HasRocketCoreParameters {
val interrupts = new TileInterrupts().asInput
val hartid = UInt(INPUT, xLen)
val rw = new Bundle {
@ -163,7 +157,7 @@ class CSRFileIO(implicit p: Parameters) extends CoreBundle {
}
class CSRFile(implicit p: Parameters) extends CoreModule()(p)
{
with HasRocketCoreParameters {
val io = new CSRFileIO
val reset_mstatus = Wire(init=new MStatus().fromBits(0))
@ -227,7 +221,7 @@ class CSRFile(implicit p: Parameters) extends CoreModule()(p)
val reg_mbadaddr = Reg(UInt(width = vaddrBitsExtended))
val reg_mscratch = Reg(Bits(width = xLen))
val mtvecWidth = paddrBits min xLen
val reg_mtvec = p(MtvecInit) match {
val reg_mtvec = mtvecInit match {
case Some(addr) => Reg(init=UInt(addr, mtvecWidth))
case None => Reg(UInt(width = mtvecWidth))
}
@ -547,7 +541,7 @@ class CSRFile(implicit p: Parameters) extends CoreModule()(p)
when (decoded_addr(CSRs.mie)) { reg_mie := wdata & supported_interrupts }
when (decoded_addr(CSRs.mepc)) { reg_mepc := formEPC(wdata) }
when (decoded_addr(CSRs.mscratch)) { reg_mscratch := wdata }
if (p(MtvecWritable))
if (mtvecWritable)
when (decoded_addr(CSRs.mtvec)) { reg_mtvec := wdata >> 2 << 2 }
when (decoded_addr(CSRs.mcause)) { reg_mcause := wdata & UInt((BigInt(1) << (xLen-1)) + 31) /* only implement 5 LSBs and MSB */ }
when (decoded_addr(CSRs.mbadaddr)) { reg_mbadaddr := wdata(vaddrBitsExtended-1,0) }

View File

@ -1,93 +0,0 @@
// See LICENSE.SiFive for license details.
package rocket
import Chisel._
import config._
import uncore.tilelink2.TLEdgeOut
import uncore.util.{CacheName, CacheBlockBytes}
import util._
case object BuildCore extends Field[(RocketConfig, Parameters) => CoreModule with HasCoreIO]
case object SharedMemoryTLEdge extends Field[TLEdgeOut]
trait HasCoreParameters {
implicit val p: Parameters
val xLen = p(XLen)
val fLen = xLen // TODO relax this
val usingVM = p(UseVM)
val usingUser = p(UseUser) || usingVM
val usingDebug = p(UseDebug)
val usingMulDiv = p(MulDivKey).nonEmpty
val usingFPU = p(FPUKey).nonEmpty
val usingAtomics = p(UseAtomics)
val usingCompressed = p(UseCompressed)
val usingRoCC = !p(BuildRoCC).isEmpty
val fastLoadWord = p(FastLoadWord)
val fastLoadByte = p(FastLoadByte)
val fastJAL = p(FastJAL)
val nBreakpoints = p(NBreakpoints)
val nPerfCounters = p(NPerfCounters)
val nPerfEvents = p(NPerfEvents)
val usingDataScratchpad = p(DataScratchpadSize) > 0
val retireWidth = p(RetireWidth)
val fetchWidth = p(FetchWidth)
val coreInstBits = p(CoreInstBits)
val coreInstBytes = coreInstBits/8
val coreDataBits = xLen
val coreDataBytes = coreDataBits/8
val dcacheArbPorts = 1 + usingVM.toInt + usingDataScratchpad.toInt + p(BuildRoCC).size
val coreDCacheReqTagBits = 6
val dcacheReqTagBits = coreDCacheReqTagBits + log2Ceil(dcacheArbPorts)
def pgIdxBits = 12
def pgLevelBits = 10 - log2Ceil(xLen / 32)
def vaddrBits = pgIdxBits + pgLevels * pgLevelBits
val paddrBits = p(PAddrBits)
def ppnBits = paddrBits - pgIdxBits
def vpnBits = vaddrBits - pgIdxBits
val pgLevels = p(PgLevels)
val asIdBits = p(ASIdBits)
val vpnBitsExtended = vpnBits + (vaddrBits < xLen).toInt
val vaddrBitsExtended = vpnBitsExtended + pgIdxBits
val coreMaxAddrBits = paddrBits max vaddrBitsExtended
val nCustomMrwCsrs = p(NCustomMRWCSRs)
// fetchWidth doubled, but coreInstBytes halved, for RVC
val decodeWidth = fetchWidth / (if (usingCompressed) 2 else 1)
// Print out log of committed instructions and their writeback values.
// Requires post-processing due to out-of-order writebacks.
val enableCommitLog = false
val maxPAddrBits = xLen match {
case 32 => 34
case 64 => 50
}
require(paddrBits <= maxPAddrBits)
require(!fastLoadByte || fastLoadWord)
}
abstract class CoreModule(implicit val p: Parameters) extends Module
with HasCoreParameters
abstract class CoreBundle(implicit val p: Parameters) extends ParameterizedBundle()(p)
with HasCoreParameters
trait HasCoreIO {
implicit val p: Parameters
val io = new Bundle {
val interrupts = new TileInterrupts().asInput
val hartid = UInt(INPUT, p(XLen))
val imem = new FrontendIO()(p.alterPartial({case CacheName => CacheName("L1I") }))
val dmem = new HellaCacheIO()(p.alterPartial({ case CacheName => CacheName("L1D") }))
val ptw = new DatapathPTWIO().flip
val fpu = new FPUCoreIO().flip
val rocc = new RoCCCoreIO().flip
}
}

View File

@ -4,13 +4,13 @@ package rocket
import Chisel._
import Chisel.ImplicitConversions._
import config._
import diplomacy._
import uncore.constants._
import uncore.tilelink2._
import uncore.util._
import util._
import TLMessages._
import config._
class DCacheDataReq(implicit p: Parameters) extends L1HellaCacheBundle()(p) {
val addr = Bits(width = untagBits)
@ -38,22 +38,19 @@ class DCacheDataArray(implicit p: Parameters) extends L1HellaCacheModule()(p) {
}
}
class DCache(cfg: DCacheConfig, val scratch: () => Option[AddressSet] = () => None)(implicit p: Parameters) extends HellaCache(cfg)(p) {
class DCache(val scratch: () => Option[AddressSet] = () => None)(implicit p: Parameters) extends HellaCache()(p) {
override lazy val module = new DCacheModule(this)
}
class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
val maxUncachedInFlight = cfg.nMMIOs
require(rowBits == encRowBits) // no ECC
val grantackq = Module(new Queue(tl_out.e.bits,1)) // TODO don't need this in scratchpad mode
// tags
val replacer = p(Replacer)()
val replacer = cacheParams.replacement
def onReset = L1Metadata(UInt(0), ClientMetadata.onReset)
val metaReadArb = Module(new Arbiter(new MetaReadReq, 3))
val metaReadArb = Module(new Arbiter(new L1MetaReadReq, 3))
val metaWriteArb = Module(new Arbiter(new L1MetaWriteReq, 3))
// data
@ -104,7 +101,7 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
when (!metaReadArb.io.in(2).ready) { io.cpu.req.ready := false }
// address translation
val tlb = Module(new TLB)
val tlb = Module(new TLB(nTLBEntries))
io.ptw <> tlb.io.ptw
tlb.io.req.valid := s1_valid_masked && s1_readwrite
tlb.io.req.bits.passthrough := s1_req.phys
@ -126,7 +123,7 @@ class DCacheModule(outer: DCache) extends HellaCacheModule(outer) {
val hitState = Mux(inScratchpad, ClientMetadata.maximum, ClientMetadata.onReset)
(inScratchpad, hitState, L1Metadata(UInt(0), ClientMetadata.onReset))
} else {
val meta = Module(new MetadataArray(onReset _))
val meta = Module(new L1MetadataArray(onReset _))
meta.io.read <> metaReadArb.io.out
meta.io.write <> metaWriteArb.io.out
val s1_meta = meta.io.resp

View File

@ -4,13 +4,13 @@
package rocket
import Chisel._
import Chisel.ImplicitConversions._
import config._
import coreplex._
import diplomacy._
import uncore.tilelink2._
import uncore.util.CacheName
import tile._
import util._
import Chisel.ImplicitConversions._
class FrontendReq(implicit p: Parameters) extends CoreBundle()(p) {
val pc = UInt(width = vaddrBitsExtended)
@ -54,12 +54,12 @@ class FrontendBundle(outer: Frontend) extends CoreBundle()(outer.p) {
class FrontendModule(outer: Frontend) extends LazyModuleImp(outer)
with HasCoreParameters
with HasL1CacheParameters {
with HasL1ICacheParameters {
val io = new FrontendBundle(outer)
implicit val edge = outer.node.edgesOut(0)
val icache = outer.icache.module
val tlb = Module(new TLB)
val tlb = Module(new TLB(nTLBEntries))
val s1_pc_ = Reg(UInt(width=vaddrBitsExtended))
val s1_pc = ~(~s1_pc_ | (coreInstBytes-1)) // discard PC LSBS (this propagates down the pipeline)
@ -106,7 +106,7 @@ class FrontendModule(outer: Frontend) extends LazyModuleImp(outer)
s2_valid := Bool(false)
}
if (p(BtbKey).nEntries > 0) {
if (usingBTB) {
val btb = Module(new BTB)
btb.io.req.valid := false
btb.io.req.bits.addr := s1_pc_
@ -153,21 +153,18 @@ class FrontendModule(outer: Frontend) extends LazyModuleImp(outer)
}
/** Mix-ins for constructing tiles that have an ICache-based pipeline frontend */
trait HasICacheFrontend extends CanHavePTW with TileNetwork {
trait HasICacheFrontend extends CanHavePTW with HasTileLinkMasterPort {
val module: HasICacheFrontendModule
val frontend = LazyModule(new Frontend()(p.alterPartial({
case CacheName => CacheName("L1I")
})))
l1backend.node := frontend.node
val frontend = LazyModule(new Frontend)
masterNode := frontend.node
nPTWPorts += 1
}
trait HasICacheFrontendBundle extends TileNetworkBundle {
trait HasICacheFrontendBundle extends HasTileLinkMasterPortBundle {
val outer: HasICacheFrontend
}
trait HasICacheFrontendModule extends CanHavePTWModule with TileNetworkModule {
trait HasICacheFrontendModule extends CanHavePTWModule with HasTileLinkMasterPortModule {
val outer: HasICacheFrontend
//val io: HasICacheFrontendBundle
ptwPorts += outer.frontend.module.io.ptw
}

View File

@ -7,43 +7,58 @@ import Chisel._
import config.{Parameters, Field}
import coreplex._
import diplomacy._
import tile._
import uncore.constants._
import uncore.tilelink2._
import uncore.util._
import util.ParameterizedBundle
import uncore.util.Code
import util.{ParameterizedBundle, RandomReplacement}
import scala.collection.mutable.ListBuffer
case class DCacheConfig(
case class DCacheParams(
nSets: Int = 64,
nWays: Int = 4,
rowBits: Int = 64,
nTLBEntries: Int = 8,
splitMetadata: Boolean = false,
ecc: Option[Code] = None,
nMSHRs: Int = 1,
nSDQ: Int = 17,
nRPQ: Int = 16,
nMMIOs: Int = 1)
nMMIOs: Int = 1) extends L1CacheParams {
def replacement = new RandomReplacement(nWays)
}
case object DCacheKey extends Field[DCacheConfig]
trait HasL1HellaCacheParameters extends HasL1CacheParameters
with HasCoreParameters {
val cacheParams = tileParams.dcache.get
val cfg = cacheParams
trait HasL1HellaCacheParameters extends HasL1CacheParameters {
val wordBits = xLen // really, xLen max
val wordBytes = wordBits/8
val wordOffBits = log2Up(wordBytes)
val beatBytes = cacheBlockBytes / cacheDataBeats
val beatWords = beatBytes / wordBytes
val beatOffBits = log2Up(beatBytes)
val idxMSB = untagBits-1
val idxLSB = blockOffBits
val offsetmsb = idxLSB-1
val offsetlsb = wordOffBits
val rowWords = rowBits/wordBits
val doNarrowRead = coreDataBits * nWays % rowBits == 0
val encDataBits = code.width(coreDataBits)
val encRowBits = encDataBits*rowWords
val nIOMSHRs = 1
val lrscCycles = 32 // ISA requires 16-insn LRSC sequences to succeed
def wordBits = xLen // really, xLen max
def wordBytes = wordBits/8
def wordOffBits = log2Up(wordBytes)
def beatBytes = cacheBlockBytes / cacheDataBeats
def beatWords = beatBytes / wordBytes
def beatOffBits = log2Up(beatBytes)
def idxMSB = untagBits-1
def idxLSB = blockOffBits
def offsetmsb = idxLSB-1
def offsetlsb = wordOffBits
def rowWords = rowBits/wordBits
def doNarrowRead = coreDataBits * nWays % rowBits == 0
def encDataBits = code.width(coreDataBits)
def encRowBits = encDataBits*rowWords
def lrscCycles = 32 // ISA requires 16-insn LRSC sequences to succeed
def nIOMSHRs = cacheParams.nMMIOs
def maxUncachedInFlight = cacheParams.nMMIOs
def dataScratchpadSize = tileParams.dataScratchpadBytes
require(isPow2(nSets))
require(rowBits >= coreDataBits)
require(rowBits == cacheDataBits) // TODO should rowBits even be seperably specifiable?
require(xLen <= cacheDataBits) // would need offset addr for puts if data width < xlen
require(!usingVM || untagBits <= pgIdxBits)
require(isPow2(nSets), s"nSets($nSets) must be pow2")
require(rowBits >= coreDataBits, s"rowBits($rowBits) < coreDataBits($coreDataBits)")
// TODO should rowBits even be seperably specifiable?
require(rowBits == cacheDataBits, s"rowBits($rowBits) != cacheDataBits($cacheDataBits)")
// would need offset addr for puts if data width < xlen
require(xLen <= cacheDataBits, s"xLen($xLen) > cacheDataBits($cacheDataBits)")
require(!usingVM || untagBits <= pgIdxBits, s"untagBits($untagBits) > pgIdxBits($pgIdxBits)")
}
abstract class L1HellaCacheModule(implicit val p: Parameters) extends Module
@ -52,25 +67,7 @@ abstract class L1HellaCacheModule(implicit val p: Parameters) extends Module
abstract class L1HellaCacheBundle(implicit val p: Parameters) extends ParameterizedBundle()(p)
with HasL1HellaCacheParameters
class L1Metadata(implicit p: Parameters) extends Metadata()(p) with HasL1HellaCacheParameters {
val coh = new ClientMetadata
}
object L1Metadata {
def apply(tag: Bits, coh: ClientMetadata)(implicit p: Parameters) = {
val meta = Wire(new L1Metadata)
meta.tag := tag
meta.coh := coh
meta
}
}
class L1MetaReadReq(implicit p: Parameters) extends MetaReadReq {
val tag = Bits(width = tagBits)
override def cloneType = new L1MetaReadReq()(p).asInstanceOf[this.type] //TODO remove
}
class L1MetaWriteReq(implicit p: Parameters) extends
MetaWriteReq[L1Metadata](new L1Metadata)
/** Bundle definitions for HellaCache interfaces */
trait HasCoreMemOp extends HasCoreParameters {
val addr = UInt(width = coreMaxAddrBits)
@ -83,6 +80,10 @@ trait HasCoreData extends HasCoreParameters {
val data = Bits(width = coreDataBits)
}
class HellaCacheReqInternal(implicit p: Parameters) extends CoreBundle()(p) with HasCoreMemOp {
val phys = Bool()
}
class HellaCacheReq(implicit p: Parameters) extends HellaCacheReqInternal()(p) with HasCoreData
class HellaCacheResp(implicit p: Parameters) extends CoreBundle()(p)
@ -104,7 +105,6 @@ class HellaCacheExceptions extends Bundle {
val pf = new AlignmentExceptions
}
// interface between D$ and processor/DTLB
class HellaCacheIO(implicit p: Parameters) extends CoreBundle()(p) {
val req = Decoupled(new HellaCacheReq)
@ -119,10 +119,13 @@ class HellaCacheIO(implicit p: Parameters) extends CoreBundle()(p) {
val ordered = Bool(INPUT)
}
abstract class HellaCache(val cfg: DCacheConfig)(implicit p: Parameters) extends LazyModule {
/** Base classes for Diplomatic TL2 HellaCaches */
abstract class HellaCache(implicit p: Parameters) extends LazyModule {
private val cfg = p(TileKey).dcache.get
val node = TLClientNode(TLClientParameters(
sourceId = IdRange(0, cfg.nMSHRs + cfg.nMMIOs),
supportsProbe = TransferSizes(p(CacheBlockBytes))))
supportsProbe = TransferSizes(1, p(CacheBlockBytes))))
val module: HellaCacheModule
}
@ -135,38 +138,101 @@ class HellaCacheBundle(outer: HellaCache) extends Bundle {
class HellaCacheModule(outer: HellaCache) extends LazyModuleImp(outer)
with HasL1HellaCacheParameters {
implicit val cfg = outer.cfg
implicit val edge = outer.node.edgesOut(0)
val io = new HellaCacheBundle(outer)
val tl_out = io.mem(0)
}
object HellaCache {
def apply(cfg: DCacheConfig, scratch: () => Option[AddressSet] = () => None)(implicit p: Parameters) = {
if (cfg.nMSHRs == 0) LazyModule(new DCache(cfg, scratch))
else LazyModule(new NonBlockingDCache(cfg))
def apply(blocking: Boolean, scratch: () => Option[AddressSet] = () => None)(implicit p: Parameters) = {
if (blocking) LazyModule(new DCache(scratch))
else LazyModule(new NonBlockingDCache)
}
}
/** Mix-ins for constructing tiles that have a HellaCache */
trait HasHellaCache extends TileNetwork {
trait HasHellaCache extends HasTileLinkMasterPort {
val module: HasHellaCacheModule
implicit val p: Parameters
def findScratchpadFromICache: Option[AddressSet]
var nDCachePorts = 0
val dcacheParams = p.alterPartial({ case CacheName => CacheName("L1D") })
val dcache = HellaCache(p(DCacheKey), findScratchpadFromICache _)(dcacheParams)
l1backend.node := dcache.node
val dcache = HellaCache(tileParams.dcache.get.nMSHRs == 0, findScratchpadFromICache _)
masterNode := dcache.node
}
trait HasHellaCacheBundle extends TileNetworkBundle {
trait HasHellaCacheBundle extends HasTileLinkMasterPortBundle {
val outer: HasHellaCache
}
trait HasHellaCacheModule extends TileNetworkModule {
trait HasHellaCacheModule extends HasTileLinkMasterPortModule {
val outer: HasHellaCache
//val io: HasHellaCacheBundle
val dcachePorts = ListBuffer[HellaCacheIO]()
val dcacheArb = Module(new HellaCacheArbiter(outer.nDCachePorts)(outer.dcacheParams))
val dcacheArb = Module(new HellaCacheArbiter(outer.nDCachePorts)(outer.p))
outer.dcache.module.io.cpu <> dcacheArb.io.mem
}
/** Metadata array used for all HellaCaches */
class L1Metadata(implicit p: Parameters) extends L1HellaCacheBundle()(p) {
val coh = new ClientMetadata
val tag = UInt(width = tagBits)
}
object L1Metadata {
def apply(tag: Bits, coh: ClientMetadata)(implicit p: Parameters) = {
val meta = Wire(new L1Metadata)
meta.tag := tag
meta.coh := coh
meta
}
}
class L1MetaReadReq(implicit p: Parameters) extends L1HellaCacheBundle()(p) {
val idx = UInt(width = idxBits)
val way_en = UInt(width = nWays)
val tag = UInt(width = tagBits)
}
class L1MetaWriteReq(implicit p: Parameters) extends L1MetaReadReq()(p) {
val data = new L1Metadata
}
class L1MetadataArray[T <: L1Metadata](onReset: () => T)(implicit p: Parameters) extends L1HellaCacheModule()(p) {
val rstVal = onReset()
val io = new Bundle {
val read = Decoupled(new L1MetaReadReq).flip
val write = Decoupled(new L1MetaWriteReq).flip
val resp = Vec(nWays, rstVal.cloneType).asOutput
}
val rst_cnt = Reg(init=UInt(0, log2Up(nSets+1)))
val rst = rst_cnt < UInt(nSets)
val waddr = Mux(rst, rst_cnt, io.write.bits.idx)
val wdata = Mux(rst, rstVal, io.write.bits.data).asUInt
val wmask = Mux(rst || Bool(nWays == 1), SInt(-1), io.write.bits.way_en.asSInt).toBools
val rmask = Mux(rst || Bool(nWays == 1), SInt(-1), io.read.bits.way_en.asSInt).toBools
when (rst) { rst_cnt := rst_cnt+UInt(1) }
val metabits = rstVal.getWidth
if (hasSplitMetadata) {
val tag_arrs = List.fill(nWays){ SeqMem(nSets, UInt(width = metabits)) }
val tag_readout = Wire(Vec(nWays,rstVal.cloneType))
(0 until nWays).foreach { (i) =>
when (rst || (io.write.valid && wmask(i))) {
tag_arrs(i).write(waddr, wdata)
}
io.resp(i) := rstVal.fromBits(tag_arrs(i).read(io.read.bits.idx, io.read.valid && rmask(i)))
}
} else {
val tag_arr = SeqMem(nSets, Vec(nWays, UInt(width = metabits)))
when (rst || io.write.valid) {
tag_arr.write(waddr, Vec.fill(nWays)(wdata), wmask)
}
io.resp := tag_arr.read(io.read.bits.idx, io.read.valid).map(rstVal.fromBits(_))
}
io.read.ready := !rst && !io.write.valid // so really this could be a 6T RAM
io.write.ready := !rst
}

View File

@ -3,9 +3,10 @@
package rocket
import Chisel._
import util._
import Chisel.ImplicitConversions._
import config._
import tile._
import util._
class Instruction(implicit val p: Parameters) extends ParameterizedBundle with HasCoreParameters {
val pf0 = Bool() // page fault on first half of instruction

View File

@ -6,25 +6,32 @@ package rocket
import Chisel._
import config._
import diplomacy._
import uncore.agents._
import tile._
import uncore.tilelink2._
import uncore.util._
import uncore.util.Code
import util._
import Chisel.ImplicitConversions._
trait HasL1CacheParameters extends HasCacheParameters with HasCoreParameters {
val cacheBlockBytes = p(CacheBlockBytes)
val lgCacheBlockBytes = log2Up(cacheBlockBytes)
val cacheDataBits = p(SharedMemoryTLEdge).bundle.dataBits
val cacheDataBeats = (cacheBlockBytes * 8) / cacheDataBits
val refillCycles = cacheDataBeats
case class ICacheParams(
nSets: Int = 64,
nWays: Int = 4,
rowBits: Int = 128,
nTLBEntries: Int = 8,
cacheIdBits: Int = 0,
splitMetadata: Boolean = false,
ecc: Option[Code] = None) extends L1CacheParams {
def replacement = new RandomReplacement(nWays)
}
class ICacheReq(implicit p: Parameters) extends CoreBundle()(p) with HasL1CacheParameters {
trait HasL1ICacheParameters extends HasL1CacheParameters with HasCoreParameters {
val cacheParams = tileParams.icache.get
}
class ICacheReq(implicit p: Parameters) extends CoreBundle()(p) with HasL1ICacheParameters {
val addr = UInt(width = vaddrBits)
}
class ICacheResp(implicit p: Parameters) extends CoreBundle()(p) with HasL1CacheParameters {
class ICacheResp(implicit p: Parameters) extends CoreBundle()(p) with HasL1ICacheParameters {
val data = Bits(width = coreInstBits)
val datablock = Bits(width = rowBits)
}
@ -46,8 +53,7 @@ class ICacheBundle(outer: ICache) extends CoreBundle()(outer.p) {
}
class ICacheModule(outer: ICache) extends LazyModuleImp(outer)
with HasCoreParameters
with HasL1CacheParameters {
with HasL1ICacheParameters {
val io = new ICacheBundle(outer)
val edge = outer.node.edgesOut(0)
val tl_out = io.mem(0)

View File

@ -4,12 +4,13 @@
package rocket
import Chisel._
import Chisel.ImplicitConversions._
import Instructions._
import uncore.constants.MemoryOpConstants._
import ALU._
import config._
import tile.HasCoreParameters
import util._
import Chisel.ImplicitConversions._
abstract trait DecodeConstants extends HasCoreParameters
{

View File

@ -29,13 +29,13 @@ class MultiplierIO(dataBits: Int, tagBits: Int) extends Bundle {
val resp = Decoupled(new MultiplierResp(dataBits, tagBits))
}
case class MulDivConfig(
case class MulDivParams(
mulUnroll: Int = 1,
mulEarlyOut: Boolean = false,
divEarlyOut: Boolean = false
)
class MulDiv(cfg: MulDivConfig, width: Int, nXpr: Int = 32) extends Module {
class MulDiv(cfg: MulDivParams, width: Int, nXpr: Int = 32) extends Module {
val io = new MultiplierIO(width, log2Up(nXpr))
val w = io.req.bits.in1.getWidth
val mulw = (w + cfg.mulUnroll - 1) / cfg.mulUnroll * cfg.mulUnroll

View File

@ -5,13 +5,12 @@ package rocket
import Chisel._
import Chisel.ImplicitConversions._
import config._
import diplomacy._
import uncore.constants._
import uncore.tilelink._
import uncore.tilelink2._
import uncore.util._
import util._
import config._
trait HasMissInfo extends HasL1HellaCacheParameters {
val tag_match = Bool()
@ -19,11 +18,6 @@ trait HasMissInfo extends HasL1HellaCacheParameters {
val way_en = Bits(width = nWays)
}
class HellaCacheReqInternal(implicit p: Parameters) extends CoreBundle()(p)
with HasCoreMemOp {
val phys = Bool()
}
class L1DataReadReq(implicit p: Parameters) extends L1HellaCacheBundle()(p) {
val way_en = Bits(width = nWays)
val addr = Bits(width = untagBits)
@ -38,18 +32,14 @@ class L1RefillReq(implicit p: Parameters) extends L1DataReadReq()(p)
class Replay(implicit p: Parameters) extends HellaCacheReqInternal()(p) with HasCoreData
class ReplayInternal(cfg: DCacheConfig)(implicit p: Parameters) extends HellaCacheReqInternal()(p) {
class ReplayInternal(implicit p: Parameters) extends HellaCacheReqInternal()(p)
with HasL1HellaCacheParameters {
val sdq_id = UInt(width = log2Up(cfg.nSDQ))
override def cloneType = new ReplayInternal(cfg)(p).asInstanceOf[this.type]
}
class MSHRReq(implicit p: Parameters) extends Replay()(p) with HasMissInfo
class MSHRReqInternal(cfg: DCacheConfig)(implicit p: Parameters)
extends ReplayInternal(cfg)(p) with HasMissInfo {
override def cloneType = new MSHRReqInternal(cfg)(p).asInstanceOf[this.type]
}
class MSHRReqInternal(implicit p: Parameters) extends ReplayInternal()(p) with HasMissInfo
class WritebackReq(params: TLBundleParameters)(implicit p: Parameters) extends L1HellaCacheBundle()(p) {
val tag = Bits(width = tagBits)
@ -145,13 +135,13 @@ class IOMSHR(id: Int)(implicit edge: TLEdgeOut, p: Parameters) extends L1HellaCa
}
}
class MSHR(id: Int)(implicit edge: TLEdgeOut, cfg: DCacheConfig, p: Parameters) extends L1HellaCacheModule()(p) {
class MSHR(id: Int)(implicit edge: TLEdgeOut, p: Parameters) extends L1HellaCacheModule()(p) {
val io = new Bundle {
val req_pri_val = Bool(INPUT)
val req_pri_rdy = Bool(OUTPUT)
val req_sec_val = Bool(INPUT)
val req_sec_rdy = Bool(OUTPUT)
val req_bits = new MSHRReqInternal(cfg).asInput
val req_bits = new MSHRReqInternal().asInput
val idx_match = Bool(OUTPUT)
val tag = Bits(OUTPUT, tagBits)
@ -163,7 +153,7 @@ class MSHR(id: Int)(implicit edge: TLEdgeOut, cfg: DCacheConfig, p: Parameters)
val refill = new L1RefillReq().asOutput // Data is bypassed
val meta_read = Decoupled(new L1MetaReadReq)
val meta_write = Decoupled(new L1MetaWriteReq)
val replay = Decoupled(new ReplayInternal(cfg))
val replay = Decoupled(new ReplayInternal)
val wb_req = Decoupled(new WritebackReq(edge.bundle))
val probe_rdy = Bool(OUTPUT)
}
@ -171,7 +161,7 @@ class MSHR(id: Int)(implicit edge: TLEdgeOut, cfg: DCacheConfig, p: Parameters)
val s_invalid :: s_wb_req :: s_wb_resp :: s_meta_clear :: s_refill_req :: s_refill_resp :: s_meta_write_req :: s_meta_write_resp :: s_drain_rpq :: Nil = Enum(UInt(), 9)
val state = Reg(init=s_invalid)
val req = Reg(new MSHRReqInternal(cfg))
val req = Reg(new MSHRReqInternal)
val req_idx = req.addr(untagBits-1,blockOffBits)
val req_tag = req.addr >> untagBits
val req_block_addr = (req.addr >> blockOffBits) << blockOffBits
@ -195,7 +185,7 @@ class MSHR(id: Int)(implicit edge: TLEdgeOut, cfg: DCacheConfig, p: Parameters)
(state.isOneOf(s_refill_req, s_refill_resp) &&
!cmd_requires_second_acquire && !refill_done))
val rpq = Module(new Queue(new ReplayInternal(cfg), cfg.nRPQ))
val rpq = Module(new Queue(new ReplayInternal, cfg.nRPQ))
rpq.io.enq.valid := (io.req_pri_val && io.req_pri_rdy || io.req_sec_val && sec_rdy) && !isPrefetch(io.req_bits.cmd)
rpq.io.enq.bits := io.req_bits
rpq.io.deq.ready := (io.replay.ready && state === s_drain_rpq) || state === s_invalid
@ -310,7 +300,7 @@ class MSHR(id: Int)(implicit edge: TLEdgeOut, cfg: DCacheConfig, p: Parameters)
}
}
class MSHRFile(implicit edge: TLEdgeOut, cfg: DCacheConfig, p: Parameters) extends L1HellaCacheModule()(p) {
class MSHRFile(implicit edge: TLEdgeOut, p: Parameters) extends L1HellaCacheModule()(p) {
val io = new Bundle {
val req = Decoupled(new MSHRReq).flip
val resp = Decoupled(new HellaCacheResp)
@ -350,7 +340,7 @@ class MSHRFile(implicit edge: TLEdgeOut, cfg: DCacheConfig, p: Parameters) exten
val meta_read_arb = Module(new Arbiter(new L1MetaReadReq, cfg.nMSHRs))
val meta_write_arb = Module(new Arbiter(new L1MetaWriteReq, cfg.nMSHRs))
val wb_req_arb = Module(new Arbiter(new WritebackReq(edge.bundle), cfg.nMSHRs))
val replay_arb = Module(new Arbiter(new ReplayInternal(cfg), cfg.nMSHRs))
val replay_arb = Module(new Arbiter(new ReplayInternal, cfg.nMSHRs))
val alloc_arb = Module(new Arbiter(Bool(), cfg.nMSHRs))
var idx_match = Bool(false)
@ -671,14 +661,14 @@ class DataArray(implicit p: Parameters) extends L1HellaCacheModule()(p) {
io.write.ready := Bool(true)
}
class NonBlockingDCache(cfg: DCacheConfig)(implicit p: Parameters) extends HellaCache(cfg)(p) {
class NonBlockingDCache(implicit p: Parameters) extends HellaCache()(p) {
override lazy val module = new NonBlockingDCacheModule(this)
}
class NonBlockingDCacheModule(outer: NonBlockingDCache) extends HellaCacheModule(outer) {
require(isPow2(nWays)) // TODO: relax this
require(p(DataScratchpadSize) == 0)
require(dataScratchpadSize == 0)
val wb = Module(new WritebackUnit)
val prober = Module(new ProbeUnit)
@ -706,7 +696,7 @@ class NonBlockingDCacheModule(outer: NonBlockingDCache) extends HellaCacheModule
val s1_write = isWrite(s1_req.cmd)
val s1_readwrite = s1_read || s1_write || isPrefetch(s1_req.cmd)
val dtlb = Module(new TLB)
val dtlb = Module(new TLB(nTLBEntries))
io.ptw <> dtlb.io.ptw
dtlb.io.req.valid := s1_valid_masked && s1_readwrite
dtlb.io.req.bits.passthrough := s1_req.phys
@ -754,8 +744,8 @@ class NonBlockingDCacheModule(outer: NonBlockingDCache) extends HellaCacheModule
// tags
def onReset = L1Metadata(UInt(0), ClientMetadata.onReset)
val meta = Module(new MetadataArray(onReset _))
val metaReadArb = Module(new Arbiter(new MetaReadReq, 5))
val meta = Module(new L1MetadataArray(onReset _))
val metaReadArb = Module(new Arbiter(new L1MetaReadReq, 5))
val metaWriteArb = Module(new Arbiter(new L1MetaWriteReq, 2))
meta.io.read <> metaReadArb.io.out
meta.io.write <> metaWriteArb.io.out
@ -852,7 +842,7 @@ class NonBlockingDCacheModule(outer: NonBlockingDCache) extends HellaCacheModule
writeArb.io.in(0).bits.way_en := s3_way
// replacement policy
val replacer = p(Replacer)()
val replacer = cacheParams.replacement
val s1_replaced_way_en = UIntToOH(replacer.way)
val s2_replaced_way_en = UIntToOH(RegEnable(replacer.way, s1_clk_en))
val s2_repl_meta = Mux1H(s2_replaced_way_en, wayMap((w: Int) => RegEnable(meta.io.resp(w), s1_clk_en && s1_replaced_way_en(w))).toSeq)

View File

@ -4,11 +4,12 @@
package rocket
import Chisel._
import config._
import uncore.constants._
import uncore.util.PseudoLRU
import util._
import Chisel.ImplicitConversions._
import config._
import tile._
import uncore.constants._
import util._
import scala.collection.mutable.ListBuffer
class PTWReq(implicit p: Parameters) extends CoreBundle()(p) {
@ -224,11 +225,12 @@ trait CanHavePTW extends HasHellaCache {
implicit val p: Parameters
val module: CanHavePTWModule
var nPTWPorts = 1
nDCachePorts += usingPTW.toInt
}
trait CanHavePTWModule extends HasHellaCacheModule {
val outer: CanHavePTW
val ptwPorts = ListBuffer(outer.dcache.module.io.ptw)
val ptwOpt = if (outer.p(UseVM)) { Some(Module(new PTW(outer.nPTWPorts)(outer.p))) } else None
val ptwOpt = if (outer.usingPTW) { Some(Module(new PTW(outer.nPTWPorts)(outer.p))) } else None
ptwOpt foreach { ptw => dcachePorts += ptw.io.mem }
}

View File

@ -4,8 +4,9 @@ package rocket
import Chisel._
import Chisel.ImplicitConversions._
import util._
import config._
import tile._
import util._
class ExpandedInstruction extends Bundle {
val bits = UInt(width = 32)
@ -150,14 +151,14 @@ class RVCDecoder(x: UInt)(implicit p: Parameters) {
}
}
class RVCExpander(implicit p: Parameters) extends Module {
class RVCExpander(implicit val p: Parameters) extends Module with HasCoreParameters {
val io = new Bundle {
val in = UInt(INPUT, 32)
val out = new ExpandedInstruction
val rvc = Bool(OUTPUT)
}
if (p(UseCompressed)) {
if (usingCompressed) {
io.rvc := io.in(1,0) =/= UInt(3)
io.out := new RVCDecoder(io.in).decode
} else {

View File

@ -5,76 +5,57 @@ package rocket
import Chisel._
import config._
import tile._
import uncore.constants._
import util._
import Chisel.ImplicitConversions._
case class RocketConfig(xLen: Int)
// TODO replace some of below fields with above Config
case object XLen extends Field[Int]
case object FetchWidth extends Field[Int]
case object RetireWidth extends Field[Int]
case object FPUKey extends Field[Option[FPUConfig]]
case object MulDivKey extends Field[Option[MulDivConfig]]
case object UseVM extends Field[Boolean]
case object UseUser extends Field[Boolean]
case object UseDebug extends Field[Boolean]
case object UseAtomics extends Field[Boolean]
case object UseCompressed extends Field[Boolean]
case object FastLoadWord extends Field[Boolean]
case object FastLoadByte extends Field[Boolean]
case object FastJAL extends Field[Boolean]
case object CoreInstBits extends Field[Int]
case object NCustomMRWCSRs extends Field[Int]
case object MtvecWritable extends Field[Boolean]
case object MtvecInit extends Field[Option[BigInt]]
case object NBreakpoints extends Field[Int]
case object NPerfCounters extends Field[Int]
case object NPerfEvents extends Field[Int]
case object DataScratchpadSize extends Field[Int]
class RegFile(n: Int, w: Int, zero: Boolean = false) {
private val rf = Mem(n, UInt(width = w))
private def access(addr: UInt) = rf(~addr(log2Up(n)-1,0))
private val reads = collection.mutable.ArrayBuffer[(UInt,UInt)]()
private var canRead = true
def read(addr: UInt) = {
require(canRead)
reads += addr -> Wire(UInt())
reads.last._2 := Mux(Bool(zero) && addr === UInt(0), UInt(0), access(addr))
reads.last._2
}
def write(addr: UInt, data: UInt) = {
canRead = false
when (addr =/= UInt(0)) {
access(addr) := data
for ((raddr, rdata) <- reads)
when (addr === raddr) { rdata := data }
}
}
case class RocketCoreParams(
useVM: Boolean = true,
useUser: Boolean = false,
useDebug: Boolean = true,
useAtomics: Boolean = true,
useCompressed: Boolean = true,
nBreakpoints: Int = 1,
nPerfCounters: Int = 0,
nPerfEvents: Int = 0,
nCustomMRWCSRs: Int = 0,
mtvecInit: Option[BigInt] = Some(BigInt(0)),
mtvecWritable: Boolean = true,
fastLoadWord: Boolean = true,
fastLoadByte: Boolean = false,
fastJAL: Boolean = false,
mulDiv: Option[MulDivParams] = Some(MulDivParams()),
fpu: Option[FPUParams] = Some(FPUParams())
) extends CoreParams {
val fetchWidth: Int = if (useCompressed) 2 else 1
// fetchWidth doubled, but coreInstBytes halved, for RVC:
val decodeWidth: Int = fetchWidth / (if (useCompressed) 2 else 1)
val retireWidth: Int = 1
val instBits: Int = if (useCompressed) 16 else 32
}
object ImmGen {
def apply(sel: UInt, inst: UInt) = {
val sign = Mux(sel === IMM_Z, SInt(0), inst(31).asSInt)
val b30_20 = Mux(sel === IMM_U, inst(30,20).asSInt, sign)
val b19_12 = Mux(sel =/= IMM_U && sel =/= IMM_UJ, sign, inst(19,12).asSInt)
val b11 = Mux(sel === IMM_U || sel === IMM_Z, SInt(0),
Mux(sel === IMM_UJ, inst(20).asSInt,
Mux(sel === IMM_SB, inst(7).asSInt, sign)))
val b10_5 = Mux(sel === IMM_U || sel === IMM_Z, Bits(0), inst(30,25))
val b4_1 = Mux(sel === IMM_U, Bits(0),
Mux(sel === IMM_S || sel === IMM_SB, inst(11,8),
Mux(sel === IMM_Z, inst(19,16), inst(24,21))))
val b0 = Mux(sel === IMM_S, inst(7),
Mux(sel === IMM_I, inst(20),
Mux(sel === IMM_Z, inst(15), Bits(0))))
trait HasRocketCoreParameters extends HasCoreParameters {
val rocketParams: RocketCoreParams = tileParams.core.asInstanceOf[RocketCoreParams]
Cat(sign, b30_20, b19_12, b11, b10_5, b4_1, b0).asSInt
}
val fastLoadWord = rocketParams.fastLoadWord
val fastLoadByte = rocketParams.fastLoadByte
val fastJAL = rocketParams.fastJAL
val nBreakpoints = rocketParams.nBreakpoints
val nPerfCounters = rocketParams.nPerfCounters
val nPerfEvents = rocketParams.nPerfEvents
val nCustomMrwCsrs = rocketParams.nCustomMRWCSRs
val mtvecInit = rocketParams.mtvecInit
val mtvecWritable = rocketParams.mtvecWritable
val mulDivParams = rocketParams.mulDiv.getOrElse(MulDivParams()) // TODO ask andrew about this
require(!fastLoadByte || fastLoadWord)
}
class Rocket(val c: RocketConfig)(implicit p: Parameters) extends CoreModule()(p) with HasCoreIO {
class Rocket(implicit p: Parameters) extends CoreModule()(p)
with HasRocketCoreParameters
with HasCoreIO {
val decode_table = {
(if (usingMulDiv) new MDecode +: (xLen > 32).option(new M64Decode).toSeq else Nil) ++:
@ -243,7 +224,7 @@ class Rocket(val c: RocketConfig)(implicit p: Parameters) extends CoreModule()(p
alu.io.in1 := ex_op1.asUInt
// multiplier and divider
val div = Module(new MulDiv(p(MulDivKey).getOrElse(MulDivConfig()), width = xLen))
val div = Module(new MulDiv(mulDivParams, width = xLen))
div.io.req.valid := ex_reg_valid && ex_ctrl.div
div.io.req.bits.dw := ex_ctrl.alu_dw
div.io.req.bits.fn := ex_ctrl.alu_fn
@ -324,9 +305,7 @@ class Rocket(val c: RocketConfig)(implicit p: Parameters) extends CoreModule()(p
val mem_int_wdata = Mux(!mem_reg_xcpt && (mem_ctrl.jalr ^ mem_npc_misaligned), mem_br_target, mem_reg_wdata.asSInt).asUInt
val mem_cfi = mem_ctrl.branch || mem_ctrl.jalr || mem_ctrl.jal
val mem_cfi_taken = (mem_ctrl.branch && mem_br_taken) || mem_ctrl.jalr || (Bool(!fastJAL) && mem_ctrl.jal)
val mem_misprediction =
if (p(BtbKey).nEntries == 0) mem_cfi_taken
else mem_wrong_npc
val mem_misprediction = if (usingBTB) mem_wrong_npc else mem_cfi_taken
take_pc_mem := mem_reg_valid && (mem_misprediction || mem_reg_flush_pipe)
mem_reg_valid := !ctrl_killx
@ -652,3 +631,44 @@ class Rocket(val c: RocketConfig)(implicit p: Parameters) extends CoreModule()(p
}
}
}
class RegFile(n: Int, w: Int, zero: Boolean = false) {
private val rf = Mem(n, UInt(width = w))
private def access(addr: UInt) = rf(~addr(log2Up(n)-1,0))
private val reads = collection.mutable.ArrayBuffer[(UInt,UInt)]()
private var canRead = true
def read(addr: UInt) = {
require(canRead)
reads += addr -> Wire(UInt())
reads.last._2 := Mux(Bool(zero) && addr === UInt(0), UInt(0), access(addr))
reads.last._2
}
def write(addr: UInt, data: UInt) = {
canRead = false
when (addr =/= UInt(0)) {
access(addr) := data
for ((raddr, rdata) <- reads)
when (addr === raddr) { rdata := data }
}
}
}
object ImmGen {
def apply(sel: UInt, inst: UInt) = {
val sign = Mux(sel === IMM_Z, SInt(0), inst(31).asSInt)
val b30_20 = Mux(sel === IMM_U, inst(30,20).asSInt, sign)
val b19_12 = Mux(sel =/= IMM_U && sel =/= IMM_UJ, sign, inst(19,12).asSInt)
val b11 = Mux(sel === IMM_U || sel === IMM_Z, SInt(0),
Mux(sel === IMM_UJ, inst(20).asSInt,
Mux(sel === IMM_SB, inst(7).asSInt, sign)))
val b10_5 = Mux(sel === IMM_U || sel === IMM_Z, Bits(0), inst(30,25))
val b4_1 = Mux(sel === IMM_U, Bits(0),
Mux(sel === IMM_S || sel === IMM_SB, inst(11,8),
Mux(sel === IMM_Z, inst(19,16), inst(24,21))))
val b0 = Mux(sel === IMM_S, inst(7),
Mux(sel === IMM_I, inst(20),
Mux(sel === IMM_Z, inst(15), Bits(0))))
Cat(sign, b30_20, b19_12, b11, b10_5, b4_1, b0).asSInt
}
}

View File

@ -4,22 +4,24 @@ package rocket
import Chisel._
import Chisel.ImplicitConversions._
import junctions._
import diplomacy._
import config._
import coreplex.CacheBlockBytes
import diplomacy._
import tile._
import uncore.constants._
import uncore.tilelink2._
import uncore.util._
import util._
class ScratchpadSlavePort(implicit p: Parameters) extends LazyModule {
val coreDataBytes = p(XLen)/8
class ScratchpadSlavePort(sizeBytes: Int)(implicit p: Parameters) extends LazyModule
with HasCoreParameters {
val node = TLManagerNode(Seq(TLManagerPortParameters(
Seq(TLManagerParameters(
address = List(AddressSet(0x80000000L, BigInt(p(DataScratchpadSize)-1))),
address = List(AddressSet(0x80000000L, BigInt(sizeBytes-1))),
regionType = RegionType.UNCACHED,
executable = true,
supportsArithmetic = if (p(UseAtomics)) TransferSizes(1, coreDataBytes) else TransferSizes.none,
supportsLogical = if (p(UseAtomics)) TransferSizes(1, coreDataBytes) else TransferSizes.none,
supportsArithmetic = if (usingAtomics) TransferSizes(1, coreDataBytes) else TransferSizes.none,
supportsLogical = if (usingAtomics) TransferSizes(1, coreDataBytes) else TransferSizes.none,
supportsPutPartial = TransferSizes(1, coreDataBytes),
supportsPutFull = TransferSizes(1, coreDataBytes),
supportsGet = TransferSizes(1, coreDataBytes),
@ -106,10 +108,11 @@ class ScratchpadSlavePort(implicit p: Parameters) extends LazyModule {
trait CanHaveScratchpad extends HasHellaCache with HasICacheFrontend {
val module: CanHaveScratchpadModule
val slaveNode = if (p(DataScratchpadSize) == 0) None else Some(TLInputNode())
val scratch = if (p(DataScratchpadSize) == 0) None else Some(LazyModule(new ScratchpadSlavePort()(dcacheParams)))
val sizeBytes = tileParams.dataScratchpadBytes
val slaveNode = TLInputNode()
val scratch = if (sizeBytes > 0) Some(LazyModule(new ScratchpadSlavePort(sizeBytes))) else None
(slaveNode zip scratch) foreach { case (node, lm) => lm.node := TLFragmenter(p(XLen)/8, p(CacheBlockBytes))(node) }
scratch foreach { lm => lm.node := TLFragmenter(p(XLen)/8, p(CacheBlockBytes))(slaveNode) }
def findScratchpadFromICache: Option[AddressSet] = scratch.map { s =>
val finalNode = frontend.node.edgesOut(0).manager.managers.find(_.nodePath.last == s.node)
@ -118,12 +121,12 @@ trait CanHaveScratchpad extends HasHellaCache with HasICacheFrontend {
finalNode.get.address(0)
}
nDCachePorts += 1 // core TODO dcachePorts += () => module.io.dmem ??
nDCachePorts += (sizeBytes > 0).toInt
}
trait CanHaveScratchpadBundle extends HasHellaCacheBundle with HasICacheFrontendBundle {
val outer: CanHaveScratchpad
val slave = outer.slaveNode.map(_.bundleIn)
val slave = outer.slaveNode.bundleIn
}
trait CanHaveScratchpadModule extends HasHellaCacheModule with HasICacheFrontendModule {

View File

@ -4,24 +4,18 @@
package rocket
import Chisel._
import util._
import Chisel.ImplicitConversions._
import scala.math._
import config._
import diplomacy._
import uncore.util._
import coreplex.CacheBlockBytes
import tile.{XLen, CoreModule, CoreBundle}
import uncore.tilelink2._
import util._
case object PAddrBits extends Field[Int]
case object PgLevels extends Field[Int]
case object ASIdBits extends Field[Int]
trait HasTLBParameters extends HasL1CacheParameters {
val entries = p(p(CacheName)).nTLBEntries
val camAddrBits = log2Ceil(entries)
val camTagBits = asIdBits + vpnBits
}
class TLBReq(implicit p: Parameters) extends CoreBundle()(p) {
val vpn = UInt(width = vpnBitsExtended)
val passthrough = Bool()
@ -39,12 +33,15 @@ class TLBResp(implicit p: Parameters) extends CoreBundle()(p) {
val cacheable = Bool(OUTPUT)
}
class TLB(implicit edge: TLEdgeOut, val p: Parameters) extends Module with HasTLBParameters {
class TLB(entries: Int)(implicit edge: TLEdgeOut, p: Parameters) extends CoreModule()(p) {
val io = new Bundle {
val req = Decoupled(new TLBReq).flip
val resp = new TLBResp
val ptw = new TLBPTWIO
}
val cacheBlockBytes = p(CacheBlockBytes)
val camAddrBits = log2Ceil(entries)
val camTagBits = asIdBits + vpnBits
val valid = Reg(init = UInt(0, entries))
val ppns = Reg(Vec(entries, UInt(width = ppnBits)))
@ -182,7 +179,7 @@ class TLB(implicit edge: TLEdgeOut, val p: Parameters) extends Module with HasTL
}
}
class DecoupledTLB(implicit edge: TLEdgeOut, p: Parameters) extends Module {
class DecoupledTLB(entries: Int)(implicit edge: TLEdgeOut, p: Parameters) extends Module {
val io = new Bundle {
val req = Decoupled(new TLBReq).flip
val resp = Decoupled(new TLBResp)
@ -191,7 +188,7 @@ class DecoupledTLB(implicit edge: TLEdgeOut, p: Parameters) extends Module {
val req = Reg(new TLBReq)
val resp = Reg(new TLBResp)
val tlb = Module(new TLB)
val tlb = Module(new TLB(entries))
val s_idle :: s_tlb_req :: s_tlb_resp :: s_done :: Nil = Enum(Bits(), 4)
val state = Reg(init = s_idle)

View File

@ -7,12 +7,23 @@ import Chisel._
import config._
import coreplex._
import diplomacy._
import uncore.converters._
import tile._
import uncore.devices._
import uncore.tilelink2._
import util._
class RocketTile(val c: RocketConfig)(implicit p: Parameters) extends BaseTile()(p)
case class RocketTileParams(
core: RocketCoreParams = RocketCoreParams(),
icache: Option[ICacheParams] = Some(ICacheParams()),
dcache: Option[DCacheParams] = Some(DCacheParams()),
rocc: Seq[RoCCParams] = Nil,
btb: Option[BTBParams] = Some(BTBParams()),
dataScratchpadBytes: Int = 0) extends TileParams {
require(icache.isDefined)
require(dcache.isDefined)
}
class RocketTile(val rocketParams: RocketTileParams)(implicit p: Parameters) extends BaseTile(rocketParams)(p)
with CanHaveLegacyRoccs // implies CanHaveSharedFPU with CanHavePTW with HasHellaCache
with CanHaveScratchpad { // implies CanHavePTW with HasHellaCache with HasICacheFrontend
@ -28,7 +39,7 @@ class RocketTileModule(outer: RocketTile) extends BaseTileModule(outer, () => ne
with CanHaveLegacyRoccsModule
with CanHaveScratchpadModule {
val core = Module(p(BuildCore)(outer.c, outer.p))
val core = Module(p(BuildCore)(outer.p))
core.io.interrupts := io.interrupts
core.io.hartid := io.hartid
outer.frontend.module.io.cpu <> core.io.imem
@ -44,25 +55,34 @@ class RocketTileModule(outer: RocketTile) extends BaseTileModule(outer, () => ne
core.io.rocc.interrupt := lr.module.io.core.interrupt
}
// TODO eliminate this redundancy
val h = dcachePorts.size
val c = core.dcacheArbPorts
val o = outer.nDCachePorts
require(h == c, s"port list size was $h, core expected $c")
require(h == o, s"port list size was $h, outer counted $o")
// TODO figure out how to move the below into their respective mix-ins
require(dcachePorts.size == core.dcacheArbPorts)
dcacheArb.io.requestor <> dcachePorts
ptwOpt foreach { ptw => ptw.io.requestor <> ptwPorts }
}
class AsyncRocketTile(c: RocketConfig)(implicit p: Parameters) extends LazyModule {
val rocket = LazyModule(new RocketTile(c))
class AsyncRocketTile(rtp: RocketTileParams)(implicit p: Parameters) extends LazyModule {
val rocket = LazyModule(new RocketTile(rtp))
val masterNodes = rocket.masterNodes.map(_ => TLAsyncOutputNode())
val slaveNode = rocket.slaveNode.map(_ => TLAsyncInputNode())
val masterNode = TLAsyncOutputNode()
val source = LazyModule(new TLAsyncCrossingSource)
source.node :=* rocket.masterNode
masterNode :=* source.node
(rocket.masterNodes zip masterNodes) foreach { case (r,n) => n := TLAsyncCrossingSource()(r) }
(rocket.slaveNode zip slaveNode) foreach { case (r,n) => r := TLAsyncCrossingSink()(n) }
val slaveNode = TLAsyncInputNode()
val sink = LazyModule(new TLAsyncCrossingSink)
rocket.slaveNode :*= sink.node
sink.node :*= slaveNode
lazy val module = new LazyModuleImp(this) {
val io = new Bundle {
val master = masterNodes.head.bundleOut // TODO fix after Chisel #366
val slave = slaveNode.map(_.bundleIn)
val master = masterNode.bundleOut
val slave = slaveNode.bundleIn
val hartid = UInt(INPUT, p(XLen))
val interrupts = new TileInterrupts()(p).asInput
val resetVector = UInt(INPUT, p(XLen))
@ -74,19 +94,23 @@ class AsyncRocketTile(c: RocketConfig)(implicit p: Parameters) extends LazyModul
}
}
class RationalRocketTile(c: RocketConfig)(implicit p: Parameters) extends LazyModule {
val rocket = LazyModule(new RocketTile(c))
class RationalRocketTile(rtp: RocketTileParams)(implicit p: Parameters) extends LazyModule {
val rocket = LazyModule(new RocketTile(rtp))
val masterNodes = rocket.masterNodes.map(_ => TLRationalOutputNode())
val slaveNode = rocket.slaveNode.map(_ => TLRationalInputNode())
val masterNode = TLRationalOutputNode()
val source = LazyModule(new TLRationalCrossingSource)
source.node :=* rocket.masterNode
masterNode :=* source.node
(rocket.masterNodes zip masterNodes) foreach { case (r,n) => n := TLRationalCrossingSource()(r) }
(rocket.slaveNode zip slaveNode) foreach { case (r,n) => r := TLRationalCrossingSink()(n) }
val slaveNode = TLRationalInputNode()
val sink = LazyModule(new TLRationalCrossingSink)
rocket.slaveNode :*= sink.node
sink.node :*= slaveNode
lazy val module = new LazyModuleImp(this) {
val io = new Bundle {
val master = masterNodes.head.bundleOut // TODO fix after Chisel #366
val slave = slaveNode.map(_.bundleIn)
val master = masterNode.bundleOut
val slave = slaveNode.bundleIn
val hartid = UInt(INPUT, p(XLen))
val interrupts = new TileInterrupts()(p).asInput
val resetVector = UInt(INPUT, p(XLen))

View File

@ -38,33 +38,21 @@ class BasePlatformConfig extends Config((site, here, up) => {
case RTCPeriod => 100 // gives 10 MHz RTC assuming 1 GHz uncore clock
})
/** Actual elaboratable target Configs */
class BaseConfig extends Config(new BaseCoreplexConfig ++ new BasePlatformConfig)
class DefaultConfig extends Config(new WithBlockingL1 ++ new BaseConfig)
class DefaultConfig extends Config(new WithBlockingL1 ++ new WithNBigCores(1) ++ new BaseConfig)
class DefaultL2Config extends Config(new WithL2Cache ++ new BaseConfig)
class DefaultL2Config extends Config(new WithL2Cache ++ new WithNBigCores(1) ++ new BaseConfig)
class DefaultBufferlessConfig extends Config(
new WithBufferlessBroadcastHub ++ new BaseConfig)
class FPGAConfig extends Config ((site, here, up) => {
case NAcquireTransactors => 4
})
new WithBufferlessBroadcastHub ++ new WithNBigCores(1) ++ new BaseConfig)
class FPGAConfig extends Config(Parameters.empty)
class DefaultFPGAConfig extends Config(new FPGAConfig ++ new BaseConfig)
class DefaultL2FPGAConfig extends Config(
new WithL2Capacity(64) ++ new WithL2Cache ++ new DefaultFPGAConfig)
class WithNMemoryChannels(n: Int) extends Config((site, here, up) => {
case BankedL2Config => up(BankedL2Config, site).copy(nMemoryChannels = n)
})
class WithExtMemSize(n: Long) extends Config((site, here, up) => {
case ExtMem => up(ExtMem, site).copy(size = n)
})
class WithScratchpads extends Config(new WithNMemoryChannels(0) ++ new WithDataScratchpad(16384))
class DefaultFPGASmallConfig extends Config(new WithSmallCores ++ new DefaultFPGAConfig)
class DefaultSmallConfig extends Config(new WithSmallCores ++ new BaseConfig)
class DefaultSmallConfig extends Config(new WithNSmallCores(1) ++ new BaseConfig)
class DefaultRV32Config extends Config(new WithRV32 ++ new DefaultConfig)
class DualBankConfig extends Config(
@ -83,12 +71,7 @@ class DualChannelDualBankL2Config extends Config(
new WithNMemoryChannels(2) ++ new WithNBanksPerMemChannel(2) ++
new WithL2Cache ++ new BaseConfig)
class RoccExampleConfig extends Config(new WithRoccExample ++ new BaseConfig)
class WithEdgeDataBits(dataBits: Int) extends Config((site, here, up) => {
case ExtMem => up(ExtMem, site).copy(beatBytes = dataBits/8)
case ZeroConfig => up(ZeroConfig, site).copy(beatBytes = dataBits/8)
})
class RoccExampleConfig extends Config(new WithRoccExample ++ new DefaultConfig)
class Edge128BitConfig extends Config(
new WithEdgeDataBits(128) ++ new BaseConfig)
@ -107,12 +90,22 @@ class OctoChannelBenchmarkConfig extends Config(new WithNMemoryChannels(8) ++ ne
class EightChannelConfig extends Config(new WithNMemoryChannels(8) ++ new BaseConfig)
class DualCoreConfig extends Config(
new WithNCores(2) ++ new WithL2Cache ++ new BaseConfig)
new WithNBigCores(2) ++ new WithL2Cache ++ new BaseConfig)
class HeterogeneousDualCoreConfig extends Config(
new WithNSmallCores(1) ++ new WithNBigCores(1) ++ new WithL2Cache ++ new BaseConfig)
class TinyConfig extends Config(
new WithScratchpads ++
new WithSmallCores ++ new WithRV32 ++
new WithStatelessBridge ++ new BaseConfig)
new WithScratchpad ++
new WithRV32 ++
new WithStatelessBridge ++
new WithNSmallCores(1) ++ new BaseConfig)
/* Composable partial function Configs to set individual parameters */
class WithEdgeDataBits(dataBits: Int) extends Config((site, here, up) => {
case ExtMem => up(ExtMem, site).copy(beatBytes = dataBits/8)
case ZeroConfig => up(ZeroConfig, site).copy(beatBytes = dataBits/8)
})
class WithJtagDTM extends Config ((site, here, up) => {
case IncludeJtagDTM => true
@ -134,10 +127,18 @@ class WithNExtTopInterrupts(nExtInts: Int) extends Config((site, here, up) => {
case NExtTopInterrupts => nExtInts
})
class WithNBreakpoints(hwbp: Int) extends Config ((site, here, up) => {
case NBreakpoints => hwbp
})
class WithRTCPeriod(nCycles: Int) extends Config((site, here, up) => {
case RTCPeriod => nCycles
})
class WithNMemoryChannels(n: Int) extends Config((site, here, up) => {
case BankedL2Config => up(BankedL2Config, site).copy(nMemoryChannels = n)
})
class WithExtMemSize(n: Long) extends Config((site, here, up) => {
case ExtMem => up(ExtMem, site).copy(size = n)
})
class WithScratchpad extends Config(new WithNMemoryChannels(0) ++ new WithDataScratchpad(16384))
class DefaultFPGASmallConfig extends Config(new WithNSmallCores(1) ++ new DefaultFPGAConfig)

View File

@ -2,7 +2,9 @@
package rocketchip
import rocket.{XLen, UseVM, UseAtomics, UseCompressed, FPUKey}
import tile.XLen
import coreplex.RocketTilesKey
import scala.collection.mutable.LinkedHashSet
/** A Generator for platforms containing Rocket Coreplexes */
@ -47,9 +49,11 @@ object Generator extends util.GeneratorApp {
override def addTestSuites {
import DefaultTestSuites._
val xlen = params(XLen)
val vm = params(UseVM)
// TODO: for now only generate tests for the first core in the first coreplex
val coreParams = params(RocketTilesKey).head.core
val vm = coreParams.useVM
val env = if (vm) List("p","v") else List("p")
params(FPUKey) foreach { case cfg =>
coreParams.fpu foreach { case cfg =>
if (xlen == 32) {
TestGeneration.addSuites(env.map(rv32ufNoDiv))
} else {
@ -62,8 +66,8 @@ object Generator extends util.GeneratorApp {
}
}
}
if (params(UseAtomics)) TestGeneration.addSuites(env.map(if (xlen == 64) rv64ua else rv32ua))
if (params(UseCompressed)) TestGeneration.addSuites(env.map(if (xlen == 64) rv64uc else rv32uc))
if (coreParams.useAtomics) TestGeneration.addSuites(env.map(if (xlen == 64) rv64ua else rv32ua))
if (coreParams.useCompressed) TestGeneration.addSuites(env.map(if (xlen == 64) rv64uc else rv32uc))
val (rvi, rvu) =
if (xlen == 64) ((if (vm) rv64i else rv64pi), rv64u)
else ((if (vm) rv32i else rv32pi), rv32u)

View File

@ -4,20 +4,16 @@ package rocketchip
import Chisel._
import config._
import junctions._
import junctions.NastiConstants._
import coreplex._
import diplomacy._
import uncore.tilelink._
import tile.XLen
import uncore.tilelink2._
import uncore.axi4._
import uncore.converters._
import uncore.devices._
import uncore.agents._
import uncore.util._
import util._
import rocket.XLen
import scala.math.max
import coreplex._
/** Specifies the size of external memory */
case class MasterConfig(base: Long, size: Long, beatBytes: Int, idBits: Int)

View File

@ -6,7 +6,7 @@ import config._
import junctions._
import diplomacy._
import uncore.devices._
import rocket._
import tile.XLen
import coreplex._
import uncore.tilelink2._
import util._
@ -59,14 +59,15 @@ object GenerateConfigString {
res append plic.globalConfigString
res append clint.globalConfigString
res append "core {\n"
for (i <- 0 until c.nTiles) { // TODO heterogeneous tiles
c.tilesParams.zipWithIndex.map { case(t, i) =>
val isa = {
val m = if (p(MulDivKey).nonEmpty) "m" else ""
val a = if (p(UseAtomics)) "a" else ""
val f = if (p(FPUKey).nonEmpty) "f" else ""
val d = if (p(FPUKey).nonEmpty && p(XLen) > 32) "d" else ""
val s = if (c.hasSupervisor) "s" else ""
s"rv${p(XLen)}i$m$a$f$d$s"
val m = if (t.core.mulDiv.nonEmpty) "m" else ""
val a = if (t.core.useAtomics) "a" else ""
val f = if (t.core.fpu.nonEmpty) "f" else ""
val d = if (t.core.fpu.nonEmpty && p(XLen) > 32) "d" else ""
val c = if (t.core.useCompressed) "c" else ""
val s = if (t.core.useVM) "s" else ""
s"rv${p(XLen)}i$m$a$f$d$c$s"
}
res append s" $i {\n"
res append " 0 {\n"

View File

@ -0,0 +1,81 @@
// See LICENSE.SiFive for license details.
package tile
import Chisel._
import config._
import diplomacy._
import rocket._
import uncore.tilelink2._
import util._
case object SharedMemoryTLEdge extends Field[TLEdgeOut]
case object TileKey extends Field[TileParams]
trait TileParams {
val core: CoreParams
val icache: Option[ICacheParams]
val dcache: Option[DCacheParams]
val rocc: Seq[RoCCParams]
val btb: Option[BTBParams]
val dataScratchpadBytes: Int
}
trait HasTileParameters {
implicit val p: Parameters
val tileParams: TileParams = p(TileKey)
val usingVM = tileParams.core.useVM
val usingUser = tileParams.core.useUser || usingVM
val usingDebug = tileParams.core.useDebug
val usingRoCC = !tileParams.rocc.isEmpty
val usingBTB = tileParams.btb.isDefined && tileParams.btb.get.nEntries > 0
val usingPTW = usingVM
val usingDataScratchpad = tileParams.dcache.isDefined && tileParams.dataScratchpadBytes > 0
def dcacheArbPorts = 1 + usingVM.toInt + usingDataScratchpad.toInt + tileParams.rocc.size
}
abstract class BareTile(implicit p: Parameters) extends LazyModule
abstract class BareTileBundle[+L <: BareTile](_outer: L) extends GenericParameterizedBundle(_outer) {
val outer = _outer
implicit val p = outer.p
}
abstract class BareTileModule[+L <: BareTile, +B <: BareTileBundle[L]](_outer: L, _io: () => B) extends LazyModuleImp(_outer) {
val outer = _outer
val io = _io ()
}
// Uses TileLink master port to connect caches and accelerators to the coreplex
trait HasTileLinkMasterPort extends HasTileParameters {
implicit val p: Parameters
val module: HasTileLinkMasterPortModule
val masterNode = TLOutputNode()
}
trait HasTileLinkMasterPortBundle {
val outer: HasTileLinkMasterPort
val master = outer.masterNode.bundleOut
}
trait HasTileLinkMasterPortModule {
val outer: HasTileLinkMasterPort
val io: HasTileLinkMasterPortBundle
}
abstract class BaseTile(tileParams: TileParams)(implicit p: Parameters) extends BareTile
with HasTileLinkMasterPort {
override lazy val module = new BaseTileModule(this, () => new BaseTileBundle(this))
}
class BaseTileBundle[+L <: BaseTile](_outer: L) extends BareTileBundle(_outer)
with HasTileLinkMasterPortBundle {
val hartid = UInt(INPUT, p(XLen))
val interrupts = new TileInterrupts()(p).asInput
val resetVector = UInt(INPUT, p(XLen))
}
class BaseTileModule[+L <: BaseTile, +B <: BaseTileBundle[L]](_outer: L, _io: () => B) extends BareTileModule(_outer, _io)
with HasTileLinkMasterPortModule

View File

@ -0,0 +1,88 @@
// See LICENSE.SiFive for license details.
package tile
import Chisel._
import config._
import rocket._
import util._
case object BuildCore extends Field[Parameters => CoreModule with HasCoreIO]
case object XLen extends Field[Int]
// These parameters can be varied per-core
trait CoreParams {
val useVM: Boolean
val useUser: Boolean
val useDebug: Boolean
val useAtomics: Boolean
val useCompressed: Boolean
val mulDiv: Option[MulDivParams]
val fpu: Option[FPUParams]
val fetchWidth: Int
val decodeWidth: Int
val retireWidth: Int
val instBits: Int
}
trait HasCoreParameters extends HasTileParameters {
val coreParams: CoreParams = tileParams.core
val xLen = p(XLen)
val fLen = xLen // TODO relax this
require(xLen == 32 || xLen == 64)
val usingMulDiv = coreParams.mulDiv.nonEmpty
val usingFPU = coreParams.fpu.nonEmpty
val usingAtomics = coreParams.useAtomics
val usingCompressed = coreParams.useCompressed
val retireWidth = coreParams.retireWidth
val fetchWidth = coreParams.fetchWidth
val decodeWidth = coreParams.decodeWidth
val coreInstBits = coreParams.instBits
val coreInstBytes = coreInstBits/8
val coreDataBits = xLen
val coreDataBytes = coreDataBits/8
val coreDCacheReqTagBits = 6
val dcacheReqTagBits = coreDCacheReqTagBits + log2Ceil(dcacheArbPorts)
def pgIdxBits = 12
def pgLevelBits = 10 - log2Ceil(xLen / 32)
def vaddrBits = pgIdxBits + pgLevels * pgLevelBits
val paddrBits = 32//p(PAddrBits)
def ppnBits = paddrBits - pgIdxBits
def vpnBits = vaddrBits - pgIdxBits
val pgLevels = p(PgLevels)
val asIdBits = p(ASIdBits)
val vpnBitsExtended = vpnBits + (vaddrBits < xLen).toInt
val vaddrBitsExtended = vpnBitsExtended + pgIdxBits
val coreMaxAddrBits = paddrBits max vaddrBitsExtended
val maxPAddrBits = xLen match { case 32 => 34; case 64 => 50 }
require(paddrBits <= maxPAddrBits)
// Print out log of committed instructions and their writeback values.
// Requires post-processing due to out-of-order writebacks.
val enableCommitLog = false
}
abstract class CoreModule(implicit val p: Parameters) extends Module
with HasCoreParameters
abstract class CoreBundle(implicit val p: Parameters) extends ParameterizedBundle()(p)
with HasCoreParameters
trait HasCoreIO {
implicit val p: Parameters
val io = new Bundle {
val interrupts = new TileInterrupts().asInput
val hartid = UInt(INPUT, p(XLen))
val imem = new FrontendIO()(p)
val dmem = new HellaCacheIO()(p)
val ptw = new DatapathPTWIO().flip
val fpu = new FPUCoreIO().flip
val rocc = new RoCCCoreIO().flip
}
}

View File

@ -1,17 +1,18 @@
// See LICENSE.Berkeley for license details.
// See LICENSE.SiFive for license details.
package rocket
package tile
import Chisel._
import Instructions._
import util._
import Chisel.ImplicitConversions._
import FPConstants._
import rocket.DecodeLogic
import rocket.Instructions._
import uncore.constants.MemoryOpConstants._
import config._
import util._
case class FPUConfig(
case class FPUParams(
divSqrt: Boolean = true,
sfmaLatency: Int = 3,
dfmaLatency: Int = 4
@ -496,7 +497,7 @@ class FPUFMAPipe(val latency: Int, expWidth: Int, sigWidth: Int)(implicit p: Par
io.out := Pipe(valid, res, latency-1)
}
class FPU(cfg: FPUConfig)(implicit p: Parameters) extends FPUModule()(p) {
class FPU(cfg: FPUParams)(implicit p: Parameters) extends FPUModule()(p) {
val io = new FPUIO
val ex_reg_valid = Reg(next=io.valid, init=Bool(false))
@ -742,12 +743,10 @@ class FPU(cfg: FPUConfig)(implicit p: Parameters) extends FPUModule()(p) {
}
/** Mix-ins for constructing tiles that may have an FPU external to the core pipeline */
trait CanHaveSharedFPU {
implicit val p: Parameters
}
trait CanHaveSharedFPU extends HasTileParameters
trait CanHaveSharedFPUModule {
val outer: CanHaveSharedFPU
val fpuOpt = outer.p(FPUKey).map(cfg => Module(new FPU(cfg)(outer.p)))
val fpuOpt = outer.tileParams.core.fpu.map(params => Module(new FPU(params)(outer.p)))
// TODO fpArb could go here instead of inside LegacyRoccComplex
}

View File

@ -0,0 +1,15 @@
// See LICENSE.SiFive for license details.
package tile
import Chisel._
import config.Parameters
import util._
class TileInterrupts(implicit p: Parameters) extends CoreBundle()(p) {
val debug = Bool()
val mtip = Bool()
val msip = Bool()
val meip = Bool()
val seip = usingVM.option(Bool())
}

View File

@ -0,0 +1,52 @@
// See LICENSE.SiFive for license details.
package tile
import Chisel._
import config.{Parameters, Field}
import coreplex.CacheBlockBytes
import rocket.PAddrBits
import uncore.tilelink2.ClientMetadata
import uncore.util.{Code, IdentityCode}
import util.ParameterizedBundle
trait L1CacheParams {
def nSets: Int
def nWays: Int
def rowBits: Int
def nTLBEntries: Int
def splitMetadata: Boolean
def ecc: Option[Code]
}
trait HasL1CacheParameters {
implicit val p: Parameters
val cacheParams: L1CacheParams
def cacheBlockBytes = p(CacheBlockBytes)
def lgCacheBlockBytes = log2Up(cacheBlockBytes)
def nSets = cacheParams.nSets
def blockOffBits = lgCacheBlockBytes
def idxBits = log2Up(cacheParams.nSets)
def untagBits = blockOffBits + idxBits
def tagBits = p(PAddrBits) - untagBits
def nWays = cacheParams.nWays
def wayBits = log2Up(nWays)
def isDM = nWays == 1
def rowBits = cacheParams.rowBits
def rowBytes = rowBits/8
def rowOffBits = log2Up(rowBytes)
def code = cacheParams.ecc.getOrElse(new IdentityCode)
def nTLBEntries = cacheParams.nTLBEntries
def hasSplitMetadata = cacheParams.splitMetadata
def cacheDataBits = p(SharedMemoryTLEdge).bundle.dataBits
def cacheDataBeats = (cacheBlockBytes * 8) / cacheDataBits
def refillCycles = cacheDataBeats
}
abstract class L1CacheModule(implicit val p: Parameters) extends Module
with HasL1CacheParameters
abstract class L1CacheBundle(implicit val p: Parameters) extends ParameterizedBundle()(p)
with HasL1CacheParameters

View File

@ -1,13 +1,14 @@
// See LICENSE.Berkeley for license details.
// See LICENSE.SiFive for license details.
package rocket
package tile
import Chisel._
import Chisel.ImplicitConversions._
import config._
import coreplex._
import diplomacy._
import rocket._
import uncore.constants._
import uncore.agents._
import uncore.coherence._
@ -20,9 +21,9 @@ import util._
case object RoccMaxTaggedMemXacts extends Field[Int]
case object RoccNMemChannels extends Field[Int]
case object RoccNPTWPorts extends Field[Int]
case object BuildRoCC extends Field[Seq[RoccParameters]]
case object BuildRoCC extends Field[Seq[RoCCParams]]
trait CanHaveLegacyRoccs extends CanHaveSharedFPU with CanHavePTW with TileNetwork {
trait CanHaveLegacyRoccs extends CanHaveSharedFPU with CanHavePTW with HasTileLinkMasterPort {
val module: CanHaveLegacyRoccsModule
val legacyRocc = if (p(BuildRoCC).isEmpty) None
else Some(LazyModule(new LegacyRoccComplex()(p.alter { (site, here, up) => {
@ -38,7 +39,7 @@ trait CanHaveLegacyRoccs extends CanHaveSharedFPU with CanHavePTW with TileNetwo
nCachingClients = 1,
nCachelessClients = 1,
maxClientXacts = List(
site(DCacheKey).nMSHRs + 1 /* IOMSHR */,
tileParams.dcache.get.nMSHRs + 1 /* IOMSHR */,
if (site(BuildRoCC).isEmpty) 1 else site(RoccMaxTaggedMemXacts)).max,
maxClientsPerPort = if (site(BuildRoCC).isEmpty) 1 else 2,
maxManagerXacts = 8,
@ -46,15 +47,16 @@ trait CanHaveLegacyRoccs extends CanHaveSharedFPU with CanHavePTW with TileNetwo
dataBits = site(CacheBlockBytes)*8)
}})))
// TODO for now, all legacy rocc mem ports mapped to one external node
legacyRocc foreach { lr =>
lr.masterNodes.foreach { l1backend.node := _ }
masterNode := lr.masterNode
nPTWPorts += lr.nPTWPorts
nDCachePorts += lr.nRocc
}
}
trait CanHaveLegacyRoccsModule extends CanHaveSharedFPUModule with CanHavePTWModule with TileNetworkModule {
trait CanHaveLegacyRoccsModule extends CanHaveSharedFPUModule
with CanHavePTWModule
with HasTileLinkMasterPortModule {
val outer: CanHaveLegacyRoccs
fpuOpt foreach { fpu =>
@ -84,13 +86,13 @@ class LegacyRoccComplex(implicit p: Parameters) extends LazyModule {
val nPTWPorts = buildRocc.map(_.nPTWPorts).sum
val roccOpcodes = buildRocc.map(_.opcodes)
val masterNode = TLOutputNode()
val legacies = List.fill(nMemChannels) { LazyModule(new TLLegacy()(p.alterPartial({ case PAddrBits => 32 }))) }
val masterNodes = legacies.map(_ => TLOutputNode())
legacies.zip(masterNodes).foreach { case(l,m) => m := TLHintHandler()(l.node) }
legacies.foreach { leg => masterNode := TLHintHandler()(leg.node) }
lazy val module = new LazyModuleImp(this) with HasCoreParameters {
val io = new Bundle {
val tl = masterNodes.map(_.bundleOut)
val tl = masterNode.bundleOut
val dcache = Vec(nRocc, new HellaCacheIO)
val fpu = new Bundle {
val cp_req = Decoupled(new FPInput())
@ -117,7 +119,7 @@ class LegacyRoccComplex(implicit p: Parameters) extends LazyModule {
case RoccNMemChannels => accelParams.nMemChannels
case RoccNPTWPorts => accelParams.nPTWPorts
}))
val dcIF = Module(new SimpleHellaCacheIF()(p.alterPartial({ case CacheName => CacheName("L1D") })))
val dcIF = Module(new SimpleHellaCacheIF)
rocc.io.cmd <> cmdRouter.io.out(i)
rocc.io.exception := io.core.exception
dcIF.io.requestor <> rocc.io.mem
@ -151,7 +153,7 @@ class LegacyRoccComplex(implicit p: Parameters) extends LazyModule {
}
}
case class RoccParameters(
case class RoCCParams(
opcodes: OpcodeSet,
generator: Parameters => RoCC,
nMemChannels: Int = 0,

View File

@ -0,0 +1,3 @@
// See LICENSE.SiFive for license details.
package object tile extends rocket.constants.ScalarOpConstants

View File

@ -1,980 +0,0 @@
// See LICENSE.Berkeley for license details.
// See LICENSE.SiFive for license details.
package uncore.agents
import Chisel._
import scala.reflect.ClassTag
import rocket.PAddrBits
import uncore.coherence._
import uncore.tilelink._
import uncore.constants._
import uncore.util._
import util._
import config.{Parameters, Field}
case object CacheId extends Field[Int]
case object L2DirectoryRepresentation extends Field[DirectoryRepresentation]
trait HasOuterCacheParameters extends HasCacheParameters with HasCoherenceAgentParameters {
val cacheId = p(CacheId)
val idxLSB = cacheIdBits
val idxMSB = idxLSB + idxBits - 1
val tagLSB = idxLSB + idxBits
val tagMSB = tagLSB + tagBits - 1
def inSameSet(block_a: HasCacheBlockAddress, block_b: HasCacheBlockAddress): Bool =
inSameSet(block_a, block_b.addr_block)
def inSameSet(block: HasCacheBlockAddress, addr: UInt): Bool =
inSet(block, addr(idxMSB, idxLSB))
def inSet(block: HasCacheBlockAddress, idx: UInt): Bool =
block.addr_block(idxMSB,idxLSB) === idx
def haveSameTag(block: HasCacheBlockAddress, addr: UInt): Bool =
hasTag(block, addr(tagMSB, tagLSB))
def hasTag(block: HasCacheBlockAddress, tag: UInt): Bool =
block.addr_block(tagMSB, tagLSB) === tag
def isSameBlock(block: HasCacheBlockAddress, tag: UInt, idx: UInt) =
hasTag(block, tag) && inSet(block, idx)
//val blockAddrBits = p(TLBlockAddrBits)
val refillCyclesPerBeat = outerDataBits/rowBits
val refillCycles = refillCyclesPerBeat*outerDataBeats
val internalDataBeats = p(CacheBlockBytes)*8/rowBits
require(refillCyclesPerBeat == 1)
val amoAluOperandBits = p(AmoAluOperandBits)
require(amoAluOperandBits <= innerDataBits)
require(rowBits == innerDataBits) // TODO: relax this by improving s_data_* states
val nSecondaryMisses = p(NSecondaryMisses)
val isLastLevelCache = true
val alwaysWriteFullBeat = !p(ECCCode).isEmpty
}
abstract class L2HellaCacheModule(implicit val p: Parameters) extends Module
with HasOuterCacheParameters {
def doInternalOutputArbitration[T <: Data : ClassTag](
out: DecoupledIO[T],
ins: Seq[DecoupledIO[T]],
block_transfer: T => Bool = (t: T) => Bool(false)) {
val arb = Module(new RRArbiter(out.bits, ins.size))
out.valid := arb.io.out.valid && !block_transfer(arb.io.out.bits)
out.bits := arb.io.out.bits
arb.io.out.ready := out.ready && !block_transfer(arb.io.out.bits)
arb.io.in <> ins
}
def doInternalInputRouting[T <: Bundle with HasL2Id](in: ValidIO[T], outs: Seq[ValidIO[T]]) {
outs.map(_.bits := in.bits)
outs.zipWithIndex.map { case (o,i) => o.valid := in.valid && in.bits.id === UInt(i) }
}
}
abstract class L2HellaCacheBundle(implicit val p: Parameters) extends ParameterizedBundle()(p)
with HasOuterCacheParameters
trait HasL2Id extends HasCoherenceAgentParameters {
val id = UInt(width = log2Up(nTransactors + 1))
}
trait HasL2InternalRequestState extends HasOuterCacheParameters {
val tag_match = Bool()
val meta = new L2Metadata
val way_en = Bits(width = nWays)
}
trait HasL2BeatAddr extends HasOuterCacheParameters {
val addr_beat = UInt(width = log2Up(refillCycles))
}
trait HasL2Data extends HasOuterCacheParameters
with HasL2BeatAddr {
val data = UInt(width = rowBits)
def hasData(dummy: Int = 0) = Bool(true)
def hasMultibeatData(dummy: Int = 0) = Bool(refillCycles > 1)
}
class L2Metadata(implicit p: Parameters) extends Metadata()(p) with HasOuterCacheParameters {
val coh = new HierarchicalMetadata
}
object L2Metadata {
def apply(tag: Bits, coh: HierarchicalMetadata)
(implicit p: Parameters): L2Metadata = {
val meta = Wire(new L2Metadata)
meta.tag := tag
meta.coh := coh
meta
}
def apply(
tag: Bits,
inner: ManagerMetadata,
outer: ClientMetadata)(implicit p: Parameters): L2Metadata = {
val coh = Wire(new HierarchicalMetadata)
coh.inner := inner
coh.outer := outer
apply(tag, coh)
}
}
class L2MetaReadReq(implicit p: Parameters) extends MetaReadReq()(p) with HasL2Id {
val tag = Bits(width = tagBits)
}
class L2MetaWriteReq(implicit p: Parameters) extends MetaWriteReq[L2Metadata](new L2Metadata)(p)
with HasL2Id {
override def cloneType = new L2MetaWriteReq().asInstanceOf[this.type]
}
class L2MetaResp(implicit p: Parameters) extends L2HellaCacheBundle()(p)
with HasL2Id
with HasL2InternalRequestState
trait HasL2MetaReadIO extends HasOuterCacheParameters {
val read = Decoupled(new L2MetaReadReq)
val resp = Valid(new L2MetaResp).flip
}
trait HasL2MetaWriteIO extends HasOuterCacheParameters {
val write = Decoupled(new L2MetaWriteReq)
}
class L2MetaRWIO(implicit p: Parameters) extends L2HellaCacheBundle()(p)
with HasL2MetaReadIO
with HasL2MetaWriteIO
class L2MetaReadOnlyIO(implicit p: Parameters) extends L2HellaCacheBundle()(p)
with HasL2MetaReadIO
trait HasL2MetaRWIO extends HasOuterCacheParameters {
val meta = new L2MetaRWIO
}
class L2MetadataArray(implicit p: Parameters) extends L2HellaCacheModule()(p) {
val io = new L2MetaRWIO().flip
def onReset = L2Metadata(UInt(0), HierarchicalMetadata.onReset)
val meta = Module(new MetadataArray(onReset _))
meta.io.read <> io.read
meta.io.write <> io.write
val way_en_1h = UInt((BigInt(1) << nWays) - 1)
val s1_way_en_1h = RegEnable(way_en_1h, io.read.valid)
meta.io.read.bits.way_en := way_en_1h
val s1_tag = RegEnable(io.read.bits.tag, io.read.valid)
val s1_id = RegEnable(io.read.bits.id, io.read.valid)
def wayMap[T <: Data](f: Int => T) = Vec((0 until nWays).map(f))
val s1_clk_en = Reg(next = io.read.fire())
val s1_tag_eq_way = wayMap((w: Int) => meta.io.resp(w).tag === s1_tag)
val s1_tag_match_way = wayMap((w: Int) => s1_tag_eq_way(w) && meta.io.resp(w).coh.outer.isValid() && s1_way_en_1h(w).toBool).asUInt
val s1_idx = RegEnable(io.read.bits.idx, io.read.valid) // deal with stalls?
val s2_tag_match_way = RegEnable(s1_tag_match_way, s1_clk_en)
val s2_tag_match = s2_tag_match_way.orR
val s2_hit_coh = Mux1H(s2_tag_match_way, wayMap((w: Int) => RegEnable(meta.io.resp(w).coh, s1_clk_en)))
val replacer = p(L2Replacer)()
val s1_hit_way = Wire(Bits())
s1_hit_way := Bits(0)
(0 until nWays).foreach(i => when (s1_tag_match_way(i)) { s1_hit_way := Bits(i) })
replacer.access(io.read.bits.idx)
replacer.update(s1_clk_en, s1_tag_match_way.orR, s1_idx, s1_hit_way)
val s1_replaced_way_en = UIntToOH(replacer.way)
val s2_replaced_way_en = UIntToOH(RegEnable(replacer.way, s1_clk_en))
val s2_repl_meta = Mux1H(s2_replaced_way_en, wayMap((w: Int) =>
RegEnable(meta.io.resp(w), s1_clk_en && s1_replaced_way_en(w))).toSeq)
io.resp.valid := Reg(next = s1_clk_en)
io.resp.bits.id := RegEnable(s1_id, s1_clk_en)
io.resp.bits.tag_match := s2_tag_match
io.resp.bits.meta := Mux(s2_tag_match,
L2Metadata(s2_repl_meta.tag, s2_hit_coh),
s2_repl_meta)
io.resp.bits.way_en := Mux(s2_tag_match, s2_tag_match_way, s2_replaced_way_en)
}
class L2DataReadReq(implicit p: Parameters) extends L2HellaCacheBundle()(p)
with HasL2BeatAddr
with HasL2Id {
val addr_idx = UInt(width = idxBits)
val way_en = Bits(width = nWays)
}
object L2DataReadReq {
def apply(
id: UInt,
way_en: UInt,
addr_idx: UInt,
addr_beat: UInt)(implicit p: Parameters) = {
val req = Wire(new L2DataReadReq)
req.id := id
req.way_en := way_en
req.addr_idx := addr_idx
req.addr_beat := addr_beat
req
}
}
class L2DataWriteReq(implicit p: Parameters) extends L2DataReadReq()(p)
with HasL2Data {
val wmask = Bits(width = rowBits/8)
}
object L2DataWriteReq {
def apply(
id: UInt,
way_en: UInt,
addr_idx: UInt,
addr_beat: UInt,
wmask: UInt,
data: UInt)(implicit p: Parameters) = {
val req = Wire(new L2DataWriteReq)
req.id := id
req.way_en := way_en
req.addr_idx := addr_idx
req.addr_beat := addr_beat
req.wmask := wmask
req.data := data
req
}
}
class L2DataResp(implicit p: Parameters) extends L2HellaCacheBundle()(p)
with HasL2Id
with HasL2Data
trait HasL2DataReadIO extends HasOuterCacheParameters {
val read = Decoupled(new L2DataReadReq)
val resp = Valid(new L2DataResp).flip
}
trait HasL2DataWriteIO extends HasOuterCacheParameters {
val write = Decoupled(new L2DataWriteReq)
}
class L2DataRWIO(implicit p: Parameters) extends L2HellaCacheBundle()(p)
with HasL2DataReadIO
with HasL2DataWriteIO
trait HasL2DataRWIO extends HasOuterCacheParameters {
val data = new L2DataRWIO
}
class L2DataArray(delay: Int)(implicit p: Parameters) extends L2HellaCacheModule()(p) {
val io = new L2DataRWIO().flip
val array = SeqMem(nWays*nSets*refillCycles, Vec(rowBits/8, Bits(width=8)))
val ren = !io.write.valid && io.read.valid
val raddr = Cat(OHToUInt(io.read.bits.way_en), io.read.bits.addr_idx, io.read.bits.addr_beat)
val waddr = Cat(OHToUInt(io.write.bits.way_en), io.write.bits.addr_idx, io.write.bits.addr_beat)
val wdata = Vec.tabulate(rowBits/8)(i => io.write.bits.data(8*(i+1)-1,8*i))
val wmask = io.write.bits.wmask.toBools
when (io.write.valid) { array.write(waddr, wdata, wmask) }
val r_req = Pipe(io.read.fire(), io.read.bits)
io.resp := Pipe(r_req.valid, r_req.bits, delay)
io.resp.bits.data := Pipe(r_req.valid, array.read(raddr, ren).asUInt, delay).bits
io.read.ready := !io.write.valid
io.write.ready := Bool(true)
}
class L2HellaCacheBank(implicit p: Parameters) extends HierarchicalCoherenceAgent()(p)
with HasOuterCacheParameters {
require(isPow2(nSets))
require(isPow2(nWays))
val meta = Module(new L2MetadataArray) // TODO: add delay knob
val data = Module(new L2DataArray(1))
val tshrfile = Module(new TSHRFile)
io.inner <> tshrfile.io.inner
io.outer <> tshrfile.io.outer
tshrfile.io.incoherent <> io.incoherent
meta.io <> tshrfile.io.meta
data.io <> tshrfile.io.data
disconnectOuterProbeAndFinish()
}
class TSHRFileIO(implicit p: Parameters) extends HierarchicalTLIO()(p)
with HasL2MetaRWIO
with HasL2DataRWIO
class TSHRFile(implicit p: Parameters) extends L2HellaCacheModule()(p)
with HasCoherenceAgentWiringHelpers {
val io = new TSHRFileIO
// Create TSHRs for outstanding transactions
val irelTrackerList =
(0 until nReleaseTransactors).map(id =>
Module(new CacheVoluntaryReleaseTracker(id)))
val iacqTrackerList =
(nReleaseTransactors until nTransactors).map(id =>
Module(new CacheAcquireTracker(id)))
val trackerList = irelTrackerList ++ iacqTrackerList
// Don't allow a writeback request to go through if we are taking
// a voluntary release for the same block.
// The writeback can go forward once the voluntary release is handled
def writebackConflictsWithVolRelease(wb: L2WritebackReq): Bool =
irelTrackerList
.map(tracker =>
!tracker.io.alloc.idle &&
isSameBlock(tracker.io.alloc, wb.tag, wb.idx))
.reduce(_ || _) ||
(io.inner.release.valid &&
isSameBlock(io.inner.release.bits, wb.tag, wb.idx))
// WritebackUnit evicts data from L2, including invalidating L1s
val wb = Module(new L2WritebackUnit(nTransactors))
val trackerAndWbIOs = trackerList.map(_.io) :+ wb.io
doInternalOutputArbitration(wb.io.wb.req, trackerList.map(_.io.wb.req),
block_transfer = writebackConflictsWithVolRelease _)
doInternalInputRouting(wb.io.wb.resp, trackerList.map(_.io.wb.resp))
// Propagate incoherence flags
(trackerList.map(_.io.incoherent) :+ wb.io.incoherent) foreach { _ := io.incoherent }
// Handle acquire transaction initiation
val irel_vs_iacq_conflict =
io.inner.acquire.valid &&
io.inner.release.valid &&
inSameSet(io.inner.acquire.bits, io.inner.release.bits)
doInputRoutingWithAllocation(
in = io.inner.acquire,
outs = trackerList.map(_.io.inner.acquire),
allocs = trackerList.map(_.io.alloc.iacq),
allocOverride = Some(!irel_vs_iacq_conflict))
assert(PopCount(trackerList.map(_.io.alloc.iacq.should)) <= UInt(1),
"At most a single tracker should now be allocated for any given Acquire")
// Wire releases from clients
doInputRoutingWithAllocation(
in = io.inner.release,
outs = trackerAndWbIOs.map(_.inner.release),
allocs = trackerAndWbIOs.map(_.alloc.irel))
assert(PopCount(trackerAndWbIOs.map(_.alloc.irel.should)) <= UInt(1),
"At most a single tracker should now be allocated for any given Release")
// Wire probe requests and grant reply to clients, finish acks from clients
doOutputArbitration(io.inner.probe, trackerList.map(_.io.inner.probe) :+ wb.io.inner.probe)
doOutputArbitration(io.inner.grant, trackerList.map(_.io.inner.grant) :+ wb.io.inner.grant)
doInputRouting(io.inner.finish, trackerList.map(_.io.inner.finish))
// Create an arbiter for the one memory port
val outerList = trackerList.map(_.io.outer) :+ wb.io.outer
val outer_arb = Module(new ClientTileLinkIOArbiter(outerList.size)
(p.alterPartial({ case TLId => p(OuterTLId)})))
outer_arb.io.in <> outerList
io.outer <> outer_arb.io.out
// Wire local memory arrays
doInternalOutputArbitration(io.meta.read, trackerList.map(_.io.meta.read) :+ wb.io.meta.read)
doInternalOutputArbitration(io.meta.write, trackerList.map(_.io.meta.write))
doInternalOutputArbitration(io.data.read, trackerList.map(_.io.data.read) :+ wb.io.data.read)
doInternalOutputArbitration(io.data.write, trackerList.map(_.io.data.write))
doInternalInputRouting(io.meta.resp, trackerList.map(_.io.meta.resp) :+ wb.io.meta.resp)
doInternalInputRouting(io.data.resp, trackerList.map(_.io.data.resp) :+ wb.io.data.resp)
}
class L2XactTrackerIO(implicit p: Parameters) extends HierarchicalXactTrackerIO()(p)
with HasL2DataRWIO
with HasL2MetaRWIO
with HasL2WritebackIO
trait HasRowBeatCounters extends HasOuterCacheParameters with HasPendingBitHelpers {
def mergeData(dataBits: Int)(beat: UInt, incoming: UInt): Unit
def connectDataBeatCounter[S <: L2HellaCacheBundle](inc: Bool, data: S, beat: UInt, full_block: Bool) = {
if(data.refillCycles > 1) {
val (multi_cnt, multi_done) = Counter(full_block && inc, data.refillCycles)
(Mux(!full_block, beat, multi_cnt), Mux(!full_block, inc, multi_done))
} else { (UInt(0), inc) }
}
def connectInternalDataBeatCounter[T <: L2HellaCacheBundle with HasL2BeatAddr](
in: DecoupledIO[T],
beat: UInt = UInt(0),
full_block: Bool = Bool(true)): (UInt, Bool) = {
connectDataBeatCounter(in.fire(), in.bits, beat, full_block)
}
def connectInternalDataBeatCounter[T <: L2HellaCacheBundle with HasL2Data](
in: ValidIO[T],
full_block: Bool): Bool = {
connectDataBeatCounter(in.valid, in.bits, UInt(0), full_block)._2
}
def addPendingBitInternal[T <: L2HellaCacheBundle with HasL2BeatAddr](in: DecoupledIO[T]) =
Fill(in.bits.refillCycles, in.fire()) & UIntToOH(in.bits.addr_beat)
def addPendingBitInternal[T <: L2HellaCacheBundle with HasL2BeatAddr](in: ValidIO[T]) =
Fill(in.bits.refillCycles, in.valid) & UIntToOH(in.bits.addr_beat)
def dropPendingBit[T <: L2HellaCacheBundle with HasL2BeatAddr] (in: DecoupledIO[T]) =
~Fill(in.bits.refillCycles, in.fire()) | ~UIntToOH(in.bits.addr_beat)
def dropPendingBitInternal[T <: L2HellaCacheBundle with HasL2BeatAddr] (in: ValidIO[T]) =
~Fill(in.bits.refillCycles, in.valid) | ~UIntToOH(in.bits.addr_beat)
// TODO: Deal with the possibility that rowBits != tlDataBits
def mergeDataInternal[T <: L2HellaCacheBundle with HasL2Data with HasL2BeatAddr](in: ValidIO[T]) {
when(in.valid) { mergeData(rowBits)(in.bits.addr_beat, in.bits.data) }
}
}
trait ReadsFromOuterCacheDataArray extends HasCoherenceMetadataBuffer
with HasRowBeatCounters
with HasDataBuffer {
def io: HasL2DataRWIO
val pending_reads = Reg(init=Bits(0, width = innerDataBeats))
val pending_resps = Reg(init=Bits(0, width = innerDataBeats))
val curr_read_beat = PriorityEncoder(pending_reads)
def readDataArray(drop_pending_bit: UInt,
add_pending_bit: UInt = UInt(0),
block_pending_read: Bool = Bool(false),
can_update_pending: Bool = Bool(true)) {
val port = io.data
when (can_update_pending) {
pending_reads := (pending_reads | add_pending_bit) &
dropPendingBit(port.read) & drop_pending_bit
}
port.read.valid := state === s_busy && pending_reads.orR && !block_pending_read
port.read.bits := L2DataReadReq(
id = UInt(trackerId),
way_en = xact_way_en,
addr_idx = xact_addr_idx,
addr_beat = curr_read_beat)
pending_resps := (pending_resps & dropPendingBitInternal(port.resp)) |
addPendingBitInternal(port.read)
scoreboard += (pending_reads.orR, pending_resps.orR)
mergeDataInternal(port.resp)
}
}
trait WritesToOuterCacheDataArray extends HasCoherenceMetadataBuffer
with HasRowBeatCounters
with HasDataBuffer {
def io: HasL2DataRWIO
val pending_writes = Reg(init=Bits(0, width = innerDataBeats))
val curr_write_beat = PriorityEncoder(pending_writes)
def writeDataArray(add_pending_bit: UInt = UInt(0),
block_pending_write: Bool = Bool(false),
can_update_pending: Bool = Bool(true)) {
val port = io.data
when (can_update_pending) {
pending_writes := (pending_writes & dropPendingBit(port.write)) |
add_pending_bit
}
port.write.valid := state === s_busy && pending_writes.orR && !block_pending_write
port.write.bits := L2DataWriteReq(
id = UInt(trackerId),
way_en = xact_way_en,
addr_idx = xact_addr_idx,
addr_beat = curr_write_beat,
wmask = ~UInt(0, port.write.bits.wmask.getWidth),
data = data_buffer(curr_write_beat))
scoreboard += pending_writes.orR
}
}
trait HasAMOALU extends HasAcquireMetadataBuffer
with HasByteWriteMaskBuffer
with HasRowBeatCounters {
val io: L2XactTrackerIO
// Provide a single ALU per tracker to merge Puts and AMOs with data being
// refilled, written back, or extant in the cache
val amoalu = Module(new AMOALU(amoAluOperandBits, rhsIsAligned = true))
val amo_result = Reg(init = UInt(0, innerDataBits))
def initializeAMOALUIOs() {
amoalu.io.addr := Cat(xact_addr_block, xact_addr_beat, xact_addr_byte)
amoalu.io.cmd := xact_op_code
amoalu.io.typ := xact_op_size
amoalu.io.lhs := io.data.resp.bits.data // default, overwritten by calls to mergeData
amoalu.io.rhs := data_buffer.head // default, overwritten by calls to mergeData
}
// Utility function for applying any buffered stored data to the cache line
// before storing it back into the data array
override def mergeData(dataBits: Int)(beat: UInt, incoming: UInt) {
val old_data = incoming // Refilled, written back, or de-cached data
val new_data = data_buffer(beat) // Newly Put data is already in the buffer
val amo_shift_bits = xact_amo_shift_bytes << 3
amoalu.io.lhs := old_data >> amo_shift_bits
amoalu.io.rhs := new_data >> amo_shift_bits
val wmask = FillInterleaved(8, wmask_buffer(beat))
data_buffer(beat) := ~wmask & old_data |
wmask & Mux(xact_iacq.isAtomic(), amoalu.io.out << amo_shift_bits, new_data)
wmask_buffer(beat) := ~UInt(0, innerWriteMaskBits)
when(xact_iacq.isAtomic() && xact_addr_beat === beat) { amo_result := old_data }
}
}
trait HasCoherenceMetadataBuffer extends HasOuterCacheParameters
with HasBlockAddressBuffer
with HasXactTrackerStates {
def trackerId: Int
val xact_way_en = Reg{ Bits(width = nWays) }
val xact_old_meta = Reg{ new L2Metadata }
val pending_coh = Reg{ xact_old_meta.coh }
val pending_meta_write = Reg{ Bool() } // pending_meta_write has own state (s_meta_write)
val inner_coh = pending_coh.inner
val outer_coh = pending_coh.outer
val xact_addr_idx = xact_addr_block(idxMSB,idxLSB)
val xact_addr_tag = xact_addr_block >> UInt(tagLSB)
// Utility function for updating the metadata that will be kept in this cache
def updatePendingCohWhen(flag: Bool, next: HierarchicalMetadata) {
when(flag && pending_coh =/= next) {
pending_meta_write := Bool(true)
pending_coh := next
}
}
def metaRead(port: HasL2MetaReadIO, next_state: UInt, way_en_known: Bool = Bool(false)) {
port.read.valid := state === s_meta_read
port.read.bits.id := UInt(trackerId)
port.read.bits.idx := xact_addr_idx
port.read.bits.tag := xact_addr_tag
port.read.bits.way_en := Mux(way_en_known, xact_way_en, ~UInt(0, nWays))
when(state === s_meta_read && port.read.ready) { state := s_meta_resp }
when(state === s_meta_resp && port.resp.valid) {
xact_old_meta := port.resp.bits.meta
when (!way_en_known) { xact_way_en := port.resp.bits.way_en }
state := next_state
}
}
def metaWrite(port: HasL2MetaWriteIO, to_write: L2Metadata, next_state: UInt) {
port.write.valid := state === s_meta_write
port.write.bits.id := UInt(trackerId)
port.write.bits.idx := xact_addr_idx
port.write.bits.way_en := xact_way_en
port.write.bits.data := to_write
when(state === s_meta_write && port.write.ready) { state := next_state }
}
}
trait TriggersWritebacks extends HasCoherenceMetadataBuffer {
def triggerWriteback(wb: L2WritebackIO, next_state: UInt) {
wb.req.valid := state === s_wb_req
wb.req.bits.id := UInt(trackerId)
wb.req.bits.idx := xact_addr_idx
wb.req.bits.tag := xact_old_meta.tag
wb.req.bits.way_en := xact_way_en
when(state === s_wb_req && wb.req.ready) { state := s_wb_resp }
when(state === s_wb_resp && wb.resp.valid) { state := s_outer_acquire }
}
}
class CacheVoluntaryReleaseTracker(trackerId: Int)(implicit p: Parameters)
extends VoluntaryReleaseTracker(trackerId)(p)
with HasDataBuffer
with WritesToOuterCacheDataArray {
val io = new L2XactTrackerIO
pinAllReadyValidLow(io)
// Avoid metatdata races with writebacks
routeInParent(
iacqMatches = inSameSet(_, xact_addr_block),
irelCanAlloc = Bool(true))
// Initialize and accept pending Release beats
innerRelease(
block_vol_ignt = pending_writes.orR,
next = s_meta_read)
io.inner.release.ready := state === s_idle || irel_can_merge || irel_same_xact
// Begin a transaction by getting the current block metadata
metaRead(io.meta, s_busy)
// Write the voluntarily written back data to this cache
writeDataArray(add_pending_bit = addPendingBitWhenBeatHasData(io.inner.release),
can_update_pending = state =/= s_idle || io.alloc.irel.should)
// End a transaction by updating the block metadata
metaWrite(
io.meta,
L2Metadata(
tag = xact_addr_tag,
inner = xact_old_meta.coh.inner.onRelease(xact_vol_irel),
outer = Mux(xact_vol_irel.hasData(),
xact_old_meta.coh.outer.onHit(M_XWR),
xact_old_meta.coh.outer)),
s_idle)
mergeDataInner(io.inner.release)
when(irel_is_allocating) {
pending_writes := addPendingBitWhenBeatHasData(io.inner.release)
}
quiesce(s_meta_write) {}
// Checks for illegal behavior
assert(!(state === s_meta_resp && io.meta.resp.valid && !io.meta.resp.bits.tag_match),
"VoluntaryReleaseTracker accepted Release for a block not resident in this cache!")
}
class CacheAcquireTracker(trackerId: Int)(implicit p: Parameters)
extends AcquireTracker(trackerId)(p)
with HasByteWriteMaskBuffer
with HasAMOALU
with TriggersWritebacks
with ReadsFromOuterCacheDataArray
with WritesToOuterCacheDataArray {
val io = new L2XactTrackerIO
pinAllReadyValidLow(io)
initializeAMOALUIOs()
val pending_coh_on_ognt = HierarchicalMetadata(
ManagerMetadata.onReset,
pending_coh.outer.onGrant(io.outer.grant.bits, xact_op_code))
val pending_coh_on_ignt = HierarchicalMetadata(
pending_coh.inner.onGrant(io.ignt()),
Mux(ognt_counter.down.done,
pending_coh_on_ognt.outer,
pending_coh.outer))
val pending_coh_on_irel = HierarchicalMetadata(
pending_coh.inner.onRelease(io.irel()), // Drop sharer
Mux(io.irel().hasData(), // Dirty writeback
pending_coh.outer.onHit(M_XWR),
pending_coh.outer))
val pending_coh_on_hit = HierarchicalMetadata(
io.meta.resp.bits.meta.coh.inner,
io.meta.resp.bits.meta.coh.outer.onHit(xact_op_code))
val pending_coh_on_miss = HierarchicalMetadata.onReset
val before_wb_req = state.isOneOf(s_meta_read, s_meta_resp)
routeInParent(
iacqMatches = inSameSet(_, xact_addr_block),
irelMatches = (irel: HasCacheBlockAddress) =>
Mux(before_wb_req, inSameSet(irel, xact_addr_block), exactAddrMatch(irel)),
iacqCanAlloc = Bool(true))
// TileLink allows for Gets-under-Get
// and Puts-under-Put, and either may also merge with a preceding prefetch
// that requested the correct permissions (via op_code)
def acquiresAreMergeable(sec: AcquireMetadata): Bool = {
val allowedTypes = List((Acquire.getType, Acquire.getType),
(Acquire.putType, Acquire.putType),
(Acquire.putBlockType, Acquire.putBlockType),
(Acquire.getPrefetchType, Acquire.getPrefetchType),
(Acquire.putPrefetchType, Acquire.putPrefetchType),
(Acquire.getPrefetchType, Acquire.getType),
(Acquire.putPrefetchType, Acquire.putType),
(Acquire.putPrefetchType, Acquire.putBlockType))
allowedTypes.map { case(a, b) => xact_iacq.isBuiltInType(a) && sec.isBuiltInType(b) }.reduce(_||_) &&
xact_op_code === sec.op_code() &&
sec.conflicts(xact_addr_block) &&
xact_allocate
}
// First, take care of accpeting new acquires or secondary misses
// Handling of primary and secondary misses' data and write mask merging
def iacq_can_merge = acquiresAreMergeable(io.iacq()) &&
state =/= s_idle &&
state =/= s_meta_resp &&
state =/= s_meta_write &&
!all_pending_done &&
!io.inner.release.fire() &&
!io.outer.grant.fire() &&
!io.data.resp.valid &&
ignt_q.io.enq.ready && ignt_q.io.deq.valid
innerAcquire(
can_alloc = Bool(true),
next = s_meta_read)
io.inner.acquire.ready := state === s_idle || iacq_can_merge ||
iacq_same_xact_multibeat
// Begin a transaction by getting the current block metadata
// Defined here because of Chisel default wire demands, used in s_meta_resp
val coh = io.meta.resp.bits.meta.coh
val tag_match = io.meta.resp.bits.tag_match
val is_hit = (if(!isLastLevelCache) tag_match && coh.outer.isHit(xact_op_code)
else tag_match && coh.outer.isValid())
val needs_writeback = !tag_match &&
xact_allocate &&
(coh.outer.requiresVoluntaryWriteback() ||
coh.inner.requiresProbesOnVoluntaryWriteback())
val needs_inner_probes = tag_match && coh.inner.requiresProbes(xact_iacq)
val should_update_meta = !tag_match && xact_allocate ||
is_hit && pending_coh_on_hit =/= coh
def full_representation = coh.inner.full()
metaRead(
io.meta,
Mux(needs_writeback, s_wb_req,
Mux(needs_inner_probes, s_inner_probe,
Mux(!is_hit, s_outer_acquire, s_busy))))
updatePendingCohWhen(
io.meta.resp.valid,
Mux(is_hit, pending_coh_on_hit,
Mux(tag_match, coh, pending_coh_on_miss)))
// Issue a request to the writeback unit
triggerWriteback(io.wb, s_outer_acquire)
// Track which clients yet need to be probed and make Probe message
// If we're probing, we know the tag matches, so if this is the
// last level cache, we can use the data without upgrading permissions
val skip_outer_acquire =
(if(!isLastLevelCache) xact_old_meta.coh.outer.isHit(xact_op_code)
else xact_old_meta.coh.outer.isValid())
innerProbe(
inner_coh.makeProbe(curr_probe_dst, xact_iacq, xact_addr_block),
Mux(!skip_outer_acquire, s_outer_acquire, s_busy))
// Handle incoming releases from clients, which may reduce sharer counts
// and/or write back dirty data, and may be unexpected voluntary releases
innerRelease() // Don't block on pending_writes because they won't happen until s_busy
def irel_can_merge = io.irel().conflicts(xact_addr_block) &&
io.irel().isVoluntary() &&
!state.isOneOf(s_idle, s_meta_read, s_meta_resp, s_meta_write) &&
!all_pending_done &&
!io.outer.grant.fire() &&
!io.inner.grant.fire() &&
!vol_ignt_counter.pending
io.inner.release.ready := irel_can_merge || irel_same_xact
updatePendingCohWhen(io.inner.release.fire(), pending_coh_on_irel)
mergeDataInner(io.inner.release)
// Send outer request
outerAcquire(
caching = xact_allocate,
coh = xact_old_meta.coh.outer, // TODO outer_coh?
data = data_buffer(ognt_counter.up.idx),
wmask = wmask_buffer(ognt_counter.up.idx),
next = s_busy)
// Handle the response from outer memory
updatePendingCohWhen(ognt_counter.down.done, pending_coh_on_ognt)
mergeDataOuter(io.outer.grant)
// Send read request and get resp
// Going back to the original inner transaction:
// We read from the the cache at this level if data wasn't written back or refilled.
// We may still merge further Gets, requiring further beats to be read.
// If ECC requires a full writemask, we'll read out data on partial writes as well.
readDataArray(
drop_pending_bit = (dropPendingBitWhenBeatHasData(io.inner.release) &
dropPendingBitWhenBeatHasData(io.outer.grant)),
add_pending_bit = addPendingBitWhenBeatNeedsRead(
io.inner.acquire,
always = Bool(alwaysWriteFullBeat),
unless = data_valid(io.iacq().addr_beat)),
block_pending_read = ognt_counter.pending,
can_update_pending = state =/= s_idle || io.alloc.irel.should)
// No override for first accepted acquire
val alloc_override = xact_allocate && (state =/= s_idle)
// Do write
// We write data to the cache at this level if it was Put here with allocate flag,
// written back dirty, or refilled from outer memory.
writeDataArray(
add_pending_bit = (addPendingBitWhenBeatHasDataAndAllocs(io.inner.acquire, alloc_override) |
addPendingBitWhenBeatHasData(io.inner.release) |
addPendingBitWhenBeatHasData(io.outer.grant, xact_allocate)),
block_pending_write = (ognt_counter.pending ||
pending_put_data.orR ||
pending_reads(curr_write_beat) ||
pending_resps(curr_write_beat)),
can_update_pending = state =/= s_idle || io.alloc.iacq.should || io.alloc.irel.should)
// Acknowledge or respond with data
innerGrant(
data = Mux(xact_iacq.isAtomic(), amo_result, data_buffer(ignt_data_idx)),
external_pending = pending_writes.orR || ognt_counter.pending,
add_pending_bits = addPendingBitInternal(io.data.resp))
updatePendingCohWhen(io.inner.grant.fire() && io.ignt().last(), pending_coh_on_ignt)
// End a transaction by updating the block metadata
metaWrite(io.meta, L2Metadata(xact_addr_tag, pending_coh), s_idle)
// Initialization of some scoreboard logic based on the original
// Acquire message on on the results of the metadata read:
when(state === s_meta_resp && io.meta.resp.valid) {
// If some kind of Put is marked no-allocate but is already in the cache,
// we need to write its data to the data array
when(is_hit && !xact_allocate && xact_iacq.hasData()) {
pending_writes := addPendingBitsFromAcquire(xact_iacq)
xact_allocate := Bool(true)
}
when (needs_inner_probes) { initializeProbes() }
pending_meta_write := should_update_meta //TODO what edge case was this covering?
}
// Initialize more transaction metadata. Pla
when(iacq_is_allocating) {
amo_result := UInt(0)
pending_meta_write := Bool(false)
pending_reads := Mux( // Pick out the specific beats of data that need to be read
io.iacq().isBuiltInType(Acquire.getBlockType) || !io.iacq().isBuiltInType(),
~UInt(0, width = innerDataBeats),
addPendingBitWhenBeatNeedsRead(io.inner.acquire, Bool(alwaysWriteFullBeat)))
pending_writes := addPendingBitWhenBeatHasDataAndAllocs(io.inner.acquire)
pending_resps := UInt(0)
}
initDataInner(io.inner.acquire, iacq_is_allocating || iacq_is_merging)
// Wait for everything to quiesce
quiesce(Mux(pending_meta_write, s_meta_write, s_idle)) { clearWmaskBuffer() }
}
class L2WritebackReq(implicit p: Parameters)
extends L2HellaCacheBundle()(p) with HasL2Id {
val tag = Bits(width = tagBits)
val idx = Bits(width = idxBits)
val way_en = Bits(width = nWays)
}
class L2WritebackResp(implicit p: Parameters) extends L2HellaCacheBundle()(p) with HasL2Id
class L2WritebackIO(implicit p: Parameters) extends L2HellaCacheBundle()(p) {
val req = Decoupled(new L2WritebackReq)
val resp = Valid(new L2WritebackResp).flip
}
trait HasL2WritebackIO extends HasOuterCacheParameters {
val wb = new L2WritebackIO()
}
class L2WritebackUnitIO(implicit p: Parameters)
extends HierarchicalXactTrackerIO()(p) with HasL2DataRWIO {
val wb = new L2WritebackIO().flip()
val meta = new L2MetaReadOnlyIO
}
class L2WritebackUnit(val trackerId: Int)(implicit p: Parameters) extends XactTracker()(p)
with AcceptsVoluntaryReleases
with EmitsVoluntaryReleases
with EmitsInnerProbes
with ReadsFromOuterCacheDataArray
with RoutesInParent {
val io = new L2WritebackUnitIO
pinAllReadyValidLow(io)
val xact_id = Reg{ io.wb.req.bits.id }
val pending_coh_on_irel = HierarchicalMetadata(
inner_coh.onRelease(io.irel()), // Drop sharer
Mux(io.irel().hasData(), // Dirty writeback
outer_coh.onHit(M_XWR),
outer_coh))
routeInParent()
// Start the writeback sub-transaction
io.wb.req.ready := state === s_idle
val coh = io.meta.resp.bits.meta.coh
val needs_inner_probes = coh.inner.requiresProbesOnVoluntaryWriteback()
val needs_outer_release = coh.outer.requiresVoluntaryWriteback()
def full_representation = coh.inner.full()
// Even though we already read the metadata in the acquire tracker that
// sent the writeback request, we have to read it again in the writeback
// unit, since it may have been updated in the meantime.
metaRead(io.meta,
next_state = Mux(needs_inner_probes, s_inner_probe, s_busy),
way_en_known = Bool(true))
// Track which clients yet need to be probed and make Probe message
innerProbe(
inner_coh.makeProbeForVoluntaryWriteback(curr_probe_dst, xact_addr_block),
s_busy)
// Handle incoming releases from clients, which may reduce sharer counts
// and/or write back dirty data
innerRelease()
def irel_can_merge = io.irel().conflicts(xact_addr_block) &&
io.irel().isVoluntary() &&
!state.isOneOf(s_idle, s_meta_read, s_meta_resp) &&
!(state === s_busy && all_pending_done) &&
!vol_ignt_counter.pending &&
!blockInnerRelease()
io.inner.release.ready := irel_can_merge || irel_same_xact
updatePendingCohWhen(io.inner.release.fire(), pending_coh_on_irel)
mergeDataInner(io.inner.release)
// If a release didn't write back data, have to read it from data array
readDataArray(
drop_pending_bit = dropPendingBitWhenBeatHasData(io.inner.release))
// Once the data is buffered we can write it back to outer memory
outerRelease(
coh = outer_coh,
data = data_buffer(vol_ognt_counter.up.idx),
add_pending_data_bits = addPendingBitInternal(io.data.resp),
add_pending_send_bit = io.meta.resp.valid && needs_outer_release)
// Respond to the initiating transaction handler signalling completion of the writeback
io.wb.resp.valid := state === s_busy && all_pending_done
io.wb.resp.bits.id := xact_id
quiesce() {}
// State machine updates and transaction handler metadata intialization
when(state === s_idle && io.wb.req.valid) {
xact_id := io.wb.req.bits.id
xact_way_en := io.wb.req.bits.way_en
xact_addr_block := (if (cacheIdBits == 0) Cat(io.wb.req.bits.tag, io.wb.req.bits.idx)
else Cat(io.wb.req.bits.tag, io.wb.req.bits.idx, UInt(cacheId, cacheIdBits)))
state := s_meta_read
}
when (state === s_meta_resp && io.meta.resp.valid) {
pending_reads := Fill(innerDataBeats, needs_outer_release)
pending_coh := coh
when(needs_inner_probes) { initializeProbes() }
}
assert(!io.meta.resp.valid || io.meta.resp.bits.tag_match,
"L2 requested Writeback for block not present in cache")
}

View File

@ -6,6 +6,7 @@ import Chisel._
import junctions._
import util._
import regmapper._
import tile.XLen
import uncore.tilelink2._
import config._
@ -848,7 +849,7 @@ trait DebugModule extends Module with HasDebugModuleParameters with HasRegMap {
*/
class TLDebugModule(address: BigInt = 0)(implicit p: Parameters)
extends TLRegisterRouter(address, beatBytes=p(rocket.XLen)/8, executable=true)(
extends TLRegisterRouter(address, beatBytes=p(XLen)/8, executable=true)(
new TLRegBundle((), _ ) with DebugModuleBundle)(
new TLRegModule((), _, _) with DebugModule)

View File

@ -11,6 +11,7 @@ import regmapper._
import uncore.tilelink2._
import config._
import scala.math.min
import tile.XLen
class GatewayPLICIO extends Bundle {
val valid = Bool(OUTPUT)
@ -59,7 +60,7 @@ class TLPLIC(supervisor: Boolean, maxPriorities: Int, address: BigInt = 0xC00000
val node = TLRegisterNode(
address = AddressSet(address, PLICConsts.size-1),
beatBytes = p(rocket.XLen)/8,
beatBytes = p(XLen)/8,
undefZero = false)
val intnode = IntNexusNode(

View File

@ -12,6 +12,7 @@ import uncore.util._
import util._
import scala.math.{min,max}
import config._
import tile.XLen
/** Number of tiles */
case object NTiles extends Field[Int]
@ -83,7 +84,7 @@ trait CoreplexLocalInterrupterModule extends Module with HasRegMap with MixCorep
/** Power, Reset, Clock, Interrupt */
// Magic TL2 Incantation to create a TL2 Slave
class CoreplexLocalInterrupter(address: BigInt = 0x02000000)(implicit p: Parameters)
extends TLRegisterRouter(address, size = ClintConsts.size, beatBytes = p(rocket.XLen)/8, undefZero = true)(
extends TLRegisterRouter(address, size = ClintConsts.size, beatBytes = p(XLen)/8, undefZero = true)(
new TLRegBundle((), _) with CoreplexLocalInterrupterBundle)(
new TLRegModule((), _, _) with CoreplexLocalInterrupterModule)
{

View File

@ -28,8 +28,10 @@ class TLCacheCork(unsafe: Boolean = false)(implicit p: Parameters) extends LazyM
}
((io.in zip io.out) zip (node.edgesIn zip node.edgesOut)) foreach { case ((in, out), (edgeIn, edgeOut)) =>
require (edgeIn.client.clients.size == 1 || unsafe, "Only one client can safely use a TLCacheCork")
require (edgeIn.client.clients.filter(_.supportsProbe).size == 1, "Only one caching client allowed")
val clients = edgeIn.client.clients
val caches = clients.filter(_.supportsProbe)
require (clients.size == 1 || caches.size == 0 || unsafe, "Only one client can safely use a TLCacheCork")
require (caches.size <= 1, "Only one caching client allowed")
edgeOut.manager.managers.foreach { case m =>
require (!m.supportsAcquireB, "Cannot support caches beyond the Cork")
}

View File

@ -27,7 +27,7 @@ case class TLManagerParameters(
require (!address.isEmpty)
address.foreach { a => require (a.finite) }
address.combinations(2).foreach { case Seq(x,y) => require (!x.overlaps(y)) }
address.combinations(2).foreach { case Seq(x,y) => require (!x.overlaps(y), s"$x and $y overlap.") }
require (supportsPutFull.contains(supportsPutPartial))
require (supportsPutFull.contains(supportsArithmetic))
require (supportsPutFull.contains(supportsLogical))

View File

@ -1,181 +0,0 @@
// See LICENSE.Berkeley for license details.
// See LICENSE.SiFive for license details.
package uncore.util
import Chisel._
import config.{Parameters, Field}
import rocket.PAddrBits
import util.ParameterizedBundle
import uncore.constants._
case class CacheConfig(
nSets: Int,
nWays: Int,
rowBits: Int,
nTLBEntries: Int,
cacheIdBits: Int,
splitMetadata: Boolean)
case class CacheName(id: String) extends Field[CacheConfig]
case object CacheName extends Field[CacheName]
case object Replacer extends Field[() => ReplacementPolicy]
case object L2Replacer extends Field[() => SeqReplacementPolicy]
case object NPrimaryMisses extends Field[Int]
case object NSecondaryMisses extends Field[Int]
case object CacheBlockBytes extends Field[Int]
case object ECCCode extends Field[Option[Code]]
trait HasCacheParameters {
implicit val p: Parameters
val cacheConfig = p(p(CacheName))
val nSets = cacheConfig.nSets
val blockOffBits = log2Up(p(CacheBlockBytes))
val cacheIdBits = cacheConfig.cacheIdBits
val idxBits = log2Up(cacheConfig.nSets)
val untagBits = blockOffBits + cacheIdBits + idxBits
val tagBits = p(PAddrBits) - untagBits
val nWays = cacheConfig.nWays
val wayBits = log2Up(nWays)
val isDM = nWays == 1
val rowBits = cacheConfig.rowBits
val rowBytes = rowBits/8
val rowOffBits = log2Up(rowBytes)
val code = p(ECCCode).getOrElse(new IdentityCode)
val hasSplitMetadata = cacheConfig.splitMetadata
}
abstract class CacheModule(implicit val p: Parameters) extends Module
with HasCacheParameters
abstract class CacheBundle(implicit val p: Parameters) extends ParameterizedBundle()(p)
with HasCacheParameters
abstract class ReplacementPolicy {
def way: UInt
def miss: Unit
def hit: Unit
}
class RandomReplacement(ways: Int) extends ReplacementPolicy {
private val replace = Wire(Bool())
replace := Bool(false)
val lfsr = LFSR16(replace)
def way = if(ways == 1) UInt(0) else lfsr(log2Up(ways)-1,0)
def miss = replace := Bool(true)
def hit = {}
}
abstract class SeqReplacementPolicy {
def access(set: UInt): Unit
def update(valid: Bool, hit: Bool, set: UInt, way: UInt): Unit
def way: UInt
}
class SeqRandom(n_ways: Int) extends SeqReplacementPolicy {
val logic = new RandomReplacement(n_ways)
def access(set: UInt) = { }
def update(valid: Bool, hit: Bool, set: UInt, way: UInt) = {
when (valid && !hit) { logic.miss }
}
def way = logic.way
}
class PseudoLRU(n: Int)
{
require(isPow2(n))
val state_reg = Reg(Bits(width = n))
def access(way: UInt) {
state_reg := get_next_state(state_reg,way)
}
def get_next_state(state: UInt, way: UInt) = {
var next_state = state
var idx = UInt(1,1)
for (i <- log2Up(n)-1 to 0 by -1) {
val bit = way(i)
next_state = next_state.bitSet(idx, !bit)
idx = Cat(idx, bit)
}
next_state
}
def replace = get_replace_way(state_reg)
def get_replace_way(state: Bits) = {
var idx = UInt(1,1)
for (i <- 0 until log2Up(n))
idx = Cat(idx, state(idx))
idx(log2Up(n)-1,0)
}
}
class SeqPLRU(n_sets: Int, n_ways: Int) extends SeqReplacementPolicy {
val state = SeqMem(n_sets, Bits(width = n_ways-1))
val logic = new PseudoLRU(n_ways)
val current_state = Wire(Bits())
val plru_way = logic.get_replace_way(current_state)
val next_state = Wire(Bits())
def access(set: UInt) = {
current_state := Cat(state.read(set), Bits(0, width = 1))
}
def update(valid: Bool, hit: Bool, set: UInt, way: UInt) = {
val update_way = Mux(hit, way, plru_way)
next_state := logic.get_next_state(current_state, update_way)
when (valid) { state.write(set, next_state(n_ways-1,1)) }
}
def way = plru_way
}
abstract class Metadata(implicit p: Parameters) extends CacheBundle()(p) {
val tag = Bits(width = tagBits)
}
class MetaReadReq(implicit p: Parameters) extends CacheBundle()(p) {
val idx = Bits(width = idxBits)
val way_en = Bits(width = nWays)
}
class MetaWriteReq[T <: Metadata](gen: T)(implicit p: Parameters) extends MetaReadReq()(p) {
val data = gen.cloneType
override def cloneType = new MetaWriteReq(gen)(p).asInstanceOf[this.type]
}
class MetadataArray[T <: Metadata](onReset: () => T)(implicit p: Parameters) extends CacheModule()(p) {
val rstVal = onReset()
val io = new Bundle {
val read = Decoupled(new MetaReadReq).flip
val write = Decoupled(new MetaWriteReq(rstVal)).flip
val resp = Vec(nWays, rstVal.cloneType).asOutput
}
val rst_cnt = Reg(init=UInt(0, log2Up(nSets+1)))
val rst = rst_cnt < UInt(nSets)
val waddr = Mux(rst, rst_cnt, io.write.bits.idx)
val wdata = Mux(rst, rstVal, io.write.bits.data).asUInt
val wmask = Mux(rst || Bool(nWays == 1), SInt(-1), io.write.bits.way_en.asSInt).toBools
val rmask = Mux(rst || Bool(nWays == 1), SInt(-1), io.read.bits.way_en.asSInt).toBools
when (rst) { rst_cnt := rst_cnt+UInt(1) }
val metabits = rstVal.getWidth
if (hasSplitMetadata) {
val tag_arrs = List.fill(nWays){ SeqMem(nSets, UInt(width = metabits)) }
val tag_readout = Wire(Vec(nWays,rstVal.cloneType))
(0 until nWays).foreach { (i) =>
when (rst || (io.write.valid && wmask(i))) {
tag_arrs(i).write(waddr, wdata)
}
io.resp(i) := rstVal.fromBits(tag_arrs(i).read(io.read.bits.idx, io.read.valid && rmask(i)))
}
} else {
val tag_arr = SeqMem(nSets, Vec(nWays, UInt(width = metabits)))
when (rst || io.write.valid) {
tag_arr.write(waddr, Vec.fill(nWays)(wdata), wmask)
}
io.resp := tag_arr.read(io.read.bits.idx, io.read.valid).map(rstVal.fromBits(_))
}
io.read.ready := !rst && !io.write.valid // so really this could be a 6T RAM
io.write.ready := !rst
}

View File

@ -0,0 +1,83 @@
// See LICENSE.Berkeley for license details.
// See LICENSE.SiFive for license details.
package util
import Chisel._
abstract class ReplacementPolicy {
def way: UInt
def miss: Unit
def hit: Unit
}
class RandomReplacement(ways: Int) extends ReplacementPolicy {
private val replace = Wire(Bool())
replace := Bool(false)
val lfsr = LFSR16(replace)
def way = if(ways == 1) UInt(0) else lfsr(log2Up(ways)-1,0)
def miss = replace := Bool(true)
def hit = {}
}
abstract class SeqReplacementPolicy {
def access(set: UInt): Unit
def update(valid: Bool, hit: Bool, set: UInt, way: UInt): Unit
def way: UInt
}
class SeqRandom(n_ways: Int) extends SeqReplacementPolicy {
val logic = new RandomReplacement(n_ways)
def access(set: UInt) = { }
def update(valid: Bool, hit: Bool, set: UInt, way: UInt) = {
when (valid && !hit) { logic.miss }
}
def way = logic.way
}
class PseudoLRU(n: Int)
{
require(isPow2(n))
val state_reg = Reg(Bits(width = n))
def access(way: UInt) {
state_reg := get_next_state(state_reg,way)
}
def get_next_state(state: UInt, way: UInt) = {
var next_state = state
var idx = UInt(1,1)
for (i <- log2Up(n)-1 to 0 by -1) {
val bit = way(i)
next_state = next_state.bitSet(idx, !bit)
idx = Cat(idx, bit)
}
next_state
}
def replace = get_replace_way(state_reg)
def get_replace_way(state: Bits) = {
var idx = UInt(1,1)
for (i <- 0 until log2Up(n))
idx = Cat(idx, state(idx))
idx(log2Up(n)-1,0)
}
}
class SeqPLRU(n_sets: Int, n_ways: Int) extends SeqReplacementPolicy {
val state = SeqMem(n_sets, Bits(width = n_ways-1))
val logic = new PseudoLRU(n_ways)
val current_state = Wire(Bits())
val plru_way = logic.get_replace_way(current_state)
val next_state = Wire(Bits())
def access(set: UInt) = {
current_state := Cat(state.read(set), Bits(0, width = 1))
}
def update(valid: Bool, hit: Bool, set: UInt, way: UInt) = {
val update_way = Mux(hit, way, plru_way)
next_state := logic.get_next_state(current_state, update_way)
when (valid) { state.write(set, next_state(n_ways-1,1)) }
}
def way = plru_way
}