move junctions utils into top-level utils package
This commit is contained in:
parent
7dd4492abb
commit
1882241493
@ -5,8 +5,8 @@ import uncore.tilelink._
|
|||||||
import uncore.constants._
|
import uncore.constants._
|
||||||
import uncore.agents._
|
import uncore.agents._
|
||||||
import uncore.util._
|
import uncore.util._
|
||||||
import junctions.{ParameterizedBundle, HasAddrMapParameters}
|
import junctions.HasAddrMapParameters
|
||||||
import util.Timer
|
import util.{ParameterizedBundle, Timer}
|
||||||
import rocket.HellaCacheIO
|
import rocket.HellaCacheIO
|
||||||
import cde.{Parameters, Field}
|
import cde.{Parameters, Field}
|
||||||
|
|
||||||
|
@ -3,9 +3,10 @@ package groundtest
|
|||||||
import Chisel._
|
import Chisel._
|
||||||
import rocket._
|
import rocket._
|
||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
import junctions._
|
|
||||||
import scala.util.Random
|
import scala.util.Random
|
||||||
import scala.collection.mutable.ListBuffer
|
import scala.collection.mutable.ListBuffer
|
||||||
|
import junctions.HasAddrMapParameters
|
||||||
|
import util.ParameterizedBundle
|
||||||
import cde.{Parameters, Field}
|
import cde.{Parameters, Field}
|
||||||
|
|
||||||
case object BuildGroundTest extends Field[Parameters => GroundTest]
|
case object BuildGroundTest extends Field[Parameters => GroundTest]
|
||||||
|
@ -3,6 +3,7 @@ package junctions
|
|||||||
import Chisel._
|
import Chisel._
|
||||||
import cde.{Parameters, Field}
|
import cde.{Parameters, Field}
|
||||||
import unittest.UnitTest
|
import unittest.UnitTest
|
||||||
|
import util.ParameterizedBundle
|
||||||
|
|
||||||
object HastiConstants
|
object HastiConstants
|
||||||
{
|
{
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
package junctions
|
package junctions
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import scala.math._
|
import scala.math._
|
||||||
|
import util.{HellaQueue, ParameterizedBundle}
|
||||||
import cde.{Parameters, Field}
|
import cde.{Parameters, Field}
|
||||||
|
|
||||||
case object MIFAddrBits extends Field[Int]
|
case object MIFAddrBits extends Field[Int]
|
||||||
|
@ -4,6 +4,7 @@ package junctions
|
|||||||
import Chisel._
|
import Chisel._
|
||||||
import scala.math.max
|
import scala.math.max
|
||||||
import scala.collection.mutable.ArraySeq
|
import scala.collection.mutable.ArraySeq
|
||||||
|
import util.{ParameterizedBundle, HellaPeekingArbiter}
|
||||||
import cde.{Parameters, Field}
|
import cde.{Parameters, Field}
|
||||||
|
|
||||||
case object NastiKey extends Field[NastiParameters]
|
case object NastiKey extends Field[NastiParameters]
|
||||||
@ -449,7 +450,7 @@ class NastiRouter(nSlaves: Int, routeSel: UInt => UInt)(implicit p: Parameters)
|
|||||||
io.master.w.ready := w_ready || err_slave.io.w.ready
|
io.master.w.ready := w_ready || err_slave.io.w.ready
|
||||||
|
|
||||||
val b_arb = Module(new RRArbiter(new NastiWriteResponseChannel, nSlaves + 1))
|
val b_arb = Module(new RRArbiter(new NastiWriteResponseChannel, nSlaves + 1))
|
||||||
val r_arb = Module(new JunctionsPeekingArbiter(
|
val r_arb = Module(new HellaPeekingArbiter(
|
||||||
new NastiReadDataChannel, nSlaves + 1,
|
new NastiReadDataChannel, nSlaves + 1,
|
||||||
// we can unlock if it's the last beat
|
// we can unlock if it's the last beat
|
||||||
(r: NastiReadDataChannel) => r.last))
|
(r: NastiReadDataChannel) => r.last))
|
||||||
|
@ -1,246 +0,0 @@
|
|||||||
/// See LICENSE for license details.
|
|
||||||
package junctions
|
|
||||||
import Chisel._
|
|
||||||
import cde.Parameters
|
|
||||||
|
|
||||||
class ParameterizedBundle(implicit p: Parameters) extends Bundle {
|
|
||||||
override def cloneType = {
|
|
||||||
try {
|
|
||||||
this.getClass.getConstructors.head.newInstance(p).asInstanceOf[this.type]
|
|
||||||
} catch {
|
|
||||||
case e: java.lang.IllegalArgumentException =>
|
|
||||||
throwException("Unable to use ParamaterizedBundle.cloneType on " +
|
|
||||||
this.getClass + ", probably because " + this.getClass +
|
|
||||||
"() takes more than one argument. Consider overriding " +
|
|
||||||
"cloneType() on " + this.getClass, e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class HellaFlowQueue[T <: Data](val entries: Int)(data: => T) extends Module {
|
|
||||||
val io = new QueueIO(data, entries)
|
|
||||||
require(entries > 1)
|
|
||||||
|
|
||||||
val do_flow = Wire(Bool())
|
|
||||||
val do_enq = io.enq.fire() && !do_flow
|
|
||||||
val do_deq = io.deq.fire() && !do_flow
|
|
||||||
|
|
||||||
val maybe_full = Reg(init=Bool(false))
|
|
||||||
val enq_ptr = Counter(do_enq, entries)._1
|
|
||||||
val (deq_ptr, deq_done) = Counter(do_deq, entries)
|
|
||||||
when (do_enq =/= do_deq) { maybe_full := do_enq }
|
|
||||||
|
|
||||||
val ptr_match = enq_ptr === deq_ptr
|
|
||||||
val empty = ptr_match && !maybe_full
|
|
||||||
val full = ptr_match && maybe_full
|
|
||||||
val atLeastTwo = full || enq_ptr - deq_ptr >= UInt(2)
|
|
||||||
do_flow := empty && io.deq.ready
|
|
||||||
|
|
||||||
val ram = SeqMem(entries, data)
|
|
||||||
when (do_enq) { ram.write(enq_ptr, io.enq.bits) }
|
|
||||||
|
|
||||||
val ren = io.deq.ready && (atLeastTwo || !io.deq.valid && !empty)
|
|
||||||
val raddr = Mux(io.deq.valid, Mux(deq_done, UInt(0), deq_ptr + UInt(1)), deq_ptr)
|
|
||||||
val ram_out_valid = Reg(next = ren)
|
|
||||||
|
|
||||||
io.deq.valid := Mux(empty, io.enq.valid, ram_out_valid)
|
|
||||||
io.enq.ready := !full
|
|
||||||
io.deq.bits := Mux(empty, io.enq.bits, ram.read(raddr, ren))
|
|
||||||
}
|
|
||||||
|
|
||||||
class HellaQueue[T <: Data](val entries: Int)(data: => T) extends Module {
|
|
||||||
val io = new QueueIO(data, entries)
|
|
||||||
|
|
||||||
val fq = Module(new HellaFlowQueue(entries)(data))
|
|
||||||
fq.io.enq <> io.enq
|
|
||||||
io.deq <> Queue(fq.io.deq, 1, pipe = true)
|
|
||||||
}
|
|
||||||
|
|
||||||
object HellaQueue {
|
|
||||||
def apply[T <: Data](enq: DecoupledIO[T], entries: Int) = {
|
|
||||||
val q = Module((new HellaQueue(entries)) { enq.bits })
|
|
||||||
q.io.enq.valid := enq.valid // not using <> so that override is allowed
|
|
||||||
q.io.enq.bits := enq.bits
|
|
||||||
enq.ready := q.io.enq.ready
|
|
||||||
q.io.deq
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** A generalized locking RR arbiter that addresses the limitations of the
|
|
||||||
* version in the Chisel standard library */
|
|
||||||
abstract class JunctionsAbstractLockingArbiter[T <: Data](typ: T, arbN: Int, rr: Boolean = false)
|
|
||||||
extends Module {
|
|
||||||
|
|
||||||
val io = new Bundle {
|
|
||||||
val in = Vec(arbN, Decoupled(typ.cloneType)).flip
|
|
||||||
val out = Decoupled(typ.cloneType)
|
|
||||||
}
|
|
||||||
|
|
||||||
def rotateLeft[T <: Data](norm: Vec[T], rot: UInt): Vec[T] = {
|
|
||||||
val n = norm.size
|
|
||||||
Vec.tabulate(n) { i =>
|
|
||||||
Mux(rot < UInt(n - i), norm(UInt(i) + rot), norm(rot - UInt(n - i)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
val lockIdx = Reg(init = UInt(0, log2Up(arbN)))
|
|
||||||
val locked = Reg(init = Bool(false))
|
|
||||||
|
|
||||||
val choice = if (rr) {
|
|
||||||
PriorityMux(
|
|
||||||
rotateLeft(Vec(io.in.map(_.valid)), lockIdx + UInt(1)),
|
|
||||||
rotateLeft(Vec((0 until arbN).map(UInt(_))), lockIdx + UInt(1)))
|
|
||||||
} else {
|
|
||||||
PriorityEncoder(io.in.map(_.valid))
|
|
||||||
}
|
|
||||||
|
|
||||||
val chosen = Mux(locked, lockIdx, choice)
|
|
||||||
|
|
||||||
for (i <- 0 until arbN) {
|
|
||||||
io.in(i).ready := io.out.ready && chosen === UInt(i)
|
|
||||||
}
|
|
||||||
|
|
||||||
io.out.valid := io.in(chosen).valid
|
|
||||||
io.out.bits := io.in(chosen).bits
|
|
||||||
}
|
|
||||||
|
|
||||||
/** This locking arbiter determines when it is safe to unlock
|
|
||||||
* by peeking at the data */
|
|
||||||
class JunctionsPeekingArbiter[T <: Data](
|
|
||||||
typ: T, arbN: Int,
|
|
||||||
canUnlock: T => Bool,
|
|
||||||
needsLock: Option[T => Bool] = None,
|
|
||||||
rr: Boolean = false)
|
|
||||||
extends JunctionsAbstractLockingArbiter(typ, arbN, rr) {
|
|
||||||
|
|
||||||
def realNeedsLock(data: T): Bool =
|
|
||||||
needsLock.map(_(data)).getOrElse(Bool(true))
|
|
||||||
|
|
||||||
when (io.out.fire()) {
|
|
||||||
when (!locked && realNeedsLock(io.out.bits)) {
|
|
||||||
lockIdx := choice
|
|
||||||
locked := Bool(true)
|
|
||||||
}
|
|
||||||
// the unlock statement takes precedent
|
|
||||||
when (canUnlock(io.out.bits)) {
|
|
||||||
locked := Bool(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** This arbiter determines when it is safe to unlock by counting transactions */
|
|
||||||
class JunctionsCountingArbiter[T <: Data](
|
|
||||||
typ: T, arbN: Int, count: Int,
|
|
||||||
val needsLock: Option[T => Bool] = None,
|
|
||||||
rr: Boolean = false)
|
|
||||||
extends JunctionsAbstractLockingArbiter(typ, arbN, rr) {
|
|
||||||
|
|
||||||
def realNeedsLock(data: T): Bool =
|
|
||||||
needsLock.map(_(data)).getOrElse(Bool(true))
|
|
||||||
|
|
||||||
// if count is 1, you should use a non-locking arbiter
|
|
||||||
require(count > 1, "CountingArbiter cannot have count <= 1")
|
|
||||||
|
|
||||||
val lock_ctr = Counter(count)
|
|
||||||
|
|
||||||
when (io.out.fire()) {
|
|
||||||
when (!locked && realNeedsLock(io.out.bits)) {
|
|
||||||
lockIdx := choice
|
|
||||||
locked := Bool(true)
|
|
||||||
lock_ctr.inc()
|
|
||||||
}
|
|
||||||
|
|
||||||
when (locked) {
|
|
||||||
when (lock_ctr.inc()) { locked := Bool(false) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class ReorderQueueWrite[T <: Data](dType: T, tagWidth: Int) extends Bundle {
|
|
||||||
val data = dType.cloneType
|
|
||||||
val tag = UInt(width = tagWidth)
|
|
||||||
|
|
||||||
override def cloneType =
|
|
||||||
new ReorderQueueWrite(dType, tagWidth).asInstanceOf[this.type]
|
|
||||||
}
|
|
||||||
|
|
||||||
class ReorderEnqueueIO[T <: Data](dType: T, tagWidth: Int)
|
|
||||||
extends DecoupledIO(new ReorderQueueWrite(dType, tagWidth)) {
|
|
||||||
|
|
||||||
override def cloneType =
|
|
||||||
new ReorderEnqueueIO(dType, tagWidth).asInstanceOf[this.type]
|
|
||||||
}
|
|
||||||
|
|
||||||
class ReorderDequeueIO[T <: Data](dType: T, tagWidth: Int) extends Bundle {
|
|
||||||
val valid = Bool(INPUT)
|
|
||||||
val tag = UInt(INPUT, tagWidth)
|
|
||||||
val data = dType.cloneType.asOutput
|
|
||||||
val matches = Bool(OUTPUT)
|
|
||||||
|
|
||||||
override def cloneType =
|
|
||||||
new ReorderDequeueIO(dType, tagWidth).asInstanceOf[this.type]
|
|
||||||
}
|
|
||||||
|
|
||||||
class ReorderQueue[T <: Data](dType: T, tagWidth: Int, size: Option[Int] = None)
|
|
||||||
extends Module {
|
|
||||||
val io = new Bundle {
|
|
||||||
val enq = new ReorderEnqueueIO(dType, tagWidth).flip
|
|
||||||
val deq = new ReorderDequeueIO(dType, tagWidth)
|
|
||||||
}
|
|
||||||
|
|
||||||
val tagSpaceSize = 1 << tagWidth
|
|
||||||
val actualSize = size.getOrElse(tagSpaceSize)
|
|
||||||
|
|
||||||
if (tagSpaceSize > actualSize) {
|
|
||||||
val roq_data = Reg(Vec(actualSize, dType))
|
|
||||||
val roq_tags = Reg(Vec(actualSize, UInt(width = tagWidth)))
|
|
||||||
val roq_free = Reg(init = Vec.fill(actualSize)(Bool(true)))
|
|
||||||
|
|
||||||
val roq_enq_addr = PriorityEncoder(roq_free)
|
|
||||||
val roq_matches = roq_tags.zip(roq_free)
|
|
||||||
.map { case (tag, free) => tag === io.deq.tag && !free }
|
|
||||||
val roq_deq_onehot = PriorityEncoderOH(roq_matches)
|
|
||||||
|
|
||||||
io.enq.ready := roq_free.reduce(_ || _)
|
|
||||||
io.deq.data := Mux1H(roq_deq_onehot, roq_data)
|
|
||||||
io.deq.matches := roq_matches.reduce(_ || _)
|
|
||||||
|
|
||||||
when (io.enq.valid && io.enq.ready) {
|
|
||||||
roq_data(roq_enq_addr) := io.enq.bits.data
|
|
||||||
roq_tags(roq_enq_addr) := io.enq.bits.tag
|
|
||||||
roq_free(roq_enq_addr) := Bool(false)
|
|
||||||
}
|
|
||||||
|
|
||||||
when (io.deq.valid) {
|
|
||||||
roq_free(OHToUInt(roq_deq_onehot)) := Bool(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
println(s"Warning - using a CAM for ReorderQueue, tagBits: ${tagWidth} size: ${actualSize}")
|
|
||||||
} else {
|
|
||||||
val roq_data = Mem(tagSpaceSize, dType)
|
|
||||||
val roq_free = Reg(init = Vec.fill(tagSpaceSize)(Bool(true)))
|
|
||||||
|
|
||||||
io.enq.ready := roq_free(io.enq.bits.tag)
|
|
||||||
io.deq.data := roq_data(io.deq.tag)
|
|
||||||
io.deq.matches := !roq_free(io.deq.tag)
|
|
||||||
|
|
||||||
when (io.enq.valid && io.enq.ready) {
|
|
||||||
roq_data(io.enq.bits.tag) := io.enq.bits.data
|
|
||||||
roq_free(io.enq.bits.tag) := Bool(false)
|
|
||||||
}
|
|
||||||
|
|
||||||
when (io.deq.valid) {
|
|
||||||
roq_free(io.deq.tag) := Bool(true)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
object DecoupledHelper {
|
|
||||||
def apply(rvs: Bool*) = new DecoupledHelper(rvs)
|
|
||||||
}
|
|
||||||
|
|
||||||
class DecoupledHelper(val rvs: Seq[Bool]) {
|
|
||||||
def fire(exclude: Bool, includes: Bool*) = {
|
|
||||||
(rvs.filter(_ ne exclude) ++ includes).reduce(_ && _)
|
|
||||||
}
|
|
||||||
}
|
|
@ -4,7 +4,7 @@ package rocket
|
|||||||
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import cde.{Parameters, Field}
|
import cde.{Parameters, Field}
|
||||||
import junctions.{ParameterizedBundle, DecoupledHelper}
|
import util.{ParameterizedBundle, DecoupledHelper}
|
||||||
|
|
||||||
class HellaCacheArbiter(n: Int)(implicit p: Parameters) extends Module
|
class HellaCacheArbiter(n: Int)(implicit p: Parameters) extends Module
|
||||||
{
|
{
|
||||||
|
@ -3,11 +3,11 @@
|
|||||||
package rocket
|
package rocket
|
||||||
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import junctions._
|
|
||||||
import cde.{Parameters, Field}
|
import cde.{Parameters, Field}
|
||||||
import Util._
|
import Util._
|
||||||
import uncore.util._
|
import uncore.util._
|
||||||
import uncore.agents.PseudoLRU
|
import uncore.agents.PseudoLRU
|
||||||
|
import util.ParameterizedBundle
|
||||||
|
|
||||||
case object BtbKey extends Field[BtbParameters]
|
case object BtbKey extends Field[BtbParameters]
|
||||||
|
|
||||||
|
@ -127,11 +127,11 @@ class DCache(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
|||||||
meta.io.read <> metaReadArb.io.out
|
meta.io.read <> metaReadArb.io.out
|
||||||
meta.io.write <> metaWriteArb.io.out
|
meta.io.write <> metaWriteArb.io.out
|
||||||
val s1_meta = meta.io.resp
|
val s1_meta = meta.io.resp
|
||||||
val s1_hit_way = s1_meta.map(r => r.coh.isValid() && r.tag === s1_tag).asUInt
|
val s1_meta_hit_way = s1_meta.map(r => r.coh.isValid() && r.tag === s1_tag).asUInt
|
||||||
val s1_hit_state = ClientMetadata.onReset.fromBits(
|
val s1_meta_hit_state = ClientMetadata.onReset.fromBits(
|
||||||
s1_meta.map(r => Mux(r.tag === s1_tag, r.coh.asUInt, UInt(0)))
|
s1_meta.map(r => Mux(r.tag === s1_tag, r.coh.asUInt, UInt(0)))
|
||||||
.reduce (_|_))
|
.reduce (_|_))
|
||||||
(s1_hit_way, s1_hit_state, s1_meta(s1_victim_way))
|
(s1_meta_hit_way, s1_meta_hit_state, s1_meta(s1_victim_way))
|
||||||
}
|
}
|
||||||
val s1_data_way = Mux(inWriteback, releaseWay, s1_hit_way)
|
val s1_data_way = Mux(inWriteback, releaseWay, s1_hit_way)
|
||||||
val s1_data = Mux1H(s1_data_way, data.io.resp) // retime into s2 if critical
|
val s1_data = Mux1H(s1_data_way, data.io.resp) // retime into s2 if critical
|
||||||
|
@ -5,7 +5,7 @@ package rocket
|
|||||||
import Chisel._
|
import Chisel._
|
||||||
import Util._
|
import Util._
|
||||||
import cde.{Parameters, Field}
|
import cde.{Parameters, Field}
|
||||||
import junctions._
|
import util.ParameterizedBundle
|
||||||
|
|
||||||
class Instruction(implicit val p: Parameters) extends ParameterizedBundle with HasCoreParameters {
|
class Instruction(implicit val p: Parameters) extends ParameterizedBundle with HasCoreParameters {
|
||||||
val pf0 = Bool() // page fault on first half of instruction
|
val pf0 = Bool() // page fault on first half of instruction
|
||||||
|
@ -3,12 +3,12 @@
|
|||||||
package rocket
|
package rocket
|
||||||
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import junctions._
|
|
||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
import uncore.coherence._
|
import uncore.coherence._
|
||||||
import uncore.agents._
|
import uncore.agents._
|
||||||
import uncore.util._
|
import uncore.util._
|
||||||
import uncore.constants._
|
import uncore.constants._
|
||||||
|
import util.{ParameterizedBundle, DecoupledHelper}
|
||||||
import cde.{Parameters, Field}
|
import cde.{Parameters, Field}
|
||||||
import Util._
|
import Util._
|
||||||
|
|
||||||
@ -44,7 +44,7 @@ trait HasL1HellaCacheParameters extends HasL1CacheParameters {
|
|||||||
|
|
||||||
abstract class L1HellaCacheModule(implicit val p: Parameters) extends Module
|
abstract class L1HellaCacheModule(implicit val p: Parameters) extends Module
|
||||||
with HasL1HellaCacheParameters
|
with HasL1HellaCacheParameters
|
||||||
abstract class L1HellaCacheBundle(implicit val p: Parameters) extends junctions.ParameterizedBundle()(p)
|
abstract class L1HellaCacheBundle(implicit val p: Parameters) extends ParameterizedBundle()(p)
|
||||||
with HasL1HellaCacheParameters
|
with HasL1HellaCacheParameters
|
||||||
|
|
||||||
trait HasCoreMemOp extends HasCoreParameters {
|
trait HasCoreMemOp extends HasCoreParameters {
|
||||||
|
@ -3,10 +3,11 @@
|
|||||||
package rocket
|
package rocket
|
||||||
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import junctions._
|
|
||||||
import uncore.devices._
|
import uncore.devices._
|
||||||
import uncore.agents.CacheName
|
import uncore.agents.CacheName
|
||||||
import uncore.constants._
|
import uncore.constants._
|
||||||
|
import junctions.HasAddrMapParameters
|
||||||
|
import util.ParameterizedBundle
|
||||||
import Util._
|
import Util._
|
||||||
import cde.{Parameters, Field}
|
import cde.{Parameters, Field}
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@ import junctions._
|
|||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
import uncore.tilelink2.{LazyModule, LazyModuleImp}
|
import uncore.tilelink2.{LazyModule, LazyModuleImp}
|
||||||
import uncore.devices._
|
import uncore.devices._
|
||||||
|
import util.ParameterizedBundle
|
||||||
import rocket._
|
import rocket._
|
||||||
import rocket.Util._
|
import rocket.Util._
|
||||||
import coreplex._
|
import coreplex._
|
||||||
|
@ -4,7 +4,8 @@ package uncore.agents
|
|||||||
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import cde.{Parameters, Field}
|
import cde.{Parameters, Field}
|
||||||
import junctions._
|
import junctions.PAddrBits
|
||||||
|
import util.ParameterizedBundle
|
||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
import uncore.converters._
|
import uncore.converters._
|
||||||
import uncore.coherence._
|
import uncore.coherence._
|
||||||
@ -41,7 +42,7 @@ trait HasCoherenceAgentParameters {
|
|||||||
|
|
||||||
abstract class CoherenceAgentModule(implicit val p: Parameters) extends Module
|
abstract class CoherenceAgentModule(implicit val p: Parameters) extends Module
|
||||||
with HasCoherenceAgentParameters
|
with HasCoherenceAgentParameters
|
||||||
abstract class CoherenceAgentBundle(implicit val p: Parameters) extends junctions.ParameterizedBundle()(p)
|
abstract class CoherenceAgentBundle(implicit val p: Parameters) extends ParameterizedBundle()(p)
|
||||||
with HasCoherenceAgentParameters
|
with HasCoherenceAgentParameters
|
||||||
|
|
||||||
trait HasCoherenceAgentWiringHelpers {
|
trait HasCoherenceAgentWiringHelpers {
|
||||||
|
@ -4,7 +4,8 @@ package uncore.agents
|
|||||||
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import scala.reflect.ClassTag
|
import scala.reflect.ClassTag
|
||||||
import junctions._
|
import junctions.PAddrBits
|
||||||
|
import util.ParameterizedBundle
|
||||||
import uncore.util.AMOALU
|
import uncore.util.AMOALU
|
||||||
import uncore.coherence._
|
import uncore.coherence._
|
||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
|
@ -7,7 +7,7 @@ import uncore.coherence._
|
|||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
import uncore.util._
|
import uncore.util._
|
||||||
import uncore.util._
|
import uncore.util._
|
||||||
import junctions._
|
import util.ParameterizedBundle
|
||||||
import cde.{Field, Parameters}
|
import cde.{Field, Parameters}
|
||||||
import scala.math.max
|
import scala.math.max
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ package uncore.converters
|
|||||||
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import junctions._
|
import junctions._
|
||||||
|
import util.{ReorderQueue, DecoupledHelper}
|
||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
import uncore.constants._
|
import uncore.constants._
|
||||||
import cde.Parameters
|
import cde.Parameters
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
package uncore.converters
|
package uncore.converters
|
||||||
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
import junctions._
|
import util.{ReorderQueue, DecoupledHelper}
|
||||||
|
import junctions.PAddrBits
|
||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
import uncore.util._
|
import uncore.util._
|
||||||
import uncore.constants._
|
import uncore.constants._
|
||||||
|
@ -3,9 +3,10 @@
|
|||||||
package uncore.devices
|
package uncore.devices
|
||||||
|
|
||||||
import Chisel._
|
import Chisel._
|
||||||
|
import junctions._
|
||||||
import uncore.tilelink._
|
import uncore.tilelink._
|
||||||
import uncore.util._
|
import uncore.util._
|
||||||
import junctions._
|
import util.ParameterizedBundle
|
||||||
import cde.{Parameters, Config, Field}
|
import cde.{Parameters, Config, Field}
|
||||||
|
|
||||||
// *****************************************
|
// *****************************************
|
||||||
|
@ -90,7 +90,7 @@ trait HasTileLinkParameters {
|
|||||||
|
|
||||||
abstract class TLModule(implicit val p: Parameters) extends Module
|
abstract class TLModule(implicit val p: Parameters) extends Module
|
||||||
with HasTileLinkParameters
|
with HasTileLinkParameters
|
||||||
abstract class TLBundle(implicit val p: Parameters) extends junctions.ParameterizedBundle()(p)
|
abstract class TLBundle(implicit val p: Parameters) extends util.ParameterizedBundle()(p)
|
||||||
with HasTileLinkParameters
|
with HasTileLinkParameters
|
||||||
|
|
||||||
/** Base trait for all TileLink channels */
|
/** Base trait for all TileLink channels */
|
||||||
|
@ -61,7 +61,7 @@ class BasicCrossbar[T <: Data](conf: CrossbarConfig[T]) extends AbstractCrossbar
|
|||||||
|
|
||||||
abstract class LogicalNetwork extends Module
|
abstract class LogicalNetwork extends Module
|
||||||
|
|
||||||
class LogicalHeader(implicit p: Parameters) extends junctions.ParameterizedBundle()(p) {
|
class LogicalHeader(implicit p: Parameters) extends util.ParameterizedBundle()(p) {
|
||||||
val src = UInt(width = p(LNHeaderBits))
|
val src = UInt(width = p(LNHeaderBits))
|
||||||
val dst = UInt(width = p(LNHeaderBits))
|
val dst = UInt(width = p(LNHeaderBits))
|
||||||
}
|
}
|
||||||
|
93
src/main/scala/util/Arbiters.scala
Normal file
93
src/main/scala/util/Arbiters.scala
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
package util
|
||||||
|
import Chisel._
|
||||||
|
import cde.Parameters
|
||||||
|
|
||||||
|
/** A generalized locking RR arbiter that addresses the limitations of the
|
||||||
|
* version in the Chisel standard library */
|
||||||
|
abstract class HellaLockingArbiter[T <: Data](typ: T, arbN: Int, rr: Boolean = false)
|
||||||
|
extends Module {
|
||||||
|
|
||||||
|
val io = new Bundle {
|
||||||
|
val in = Vec(arbN, Decoupled(typ.cloneType)).flip
|
||||||
|
val out = Decoupled(typ.cloneType)
|
||||||
|
}
|
||||||
|
|
||||||
|
def rotateLeft[T <: Data](norm: Vec[T], rot: UInt): Vec[T] = {
|
||||||
|
val n = norm.size
|
||||||
|
Vec.tabulate(n) { i =>
|
||||||
|
Mux(rot < UInt(n - i), norm(UInt(i) + rot), norm(rot - UInt(n - i)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val lockIdx = Reg(init = UInt(0, log2Up(arbN)))
|
||||||
|
val locked = Reg(init = Bool(false))
|
||||||
|
|
||||||
|
val choice = if (rr) {
|
||||||
|
PriorityMux(
|
||||||
|
rotateLeft(Vec(io.in.map(_.valid)), lockIdx + UInt(1)),
|
||||||
|
rotateLeft(Vec((0 until arbN).map(UInt(_))), lockIdx + UInt(1)))
|
||||||
|
} else {
|
||||||
|
PriorityEncoder(io.in.map(_.valid))
|
||||||
|
}
|
||||||
|
|
||||||
|
val chosen = Mux(locked, lockIdx, choice)
|
||||||
|
|
||||||
|
for (i <- 0 until arbN) {
|
||||||
|
io.in(i).ready := io.out.ready && chosen === UInt(i)
|
||||||
|
}
|
||||||
|
|
||||||
|
io.out.valid := io.in(chosen).valid
|
||||||
|
io.out.bits := io.in(chosen).bits
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This locking arbiter determines when it is safe to unlock
|
||||||
|
* by peeking at the data */
|
||||||
|
class HellaPeekingArbiter[T <: Data](
|
||||||
|
typ: T, arbN: Int,
|
||||||
|
canUnlock: T => Bool,
|
||||||
|
needsLock: Option[T => Bool] = None,
|
||||||
|
rr: Boolean = false)
|
||||||
|
extends HellaLockingArbiter(typ, arbN, rr) {
|
||||||
|
|
||||||
|
def realNeedsLock(data: T): Bool =
|
||||||
|
needsLock.map(_(data)).getOrElse(Bool(true))
|
||||||
|
|
||||||
|
when (io.out.fire()) {
|
||||||
|
when (!locked && realNeedsLock(io.out.bits)) {
|
||||||
|
lockIdx := choice
|
||||||
|
locked := Bool(true)
|
||||||
|
}
|
||||||
|
// the unlock statement takes precedent
|
||||||
|
when (canUnlock(io.out.bits)) {
|
||||||
|
locked := Bool(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This arbiter determines when it is safe to unlock by counting transactions */
|
||||||
|
class HellaCountingArbiter[T <: Data](
|
||||||
|
typ: T, arbN: Int, count: Int,
|
||||||
|
val needsLock: Option[T => Bool] = None,
|
||||||
|
rr: Boolean = false)
|
||||||
|
extends HellaLockingArbiter(typ, arbN, rr) {
|
||||||
|
|
||||||
|
def realNeedsLock(data: T): Bool =
|
||||||
|
needsLock.map(_(data)).getOrElse(Bool(true))
|
||||||
|
|
||||||
|
// if count is 1, you should use a non-locking arbiter
|
||||||
|
require(count > 1, "CountingArbiter cannot have count <= 1")
|
||||||
|
|
||||||
|
val lock_ctr = Counter(count)
|
||||||
|
|
||||||
|
when (io.out.fire()) {
|
||||||
|
when (!locked && realNeedsLock(io.out.bits)) {
|
||||||
|
lockIdx := choice
|
||||||
|
locked := Bool(true)
|
||||||
|
lock_ctr.inc()
|
||||||
|
}
|
||||||
|
|
||||||
|
when (locked) {
|
||||||
|
when (lock_ctr.inc()) { locked := Bool(false) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
55
src/main/scala/util/HellaQueue.scala
Normal file
55
src/main/scala/util/HellaQueue.scala
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
package util
|
||||||
|
|
||||||
|
import Chisel._
|
||||||
|
import cde.Parameters
|
||||||
|
|
||||||
|
class HellaFlowQueue[T <: Data](val entries: Int)(data: => T) extends Module {
|
||||||
|
val io = new QueueIO(data, entries)
|
||||||
|
require(entries > 1)
|
||||||
|
|
||||||
|
val do_flow = Wire(Bool())
|
||||||
|
val do_enq = io.enq.fire() && !do_flow
|
||||||
|
val do_deq = io.deq.fire() && !do_flow
|
||||||
|
|
||||||
|
val maybe_full = Reg(init=Bool(false))
|
||||||
|
val enq_ptr = Counter(do_enq, entries)._1
|
||||||
|
val (deq_ptr, deq_done) = Counter(do_deq, entries)
|
||||||
|
when (do_enq =/= do_deq) { maybe_full := do_enq }
|
||||||
|
|
||||||
|
val ptr_match = enq_ptr === deq_ptr
|
||||||
|
val empty = ptr_match && !maybe_full
|
||||||
|
val full = ptr_match && maybe_full
|
||||||
|
val atLeastTwo = full || enq_ptr - deq_ptr >= UInt(2)
|
||||||
|
do_flow := empty && io.deq.ready
|
||||||
|
|
||||||
|
val ram = SeqMem(entries, data)
|
||||||
|
when (do_enq) { ram.write(enq_ptr, io.enq.bits) }
|
||||||
|
|
||||||
|
val ren = io.deq.ready && (atLeastTwo || !io.deq.valid && !empty)
|
||||||
|
val raddr = Mux(io.deq.valid, Mux(deq_done, UInt(0), deq_ptr + UInt(1)), deq_ptr)
|
||||||
|
val ram_out_valid = Reg(next = ren)
|
||||||
|
|
||||||
|
io.deq.valid := Mux(empty, io.enq.valid, ram_out_valid)
|
||||||
|
io.enq.ready := !full
|
||||||
|
io.deq.bits := Mux(empty, io.enq.bits, ram.read(raddr, ren))
|
||||||
|
}
|
||||||
|
|
||||||
|
class HellaQueue[T <: Data](val entries: Int)(data: => T) extends Module {
|
||||||
|
val io = new QueueIO(data, entries)
|
||||||
|
|
||||||
|
val fq = Module(new HellaFlowQueue(entries)(data))
|
||||||
|
fq.io.enq <> io.enq
|
||||||
|
io.deq <> Queue(fq.io.deq, 1, pipe = true)
|
||||||
|
}
|
||||||
|
|
||||||
|
object HellaQueue {
|
||||||
|
def apply[T <: Data](enq: DecoupledIO[T], entries: Int) = {
|
||||||
|
val q = Module((new HellaQueue(entries)) { enq.bits })
|
||||||
|
q.io.enq.valid := enq.valid // not using <> so that override is allowed
|
||||||
|
q.io.enq.bits := enq.bits
|
||||||
|
enq.ready := q.io.enq.ready
|
||||||
|
q.io.deq
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
28
src/main/scala/util/Misc.scala
Normal file
28
src/main/scala/util/Misc.scala
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
package util
|
||||||
|
|
||||||
|
import Chisel._
|
||||||
|
import cde.Parameters
|
||||||
|
|
||||||
|
class ParameterizedBundle(implicit p: Parameters) extends Bundle {
|
||||||
|
override def cloneType = {
|
||||||
|
try {
|
||||||
|
this.getClass.getConstructors.head.newInstance(p).asInstanceOf[this.type]
|
||||||
|
} catch {
|
||||||
|
case e: java.lang.IllegalArgumentException =>
|
||||||
|
throwException("Unable to use ParamaterizedBundle.cloneType on " +
|
||||||
|
this.getClass + ", probably because " + this.getClass +
|
||||||
|
"() takes more than one argument. Consider overriding " +
|
||||||
|
"cloneType() on " + this.getClass, e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object DecoupledHelper {
|
||||||
|
def apply(rvs: Bool*) = new DecoupledHelper(rvs)
|
||||||
|
}
|
||||||
|
|
||||||
|
class DecoupledHelper(val rvs: Seq[Bool]) {
|
||||||
|
def fire(exclude: Bool, includes: Bool*) = {
|
||||||
|
(rvs.filter(_ ne exclude) ++ includes).reduce(_ && _)
|
||||||
|
}
|
||||||
|
}
|
85
src/main/scala/util/ReorderQueue.scala
Normal file
85
src/main/scala/util/ReorderQueue.scala
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
package util
|
||||||
|
|
||||||
|
import Chisel._
|
||||||
|
import cde.Parameters
|
||||||
|
|
||||||
|
class ReorderQueueWrite[T <: Data](dType: T, tagWidth: Int) extends Bundle {
|
||||||
|
val data = dType.cloneType
|
||||||
|
val tag = UInt(width = tagWidth)
|
||||||
|
|
||||||
|
override def cloneType =
|
||||||
|
new ReorderQueueWrite(dType, tagWidth).asInstanceOf[this.type]
|
||||||
|
}
|
||||||
|
|
||||||
|
class ReorderEnqueueIO[T <: Data](dType: T, tagWidth: Int)
|
||||||
|
extends DecoupledIO(new ReorderQueueWrite(dType, tagWidth)) {
|
||||||
|
|
||||||
|
override def cloneType =
|
||||||
|
new ReorderEnqueueIO(dType, tagWidth).asInstanceOf[this.type]
|
||||||
|
}
|
||||||
|
|
||||||
|
class ReorderDequeueIO[T <: Data](dType: T, tagWidth: Int) extends Bundle {
|
||||||
|
val valid = Bool(INPUT)
|
||||||
|
val tag = UInt(INPUT, tagWidth)
|
||||||
|
val data = dType.cloneType.asOutput
|
||||||
|
val matches = Bool(OUTPUT)
|
||||||
|
|
||||||
|
override def cloneType =
|
||||||
|
new ReorderDequeueIO(dType, tagWidth).asInstanceOf[this.type]
|
||||||
|
}
|
||||||
|
|
||||||
|
class ReorderQueue[T <: Data](dType: T, tagWidth: Int, size: Option[Int] = None)
|
||||||
|
extends Module {
|
||||||
|
val io = new Bundle {
|
||||||
|
val enq = new ReorderEnqueueIO(dType, tagWidth).flip
|
||||||
|
val deq = new ReorderDequeueIO(dType, tagWidth)
|
||||||
|
}
|
||||||
|
|
||||||
|
val tagSpaceSize = 1 << tagWidth
|
||||||
|
val actualSize = size.getOrElse(tagSpaceSize)
|
||||||
|
|
||||||
|
if (tagSpaceSize > actualSize) {
|
||||||
|
val roq_data = Reg(Vec(actualSize, dType))
|
||||||
|
val roq_tags = Reg(Vec(actualSize, UInt(width = tagWidth)))
|
||||||
|
val roq_free = Reg(init = Vec.fill(actualSize)(Bool(true)))
|
||||||
|
|
||||||
|
val roq_enq_addr = PriorityEncoder(roq_free)
|
||||||
|
val roq_matches = roq_tags.zip(roq_free)
|
||||||
|
.map { case (tag, free) => tag === io.deq.tag && !free }
|
||||||
|
val roq_deq_onehot = PriorityEncoderOH(roq_matches)
|
||||||
|
|
||||||
|
io.enq.ready := roq_free.reduce(_ || _)
|
||||||
|
io.deq.data := Mux1H(roq_deq_onehot, roq_data)
|
||||||
|
io.deq.matches := roq_matches.reduce(_ || _)
|
||||||
|
|
||||||
|
when (io.enq.valid && io.enq.ready) {
|
||||||
|
roq_data(roq_enq_addr) := io.enq.bits.data
|
||||||
|
roq_tags(roq_enq_addr) := io.enq.bits.tag
|
||||||
|
roq_free(roq_enq_addr) := Bool(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
when (io.deq.valid) {
|
||||||
|
roq_free(OHToUInt(roq_deq_onehot)) := Bool(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
println(s"Warning - using a CAM for ReorderQueue, tagBits: ${tagWidth} size: ${actualSize}")
|
||||||
|
} else {
|
||||||
|
val roq_data = Mem(tagSpaceSize, dType)
|
||||||
|
val roq_free = Reg(init = Vec.fill(tagSpaceSize)(Bool(true)))
|
||||||
|
|
||||||
|
io.enq.ready := roq_free(io.enq.bits.tag)
|
||||||
|
io.deq.data := roq_data(io.deq.tag)
|
||||||
|
io.deq.matches := !roq_free(io.deq.tag)
|
||||||
|
|
||||||
|
when (io.enq.valid && io.enq.ready) {
|
||||||
|
roq_data(io.enq.bits.tag) := io.enq.bits.data
|
||||||
|
roq_free(io.enq.bits.tag) := Bool(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
when (io.deq.valid) {
|
||||||
|
roq_free(io.deq.tag) := Bool(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user