Merge pull request #975 from freechipsproject/async_reg
Cleanup some register primitives
This commit is contained in:
commit
929a924779
@ -6,6 +6,7 @@ import Chisel._
|
|||||||
import chisel3.internal.sourceinfo.SourceInfo
|
import chisel3.internal.sourceinfo.SourceInfo
|
||||||
import freechips.rocketchip.config.Parameters
|
import freechips.rocketchip.config.Parameters
|
||||||
import freechips.rocketchip.diplomacy._
|
import freechips.rocketchip.diplomacy._
|
||||||
|
import freechips.rocketchip.util.SynchronizerShiftReg
|
||||||
import scala.collection.mutable.ListBuffer
|
import scala.collection.mutable.ListBuffer
|
||||||
import scala.math.max
|
import scala.math.max
|
||||||
|
|
||||||
@ -139,7 +140,7 @@ class IntXing(sync: Int = 3)(implicit p: Parameters) extends LazyModule
|
|||||||
}
|
}
|
||||||
|
|
||||||
(io.in zip io.out) foreach { case (in, out) =>
|
(io.in zip io.out) foreach { case (in, out) =>
|
||||||
out := (0 to sync).foldLeft(in) { case (a, _) => RegNext(a) }
|
out := SynchronizerShiftReg(in, sync)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -13,27 +13,12 @@ object GrayCounter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
object UIntSyncChain {
|
|
||||||
def apply(in: UInt, sync: Int, name: String = "gray"): UInt = {
|
|
||||||
val syncv = List.tabulate(sync) { i =>
|
|
||||||
Module (new AsyncResetRegVec(w = in.getWidth, 0)).suggestName(s"${name}_sync_${i}")
|
|
||||||
}
|
|
||||||
syncv.last.io.d := in
|
|
||||||
syncv.last.io.en := Bool(true)
|
|
||||||
(syncv.init zip syncv.tail).foreach { case (sink, source) =>
|
|
||||||
sink.io.d := source.io.q
|
|
||||||
sink.io.en := Bool(true)
|
|
||||||
}
|
|
||||||
syncv.head.io.q
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class AsyncValidSync(sync: Int, desc: String) extends Module {
|
class AsyncValidSync(sync: Int, desc: String) extends Module {
|
||||||
val io = new Bundle {
|
val io = new Bundle {
|
||||||
val in = Bool(INPUT)
|
val in = Bool(INPUT)
|
||||||
val out = Bool(OUTPUT)
|
val out = Bool(OUTPUT)
|
||||||
}
|
}
|
||||||
io.out := UIntSyncChain(io.in.asUInt, sync, desc)(0)
|
io.out := AsyncResetSynchronizerShiftReg(io.in, sync, Some(desc))
|
||||||
}
|
}
|
||||||
|
|
||||||
class AsyncQueueSource[T <: Data](gen: T, depth: Int, sync: Int, safe: Boolean = true, narrowData: Boolean = false) extends Module {
|
class AsyncQueueSource[T <: Data](gen: T, depth: Int, sync: Int, safe: Boolean = true, narrowData: Boolean = false) extends Module {
|
||||||
@ -55,7 +40,7 @@ class AsyncQueueSource[T <: Data](gen: T, depth: Int, sync: Int, safe: Boolean =
|
|||||||
val sink_ready = Wire(init = Bool(true))
|
val sink_ready = Wire(init = Bool(true))
|
||||||
val mem = Reg(Vec(depth, gen)) // This does NOT need to be reset at all.
|
val mem = Reg(Vec(depth, gen)) // This does NOT need to be reset at all.
|
||||||
val widx = GrayCounter(bits+1, io.enq.fire(), !sink_ready, "widx_bin")
|
val widx = GrayCounter(bits+1, io.enq.fire(), !sink_ready, "widx_bin")
|
||||||
val ridx = UIntSyncChain(io.ridx, sync, "ridx_gray")
|
val ridx = AsyncResetSynchronizerShiftReg(io.ridx, sync, Some("ridx_gray"))
|
||||||
val ready = sink_ready && widx =/= (ridx ^ UInt(depth | depth >> 1))
|
val ready = sink_ready && widx =/= (ridx ^ UInt(depth | depth >> 1))
|
||||||
|
|
||||||
val index = if (depth == 1) UInt(0) else io.widx(bits-1, 0) ^ (io.widx(bits, bits) << (bits-1))
|
val index = if (depth == 1) UInt(0) else io.widx(bits-1, 0) ^ (io.widx(bits, bits) << (bits-1))
|
||||||
@ -112,7 +97,7 @@ class AsyncQueueSink[T <: Data](gen: T, depth: Int, sync: Int, safe: Boolean = t
|
|||||||
|
|
||||||
val source_ready = Wire(init = Bool(true))
|
val source_ready = Wire(init = Bool(true))
|
||||||
val ridx = GrayCounter(bits+1, io.deq.fire(), !source_ready, "ridx_bin")
|
val ridx = GrayCounter(bits+1, io.deq.fire(), !source_ready, "ridx_bin")
|
||||||
val widx = UIntSyncChain(io.widx, sync, "widx_gray")
|
val widx = AsyncResetSynchronizerShiftReg(io.widx, sync, Some("widx_gray"))
|
||||||
val valid = source_ready && ridx =/= widx
|
val valid = source_ready && ridx =/= widx
|
||||||
|
|
||||||
// The mux is safe because timing analysis ensures ridx has reached the register
|
// The mux is safe because timing analysis ensures ridx has reached the register
|
||||||
@ -125,7 +110,8 @@ class AsyncQueueSink[T <: Data](gen: T, depth: Int, sync: Int, safe: Boolean = t
|
|||||||
// be considered unless the asynchronously reset deq valid register is set.
|
// be considered unless the asynchronously reset deq valid register is set.
|
||||||
// It is possible that bits latches when the source domain is reset / has power cut
|
// It is possible that bits latches when the source domain is reset / has power cut
|
||||||
// This is safe, because isolation gates brought mem low before the zeroed widx reached us
|
// This is safe, because isolation gates brought mem low before the zeroed widx reached us
|
||||||
io.deq.bits := RegEnable(io.mem(if(narrowData) UInt(0) else index), valid)
|
val deq_bits_nxt = Mux(valid, io.mem(if(narrowData) UInt(0) else index), io.deq.bits)
|
||||||
|
io.deq.bits := SynchronizerShiftReg(deq_bits_nxt, sync = 1, name = Some("deq_bits_reg"))
|
||||||
|
|
||||||
val valid_reg = AsyncResetReg(valid.asUInt, "valid_reg")(0)
|
val valid_reg = AsyncResetReg(valid.asUInt, "valid_reg")(0)
|
||||||
io.deq.valid := valid_reg && source_ready
|
io.deq.valid := valid_reg && source_ready
|
||||||
|
@ -63,6 +63,9 @@ class AsyncResetRegVec(val w: Int, val init: BigInt) extends Module {
|
|||||||
}
|
}
|
||||||
|
|
||||||
io.q := q.asUInt
|
io.q := q.asUInt
|
||||||
|
|
||||||
|
override def desiredName = s"AsyncResetRegVec_w${w}_i${init}"
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
object AsyncResetReg {
|
object AsyncResetReg {
|
||||||
@ -103,3 +106,4 @@ object AsyncResetReg {
|
|||||||
def apply(updateData: UInt): UInt = apply(updateData, resetData=BigInt(0), enable=Bool(true))
|
def apply(updateData: UInt): UInt = apply(updateData, resetData=BigInt(0), enable=Bool(true))
|
||||||
def apply(updateData: UInt, name:String): UInt = apply(updateData, resetData=BigInt(0), enable=Bool(true), Some(name))
|
def apply(updateData: UInt, name:String): UInt = apply(updateData, resetData=BigInt(0), enable=Bool(true), Some(name))
|
||||||
}
|
}
|
||||||
|
|
@ -99,7 +99,7 @@ object LevelSyncCrossing {
|
|||||||
val out = Bool(OUTPUT)
|
val out = Bool(OUTPUT)
|
||||||
}
|
}
|
||||||
|
|
||||||
io.out := ShiftRegister(io.in, sync)
|
io.out := SynchronizerShiftReg(io.in, sync)
|
||||||
}
|
}
|
||||||
|
|
||||||
class SynchronizerFrontend(_clock: Clock) extends Module(Some(_clock)) {
|
class SynchronizerFrontend(_clock: Clock) extends Module(Some(_clock)) {
|
||||||
|
@ -15,12 +15,7 @@ class ResetCatchAndSync (sync: Int = 3) extends Module {
|
|||||||
val sync_reset = Bool(OUTPUT)
|
val sync_reset = Bool(OUTPUT)
|
||||||
}
|
}
|
||||||
|
|
||||||
val reset_n_catch_reg = Module (new AsyncResetRegVec(sync, 0))
|
io.sync_reset := ~AsyncResetSynchronizerShiftReg(Bool(true), sync)
|
||||||
|
|
||||||
reset_n_catch_reg.io.en := Bool(true)
|
|
||||||
reset_n_catch_reg.io.d := Cat(Bool(true), reset_n_catch_reg.io.q >> 1)
|
|
||||||
|
|
||||||
io.sync_reset := ~reset_n_catch_reg.io.q(0)
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
144
src/main/scala/util/ShiftReg.scala
Normal file
144
src/main/scala/util/ShiftReg.scala
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
// See LICENSE.SiFive for license details.
|
||||||
|
|
||||||
|
package freechips.rocketchip.util
|
||||||
|
|
||||||
|
import Chisel._
|
||||||
|
|
||||||
|
// Similar to the Chisel ShiftRegister but allows the user to suggest a
|
||||||
|
// name to the registers that get instantiated, and
|
||||||
|
// to provide a reset value.
|
||||||
|
object ShiftRegInit {
|
||||||
|
def apply[T <: Data](in: T, n: Int, init: T, name: Option[String] = None): T =
|
||||||
|
|
||||||
|
(0 until n).foldRight(in) {
|
||||||
|
case (i, next) => {
|
||||||
|
val r = Reg(next, next = next, init = init)
|
||||||
|
name.foreach { na => r.suggestName(s"${na}_${i}") }
|
||||||
|
r
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** These wrap behavioral
|
||||||
|
* shift registers into specific modules to allow for
|
||||||
|
* backend flows to replace or constrain
|
||||||
|
* them properly when used for CDC synchronization,
|
||||||
|
* rather than buffering.
|
||||||
|
*
|
||||||
|
* The different types vary in their reset behavior:
|
||||||
|
* AsyncResetShiftReg -- This is identical to the AsyncResetSynchronizerShiftReg,
|
||||||
|
* it is just named differently to distinguish its use case.
|
||||||
|
* This is an async ShiftRegister meant for timing,
|
||||||
|
* not for synchronization.
|
||||||
|
* AsyncResetSynchronizerShiftReg -- asynchronously reset to specific value.
|
||||||
|
* SyncResetSynchronizerShiftReg -- reset to specific value.
|
||||||
|
* SynchronizerShiftReg -- no reset, pipeline only.
|
||||||
|
*/
|
||||||
|
|
||||||
|
abstract class AbstractPipelineReg(w: Int = 1) extends Module {
|
||||||
|
val io = new Bundle {
|
||||||
|
val d = UInt(INPUT, width = w)
|
||||||
|
val q = UInt(OUTPUT, width = w)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object AbstractPipelineReg {
|
||||||
|
def apply [T <: Chisel.Data](gen: => AbstractPipelineReg, in: T, name: Option[String] = None): T = {
|
||||||
|
val chain = Module(gen)
|
||||||
|
name.foreach{ chain.suggestName(_) }
|
||||||
|
chain.io.d := in.asUInt
|
||||||
|
chain.io.q.asTypeOf(in)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class AsyncResetShiftReg(w: Int = 1, depth: Int = 1, init: Int = 0, name: String = "pipe") extends AbstractPipelineReg(w) {
|
||||||
|
require(depth > 0, "Depth must be greater than 0.")
|
||||||
|
|
||||||
|
override def desiredName = s"AsyncResetShiftReg_w${w}_d${depth}_i${init}"
|
||||||
|
|
||||||
|
val chain = List.tabulate(depth) { i =>
|
||||||
|
Module (new AsyncResetRegVec(w, init)).suggestName(s"${name}_${i}")
|
||||||
|
}
|
||||||
|
|
||||||
|
chain.last.io.d := io.d
|
||||||
|
chain.last.io.en := Bool(true)
|
||||||
|
|
||||||
|
(chain.init zip chain.tail).foreach { case (sink, source) =>
|
||||||
|
sink.io.d := source.io.q
|
||||||
|
sink.io.en := Bool(true)
|
||||||
|
}
|
||||||
|
io.q := chain.head.io.q
|
||||||
|
}
|
||||||
|
|
||||||
|
object AsyncResetShiftReg {
|
||||||
|
def apply [T <: Chisel.Data](in: T, depth: Int, init: Int = 0, name: Option[String] = None): T =
|
||||||
|
AbstractPipelineReg(new AsyncResetShiftReg(in.getWidth, depth, init), in, name)
|
||||||
|
|
||||||
|
def apply [T <: Chisel.Data](in: T, depth: Int, name: Option[String]): T =
|
||||||
|
apply(in, depth, 0, name)
|
||||||
|
|
||||||
|
def apply [T <: Chisel.Data](in: T, depth: Int, init: T, name: Option[String]): T =
|
||||||
|
apply(in, depth, init.litValue.toInt, name)
|
||||||
|
|
||||||
|
def apply [T <: Chisel.Data](in: T, depth: Int, init: T): T =
|
||||||
|
apply (in, depth, init.litValue.toInt, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note that it is important to ovveride "name" in order to ensure that the Chisel dedup does
|
||||||
|
// not try to merge instances of this with instances of the superclass.
|
||||||
|
class AsyncResetSynchronizerShiftReg(w: Int = 1, sync: Int = 3, init: Int = 0) extends AsyncResetShiftReg(w, depth = sync, init, name = "sync") {
|
||||||
|
require(sync > 0, "Sync must be greater than 0.")
|
||||||
|
override def desiredName = s"AsyncResetSynchronizerShiftReg_w${w}_d${sync}_i${init}"
|
||||||
|
}
|
||||||
|
|
||||||
|
object AsyncResetSynchronizerShiftReg {
|
||||||
|
def apply [T <: Chisel.Data](in: T, depth: Int, init: Int = 0, name: Option[String] = None): T =
|
||||||
|
AbstractPipelineReg(new AsyncResetSynchronizerShiftReg(in.getWidth, depth, init), in, name)
|
||||||
|
|
||||||
|
def apply [T <: Chisel.Data](in: T, depth: Int, name: Option[String]): T =
|
||||||
|
apply(in, depth, 0, name)
|
||||||
|
|
||||||
|
def apply [T <: Chisel.Data](in: T, depth: Int, init: T, name: Option[String]): T =
|
||||||
|
apply(in, depth, init.litValue.toInt, name)
|
||||||
|
|
||||||
|
def apply [T <: Chisel.Data](in: T, depth: Int, init: T): T =
|
||||||
|
apply (in, depth, init.litValue.toInt, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
class SynchronizerShiftReg(w: Int = 1, sync: Int = 3) extends AbstractPipelineReg(w) {
|
||||||
|
require(sync > 0, "Sync must be greater than 0.")
|
||||||
|
|
||||||
|
override def desiredName = s"SynchronizerShiftReg_w${w}_d${sync}"
|
||||||
|
|
||||||
|
val syncv = List.tabulate(sync) { i =>
|
||||||
|
val r = Reg(UInt(width = w))
|
||||||
|
r.suggestName(s"sync_${i}")
|
||||||
|
}
|
||||||
|
|
||||||
|
syncv.last := io.d
|
||||||
|
|
||||||
|
(syncv.init zip syncv.tail).foreach { case (sink, source) =>
|
||||||
|
sink := source
|
||||||
|
}
|
||||||
|
io.q := syncv.head
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
object SynchronizerShiftReg {
|
||||||
|
def apply [T <: Chisel.Data](in: T, sync: Int = 3, name: Option[String] = None): T =
|
||||||
|
AbstractPipelineReg(new SynchronizerShiftReg(in.getWidth, sync), in, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
class SyncResetSynchronizerShiftReg(w: Int = 1, sync: Int = 3, init: Int = 0) extends AbstractPipelineReg(w) {
|
||||||
|
require (sync >= 0, "Sync must be greater than or equal to 0")
|
||||||
|
|
||||||
|
override def desiredName = s"SyncResetSynchronizerShiftReg_w${w}_d${sync}_i${init}"
|
||||||
|
|
||||||
|
io.q := ShiftRegInit(io.d, n = sync, init = init.U, name = Some("sync"))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
object SyncResetSynchronizerShiftReg {
|
||||||
|
def apply [T <: Chisel.Data](in: T, sync: Int = 3, init: T, name: Option[String] = None): T =
|
||||||
|
AbstractPipelineReg(new SyncResetSynchronizerShiftReg(in.getWidth, sync, init.litValue.toInt), in, name)
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user