2016-09-11 08:39:29 +02:00
|
|
|
// See LICENSE for license details.
|
|
|
|
|
|
|
|
package rocketchip
|
|
|
|
|
|
|
|
import cde.{Parameters, Dump}
|
|
|
|
import junctions._
|
2016-10-04 00:17:36 +02:00
|
|
|
import diplomacy._
|
2016-09-11 08:39:29 +02:00
|
|
|
import uncore.devices._
|
|
|
|
import rocket._
|
|
|
|
import coreplex._
|
2016-09-17 02:27:49 +02:00
|
|
|
import uncore.tilelink2._
|
2016-09-28 06:27:07 +02:00
|
|
|
import util._
|
2016-09-11 08:39:29 +02:00
|
|
|
|
2016-09-15 01:09:59 +02:00
|
|
|
import java.nio.file.{Files, Paths}
|
|
|
|
import java.nio.{ByteBuffer, ByteOrder}
|
|
|
|
|
2016-09-11 08:39:29 +02:00
|
|
|
class RangeManager {
|
|
|
|
private var finalized = false
|
2016-09-29 05:32:53 +02:00
|
|
|
private val l = collection.mutable.ListBuffer[(String, Int)]()
|
2016-09-11 08:39:29 +02:00
|
|
|
def add(name: String, element: Int) = { require(!finalized); l += (name -> element) }
|
|
|
|
def rangeMap = {
|
|
|
|
finalized = true
|
2016-09-29 05:32:53 +02:00
|
|
|
(l map {
|
2016-09-11 08:39:29 +02:00
|
|
|
var sum = 0
|
|
|
|
x => { sum += x._2; (x._1 -> (sum-x._2, sum)) }
|
2016-09-29 05:32:53 +02:00
|
|
|
}).toMap
|
2016-09-11 08:39:29 +02:00
|
|
|
}
|
|
|
|
def range(name: String) = rangeMap(name)
|
|
|
|
def print = {
|
2016-09-29 05:32:53 +02:00
|
|
|
rangeMap.toSeq.sortBy(_._2).foreach { case (name, (start, end)) =>
|
|
|
|
println(s"${name} on int ${start}-${end-1}")
|
2016-09-11 08:39:29 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
def sum = {
|
|
|
|
finalized = true
|
|
|
|
l.map(_._2).sum
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
class ResourceManager[T] {
|
|
|
|
private var finalized = false
|
|
|
|
private val l = collection.mutable.ArrayBuffer[T]()
|
|
|
|
def add(element: T) = { require(!finalized); l += element }
|
|
|
|
def add(list: Seq[T]) = { require(!finalized); l ++= list }
|
|
|
|
def get: Seq[T] = { finalized = true; l }
|
|
|
|
}
|
|
|
|
|
|
|
|
class GlobalVariable[T] {
|
|
|
|
private var assigned = false
|
|
|
|
private var variable: T = _
|
|
|
|
def assign(value: T) = { require(!assigned); assigned = true; variable = value }
|
|
|
|
def get: T = { require(assigned); variable }
|
|
|
|
}
|
|
|
|
|
|
|
|
object GenerateGlobalAddrMap {
|
2016-10-25 23:09:26 +02:00
|
|
|
def apply(p: Parameters, peripheryManagers: Seq[TLManagerParameters]) = {
|
2016-10-28 03:29:16 +02:00
|
|
|
val tl2Devices = peripheryManagers.map { manager =>
|
2016-09-17 09:16:40 +02:00
|
|
|
val cacheable = manager.regionType match {
|
|
|
|
case RegionType.CACHED => true
|
|
|
|
case RegionType.TRACKED => true
|
|
|
|
case _ => false
|
|
|
|
}
|
2016-09-17 02:27:49 +02:00
|
|
|
val attr = MemAttr(
|
|
|
|
(if (manager.supportsGet) AddrMapProt.R else 0) |
|
|
|
|
(if (manager.supportsPutFull) AddrMapProt.W else 0) |
|
2016-09-17 09:16:40 +02:00
|
|
|
(if (manager.executable) AddrMapProt.X else 0), cacheable)
|
2016-09-17 03:03:49 +02:00
|
|
|
val multi = manager.address.size > 1
|
2016-09-17 02:27:49 +02:00
|
|
|
manager.address.zipWithIndex.map { case (address, i) =>
|
2016-09-18 00:31:35 +02:00
|
|
|
require (address.contiguous) // TL1 needs this
|
2016-09-17 03:03:49 +02:00
|
|
|
val name = manager.name + (if (multi) ".%d".format(i) else "")
|
|
|
|
AddrMapEntry(name, MemRange(address.base, address.mask+1, attr))
|
2016-09-17 02:27:49 +02:00
|
|
|
}
|
|
|
|
}.flatten
|
|
|
|
|
2016-10-28 03:29:16 +02:00
|
|
|
val uniquelyNamedTL2Devices =
|
2016-09-26 22:05:49 +02:00
|
|
|
tl2Devices.groupBy(_.name).values.map(_.zipWithIndex.map {
|
|
|
|
case (e, i) => if (i == 0) e else e.copy(name = e.name + "_" + i)
|
|
|
|
}).flatten.toList
|
|
|
|
|
2016-09-11 08:39:29 +02:00
|
|
|
val memBase = 0x80000000L
|
|
|
|
val memSize = p(ExtMemSize)
|
|
|
|
Dump("MEM_BASE", memBase)
|
|
|
|
|
2016-10-28 03:29:16 +02:00
|
|
|
val tl2 = AddrMapEntry("TL2", new AddrMap(uniquelyNamedTL2Devices, collapse = true))
|
2016-09-11 08:39:29 +02:00
|
|
|
val mem = AddrMapEntry("mem", MemRange(memBase, memSize, MemAttr(AddrMapProt.RWX, true)))
|
2016-10-28 03:29:16 +02:00
|
|
|
AddrMap((tl2 +: (p(NMemoryChannels) > 0).option(mem).toSeq):_*)
|
2016-09-11 08:39:29 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
object GenerateConfigString {
|
2016-10-27 07:28:40 +02:00
|
|
|
def apply(p: Parameters, peripheryManagers: Seq[TLManagerParameters]) = {
|
|
|
|
val c = CoreplexParameters()(p)
|
2016-09-15 09:38:46 +02:00
|
|
|
val addrMap = p(GlobalAddrMap)
|
2016-10-28 03:29:16 +02:00
|
|
|
val plicAddr = addrMap("TL2:plic").start
|
2016-10-27 02:48:15 +02:00
|
|
|
val clint = CoreplexLocalInterrupterConfig()
|
2016-09-11 08:39:29 +02:00
|
|
|
val xLen = p(XLen)
|
|
|
|
val res = new StringBuilder
|
|
|
|
res append "plic {\n"
|
|
|
|
res append s" priority 0x${plicAddr.toString(16)};\n"
|
2016-10-27 22:09:11 +02:00
|
|
|
res append s" pending 0x${(plicAddr + PLICConsts.pendingBase).toString(16)};\n"
|
2016-09-11 08:39:29 +02:00
|
|
|
res append s" ndevs ${c.plicKey.nDevices};\n"
|
|
|
|
res append "};\n"
|
|
|
|
res append "rtc {\n"
|
2016-09-16 23:26:34 +02:00
|
|
|
res append s" addr 0x${clint.timeAddress.toString(16)};\n"
|
2016-09-11 08:39:29 +02:00
|
|
|
res append "};\n"
|
|
|
|
if (addrMap contains "mem") {
|
|
|
|
res append "ram {\n"
|
|
|
|
res append " 0 {\n"
|
|
|
|
res append s" addr 0x${addrMap("mem").start.toString(16)};\n"
|
|
|
|
res append s" size 0x${addrMap("mem").size.toString(16)};\n"
|
|
|
|
res append " };\n"
|
|
|
|
res append "};\n"
|
|
|
|
}
|
|
|
|
res append "core {\n"
|
|
|
|
for (i <- 0 until c.nTiles) { // TODO heterogeneous tiles
|
|
|
|
val isa = {
|
|
|
|
val m = if (p(MulDivKey).nonEmpty) "m" else ""
|
|
|
|
val a = if (p(UseAtomics)) "a" else ""
|
|
|
|
val f = if (p(FPUKey).nonEmpty) "f" else ""
|
|
|
|
val d = if (p(FPUKey).nonEmpty && p(XLen) > 32) "d" else ""
|
|
|
|
val s = if (c.hasSupervisor) "s" else ""
|
|
|
|
s"rv${p(XLen)}i$m$a$f$d$s"
|
|
|
|
}
|
|
|
|
res append s" $i {\n"
|
|
|
|
res append " 0 {\n"
|
|
|
|
res append s" isa $isa;\n"
|
2016-09-16 23:26:34 +02:00
|
|
|
res append s" timecmp 0x${clint.timecmpAddress(i).toString(16)};\n"
|
|
|
|
res append s" ipi 0x${clint.msipAddress(i).toString(16)};\n"
|
2016-09-11 08:39:29 +02:00
|
|
|
res append s" plic {\n"
|
|
|
|
res append s" m {\n"
|
|
|
|
res append s" ie 0x${(plicAddr + c.plicKey.enableAddr(i, 'M')).toString(16)};\n"
|
|
|
|
res append s" thresh 0x${(plicAddr + c.plicKey.threshAddr(i, 'M')).toString(16)};\n"
|
|
|
|
res append s" claim 0x${(plicAddr + c.plicKey.claimAddr(i, 'M')).toString(16)};\n"
|
|
|
|
res append s" };\n"
|
|
|
|
if (c.hasSupervisor) {
|
|
|
|
res append s" s {\n"
|
|
|
|
res append s" ie 0x${(plicAddr + c.plicKey.enableAddr(i, 'S')).toString(16)};\n"
|
|
|
|
res append s" thresh 0x${(plicAddr + c.plicKey.threshAddr(i, 'S')).toString(16)};\n"
|
|
|
|
res append s" claim 0x${(plicAddr + c.plicKey.claimAddr(i, 'S')).toString(16)};\n"
|
|
|
|
res append s" };\n"
|
|
|
|
}
|
|
|
|
res append " };\n"
|
|
|
|
res append " };\n"
|
|
|
|
res append " };\n"
|
|
|
|
}
|
|
|
|
res append "};\n"
|
2016-09-17 02:27:49 +02:00
|
|
|
peripheryManagers.foreach { manager => res append manager.dts }
|
2016-09-11 08:39:29 +02:00
|
|
|
res append '\u0000'
|
|
|
|
res.toString
|
|
|
|
}
|
|
|
|
}
|
2016-09-15 01:09:59 +02:00
|
|
|
|
|
|
|
object GenerateBootROM {
|
Allow reset vector to be set dynamically
A chip's power-up sequence, or awake-from-sleep sequence, may wish to
set the reset PC based upon dynamic properties, e.g., the settings of
external pins. Support this by passing the reset vector to the Coreplex.
ExampleTop simply hard-wires the reset vector, as was the case before.
Additionally, allow MTVEC to *not* be reset. In most cases, including
riscv-tests, pk, and bbl, overriding MTVEC is one of the first things
that the boot sequence does. So the reset value is superfluous.
2016-09-20 01:45:57 +02:00
|
|
|
def apply(p: Parameters, address: BigInt) = {
|
2016-09-15 01:09:59 +02:00
|
|
|
val romdata = Files.readAllBytes(Paths.get(p(BootROMFile)))
|
|
|
|
val rom = ByteBuffer.wrap(romdata)
|
|
|
|
|
|
|
|
rom.order(ByteOrder.LITTLE_ENDIAN)
|
|
|
|
|
Allow reset vector to be set dynamically
A chip's power-up sequence, or awake-from-sleep sequence, may wish to
set the reset PC based upon dynamic properties, e.g., the settings of
external pins. Support this by passing the reset vector to the Coreplex.
ExampleTop simply hard-wires the reset vector, as was the case before.
Additionally, allow MTVEC to *not* be reset. In most cases, including
riscv-tests, pk, and bbl, overriding MTVEC is one of the first things
that the boot sequence does. So the reset value is superfluous.
2016-09-20 01:45:57 +02:00
|
|
|
require(address == address.toInt)
|
|
|
|
val configStringAddr = address.toInt + rom.capacity
|
2016-09-15 01:09:59 +02:00
|
|
|
require(rom.getInt(12) == 0,
|
|
|
|
"Config string address position should not be occupied by code")
|
|
|
|
rom.putInt(12, configStringAddr)
|
2016-10-28 03:03:43 +02:00
|
|
|
rom.array() ++ (ConfigStringOutput.contents.get.getBytes.toSeq)
|
2016-09-15 01:09:59 +02:00
|
|
|
}
|
|
|
|
}
|