Merge branch 'master' of github.com:ucb-bar/reference-chip into dse
Conflicts: src/main/scala/ReferenceChip.scala
This commit is contained in:
		
							
								
								
									
										1
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							@@ -1,2 +1,3 @@
 | 
				
			|||||||
target/
 | 
					target/
 | 
				
			||||||
project/target
 | 
					project/target
 | 
				
			||||||
 | 
					*.swp
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										45
									
								
								Makefrag
									
									
									
									
									
								
							
							
						
						
									
										45
									
								
								Makefrag
									
									
									
									
									
								
							@@ -114,6 +114,7 @@ asm_p_tests = \
 | 
				
			|||||||
	rv64uf-p-fcmp \
 | 
						rv64uf-p-fcmp \
 | 
				
			||||||
	rv64uf-p-fcvt \
 | 
						rv64uf-p-fcvt \
 | 
				
			||||||
	rv64uf-p-fcvt_w \
 | 
						rv64uf-p-fcvt_w \
 | 
				
			||||||
 | 
						rv64uf-p-fclass \
 | 
				
			||||||
	rv64uf-p-fadd \
 | 
						rv64uf-p-fadd \
 | 
				
			||||||
	rv64uf-p-fmin \
 | 
						rv64uf-p-fmin \
 | 
				
			||||||
	rv64uf-p-fmadd \
 | 
						rv64uf-p-fmadd \
 | 
				
			||||||
@@ -210,6 +211,7 @@ asm_v_tests = \
 | 
				
			|||||||
	rv64uf-v-fcmp \
 | 
						rv64uf-v-fcmp \
 | 
				
			||||||
	rv64uf-v-fcvt \
 | 
						rv64uf-v-fcvt \
 | 
				
			||||||
	rv64uf-v-fcvt_w \
 | 
						rv64uf-v-fcvt_w \
 | 
				
			||||||
 | 
						rv64uf-v-fclass \
 | 
				
			||||||
	rv64uf-v-fadd \
 | 
						rv64uf-v-fadd \
 | 
				
			||||||
	rv64uf-v-fmin \
 | 
						rv64uf-v-fmin \
 | 
				
			||||||
	rv64uf-v-fmadd \
 | 
						rv64uf-v-fmadd \
 | 
				
			||||||
@@ -222,9 +224,12 @@ vecasm_p_tests = \
 | 
				
			|||||||
	rv64uv-p-vmsv \
 | 
						rv64uv-p-vmsv \
 | 
				
			||||||
	rv64uv-p-vmvv \
 | 
						rv64uv-p-vmvv \
 | 
				
			||||||
	rv64uv-p-vfmvv \
 | 
						rv64uv-p-vfmvv \
 | 
				
			||||||
 | 
						rv64uv-p-vfmsv_d \
 | 
				
			||||||
 | 
						rv64uv-p-vfmsv_s \
 | 
				
			||||||
	rv64uv-p-vsetcfg \
 | 
						rv64uv-p-vsetcfg \
 | 
				
			||||||
	rv64uv-p-vsetcfgi \
 | 
						rv64uv-p-vsetcfgi \
 | 
				
			||||||
	rv64uv-p-vsetvl \
 | 
						rv64uv-p-vsetvl \
 | 
				
			||||||
 | 
						rv64uv-p-keepcfg \
 | 
				
			||||||
	rv64uv-p-movz \
 | 
						rv64uv-p-movz \
 | 
				
			||||||
	rv64uv-p-movn \
 | 
						rv64uv-p-movn \
 | 
				
			||||||
	rv64uv-p-fmovz \
 | 
						rv64uv-p-fmovz \
 | 
				
			||||||
@@ -270,6 +275,7 @@ vecasm_p_tests = \
 | 
				
			|||||||
	rv64uv-p-amoswap_w \
 | 
						rv64uv-p-amoswap_w \
 | 
				
			||||||
	rv64uv-p-imul \
 | 
						rv64uv-p-imul \
 | 
				
			||||||
	rv64uv-p-fma \
 | 
						rv64uv-p-fma \
 | 
				
			||||||
 | 
						rv64uv-p-fma_many \
 | 
				
			||||||
	rv64ui-p-vec-mul \
 | 
						rv64ui-p-vec-mul \
 | 
				
			||||||
	rv64ui-p-vec-mulw \
 | 
						rv64ui-p-vec-mulw \
 | 
				
			||||||
	rv64ui-p-vec-mulh \
 | 
						rv64ui-p-vec-mulh \
 | 
				
			||||||
@@ -311,15 +317,18 @@ vecasm_p_tests = \
 | 
				
			|||||||
	rv64uf-p-vec-fcvt_w \
 | 
						rv64uf-p-vec-fcvt_w \
 | 
				
			||||||
	rv64uf-p-vec-fcvt \
 | 
						rv64uf-p-vec-fcvt \
 | 
				
			||||||
	rv64uf-p-vec-fcmp \
 | 
						rv64uf-p-vec-fcmp \
 | 
				
			||||||
#	rv64sv-p-illegal_tvec_cmd \
 | 
						rv64sv-p-illegal_cfg_nxpr \
 | 
				
			||||||
#	rv64sv-p-illegal_tvec_regid \
 | 
						rv64sv-p-illegal_cfg_nfpr \
 | 
				
			||||||
#	rv64sv-p-illegal_vt_inst \
 | 
						rv64sv-p-illegal_inst \
 | 
				
			||||||
#	rv64sv-p-illegal_vt_regid \
 | 
						rv64sv-p-illegal_tvec_regid \
 | 
				
			||||||
#	rv64sv-p-ma_utld \
 | 
						rv64sv-p-illegal_vt_inst \
 | 
				
			||||||
#	rv64sv-p-ma_utsd \
 | 
						rv64sv-p-illegal_vt_regid \
 | 
				
			||||||
#	rv64sv-p-ma_vld \
 | 
						rv64sv-p-ma_utld \
 | 
				
			||||||
#	rv64sv-p-ma_vsd \
 | 
						rv64sv-p-ma_utsd \
 | 
				
			||||||
#	rv64sv-p-ma_vt_inst \
 | 
						rv64sv-p-ma_vld \
 | 
				
			||||||
 | 
						rv64sv-p-ma_vsd \
 | 
				
			||||||
 | 
						rv64sv-p-ma_vt_inst \
 | 
				
			||||||
 | 
						rv64sv-p-privileged_inst \
 | 
				
			||||||
 | 
					
 | 
				
			||||||
vecasm_v_tests = \
 | 
					vecasm_v_tests = \
 | 
				
			||||||
	rv64uv-v-wakeup \
 | 
						rv64uv-v-wakeup \
 | 
				
			||||||
@@ -328,6 +337,12 @@ vecasm_v_tests = \
 | 
				
			|||||||
	rv64uv-v-vmsv \
 | 
						rv64uv-v-vmsv \
 | 
				
			||||||
	rv64uv-v-vmvv \
 | 
						rv64uv-v-vmvv \
 | 
				
			||||||
	rv64uv-v-vfmvv \
 | 
						rv64uv-v-vfmvv \
 | 
				
			||||||
 | 
						rv64uv-v-vfmsv_d \
 | 
				
			||||||
 | 
						rv64uv-v-vfmsv_s \
 | 
				
			||||||
 | 
						rv64uv-v-vsetcfg \
 | 
				
			||||||
 | 
						rv64uv-v-vsetcfgi \
 | 
				
			||||||
 | 
						rv64uv-v-vsetvl \
 | 
				
			||||||
 | 
						rv64uv-v-keepcfg \
 | 
				
			||||||
	rv64uv-v-movz \
 | 
						rv64uv-v-movz \
 | 
				
			||||||
	rv64uv-v-movn \
 | 
						rv64uv-v-movn \
 | 
				
			||||||
	rv64uv-v-fmovz \
 | 
						rv64uv-v-fmovz \
 | 
				
			||||||
@@ -371,6 +386,7 @@ vecasm_v_tests = \
 | 
				
			|||||||
	rv64uv-v-amominu_w \
 | 
						rv64uv-v-amominu_w \
 | 
				
			||||||
	rv64uv-v-imul \
 | 
						rv64uv-v-imul \
 | 
				
			||||||
	rv64uv-v-fma \
 | 
						rv64uv-v-fma \
 | 
				
			||||||
 | 
						rv64uv-v-fma_many \
 | 
				
			||||||
	rv64ui-v-vec-mul \
 | 
						rv64ui-v-vec-mul \
 | 
				
			||||||
	rv64ui-v-vec-mulw \
 | 
						rv64ui-v-vec-mulw \
 | 
				
			||||||
	rv64ui-v-vec-mulh \
 | 
						rv64ui-v-vec-mulh \
 | 
				
			||||||
@@ -426,6 +442,12 @@ vecasm_pt_tests = \
 | 
				
			|||||||
	rv64uv-pt-vmvv \
 | 
						rv64uv-pt-vmvv \
 | 
				
			||||||
	rv64uv-pt-vmsv \
 | 
						rv64uv-pt-vmsv \
 | 
				
			||||||
	rv64uv-pt-vfmvv \
 | 
						rv64uv-pt-vfmvv \
 | 
				
			||||||
 | 
						rv64uv-pt-vfmsv_d \
 | 
				
			||||||
 | 
						rv64uv-pt-vfmsv_s \
 | 
				
			||||||
 | 
						rv64uv-pt-vsetcfg \
 | 
				
			||||||
 | 
						rv64uv-pt-vsetcfgi \
 | 
				
			||||||
 | 
						rv64uv-pt-vsetvl \
 | 
				
			||||||
 | 
						rv64uv-pt-keepcfg \
 | 
				
			||||||
	rv64uv-pt-movz \
 | 
						rv64uv-pt-movz \
 | 
				
			||||||
	rv64uv-pt-movn \
 | 
						rv64uv-pt-movn \
 | 
				
			||||||
	rv64uv-pt-fmovz \
 | 
						rv64uv-pt-fmovz \
 | 
				
			||||||
@@ -463,6 +485,7 @@ vecasm_pt_tests = \
 | 
				
			|||||||
	rv64uv-pt-amominu_w \
 | 
						rv64uv-pt-amominu_w \
 | 
				
			||||||
	rv64uv-pt-imul \
 | 
						rv64uv-pt-imul \
 | 
				
			||||||
	rv64uv-pt-fma \
 | 
						rv64uv-pt-fma \
 | 
				
			||||||
 | 
						rv64uv-pt-fma_many \
 | 
				
			||||||
	rv64ui-pt-vec-mul \
 | 
						rv64ui-pt-vec-mul \
 | 
				
			||||||
	rv64ui-pt-vec-mulw \
 | 
						rv64ui-pt-vec-mulw \
 | 
				
			||||||
	rv64ui-pt-vec-mulh \
 | 
						rv64ui-pt-vec-mulh \
 | 
				
			||||||
@@ -514,7 +537,7 @@ bmarks = \
 | 
				
			|||||||
	qsort.riscv \
 | 
						qsort.riscv \
 | 
				
			||||||
	towers.riscv \
 | 
						towers.riscv \
 | 
				
			||||||
	vvadd.riscv \
 | 
						vvadd.riscv \
 | 
				
			||||||
	dgemm.riscv \
 | 
						mm.riscv \
 | 
				
			||||||
	dhrystone.riscv \
 | 
						dhrystone.riscv \
 | 
				
			||||||
	spmv.riscv \
 | 
						spmv.riscv \
 | 
				
			||||||
	#vec-vvadd.riscv \
 | 
						#vec-vvadd.riscv \
 | 
				
			||||||
@@ -603,5 +626,5 @@ bt_vvadd.riscv\
 | 
				
			|||||||
disasm := 2>
 | 
					disasm := 2>
 | 
				
			||||||
which_disasm := $(shell which riscv-dis)
 | 
					which_disasm := $(shell which riscv-dis)
 | 
				
			||||||
ifneq ($(which_disasm),)
 | 
					ifneq ($(which_disasm),)
 | 
				
			||||||
	disasm := 3>&1 1>&2 2>&3 | $(which_disasm) >
 | 
						disasm := 3>&1 1>&2 2>&3 | $(which_disasm) --extension=hwacha >
 | 
				
			||||||
endif
 | 
					endif
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										2
									
								
								chisel
									
									
									
									
									
								
							
							
								
								
								
								
								
							
						
						
									
										2
									
								
								chisel
									
									
									
									
									
								
							 Submodule chisel updated: c5794df83a...54ad639f11
									
								
							@@ -125,7 +125,7 @@ int main(int argc, char** argv)
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if (log)
 | 
					    if (log)
 | 
				
			||||||
      tile.print(stderr, stderr);
 | 
					      tile.print(stderr);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if (vcd)
 | 
					    if (vcd)
 | 
				
			||||||
      tile.dump(vcdfile, trace_count);
 | 
					      tile.dump(vcdfile, trace_count);
 | 
				
			||||||
@@ -139,7 +139,7 @@ int main(int argc, char** argv)
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  if (htif->exit_code())
 | 
					  if (htif->exit_code())
 | 
				
			||||||
  {
 | 
					  {
 | 
				
			||||||
    fprintf(stderr, "*** FAILED *** (code = %d) after %lld cycles\n", htif->exit_code(), (long long)trace_count);
 | 
					    fprintf(stderr, "*** FAILED *** (code = %d, seed %d) after %lld cycles\n", htif->exit_code(), random_seed, (long long)trace_count);
 | 
				
			||||||
    ret = htif->exit_code();
 | 
					    ret = htif->exit_code();
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
  else if (trace_count == max_cycles)
 | 
					  else if (trace_count == max_cycles)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -8,12 +8,41 @@
 | 
				
			|||||||
#include <sstream>
 | 
					#include <sstream>
 | 
				
			||||||
#include <iterator>
 | 
					#include <iterator>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
static htif_emulator_t* htif = NULL;
 | 
					 | 
				
			||||||
static unsigned htif_bytes;
 | 
					 | 
				
			||||||
static mm_t* mm = NULL;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
extern "C" {
 | 
					extern "C" {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					extern int vcs_main(int argc, char** argv);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					static htif_emulator_t* htif;
 | 
				
			||||||
 | 
					static unsigned htif_bytes;
 | 
				
			||||||
 | 
					static mm_t* mm;
 | 
				
			||||||
 | 
					static const char* loadmem;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					void htif_fini(vc_handle failure)
 | 
				
			||||||
 | 
					{
 | 
				
			||||||
 | 
					  delete htif;
 | 
				
			||||||
 | 
					  htif = NULL;
 | 
				
			||||||
 | 
					  exit(vc_getScalar(failure));
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					int main(int argc, char** argv)
 | 
				
			||||||
 | 
					{
 | 
				
			||||||
 | 
					  bool dramsim = false;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  for (int i = 1; i < argc; i++)
 | 
				
			||||||
 | 
					  {
 | 
				
			||||||
 | 
					    if (!strcmp(argv[i], "+dramsim"))
 | 
				
			||||||
 | 
					      dramsim = true;
 | 
				
			||||||
 | 
					    else if (!strncmp(argv[i], "+loadmem=", 9))
 | 
				
			||||||
 | 
					      loadmem = argv[i]+9;
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  mm = dramsim ? (mm_t*)(new mm_dramsim2_t) : (mm_t*)(new mm_magic_t);
 | 
				
			||||||
 | 
					  htif = new htif_emulator_t(std::vector<std::string>(argv + 1, argv + argc));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  vcs_main(argc, argv);
 | 
				
			||||||
 | 
					  abort(); // should never get here
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
void memory_tick(
 | 
					void memory_tick(
 | 
				
			||||||
  vc_handle mem_req_val,
 | 
					  vc_handle mem_req_val,
 | 
				
			||||||
  vc_handle mem_req_rdy,
 | 
					  vc_handle mem_req_rdy,
 | 
				
			||||||
@@ -62,55 +91,18 @@ void memory_tick(
 | 
				
			|||||||
  );
 | 
					  );
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
void htif_init
 | 
					void htif_init(vc_handle htif_width, vc_handle mem_width)
 | 
				
			||||||
(
 | 
					 | 
				
			||||||
  vc_handle htif_width,
 | 
					 | 
				
			||||||
  vc_handle mem_width,
 | 
					 | 
				
			||||||
  vc_handle argv,
 | 
					 | 
				
			||||||
  vc_handle loadmem,
 | 
					 | 
				
			||||||
  vc_handle dramsim
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
{
 | 
					{
 | 
				
			||||||
  int mw = vc_4stVectorRef(mem_width)->d;
 | 
					  int mw = vc_4stVectorRef(mem_width)->d;
 | 
				
			||||||
  mm = vc_getScalar(dramsim) ? (mm_t*)(new mm_dramsim2_t) : (mm_t*)(new mm_magic_t);
 | 
					 | 
				
			||||||
  assert(mw && (mw & (mw-1)) == 0);
 | 
					  assert(mw && (mw & (mw-1)) == 0);
 | 
				
			||||||
  mm->init(MEM_SIZE, mw/8, LINE_SIZE);
 | 
					  mm->init(MEM_SIZE, mw/8, LINE_SIZE);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  if (loadmem)
 | 
				
			||||||
 | 
					    load_mem(mm->get_data(), loadmem);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  vec32* w = vc_4stVectorRef(htif_width);
 | 
					  vec32* w = vc_4stVectorRef(htif_width);
 | 
				
			||||||
  assert(w->d <= 32 && w->d % 8 == 0); // htif_tick assumes data fits in a vec32
 | 
					  assert(w->d <= 32 && w->d % 8 == 0); // htif_tick assumes data fits in a vec32
 | 
				
			||||||
  htif_bytes = w->d/8;
 | 
					  htif_bytes = w->d/8;
 | 
				
			||||||
 | 
					 | 
				
			||||||
  char loadmem_str[1024];
 | 
					 | 
				
			||||||
  vc_VectorToString(loadmem, loadmem_str);
 | 
					 | 
				
			||||||
  if (*loadmem_str)
 | 
					 | 
				
			||||||
    load_mem(mm->get_data(), loadmem_str);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  char argv_str[1024];
 | 
					 | 
				
			||||||
  vc_VectorToString(argv, argv_str);
 | 
					 | 
				
			||||||
  if (!*argv_str)
 | 
					 | 
				
			||||||
  {
 | 
					 | 
				
			||||||
    if (*loadmem_str)
 | 
					 | 
				
			||||||
      strcpy(argv_str, "none");
 | 
					 | 
				
			||||||
    else
 | 
					 | 
				
			||||||
    {
 | 
					 | 
				
			||||||
      fprintf(stderr, "Usage: ./simv [host options] +argv=\"<target program> [target args]\"\n");
 | 
					 | 
				
			||||||
      exit(-1);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  std::vector<std::string> args;
 | 
					 | 
				
			||||||
  std::stringstream ss(argv_str);
 | 
					 | 
				
			||||||
  std::istream_iterator<std::string> begin(ss), end;
 | 
					 | 
				
			||||||
  std::copy(begin, end, std::back_inserter<std::vector<std::string>>(args));
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  htif = new htif_emulator_t(args);
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
void htif_fini(vc_handle failure)
 | 
					 | 
				
			||||||
{
 | 
					 | 
				
			||||||
  delete htif;
 | 
					 | 
				
			||||||
  htif = NULL;
 | 
					 | 
				
			||||||
  exit(vc_getScalar(failure));
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
void htif_tick
 | 
					void htif_tick
 | 
				
			||||||
 
 | 
				
			|||||||
 Submodule hardfloat updated: d126925915...4a938b1aae
									
								
							@@ -31,7 +31,8 @@ object BuildSettings extends Build {
 | 
				
			|||||||
  lazy val uncore = Project("uncore", file("uncore"), settings = buildSettings) dependsOn(hardfloat)
 | 
					  lazy val uncore = Project("uncore", file("uncore"), settings = buildSettings) dependsOn(hardfloat)
 | 
				
			||||||
  lazy val rocket = Project("rocket", file("rocket"), settings = buildSettings) dependsOn(uncore)
 | 
					  lazy val rocket = Project("rocket", file("rocket"), settings = buildSettings) dependsOn(uncore)
 | 
				
			||||||
  lazy val hwacha = Project("hwacha", file("hwacha"), settings = buildSettings) dependsOn(uncore, rocket)
 | 
					  lazy val hwacha = Project("hwacha", file("hwacha"), settings = buildSettings) dependsOn(uncore, rocket)
 | 
				
			||||||
  lazy val referencechip = Project("referencechip", file("."), settings = buildSettings ++ chipSettings) dependsOn(rocket, hwacha)
 | 
					  lazy val rekall = Project("rekall", file("rekall"), settings = buildSettings) dependsOn(chisel)
 | 
				
			||||||
 | 
					  lazy val referencechip = Project("referencechip", file("."), settings = buildSettings ++ chipSettings) dependsOn(rocket, hwacha, rekall)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  val elaborateTask = InputKey[Unit]("elaborate", "convert chisel components into backend source code")
 | 
					  val elaborateTask = InputKey[Unit]("elaborate", "convert chisel components into backend source code")
 | 
				
			||||||
  val makeTask = InputKey[Unit]("make", "trigger backend-specific makefile command")
 | 
					  val makeTask = InputKey[Unit]("make", "trigger backend-specific makefile command")
 | 
				
			||||||
 
 | 
				
			|||||||
 Submodule riscv-tests updated: 0c98ef833d...83ed3f519d
									
								
							 Submodule riscv-tools updated: 16908b2a8d...1f62b9b6b6
									
								
							
							
								
								
									
										2
									
								
								rocket
									
									
									
									
									
								
							
							
								
								
								
								
								
							
						
						
									
										2
									
								
								rocket
									
									
									
									
									
								
							 Submodule rocket updated: 41023dc10f...fd9bea861c
									
								
							
							
								
								
									
										69
									
								
								src/main/scala/Backends.scala
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										69
									
								
								src/main/scala/Backends.scala
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,69 @@
 | 
				
			|||||||
 | 
					package referencechip
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import Chisel._
 | 
				
			||||||
 | 
					import ReferenceChipBackend._
 | 
				
			||||||
 | 
					import scala.collection.mutable.HashMap
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					object ReferenceChipBackend {
 | 
				
			||||||
 | 
					  val initMap = new HashMap[Module, Bool]()
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class ReferenceChipBackend extends VerilogBackend
 | 
				
			||||||
 | 
					{
 | 
				
			||||||
 | 
					  initMap.clear()
 | 
				
			||||||
 | 
					  override def emitPortDef(m: MemAccess, idx: Int) = {
 | 
				
			||||||
 | 
					    val res = new StringBuilder()
 | 
				
			||||||
 | 
					    for (node <- m.mem.inputs) {
 | 
				
			||||||
 | 
					      if(node.name.contains("init"))
 | 
				
			||||||
 | 
					         res.append("    .init(" + node.name + "),\n")
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    (if (idx == 0) res.toString else "") + super.emitPortDef(m, idx)
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  def addMemPin(c: Module) = {
 | 
				
			||||||
 | 
					    for (mod <- Module.components; node <- mod.nodes) {
 | 
				
			||||||
 | 
					      if (node.isInstanceOf[Mem[ _ ]] && node.component != null && node.asInstanceOf[Mem[_]].seqRead) {
 | 
				
			||||||
 | 
					        connectMemPin(c, node.component, node)
 | 
				
			||||||
 | 
					      }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  def connectMemPin(topC: Module, c: Module, p: Node): Unit = {
 | 
				
			||||||
 | 
					    var isNewPin = false
 | 
				
			||||||
 | 
					    val compInitPin = 
 | 
				
			||||||
 | 
					      if (initMap.contains(c)) {
 | 
				
			||||||
 | 
					        initMap(c)
 | 
				
			||||||
 | 
					      } else {
 | 
				
			||||||
 | 
					        isNewPin = true
 | 
				
			||||||
 | 
					        val res = Bool(INPUT)
 | 
				
			||||||
 | 
					        res.isIo = true
 | 
				
			||||||
 | 
					        res
 | 
				
			||||||
 | 
					      }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    p.inputs += compInitPin
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if (isNewPin) {
 | 
				
			||||||
 | 
					      compInitPin.setName("init")
 | 
				
			||||||
 | 
					      c.io.asInstanceOf[Bundle] += compInitPin
 | 
				
			||||||
 | 
					      compInitPin.component = c
 | 
				
			||||||
 | 
					      initMap += (c -> compInitPin)
 | 
				
			||||||
 | 
					      connectMemPin(topC, c.parent, compInitPin)
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  def addTopLevelPin(c: Module) = {
 | 
				
			||||||
 | 
					    val init = Bool(INPUT)
 | 
				
			||||||
 | 
					    init.isIo = true
 | 
				
			||||||
 | 
					    init.setName("init")
 | 
				
			||||||
 | 
					    init.component = c
 | 
				
			||||||
 | 
					    c.io.asInstanceOf[Bundle] += init
 | 
				
			||||||
 | 
					    initMap += (c -> init)
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  transforms += ((c: Module) => addTopLevelPin(c))
 | 
				
			||||||
 | 
					  transforms += ((c: Module) => addMemPin(c))
 | 
				
			||||||
 | 
					  transforms += ((c: Module) => collectNodesIntoComp(initializeDFS))
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class Fame1ReferenceChipBackend extends ReferenceChipBackend with Fame1Transform
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -4,122 +4,89 @@ import Chisel._
 | 
				
			|||||||
import uncore._
 | 
					import uncore._
 | 
				
			||||||
import rocket._
 | 
					import rocket._
 | 
				
			||||||
import rocket.Util._
 | 
					import rocket.Util._
 | 
				
			||||||
import ReferenceChipBackend._
 | 
					 | 
				
			||||||
import scala.collection.mutable.ArrayBuffer
 | 
					 | 
				
			||||||
import scala.collection.mutable.HashMap
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
object DummyTopLevelConstants {
 | 
					object DesignSpaceConstants {
 | 
				
			||||||
  val NTILES = 1
 | 
					  val NTILES = 1
 | 
				
			||||||
  val NBANKS = 1
 | 
					  val NBANKS = 1
 | 
				
			||||||
  val HTIF_WIDTH = 16
 | 
					  val HTIF_WIDTH = 16
 | 
				
			||||||
  val ENABLE_SHARING = true
 | 
					  val ENABLE_SHARING = true
 | 
				
			||||||
  val ENABLE_CLEAN_EXCLUSIVE = true
 | 
					  val ENABLE_CLEAN_EXCLUSIVE = true
 | 
				
			||||||
 | 
					  val USE_DRAMSIDE_LLC = true
 | 
				
			||||||
  val HAS_FPU = true
 | 
					  val HAS_FPU = true
 | 
				
			||||||
  val NL2_REL_XACTS = 1
 | 
					  val NL2_REL_XACTS = 1
 | 
				
			||||||
  val NL2_ACQ_XACTS = 7
 | 
					  val NL2_ACQ_XACTS = 7
 | 
				
			||||||
  val NMSHRS = 2
 | 
					  val NMSHRS = 2
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import DummyTopLevelConstants._
 | 
					object MemoryConstants {
 | 
				
			||||||
 | 
					  val CACHE_DATA_SIZE_IN_BYTES = 1 << 6 //TODO: How configurable is this really?
 | 
				
			||||||
object ReferenceChipBackend {
 | 
					  val OFFSET_BITS = log2Up(CACHE_DATA_SIZE_IN_BYTES)
 | 
				
			||||||
  val initMap = new HashMap[Module, Bool]()
 | 
					  val PADDR_BITS = 32
 | 
				
			||||||
 | 
					  val VADDR_BITS = 43
 | 
				
			||||||
 | 
					  val PGIDX_BITS = 13
 | 
				
			||||||
 | 
					  val ASID_BITS = 7
 | 
				
			||||||
 | 
					  val PERM_BITS = 6
 | 
				
			||||||
 | 
					  val MEM_TAG_BITS = 5
 | 
				
			||||||
 | 
					  val MEM_DATA_BITS = 128
 | 
				
			||||||
 | 
					  val MEM_ADDR_BITS = PADDR_BITS - OFFSET_BITS
 | 
				
			||||||
 | 
					  val MEM_DATA_BEATS = 4
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class ReferenceChipBackend extends VerilogBackend
 | 
					object TileLinkSizeConstants {
 | 
				
			||||||
{
 | 
					  val WRITE_MASK_BITS = 6
 | 
				
			||||||
  initMap.clear()
 | 
					  val SUBWORD_ADDR_BITS = 3
 | 
				
			||||||
  override def emitPortDef(m: MemAccess, idx: Int) = {
 | 
					  val ATOMIC_OP_BITS = 4
 | 
				
			||||||
    val res = new StringBuilder()
 | 
					 | 
				
			||||||
    for (node <- m.mem.inputs) {
 | 
					 | 
				
			||||||
      if(node.name.contains("init"))
 | 
					 | 
				
			||||||
         res.append("    .init(" + node.name + "),\n")
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
    (if (idx == 0) res.toString else "") + super.emitPortDef(m, idx)
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  def addMemPin(c: Module) = {
 | 
					import DesignSpaceConstants._
 | 
				
			||||||
    for (node <- Module.nodes) {
 | 
					import MemoryConstants._
 | 
				
			||||||
      if (node.isInstanceOf[Mem[ _ ]] && node.component != null && node.asInstanceOf[Mem[_]].seqRead) {
 | 
					import TileLinkSizeConstants._
 | 
				
			||||||
        connectMemPin(c, node.component, node)
 | 
					 | 
				
			||||||
      }
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  def connectMemPin(topC: Module, c: Module, p: Node): Unit = {
 | 
					 | 
				
			||||||
    var isNewPin = false
 | 
					 | 
				
			||||||
    val compInitPin = 
 | 
					 | 
				
			||||||
      if (initMap.contains(c)) {
 | 
					 | 
				
			||||||
        initMap(c)
 | 
					 | 
				
			||||||
      } else {
 | 
					 | 
				
			||||||
        isNewPin = true
 | 
					 | 
				
			||||||
        val res = Bool(INPUT)
 | 
					 | 
				
			||||||
        res.isIo = true
 | 
					 | 
				
			||||||
        res
 | 
					 | 
				
			||||||
      }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    p.inputs += compInitPin
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if (isNewPin) {
 | 
					 | 
				
			||||||
      compInitPin.setName("init")
 | 
					 | 
				
			||||||
      c.io.asInstanceOf[Bundle] += compInitPin
 | 
					 | 
				
			||||||
      compInitPin.component = c
 | 
					 | 
				
			||||||
      initMap += (c -> compInitPin)
 | 
					 | 
				
			||||||
      connectMemPin(topC, c.parent, compInitPin)
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  def addTopLevelPin(c: Module) = {
 | 
					 | 
				
			||||||
    val init = Bool(INPUT)
 | 
					 | 
				
			||||||
    init.isIo = true
 | 
					 | 
				
			||||||
    init.setName("init")
 | 
					 | 
				
			||||||
    init.component = c
 | 
					 | 
				
			||||||
    c.io.asInstanceOf[Bundle] += init
 | 
					 | 
				
			||||||
    initMap += (c -> init)
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  transforms += ((c: Module) => addTopLevelPin(c))
 | 
					 | 
				
			||||||
  transforms += ((c: Module) => addMemPin(c))
 | 
					 | 
				
			||||||
  transforms += ((c: Module) => collectNodesIntoComp(initializeDFS))
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
class OuterMemorySystem(htif_width: Int)(implicit conf: UncoreConfiguration) extends Module
 | 
					class OuterMemorySystem(htif_width: Int)(implicit conf: UncoreConfiguration) extends Module
 | 
				
			||||||
{
 | 
					{
 | 
				
			||||||
  implicit val (tl, ln, l2) = (conf.tl, conf.tl.ln, conf.l2)
 | 
					  implicit val (tl, ln, l2, mif) = (conf.tl, conf.tl.ln, conf.l2, conf.mif)
 | 
				
			||||||
  val io = new Bundle {
 | 
					  val io = new Bundle {
 | 
				
			||||||
    val tiles = Vec.fill(conf.nTiles){new TileLinkIO}.flip
 | 
					    val tiles = Vec.fill(conf.nTiles){new TileLinkIO}.flip
 | 
				
			||||||
    val htif = (new TileLinkIO).flip
 | 
					    val htif = (new TileLinkIO).flip
 | 
				
			||||||
    val incoherent = Vec.fill(ln.nClients){Bool()}.asInput
 | 
					    val incoherent = Vec.fill(ln.nClients){Bool()}.asInput
 | 
				
			||||||
    val mem = new ioMem
 | 
					    val mem = new MemIO
 | 
				
			||||||
    val mem_backup = new ioMemSerialized(htif_width)
 | 
					    val mem_backup = new MemSerializedIO(htif_width)
 | 
				
			||||||
    val mem_backup_en = Bool(INPUT)
 | 
					    val mem_backup_en = Bool(INPUT)
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  val refill_cycles = tl.dataBits/mif.dataBits
 | 
				
			||||||
 | 
					  val (llc, masterEndpoints) = if(conf.useDRAMSideLLC) {
 | 
				
			||||||
    val llc_tag_leaf = Mem(Bits(width = 152), 512, seqRead = true)
 | 
					    val llc_tag_leaf = Mem(Bits(width = 152), 512, seqRead = true)
 | 
				
			||||||
    val llc_data_leaf = Mem(Bits(width = 64), 4096, seqRead = true)
 | 
					    val llc_data_leaf = Mem(Bits(width = 64), 4096, seqRead = true)
 | 
				
			||||||
  val llc = Module(new DRAMSideLLC(sets=512, ways=8, outstanding=16, tagLeaf=llc_tag_leaf, dataLeaf=llc_data_leaf))
 | 
					    val llc = Module(new DRAMSideLLC(sets=512, ways=8, outstanding=16, 
 | 
				
			||||||
  //val llc = Module(new DRAMSideLLCNull(NL2_REL_XACTS+NL2_ACQ_XACTS, REFILL_CYCLES))
 | 
					      refill_cycles=refill_cycles, tagLeaf=llc_tag_leaf, dataLeaf=llc_data_leaf))
 | 
				
			||||||
  val mem_serdes = Module(new MemSerdes(htif_width))
 | 
					    val mes = (0 until ln.nMasters).map(i => Module(new L2CoherenceAgent(i)))
 | 
				
			||||||
 | 
					    (llc, mes)
 | 
				
			||||||
 | 
					  } else {
 | 
				
			||||||
 | 
					    val llc = Module(new DRAMSideLLCNull(16, refill_cycles))
 | 
				
			||||||
 | 
					    val mes = (0 until ln.nMasters).map(i => Module(new L2HellaCache(i)))
 | 
				
			||||||
 | 
					    (llc, mes)
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  val masterEndpoints = (0 until ln.nMasters).map(i => Module(new L2CoherenceAgent(i)))
 | 
					 | 
				
			||||||
  val net = Module(new ReferenceChipCrossbarNetwork)
 | 
					  val net = Module(new ReferenceChipCrossbarNetwork)
 | 
				
			||||||
  net.io.clients zip (io.tiles :+ io.htif) map { case (net, end) => net <> end }
 | 
					  net.io.clients zip (io.tiles :+ io.htif) map { case (net, end) => net <> end }
 | 
				
			||||||
  net.io.masters zip (masterEndpoints.map(_.io.client)) map { case (net, end) => net <> end }
 | 
					  net.io.masters zip (masterEndpoints.map(_.io.inner)) map { case (net, end) => net <> end }
 | 
				
			||||||
  masterEndpoints.map{ _.io.incoherent zip io.incoherent map { case (m, c) => m := c } }
 | 
					  masterEndpoints.map{ _.io.incoherent zip io.incoherent map { case (m, c) => m := c } }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  val conv = Module(new MemIOUncachedTileLinkIOConverter(2))
 | 
					  val conv = Module(new MemIOUncachedTileLinkIOConverter(2))
 | 
				
			||||||
  if(ln.nMasters > 1) {
 | 
					  if(ln.nMasters > 1) {
 | 
				
			||||||
    val arb = Module(new UncachedTileLinkIOArbiterThatAppendsArbiterId(ln.nMasters))
 | 
					    val arb = Module(new UncachedTileLinkIOArbiterThatAppendsArbiterId(ln.nMasters))
 | 
				
			||||||
    arb.io.in zip masterEndpoints.map(_.io.master) map { case (arb, cache) => arb <> cache }
 | 
					    arb.io.in zip masterEndpoints.map(_.io.outer) map { case (arb, cache) => arb <> cache }
 | 
				
			||||||
    conv.io.uncached <> arb.io.out
 | 
					    conv.io.uncached <> arb.io.out
 | 
				
			||||||
  } else {
 | 
					  } else {
 | 
				
			||||||
    conv.io.uncached <> masterEndpoints.head.io.master
 | 
					    conv.io.uncached <> masterEndpoints.head.io.outer
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
  llc.io.cpu.req_cmd <> Queue(conv.io.mem.req_cmd)
 | 
					  llc.io.cpu.req_cmd <> Queue(conv.io.mem.req_cmd)
 | 
				
			||||||
  llc.io.cpu.req_data <> Queue(conv.io.mem.req_data, REFILL_CYCLES)
 | 
					  llc.io.cpu.req_data <> Queue(conv.io.mem.req_data, refill_cycles)
 | 
				
			||||||
  conv.io.mem.resp <> llc.io.cpu.resp
 | 
					  conv.io.mem.resp <> llc.io.cpu.resp
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // mux between main and backup memory ports
 | 
					  // mux between main and backup memory ports
 | 
				
			||||||
 | 
					  val mem_serdes = Module(new MemSerdes(htif_width))
 | 
				
			||||||
  val mem_cmdq = Module(new Queue(new MemReqCmd, 2))
 | 
					  val mem_cmdq = Module(new Queue(new MemReqCmd, 2))
 | 
				
			||||||
  mem_cmdq.io.enq <> llc.io.mem.req_cmd
 | 
					  mem_cmdq.io.enq <> llc.io.mem.req_cmd
 | 
				
			||||||
  mem_cmdq.io.deq.ready := Mux(io.mem_backup_en, mem_serdes.io.wide.req_cmd.ready, io.mem.req_cmd.ready)
 | 
					  mem_cmdq.io.deq.ready := Mux(io.mem_backup_en, mem_serdes.io.wide.req_cmd.ready, io.mem.req_cmd.ready)
 | 
				
			||||||
@@ -128,7 +95,7 @@ class OuterMemorySystem(htif_width: Int)(implicit conf: UncoreConfiguration) ext
 | 
				
			|||||||
  mem_serdes.io.wide.req_cmd.valid := mem_cmdq.io.deq.valid && io.mem_backup_en
 | 
					  mem_serdes.io.wide.req_cmd.valid := mem_cmdq.io.deq.valid && io.mem_backup_en
 | 
				
			||||||
  mem_serdes.io.wide.req_cmd.bits := mem_cmdq.io.deq.bits
 | 
					  mem_serdes.io.wide.req_cmd.bits := mem_cmdq.io.deq.bits
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  val mem_dataq = Module(new Queue(new MemData, REFILL_CYCLES))
 | 
					  val mem_dataq = Module(new Queue(new MemData, refill_cycles))
 | 
				
			||||||
  mem_dataq.io.enq <> llc.io.mem.req_data
 | 
					  mem_dataq.io.enq <> llc.io.mem.req_data
 | 
				
			||||||
  mem_dataq.io.deq.ready := Mux(io.mem_backup_en, mem_serdes.io.wide.req_data.ready, io.mem.req_data.ready)
 | 
					  mem_dataq.io.deq.ready := Mux(io.mem_backup_en, mem_serdes.io.wide.req_data.ready, io.mem.req_data.ready)
 | 
				
			||||||
  io.mem.req_data.valid := mem_dataq.io.deq.valid && !io.mem_backup_en
 | 
					  io.mem.req_data.valid := mem_dataq.io.deq.valid && !io.mem_backup_en
 | 
				
			||||||
@@ -143,21 +110,21 @@ class OuterMemorySystem(htif_width: Int)(implicit conf: UncoreConfiguration) ext
 | 
				
			|||||||
  io.mem_backup <> mem_serdes.io.narrow
 | 
					  io.mem_backup <> mem_serdes.io.narrow
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
case class UncoreConfiguration(l2: L2CoherenceAgentConfiguration, tl: TileLinkConfiguration, nTiles: Int, nBanks: Int, bankIdLsb: Int, nSCR: Int)
 | 
					case class UncoreConfiguration(l2: L2CacheConfig, tl: TileLinkConfiguration, mif: MemoryIFConfiguration, nTiles: Int, nBanks: Int, bankIdLsb: Int, nSCR: Int, offsetBits: Int, useDRAMSideLLC: Boolean)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Uncore(htif_width: Int)(implicit conf: UncoreConfiguration) extends Module
 | 
					class Uncore(htif_width: Int)(implicit conf: UncoreConfiguration) extends Module
 | 
				
			||||||
{
 | 
					{
 | 
				
			||||||
  implicit val tl = conf.tl
 | 
					  implicit val (tl, mif) = (conf.tl, conf.mif)
 | 
				
			||||||
  val io = new Bundle {
 | 
					  val io = new Bundle {
 | 
				
			||||||
    val host = new HostIO(htif_width)
 | 
					    val host = new HostIO(htif_width)
 | 
				
			||||||
    val mem = new ioMem
 | 
					    val mem = new MemIO
 | 
				
			||||||
    val tiles = Vec.fill(conf.nTiles){new TileLinkIO}.flip
 | 
					    val tiles = Vec.fill(conf.nTiles){new TileLinkIO}.flip
 | 
				
			||||||
    val htif = Vec.fill(conf.nTiles){new HTIFIO(conf.nTiles)}.flip
 | 
					    val htif = Vec.fill(conf.nTiles){new HTIFIO(conf.nTiles)}.flip
 | 
				
			||||||
    val incoherent = Vec.fill(conf.nTiles){Bool()}.asInput
 | 
					    val incoherent = Vec.fill(conf.nTiles){Bool()}.asInput
 | 
				
			||||||
    val mem_backup = new ioMemSerialized(htif_width)
 | 
					    val mem_backup = new MemSerializedIO(htif_width)
 | 
				
			||||||
    val mem_backup_en = Bool(INPUT)
 | 
					    val mem_backup_en = Bool(INPUT)
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
  val htif = Module(new HTIF(htif_width, CSRs.reset, conf.nSCR))
 | 
					  val htif = Module(new HTIF(htif_width, CSRs.reset, conf.nSCR, conf.offsetBits))
 | 
				
			||||||
  val outmemsys = Module(new OuterMemorySystem(htif_width))
 | 
					  val outmemsys = Module(new OuterMemorySystem(htif_width))
 | 
				
			||||||
  val incoherentWithHtif = (io.incoherent :+ Bool(true).asInput)
 | 
					  val incoherentWithHtif = (io.incoherent :+ Bool(true).asInput)
 | 
				
			||||||
  outmemsys.io.incoherent := incoherentWithHtif
 | 
					  outmemsys.io.incoherent := incoherentWithHtif
 | 
				
			||||||
@@ -167,21 +134,15 @@ class Uncore(htif_width: Int)(implicit conf: UncoreConfiguration) extends Module
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  // Add networking headers and endpoint queues
 | 
					  // Add networking headers and endpoint queues
 | 
				
			||||||
  def convertAddrToBank(addr: Bits): UInt = {
 | 
					  def convertAddrToBank(addr: Bits): UInt = {
 | 
				
			||||||
    require(conf.bankIdLsb + log2Up(conf.nBanks) < MEM_ADDR_BITS, {println("Invalid bits for bank multiplexing.")})
 | 
					    require(conf.bankIdLsb + log2Up(conf.nBanks) < conf.mif.addrBits, {println("Invalid bits for bank multiplexing.")})
 | 
				
			||||||
    addr(conf.bankIdLsb + log2Up(conf.nBanks) - 1, conf.bankIdLsb)
 | 
					    addr(conf.bankIdLsb + log2Up(conf.nBanks) - 1, conf.bankIdLsb)
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  (outmemsys.io.tiles :+ outmemsys.io.htif).zip(io.tiles :+ htif.io.mem).zipWithIndex.map { 
 | 
					  (outmemsys.io.tiles :+ outmemsys.io.htif).zip(io.tiles :+ htif.io.mem).zipWithIndex.map { 
 | 
				
			||||||
    case ((outer, client), i) => 
 | 
					    case ((outer, client), i) => 
 | 
				
			||||||
      outer.acquire <> TileLinkHeaderAppender(client.acquire, i, conf.nBanks, convertAddrToBank _)
 | 
					      outer.acquire <> Queue(TileLinkHeaderOverwriter(client.acquire, i, conf.nBanks, convertAddrToBank _))
 | 
				
			||||||
      outer.release <> TileLinkHeaderAppender(client.release, i, conf.nBanks, convertAddrToBank _)
 | 
					      outer.release <> Queue(TileLinkHeaderOverwriter(client.release, i, conf.nBanks, convertAddrToBank _))
 | 
				
			||||||
 | 
					      outer.finish <> Queue(TileLinkHeaderOverwriter(client.finish, i, true))
 | 
				
			||||||
      val grant_ack_q = Queue(client.grant_ack)
 | 
					 | 
				
			||||||
      outer.grant_ack.valid := grant_ack_q.valid
 | 
					 | 
				
			||||||
      outer.grant_ack.bits := grant_ack_q.bits
 | 
					 | 
				
			||||||
      outer.grant_ack.bits.header.src := UInt(i)
 | 
					 | 
				
			||||||
      grant_ack_q.ready := outer.grant_ack.ready
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      client.grant <> Queue(outer.grant, 1, pipe = true)
 | 
					      client.grant <> Queue(outer.grant, 1, pipe = true)
 | 
				
			||||||
      client.probe <> Queue(outer.probe)
 | 
					      client.probe <> Queue(outer.probe)
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
@@ -216,12 +177,12 @@ class Uncore(htif_width: Int)(implicit conf: UncoreConfiguration) extends Module
 | 
				
			|||||||
  io.host.debug_stats_pcr := htif.io.host.debug_stats_pcr
 | 
					  io.host.debug_stats_pcr := htif.io.host.debug_stats_pcr
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class TopIO(htifWidth: Int) extends Bundle  {
 | 
					class TopIO(htifWidth: Int)(implicit conf: MemoryIFConfiguration) extends Bundle  {
 | 
				
			||||||
  val host    = new HostIO(htifWidth)
 | 
					  val host    = new HostIO(htifWidth)
 | 
				
			||||||
  val mem     = new ioMem
 | 
					  val mem     = new MemIO
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class VLSITopIO(htifWidth: Int) extends TopIO(htifWidth) {
 | 
					class VLSITopIO(htifWidth: Int)(implicit conf: MemoryIFConfiguration) extends TopIO(htifWidth)(conf) {
 | 
				
			||||||
  val mem_backup_en = Bool(INPUT)
 | 
					  val mem_backup_en = Bool(INPUT)
 | 
				
			||||||
  val in_mem_ready = Bool(OUTPUT)
 | 
					  val in_mem_ready = Bool(OUTPUT)
 | 
				
			||||||
  val in_mem_valid = Bool(INPUT)
 | 
					  val in_mem_valid = Bool(INPUT)
 | 
				
			||||||
@@ -231,38 +192,48 @@ class VLSITopIO(htifWidth: Int) extends TopIO(htifWidth) {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class MemDessert extends Module {
 | 
					class MemDessert extends Module {
 | 
				
			||||||
 | 
					  implicit val mif = MemoryIFConfiguration(MEM_ADDR_BITS, MEM_DATA_BITS, MEM_TAG_BITS, MEM_DATA_BEATS)
 | 
				
			||||||
  val io = new MemDesserIO(HTIF_WIDTH)
 | 
					  val io = new MemDesserIO(HTIF_WIDTH)
 | 
				
			||||||
  val x = Module(new MemDesser(HTIF_WIDTH))
 | 
					  val x = Module(new MemDesser(HTIF_WIDTH))
 | 
				
			||||||
  io.narrow <> x.io.narrow
 | 
					  io.narrow <> x.io.narrow
 | 
				
			||||||
  io.wide <> x.io.wide
 | 
					  io.wide <> x.io.wide
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Top extends Module {
 | 
					class Top extends Module {
 | 
				
			||||||
 | 
					  val dir = new FullRepresentation(NTILES+1)
 | 
				
			||||||
  val co = if(ENABLE_SHARING) {
 | 
					  val co = if(ENABLE_SHARING) {
 | 
				
			||||||
              if(ENABLE_CLEAN_EXCLUSIVE) new MESICoherence
 | 
					              if(ENABLE_CLEAN_EXCLUSIVE) new MESICoherence(dir)
 | 
				
			||||||
              else new MSICoherence
 | 
					              else new MSICoherence(dir)
 | 
				
			||||||
            } else {
 | 
					            } else {
 | 
				
			||||||
              if(ENABLE_CLEAN_EXCLUSIVE) new MEICoherence
 | 
					              if(ENABLE_CLEAN_EXCLUSIVE) new MEICoherence(dir)
 | 
				
			||||||
              else new MICoherence
 | 
					              else new MICoherence(dir)
 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  implicit val ln = LogicalNetworkConfiguration(log2Up(NTILES)+1, NBANKS, NTILES+1)
 | 
					  implicit val ln = LogicalNetworkConfiguration(log2Up(NTILES)+1, NBANKS, NTILES+1)
 | 
				
			||||||
  implicit val tl = TileLinkConfiguration(co, ln, log2Up(NL2_REL_XACTS+NL2_ACQ_XACTS), 2*log2Up(NMSHRS*NTILES+1), MEM_DATA_BITS)
 | 
					  implicit val as = AddressSpaceConfiguration(PADDR_BITS, VADDR_BITS, PGIDX_BITS, ASID_BITS, PERM_BITS)
 | 
				
			||||||
  implicit val l2 = L2CoherenceAgentConfiguration(tl, NL2_REL_XACTS, NL2_ACQ_XACTS)
 | 
					  implicit val tl = TileLinkConfiguration(co = co, ln = ln, 
 | 
				
			||||||
  implicit val uc = UncoreConfiguration(l2, tl, NTILES, NBANKS, bankIdLsb = 5, nSCR = 64)
 | 
					                                          addrBits = as.paddrBits-OFFSET_BITS, 
 | 
				
			||||||
 | 
					                                          clientXactIdBits = log2Up(NL2_REL_XACTS+NL2_ACQ_XACTS), 
 | 
				
			||||||
 | 
					                                          masterXactIdBits = 2*log2Up(NMSHRS*NTILES+1), 
 | 
				
			||||||
 | 
					                                          dataBits = CACHE_DATA_SIZE_IN_BYTES*8, 
 | 
				
			||||||
 | 
					                                          writeMaskBits = WRITE_MASK_BITS, 
 | 
				
			||||||
 | 
					                                          wordAddrBits = SUBWORD_ADDR_BITS, 
 | 
				
			||||||
 | 
					                                          atomicOpBits = ATOMIC_OP_BITS)
 | 
				
			||||||
 | 
					  implicit val l2 = L2CacheConfig(512, 8, 1, 1, NL2_REL_XACTS, NL2_ACQ_XACTS, tl, as)
 | 
				
			||||||
 | 
					  implicit val mif = MemoryIFConfiguration(MEM_ADDR_BITS, MEM_DATA_BITS, MEM_TAG_BITS, MEM_DATA_BEATS)
 | 
				
			||||||
 | 
					  implicit val uc = UncoreConfiguration(l2, tl, mif, NTILES, NBANKS, bankIdLsb = 5, nSCR = 64, offsetBits = OFFSET_BITS, useDRAMSideLLC = USE_DRAMSIDE_LLC)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  val isize = RangeParam("i",7,7,9)
 | 
					  val ic = ICacheConfig(sets = 128, assoc = 2, ntlb = 8, tl = tl, as = as, btb = BTBConfig(as, 64, 2))
 | 
				
			||||||
  val ic = ICacheConfig(math.pow(2, isize.getValue).toInt, 2, ntlb = 8, nbtb = 38)
 | 
					  val dc = DCacheConfig(sets = 128, ways = 4, 
 | 
				
			||||||
 | 
					                        tl = tl, as = as,
 | 
				
			||||||
  val dsize = RangeParam("d",7,7,9)
 | 
					                        ntlb = 8, nmshr = NMSHRS, nrpq = 16, nsdq = 17, 
 | 
				
			||||||
  val dc = DCacheConfig(math.pow(2, dsize.getValue).toInt, 4, ntlb = 8, nmshr = NMSHRS, nrpq = 16, nsdq = 17, states = co.nClientStates)
 | 
					                        reqtagbits = -1, databits = -1)
 | 
				
			||||||
  //val dc = DCacheConfig(128, 4, ntlb = 8, nmshr = NMSHRS, nrpq = 16, nsdq = 17, states = co.nClientStates)
 | 
					  val vic = ICacheConfig(sets = 128, assoc = 1, tl = tl, as = as, btb = BTBConfig(as, 8))
 | 
				
			||||||
 | 
					  val hc = hwacha.HwachaConfiguration(as, vic, dc, 8, 256, ndtlb = 8, nptlb = 2)
 | 
				
			||||||
  val vic = ICacheConfig(128, 1)
 | 
					  val fpu = if (HAS_FPU) Some(FPUConfig(sfmaLatency = 2, dfmaLatency = 3)) else None
 | 
				
			||||||
  val hc = hwacha.HwachaConfiguration(vic, dc, 8, 256, ndtlb = 8, nptlb = 2)
 | 
					  val rc = RocketConfiguration(tl, as, ic, dc, fpu
 | 
				
			||||||
  val rc = RocketConfiguration(tl, ic, dc,
 | 
					  //                             rocc = (c: RocketConfiguration) => (new hwacha.Hwacha(hc, c))
 | 
				
			||||||
                               fpu = HAS_FPU
 | 
					 | 
				
			||||||
                               //,rocc = (c: RocketConfiguration) => (new hwacha.Hwacha(hc, c))
 | 
					 | 
				
			||||||
                              )
 | 
					                              )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  val io = new VLSITopIO(HTIF_WIDTH)
 | 
					  val io = new VLSITopIO(HTIF_WIDTH)
 | 
				
			||||||
@@ -278,11 +249,12 @@ class Top extends Module {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    resetSigs(i) := hl.reset
 | 
					    resetSigs(i) := hl.reset
 | 
				
			||||||
    val tile = tileList(i)
 | 
					    val tile = tileList(i)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    tile.io.tilelink <> tl
 | 
					    tile.io.tilelink <> tl
 | 
				
			||||||
    il := hl.reset
 | 
					    il := hl.reset
 | 
				
			||||||
 | 
					    tile.io.host.id := UInt(i)
 | 
				
			||||||
    tile.io.host.reset := Reg(next=Reg(next=hl.reset))
 | 
					    tile.io.host.reset := Reg(next=Reg(next=hl.reset))
 | 
				
			||||||
    tile.io.host.pcr_req <> Queue(hl.pcr_req, 1)
 | 
					    tile.io.host.pcr_req <> Queue(hl.pcr_req, 1)
 | 
				
			||||||
    tile.io.host.id := i
 | 
					 | 
				
			||||||
    hl.pcr_rep <> Queue(tile.io.host.pcr_rep, 1)
 | 
					    hl.pcr_rep <> Queue(tile.io.host.pcr_rep, 1)
 | 
				
			||||||
    hl.ipi_req <> Queue(tile.io.host.ipi_req, 1)
 | 
					    hl.ipi_req <> Queue(tile.io.host.ipi_req, 1)
 | 
				
			||||||
    tile.io.host.ipi_rep <> Queue(hl.ipi_rep, 1)
 | 
					    tile.io.host.ipi_rep <> Queue(hl.ipi_rep, 1)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,51 +1,47 @@
 | 
				
			|||||||
package referencechip
 | 
					package referencechip
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import Chisel._
 | 
					import Chisel._
 | 
				
			||||||
import Node._
 | 
					 | 
				
			||||||
import uncore._
 | 
					import uncore._
 | 
				
			||||||
import rocket._
 | 
					import rocket._
 | 
				
			||||||
 | 
					import DRAMModel._
 | 
				
			||||||
 | 
					import DRAMModel.MemModelConstants._
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class FPGAOuterMemorySystem(htif_width: Int)(implicit conf: UncoreConfiguration) extends Module
 | 
					class FPGAOuterMemorySystem(htif_width: Int)(implicit conf: FPGAUncoreConfiguration) 
 | 
				
			||||||
{
 | 
					  extends Module {
 | 
				
			||||||
  implicit val (tl, ln, l2) = (conf.tl, conf.tl.ln, conf.l2)
 | 
					  implicit val (tl, ln, l2, mif) = (conf.tl, conf.tl.ln, conf.l2, conf.mif)
 | 
				
			||||||
  val io = new Bundle {
 | 
					  val io = new Bundle {
 | 
				
			||||||
    val tiles = Vec.fill(conf.nTiles){new TileLinkIO}.flip
 | 
					    val tiles = Vec.fill(conf.nTiles){new TileLinkIO}.flip
 | 
				
			||||||
    val htif = (new TileLinkIO).flip
 | 
					    val htif = (new TileLinkIO).flip
 | 
				
			||||||
    val incoherent = Vec.fill(ln.nClients){Bool()}.asInput
 | 
					    val incoherent = Vec.fill(ln.nClients){Bool()}.asInput
 | 
				
			||||||
    val mem = new ioMem
 | 
					    val mem = new MemIO
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  val masterEndpoints = (0 until ln.nMasters).map(i => Module(new L2CoherenceAgent(i)))
 | 
					  val master = Module(new L2CoherenceAgent(0))
 | 
				
			||||||
 | 
					 | 
				
			||||||
  val net = Module(new ReferenceChipCrossbarNetwork)
 | 
					  val net = Module(new ReferenceChipCrossbarNetwork)
 | 
				
			||||||
  net.io.clients zip (io.tiles :+ io.htif) map { case (net, end) => net <> end }
 | 
					  net.io.clients zip (io.tiles :+ io.htif) map { case (net, end) => net <> end }
 | 
				
			||||||
  net.io.masters zip (masterEndpoints.map(_.io.client)) map { case (net, end) => net <> end }
 | 
					  net.io.masters.head <> master.io.inner
 | 
				
			||||||
  masterEndpoints.map{ _.io.incoherent zip io.incoherent map { case (m, c) => m := c } }
 | 
					  master.io.incoherent zip io.incoherent map { case (m, c) => m := c }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  val conv = Module(new MemIOUncachedTileLinkIOConverter(2))
 | 
					  val conv = Module(new MemIOUncachedTileLinkIOConverter(2))
 | 
				
			||||||
  if(ln.nMasters > 1) {
 | 
					  conv.io.uncached <> master.io.outer
 | 
				
			||||||
    val arb = Module(new UncachedTileLinkIOArbiterThatAppendsArbiterId(ln.nMasters))
 | 
					  io.mem.req_cmd <> Queue(conv.io.mem.req_cmd, 2)
 | 
				
			||||||
    arb.io.in zip masterEndpoints.map(_.io.master) map { case (arb, cache) => arb <> cache }
 | 
					  io.mem.req_data <> Queue(conv.io.mem.req_data, tl.dataBits/mif.dataBits)
 | 
				
			||||||
    conv.io.uncached <> arb.io.out
 | 
					 | 
				
			||||||
  } else {
 | 
					 | 
				
			||||||
    conv.io.uncached <> masterEndpoints.head.io.master
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
  io.mem.req_cmd <> Queue(conv.io.mem.req_cmd)
 | 
					 | 
				
			||||||
  io.mem.req_data <> Queue(conv.io.mem.req_data, REFILL_CYCLES)
 | 
					 | 
				
			||||||
  conv.io.mem.resp <> Queue(io.mem.resp)
 | 
					  conv.io.mem.resp <> Queue(io.mem.resp)
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class FPGAUncore(htif_width: Int)(implicit conf: UncoreConfiguration) extends Module
 | 
					case class FPGAUncoreConfiguration(l2: L2CoherenceAgentConfiguration, tl: TileLinkConfiguration, mif: MemoryIFConfiguration, nTiles: Int, nSCR: Int, offsetBits: Int)
 | 
				
			||||||
{
 | 
					
 | 
				
			||||||
  implicit val (tl, ln) = (conf.tl, conf.tl.ln)
 | 
					class FPGAUncore(htif_width: Int)(implicit conf: FPGAUncoreConfiguration) 
 | 
				
			||||||
 | 
					  extends Module {
 | 
				
			||||||
 | 
					  implicit val (tl, ln, mif) = (conf.tl, conf.tl.ln, conf.mif)
 | 
				
			||||||
  val io = new Bundle {
 | 
					  val io = new Bundle {
 | 
				
			||||||
    val host = new HostIO(htif_width)
 | 
					    val host = new HostIO(htif_width)
 | 
				
			||||||
    val mem = new ioMem
 | 
					    val mem = new MemIO
 | 
				
			||||||
    val tiles = Vec.fill(conf.nTiles){new TileLinkIO}.flip
 | 
					    val tiles = Vec.fill(conf.nTiles){new TileLinkIO}.flip
 | 
				
			||||||
    val htif = Vec.fill(conf.nTiles){new HTIFIO(conf.nTiles)}.flip
 | 
					    val htif = Vec.fill(conf.nTiles){new HTIFIO(conf.nTiles)}.flip
 | 
				
			||||||
    val incoherent = Vec.fill(conf.nTiles){Bool()}.asInput
 | 
					    val incoherent = Vec.fill(conf.nTiles){Bool()}.asInput
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
  val htif = Module(new HTIF(htif_width, CSRs.reset, conf.nSCR))
 | 
					  val htif = Module(new HTIF(htif_width, CSRs.reset, conf.nSCR, conf.offsetBits))
 | 
				
			||||||
  val outmemsys = Module(new FPGAOuterMemorySystem(htif_width))
 | 
					  val outmemsys = Module(new FPGAOuterMemorySystem(htif_width))
 | 
				
			||||||
  val incoherentWithHtif = (io.incoherent :+ Bool(true).asInput)
 | 
					  val incoherentWithHtif = (io.incoherent :+ Bool(true).asInput)
 | 
				
			||||||
  outmemsys.io.incoherent := incoherentWithHtif
 | 
					  outmemsys.io.incoherent := incoherentWithHtif
 | 
				
			||||||
@@ -53,22 +49,11 @@ class FPGAUncore(htif_width: Int)(implicit conf: UncoreConfiguration) extends Mo
 | 
				
			|||||||
  outmemsys.io.mem <> io.mem
 | 
					  outmemsys.io.mem <> io.mem
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // Add networking headers and endpoint queues
 | 
					  // Add networking headers and endpoint queues
 | 
				
			||||||
  def convertAddrToBank(addr: Bits): UInt = {
 | 
					 | 
				
			||||||
    require(conf.bankIdLsb + log2Up(conf.nBanks) < MEM_ADDR_BITS, {println("Invalid bits for bank multiplexing.")})
 | 
					 | 
				
			||||||
    addr(conf.bankIdLsb + log2Up(conf.nBanks) - 1, conf.bankIdLsb)
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  (outmemsys.io.tiles :+ outmemsys.io.htif).zip(io.tiles :+ htif.io.mem).zipWithIndex.map { 
 | 
					  (outmemsys.io.tiles :+ outmemsys.io.htif).zip(io.tiles :+ htif.io.mem).zipWithIndex.map { 
 | 
				
			||||||
    case ((outer, client), i) => 
 | 
					    case ((outer, client), i) => 
 | 
				
			||||||
      outer.acquire <> TileLinkHeaderAppender(client.acquire, i, conf.nBanks, convertAddrToBank _)
 | 
					      outer.acquire <> Queue(TileLinkHeaderOverwriter(client.acquire, i, false))
 | 
				
			||||||
      outer.release <> TileLinkHeaderAppender(client.release, i, conf.nBanks, convertAddrToBank _)
 | 
					      outer.release <> Queue(TileLinkHeaderOverwriter(client.release, i, false))
 | 
				
			||||||
 | 
					      outer.finish <> Queue(TileLinkHeaderOverwriter(client.finish, i, true))
 | 
				
			||||||
      val grant_ack_q = Queue(client.grant_ack)
 | 
					 | 
				
			||||||
      outer.grant_ack.valid := grant_ack_q.valid
 | 
					 | 
				
			||||||
      outer.grant_ack.bits := grant_ack_q.bits
 | 
					 | 
				
			||||||
      outer.grant_ack.bits.header.src := UInt(i)
 | 
					 | 
				
			||||||
      grant_ack_q.ready := outer.grant_ack.ready
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      client.grant <> Queue(outer.grant, 1, pipe = true)
 | 
					      client.grant <> Queue(outer.grant, 1, pipe = true)
 | 
				
			||||||
      client.probe <> Queue(outer.probe)
 | 
					      client.probe <> Queue(outer.probe)
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
@@ -77,24 +62,37 @@ class FPGAUncore(htif_width: Int)(implicit conf: UncoreConfiguration) extends Mo
 | 
				
			|||||||
  htif.io.host.in <> io.host.in
 | 
					  htif.io.host.in <> io.host.in
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class FPGATopIO(htifWidth: Int) extends TopIO(htifWidth)
 | 
					import MemoryConstants._
 | 
				
			||||||
 | 
					import TileLinkSizeConstants._
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import MemoryConstants._
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class FPGATopIO(htifWidth: Int)(implicit conf: MemoryIFConfiguration) extends TopIO(htifWidth)(conf)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class FPGATop extends Module {
 | 
					class FPGATop extends Module {
 | 
				
			||||||
  val htif_width = 16
 | 
					 | 
				
			||||||
  val co = new MESICoherence
 | 
					 | 
				
			||||||
  val ntiles = 1
 | 
					  val ntiles = 1
 | 
				
			||||||
  val nbanks = 1
 | 
					 | 
				
			||||||
  val nmshrs = 2
 | 
					  val nmshrs = 2
 | 
				
			||||||
  implicit val ln = LogicalNetworkConfiguration(log2Up(ntiles)+1, nbanks, ntiles+1)
 | 
					  val htif_width = 16
 | 
				
			||||||
  implicit val tl = TileLinkConfiguration(co, ln, log2Up(1+8), 2*log2Up(nmshrs*ntiles+1), MEM_DATA_BITS)
 | 
					 | 
				
			||||||
  implicit val l2 = L2CoherenceAgentConfiguration(tl, 1, 8)
 | 
					 | 
				
			||||||
  implicit val uc = UncoreConfiguration(l2, tl, ntiles, nbanks, bankIdLsb = 5, nSCR = 64)
 | 
					 | 
				
			||||||
  
 | 
					  
 | 
				
			||||||
  val ic = ICacheConfig(64, 1, ntlb = 4, nbtb = 4)
 | 
					  val co = new MESICoherence(new FullRepresentation(ntiles+1))
 | 
				
			||||||
  val dc = DCacheConfig(64, 1, ntlb = 4, nmshr = 2, nrpq = 16, nsdq = 17, states = co.nClientStates)
 | 
					  implicit val ln = LogicalNetworkConfiguration(log2Up(ntiles)+1, 1, ntiles+1)
 | 
				
			||||||
  val rc = RocketConfiguration(tl, ic, dc,
 | 
					  implicit val as = AddressSpaceConfiguration(PADDR_BITS, VADDR_BITS, PGIDX_BITS, ASID_BITS, PERM_BITS)
 | 
				
			||||||
                               fastMulDiv = false,
 | 
					  implicit val tl = TileLinkConfiguration(co = co, ln = ln,
 | 
				
			||||||
                               fpu = false)
 | 
					                                          addrBits = as.paddrBits-OFFSET_BITS, 
 | 
				
			||||||
 | 
					                                          clientXactIdBits = log2Up(1+8), 
 | 
				
			||||||
 | 
					                                          masterXactIdBits = 2*log2Up(2*1+1), 
 | 
				
			||||||
 | 
					                                          dataBits = CACHE_DATA_SIZE_IN_BYTES*8, 
 | 
				
			||||||
 | 
					                                          writeMaskBits = WRITE_MASK_BITS, 
 | 
				
			||||||
 | 
					                                          wordAddrBits = SUBWORD_ADDR_BITS, 
 | 
				
			||||||
 | 
					                                          atomicOpBits = ATOMIC_OP_BITS)
 | 
				
			||||||
 | 
					  implicit val l2 = L2CoherenceAgentConfiguration(tl, 1, 8)
 | 
				
			||||||
 | 
					  implicit val mif = MemoryIFConfiguration(MEM_ADDR_BITS, MEM_DATA_BITS, MEM_TAG_BITS, 4)
 | 
				
			||||||
 | 
					  implicit val uc = FPGAUncoreConfiguration(l2, tl, mif, ntiles, nSCR = 64, offsetBits = OFFSET_BITS)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  val ic = ICacheConfig(64, 1, ntlb = 4, tl = tl, as = as, btb = BTBConfig(as, 8, 2))
 | 
				
			||||||
 | 
					  val dc = DCacheConfig(64, 1, ntlb = 4, nmshr = 2, nrpq = 16, nsdq = 17, tl = tl, as = as, reqtagbits = -1, databits = -1)
 | 
				
			||||||
 | 
					  val rc = RocketConfiguration(tl, as, ic, dc, fpu = None,
 | 
				
			||||||
 | 
					                               fastMulDiv = false)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  val io = new FPGATopIO(htif_width)
 | 
					  val io = new FPGATopIO(htif_width)
 | 
				
			||||||
 
 | 
					 
 | 
				
			||||||
@@ -120,8 +118,8 @@ class FPGATop extends Module {
 | 
				
			|||||||
    tile.io.host.ipi_rep <> Queue(hl.ipi_rep)
 | 
					    tile.io.host.ipi_rep <> Queue(hl.ipi_rep)
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 
 | 
					 
 | 
				
			||||||
  io.host <> uncore.io.host
 | 
					  uncore.io.host <> io.host
 | 
				
			||||||
  io.mem <> uncore.io.mem
 | 
					  uncore.io.mem <> io.mem
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
abstract class AXISlave extends Module {
 | 
					abstract class AXISlave extends Module {
 | 
				
			||||||
@@ -175,7 +173,7 @@ class Slave extends AXISlave
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  // write cr1 -> mem.resp (nonblocking)
 | 
					  // write cr1 -> mem.resp (nonblocking)
 | 
				
			||||||
  val in_count = Reg(init=UInt(0, log2Up(memw/dw)))
 | 
					  val in_count = Reg(init=UInt(0, log2Up(memw/dw)))
 | 
				
			||||||
  val rf_count = Reg(init=UInt(0, log2Up(REFILL_CYCLES)))
 | 
					  val rf_count = Reg(init=UInt(0, log2Up(CACHE_DATA_SIZE_IN_BYTES*8/memw)))
 | 
				
			||||||
  require(memw % dw == 0 && isPow2(memw/dw))
 | 
					  require(memw % dw == 0 && isPow2(memw/dw))
 | 
				
			||||||
  val in_reg = Reg(top.io.mem.resp.bits.data)
 | 
					  val in_reg = Reg(top.io.mem.resp.bits.data)
 | 
				
			||||||
  top.io.mem.resp.bits.data := Cat(io.in.bits, in_reg(in_reg.getWidth-1,dw))
 | 
					  top.io.mem.resp.bits.data := Cat(io.in.bits, in_reg(in_reg.getWidth-1,dw))
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -5,115 +5,43 @@ import uncore._
 | 
				
			|||||||
import scala.reflect._
 | 
					import scala.reflect._
 | 
				
			||||||
import scala.reflect.runtime.universe._
 | 
					import scala.reflect.runtime.universe._
 | 
				
			||||||
 | 
					
 | 
				
			||||||
object TileLinkHeaderAppender {
 | 
					object TileLinkHeaderOverwriter {
 | 
				
			||||||
  def apply[T <: ClientSourcedMessage with HasPhysicalAddress, U <: ClientSourcedMessage with HasTileLinkData](in: PairedDataIO[LogicalNetworkIO[T],LogicalNetworkIO[U]], clientId: Int, nBanks: Int, addrConvert: Bits => UInt)(implicit conf: TileLinkConfiguration) = {
 | 
					  def apply[T <: ClientSourcedMessage](in: DecoupledIO[LogicalNetworkIO[T]], clientId: Int, passThrough: Boolean)(implicit conf: TileLinkConfiguration): DecoupledIO[LogicalNetworkIO[T]] = {
 | 
				
			||||||
    val shim = Module(new TileLinkHeaderAppender(in.meta.bits.payload, in.data.bits.payload, clientId, nBanks, addrConvert))
 | 
					    val out = in.clone.asDirectionless
 | 
				
			||||||
    shim.io.in <> in
 | 
					    out.bits.payload := in.bits.payload
 | 
				
			||||||
    shim.io.out
 | 
					    out.bits.header.src := UInt(clientId)
 | 
				
			||||||
 | 
					    out.bits.header.dst := (if(passThrough) in.bits.header.dst else UInt(0))
 | 
				
			||||||
 | 
					    out.valid := in.valid
 | 
				
			||||||
 | 
					    in.ready := out.ready
 | 
				
			||||||
 | 
					    out
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
  def apply[T <: ClientSourcedMessage with HasPhysicalAddress](in: DecoupledIO[LogicalNetworkIO[T]], clientId: Int, nBanks: Int, addrConvert: Bits => UInt)(implicit conf: TileLinkConfiguration) = {
 | 
					  def apply[T <: ClientSourcedMessage with HasPhysicalAddress](in: DecoupledIO[LogicalNetworkIO[T]], clientId: Int, nBanks: Int, addrConvert: UInt => UInt)(implicit conf: TileLinkConfiguration): DecoupledIO[LogicalNetworkIO[T]] = {
 | 
				
			||||||
    val shim = Module(new TileLinkHeaderAppender(in.bits.payload.clone, new AcquireData, clientId, nBanks, addrConvert))
 | 
					    val out: DecoupledIO[LogicalNetworkIO[T]] = apply(in, clientId, false)
 | 
				
			||||||
    shim.io.in.meta <> in
 | 
					    out.bits.header.dst := (if(nBanks > 1) addrConvert(in.bits.payload.addr) else UInt(0))
 | 
				
			||||||
    shim.io.out.meta
 | 
					    out
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class TileLinkHeaderAppender[T <: ClientSourcedMessage with HasPhysicalAddress, U <: ClientSourcedMessage with HasTileLinkData](mType: T, dType: U, clientId: Int, nBanks: Int, addrConvert: Bits => UInt)(implicit conf: TileLinkConfiguration) extends Module {
 | 
					class ReferenceChipCrossbarNetwork(implicit conf: TileLinkConfiguration) 
 | 
				
			||||||
  implicit val ln = conf.ln
 | 
					  extends LogicalNetwork[TileLinkIO]()(conf.ln) {
 | 
				
			||||||
  val io = new Bundle {
 | 
					  implicit val (ln, co) = (conf.ln, conf.co)
 | 
				
			||||||
    val in = new PairedDataIO(new LogicalNetworkIO(mType), new LogicalNetworkIO(dType)).flip
 | 
					 | 
				
			||||||
    val out = new PairedDataIO(new LogicalNetworkIO(mType), new LogicalNetworkIO(dType))
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  val meta_q = Queue(io.in.meta)
 | 
					 | 
				
			||||||
  val data_q = Queue(io.in.data)
 | 
					 | 
				
			||||||
  if(nBanks == 1) {
 | 
					 | 
				
			||||||
    io.out.meta.bits.payload := meta_q.bits.payload
 | 
					 | 
				
			||||||
    io.out.meta.bits.header.src := UInt(clientId)
 | 
					 | 
				
			||||||
    io.out.meta.bits.header.dst := UInt(0)
 | 
					 | 
				
			||||||
    io.out.meta.valid := meta_q.valid
 | 
					 | 
				
			||||||
    meta_q.ready := io.out.meta.ready
 | 
					 | 
				
			||||||
    io.out.data.bits.payload := data_q.bits.payload
 | 
					 | 
				
			||||||
    io.out.data.bits.header.src := UInt(clientId)
 | 
					 | 
				
			||||||
    io.out.data.bits.header.dst := UInt(0)
 | 
					 | 
				
			||||||
    io.out.data.valid := data_q.valid
 | 
					 | 
				
			||||||
    data_q.ready := io.out.data.ready
 | 
					 | 
				
			||||||
  } else {
 | 
					 | 
				
			||||||
    val meta_has_data = conf.co.messageHasData(meta_q.bits.payload)
 | 
					 | 
				
			||||||
    val addr_q = Module(new Queue(io.in.meta.bits.payload.addr.clone, 2, pipe = true, flow = true))
 | 
					 | 
				
			||||||
    val data_cnt = Reg(init=UInt(0, width = log2Up(REFILL_CYCLES)))
 | 
					 | 
				
			||||||
    val data_cnt_up = data_cnt + UInt(1)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    io.out.meta.bits.payload := meta_q.bits.payload
 | 
					 | 
				
			||||||
    io.out.meta.bits.header.src := UInt(clientId)
 | 
					 | 
				
			||||||
    io.out.meta.bits.header.dst := addrConvert(meta_q.bits.payload.addr)
 | 
					 | 
				
			||||||
    io.out.data.bits.payload := meta_q.bits.payload
 | 
					 | 
				
			||||||
    io.out.data.bits.header.src := UInt(clientId)
 | 
					 | 
				
			||||||
    io.out.data.bits.header.dst := addrConvert(addr_q.io.deq.bits)
 | 
					 | 
				
			||||||
    addr_q.io.enq.bits := meta_q.bits.payload.addr
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    io.out.meta.valid := meta_q.valid && addr_q.io.enq.ready
 | 
					 | 
				
			||||||
    meta_q.ready := io.out.meta.ready && addr_q.io.enq.ready
 | 
					 | 
				
			||||||
    io.out.data.valid := data_q.valid && addr_q.io.deq.valid
 | 
					 | 
				
			||||||
    data_q.ready := io.out.data.ready && addr_q.io.deq.valid
 | 
					 | 
				
			||||||
    addr_q.io.enq.valid := meta_q.valid && io.out.meta.ready && meta_has_data
 | 
					 | 
				
			||||||
    addr_q.io.deq.ready := Bool(false)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    when(data_q.valid && data_q.ready) {
 | 
					 | 
				
			||||||
      data_cnt := data_cnt_up
 | 
					 | 
				
			||||||
      when(data_cnt_up === UInt(0)) {
 | 
					 | 
				
			||||||
        addr_q.io.deq.ready := Bool(true)
 | 
					 | 
				
			||||||
      }
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
//Adapter betweewn an UncachedTileLinkIO and a mem controller MemIO
 | 
					 | 
				
			||||||
class MemIOUncachedTileLinkIOConverter(qDepth: Int)(implicit conf: TileLinkConfiguration) extends Module {
 | 
					 | 
				
			||||||
  val io = new Bundle {
 | 
					 | 
				
			||||||
    val uncached = new UncachedTileLinkIO().flip
 | 
					 | 
				
			||||||
    val mem = new ioMem
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
  val mem_cmd_q = Module(new Queue(new MemReqCmd, qDepth))
 | 
					 | 
				
			||||||
  val mem_data_q = Module(new Queue(new MemData, qDepth))
 | 
					 | 
				
			||||||
  mem_cmd_q.io.enq.valid := io.uncached.acquire.meta.valid
 | 
					 | 
				
			||||||
  io.uncached.acquire.meta.ready := mem_cmd_q.io.enq.ready 
 | 
					 | 
				
			||||||
  mem_cmd_q.io.enq.bits.rw := conf.co.needsOuterWrite(io.uncached.acquire.meta.bits.payload.a_type, UInt(0))
 | 
					 | 
				
			||||||
  mem_cmd_q.io.enq.bits.tag := io.uncached.acquire.meta.bits.payload.client_xact_id
 | 
					 | 
				
			||||||
  mem_cmd_q.io.enq.bits.addr := io.uncached.acquire.meta.bits.payload.addr
 | 
					 | 
				
			||||||
  mem_data_q.io.enq.valid := io.uncached.acquire.data.valid
 | 
					 | 
				
			||||||
  io.uncached.acquire.data.ready := mem_data_q.io.enq.ready
 | 
					 | 
				
			||||||
  mem_data_q.io.enq.bits.data := io.uncached.acquire.data.bits.payload.data 
 | 
					 | 
				
			||||||
  io.uncached.grant.valid := io.mem.resp.valid
 | 
					 | 
				
			||||||
  io.mem.resp.ready := io.uncached.grant.ready
 | 
					 | 
				
			||||||
  io.uncached.grant.bits.payload.data := io.mem.resp.bits.data
 | 
					 | 
				
			||||||
  io.uncached.grant.bits.payload.client_xact_id := io.mem.resp.bits.tag
 | 
					 | 
				
			||||||
  io.uncached.grant.bits.payload.master_xact_id := UInt(0) // DNC
 | 
					 | 
				
			||||||
  io.uncached.grant.bits.payload.g_type := UInt(0) // DNC
 | 
					 | 
				
			||||||
  io.mem.req_cmd <> mem_cmd_q.io.deq
 | 
					 | 
				
			||||||
  io.mem.req_data <> mem_data_q.io.deq
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class ReferenceChipCrossbarNetwork(implicit conf: UncoreConfiguration) extends LogicalNetwork[TileLinkIO]()(conf.tl.ln) {
 | 
					 | 
				
			||||||
  implicit val (tl, ln, co) = (conf.tl, conf.tl.ln, conf.tl.co)
 | 
					 | 
				
			||||||
  val io = new Bundle {
 | 
					  val io = new Bundle {
 | 
				
			||||||
    val clients = Vec.fill(ln.nClients){(new TileLinkIO).flip}
 | 
					    val clients = Vec.fill(ln.nClients){(new TileLinkIO).flip}
 | 
				
			||||||
    val masters = Vec.fill(ln.nMasters){new TileLinkIO}
 | 
					    val masters = Vec.fill(ln.nMasters){new TileLinkIO}
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  implicit val pconf = new PhysicalNetworkConfiguration(ln.nEndpoints, ln.idBits) // Same config for all networks
 | 
					  implicit val pconf = new PhysicalNetworkConfiguration(ln.nEndpoints, ln.idBits) // Same config for all networks
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // Actually instantiate the particular networks required for TileLink
 | 
					  // Actually instantiate the particular networks required for TileLink
 | 
				
			||||||
  val acqNet = Module(new PairedCrossbar(new Acquire, new AcquireData, REFILL_CYCLES, (acq: PhysicalNetworkIO[Acquire]) => co.messageHasData(acq.payload)))
 | 
					  val acqNet = Module(new BasicCrossbar(new Acquire))
 | 
				
			||||||
  val relNet = Module(new PairedCrossbar(new Release, new ReleaseData, REFILL_CYCLES, (rel: PhysicalNetworkIO[Release]) => co.messageHasData(rel.payload)))
 | 
					  val relNet = Module(new BasicCrossbar(new Release))
 | 
				
			||||||
  val probeNet = Module(new BasicCrossbar(new Probe))
 | 
					  val prbNet = Module(new BasicCrossbar(new Probe))
 | 
				
			||||||
  val grantNet = Module(new BasicCrossbar(new Grant))
 | 
					  val gntNet = Module(new BasicCrossbar(new Grant))
 | 
				
			||||||
  val ackNet = Module(new BasicCrossbar(new GrantAck))
 | 
					  val ackNet = Module(new BasicCrossbar(new Finish))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // Aliases for the various network IO bundle types
 | 
					  // Aliases for the various network IO bundle types
 | 
				
			||||||
  type FBCIO[T <: Data] = DecoupledIO[PhysicalNetworkIO[T]]
 | 
					  type FBCIO[T <: Data] = DecoupledIO[PhysicalNetworkIO[T]]
 | 
				
			||||||
  type FLNIO[T <: Data] = DecoupledIO[LogicalNetworkIO[T]]
 | 
					  type FLNIO[T <: Data] = DecoupledIO[LogicalNetworkIO[T]]
 | 
				
			||||||
  type PBCIO[M <: Data, D <: Data] = PairedDataIO[PhysicalNetworkIO[M], PhysicalNetworkIO[D]]
 | 
					 | 
				
			||||||
  type PLNIO[M <: Data, D <: Data] = PairedDataIO[LogicalNetworkIO[M], LogicalNetworkIO[D]]
 | 
					 | 
				
			||||||
  type FromCrossbar[T <: Data] = FBCIO[T] => FLNIO[T]
 | 
					  type FromCrossbar[T <: Data] = FBCIO[T] => FLNIO[T]
 | 
				
			||||||
  type ToCrossbar[T <: Data] = FLNIO[T] => FBCIO[T]
 | 
					  type ToCrossbar[T <: Data] = FLNIO[T] => FBCIO[T]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -179,7 +107,7 @@ class ReferenceChipCrossbarNetwork(implicit conf: UncoreConfiguration) extends L
 | 
				
			|||||||
    else doFIFOOutputHookup(physIn, physOut, logIO, outShim)
 | 
					    else doFIFOOutputHookup(physIn, physOut, logIO, outShim)
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
    
 | 
					    
 | 
				
			||||||
  //Hookup all instances of a particular subbundle of 
 | 
					  //Hookup all instances of a particular subbundle of TileLink
 | 
				
			||||||
  def doFIFOHookups[T <: Data: TypeTag](physIO: BasicCrossbarIO[T], getLogIO: TileLinkIO => FLNIO[T]) = {
 | 
					  def doFIFOHookups[T <: Data: TypeTag](physIO: BasicCrossbarIO[T], getLogIO: TileLinkIO => FLNIO[T]) = {
 | 
				
			||||||
    typeTag[T].tpe match{ 
 | 
					    typeTag[T].tpe match{ 
 | 
				
			||||||
      case t if t <:< typeTag[ClientSourcedMessage].tpe => {
 | 
					      case t if t <:< typeTag[ClientSourcedMessage].tpe => {
 | 
				
			||||||
@@ -194,33 +122,9 @@ class ReferenceChipCrossbarNetwork(implicit conf: UncoreConfiguration) extends L
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  def doPairedDataHookup[T <: Data, R <: Data](isEndpointSourceOfMessage: Boolean, physIn: PBCIO[T,R], physOut: PBCIO[T,R], logIO: PLNIO[T,R], inShim: ToCrossbar[T], outShim: FromCrossbar[T], inShimD: ToCrossbar[R], outShimD: FromCrossbar[R]) = {
 | 
					  doFIFOHookups(acqNet.io, (tl: TileLinkIO) => tl.acquire)
 | 
				
			||||||
    if(isEndpointSourceOfMessage) {
 | 
					  doFIFOHookups(relNet.io, (tl: TileLinkIO) => tl.release)
 | 
				
			||||||
      doFIFOInputHookup[T](physIn.meta, physOut.meta, logIO.meta, inShim)
 | 
					  doFIFOHookups(prbNet.io, (tl: TileLinkIO) => tl.probe)
 | 
				
			||||||
      doFIFOInputHookup[R](physIn.data, physOut.data, logIO.data, inShimD)
 | 
					  doFIFOHookups(gntNet.io, (tl: TileLinkIO) => tl.grant)
 | 
				
			||||||
    } else {
 | 
					  doFIFOHookups(ackNet.io, (tl: TileLinkIO) => tl.finish)
 | 
				
			||||||
      doFIFOOutputHookup[T](physIn.meta, physOut.meta, logIO.meta, outShim)
 | 
					 | 
				
			||||||
      doFIFOOutputHookup[R](physIn.data, physOut.data, logIO.data, outShimD)
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  def doPairedDataHookups[T <: Data: TypeTag, R <: Data](physIO: PairedCrossbarIO[T,R], getLogIO: TileLinkIO => PLNIO[T,R]) = {
 | 
					 | 
				
			||||||
    typeTag[T].tpe match{ 
 | 
					 | 
				
			||||||
      case t if t <:< typeTag[ClientSourcedMessage].tpe => {
 | 
					 | 
				
			||||||
        io.masters.zipWithIndex.map{ case (i, id) => doPairedDataHookup[T,R](false, physIO.in(id), physIO.out(id), getLogIO(i), ClientToCrossbarShim, CrossbarToMasterShim, ClientToCrossbarShim, CrossbarToMasterShim) }
 | 
					 | 
				
			||||||
        io.clients.zipWithIndex.map{ case (i, id) => doPairedDataHookup[T,R](true, physIO.in(id+ln.nMasters), physIO.out(id+ln.nMasters), getLogIO(i), ClientToCrossbarShim, CrossbarToMasterShim, ClientToCrossbarShim, CrossbarToMasterShim) }
 | 
					 | 
				
			||||||
      }
 | 
					 | 
				
			||||||
      case t if t <:< typeTag[MasterSourcedMessage].tpe => {
 | 
					 | 
				
			||||||
        io.masters.zipWithIndex.map{ case (i, id) => doPairedDataHookup[T,R](true, physIO.in(id), physIO.out(id), getLogIO(i), MasterToCrossbarShim, CrossbarToClientShim, MasterToCrossbarShim, CrossbarToClientShim) }
 | 
					 | 
				
			||||||
        io.clients.zipWithIndex.map{ case (i, id) => doPairedDataHookup[T,R](false, physIO.in(id+ln.nMasters), physIO.out(id+ln.nMasters), getLogIO(i), MasterToCrossbarShim, CrossbarToClientShim, MasterToCrossbarShim, CrossbarToClientShim) }
 | 
					 | 
				
			||||||
      }
 | 
					 | 
				
			||||||
      case _ => require(false, "Unknown message sourcing.")
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  doPairedDataHookups(acqNet.io, (tl: TileLinkIO) => tl.acquire)
 | 
					 | 
				
			||||||
  doPairedDataHookups(relNet.io, (tl: TileLinkIO) => tl.release)
 | 
					 | 
				
			||||||
  doFIFOHookups(probeNet.io, (tl: TileLinkIO) => tl.probe)
 | 
					 | 
				
			||||||
  doFIFOHookups(grantNet.io, (tl: TileLinkIO) => tl.grant)
 | 
					 | 
				
			||||||
  doFIFOHookups(ackNet.io, (tl: TileLinkIO) => tl.grant_ack)
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										2
									
								
								uncore
									
									
									
									
									
								
							
							
								
								
								
								
								
							
						
						
									
										2
									
								
								uncore
									
									
									
									
									
								
							 Submodule uncore updated: 803308e917...ebe0f493a6
									
								
							
		Reference in New Issue
	
	Block a user