1 |
5 |
sergeykhbr |
Entering 'chisel'
|
2 |
|
|
Entering 'context-dependent-environments'
|
3 |
|
|
Entering 'dramsim2'
|
4 |
|
|
Entering 'fpga-zynq'
|
5 |
|
|
Entering 'groundtest'
|
6 |
|
|
Entering 'hardfloat'
|
7 |
|
|
Entering 'junctions'
|
8 |
|
|
Entering 'riscv-tools'
|
9 |
|
|
Entering 'riscv-tools/riscv-tests'
|
10 |
|
|
Entering 'riscv-tools/riscv-tests/env'
|
11 |
|
|
Entering 'rocket'
|
12 |
|
|
diff --git a/src/main/scala/csr.scala b/src/main/scala/csr.scala
|
13 |
|
|
index 62f81ff..ae649b0 100644
|
14 |
|
|
--- a/src/main/scala/csr.scala
|
15 |
|
|
+++ b/src/main/scala/csr.scala
|
16 |
|
|
@@ -155,9 +155,9 @@ class CSRFile(implicit p: Parameters) extends CoreModule()(p)
|
17 |
|
|
val system_insn = io.rw.cmd === CSR.I
|
18 |
|
|
val cpu_ren = io.rw.cmd =/= CSR.N && !system_insn
|
19 |
|
|
|
20 |
|
|
- val host_csr_req_valid = Reg(Bool()) // don't reset
|
21 |
|
|
+ val host_csr_req_valid = Reg(init=Bool(false)) // don't reset
|
22 |
|
|
val host_csr_req_fire = host_csr_req_valid && !cpu_ren
|
23 |
|
|
- val host_csr_rep_valid = Reg(Bool()) // don't reset
|
24 |
|
|
+ val host_csr_rep_valid = Reg(init=Bool(false)) // don't reset
|
25 |
|
|
val host_csr_bits = Reg(io.host.csr.req.bits)
|
26 |
|
|
io.host.csr.req.ready := !host_csr_req_valid && !host_csr_rep_valid
|
27 |
|
|
io.host.csr.resp.valid := host_csr_rep_valid
|
28 |
|
|
diff --git a/src/main/scala/nbdcache.scala b/src/main/scala/nbdcache.scala
|
29 |
|
|
index 2d0eee2..a2d8bbe 100644
|
30 |
|
|
--- a/src/main/scala/nbdcache.scala
|
31 |
|
|
+++ b/src/main/scala/nbdcache.scala
|
32 |
|
|
@@ -395,7 +395,8 @@ class MSHRFile(implicit p: Parameters) extends L1HellaCacheModule()(p) {
|
33 |
|
|
}
|
34 |
|
|
|
35 |
|
|
// determine if the request is in the memory region or mmio region
|
36 |
|
|
- val cacheable = io.req.bits.addr < UInt(mmioBase)
|
37 |
|
|
+ //val cacheable = io.req.bits.addr < UInt(mmioBase)
|
38 |
|
|
+ val cacheable = Bool(false)
|
39 |
|
|
|
40 |
|
|
val sdq_val = Reg(init=Bits(0, sdqDepth))
|
41 |
|
|
val sdq_alloc_id = PriorityEncoder(~sdq_val(sdqDepth-1,0))
|
42 |
|
|
diff --git a/src/main/scala/rocket.scala b/src/main/scala/rocket.scala
|
43 |
|
|
index d965709..dfdf549 100644
|
44 |
|
|
--- a/src/main/scala/rocket.scala
|
45 |
|
|
+++ b/src/main/scala/rocket.scala
|
46 |
|
|
@@ -163,6 +163,10 @@ class Rocket(implicit p: Parameters) extends CoreModule()(p) {
|
47 |
|
|
val wb_reg_wdata = Reg(Bits())
|
48 |
|
|
val wb_reg_rs2 = Reg(Bits())
|
49 |
|
|
val take_pc_wb = Wire(Bool())
|
50 |
|
|
+ //SH
|
51 |
|
|
+ val reg_ll_wdata_postponed = Reg(Bits())
|
52 |
|
|
+ val reg_ll_waddr_postponed = Reg(Bits())
|
53 |
|
|
+ val reg_ll_wen_postponed = Reg(init = Bool(false))
|
54 |
|
|
|
55 |
|
|
val take_pc_mem_wb = take_pc_wb || take_pc_mem
|
56 |
|
|
val take_pc = take_pc_mem_wb
|
57 |
|
|
@@ -410,12 +414,36 @@ class Rocket(implicit p: Parameters) extends CoreModule()(p) {
|
58 |
|
|
|
59 |
|
|
val wb_valid = wb_reg_valid && !replay_wb && !csr.io.csr_xcpt
|
60 |
|
|
val wb_wen = wb_valid && wb_ctrl.wxd
|
61 |
|
|
- val rf_wen = wb_wen || ll_wen
|
62 |
|
|
- val rf_waddr = Mux(ll_wen, ll_waddr, wb_waddr)
|
63 |
|
|
+
|
64 |
|
|
+ //SH
|
65 |
|
|
+ val stall_wen = ll_wen && wb_wen// && (wb_waddr === UInt(0x1))
|
66 |
|
|
+ when (stall_wen) {
|
67 |
|
|
+ reg_ll_wen_postponed := Bool(true)
|
68 |
|
|
+ reg_ll_waddr_postponed := wb_waddr
|
69 |
|
|
+ reg_ll_wdata_postponed := wb_reg_wdata
|
70 |
|
|
+ }
|
71 |
|
|
+ when (!wb_wen || (!ll_wen && wb_wen && wb_waddr === reg_ll_waddr_postponed)) {
|
72 |
|
|
+ reg_ll_wen_postponed := Bool(false)
|
73 |
|
|
+ reg_ll_waddr_postponed := UInt(0)
|
74 |
|
|
+ reg_ll_wdata_postponed := UInt(0)
|
75 |
|
|
+ }
|
76 |
|
|
+ val rf_wen = wb_wen || ll_wen || reg_ll_wen_postponed
|
77 |
|
|
+ val rf_waddr = Mux(ll_wen, ll_waddr,
|
78 |
|
|
+ Mux(wb_wen, wb_waddr,
|
79 |
|
|
+ reg_ll_waddr_postponed))
|
80 |
|
|
+
|
81 |
|
|
val rf_wdata = Mux(dmem_resp_valid && dmem_resp_xpu, io.dmem.resp.bits.data,
|
82 |
|
|
Mux(ll_wen, ll_wdata,
|
83 |
|
|
Mux(wb_ctrl.csr =/= CSR.N, csr.io.rw.rdata,
|
84 |
|
|
- wb_reg_wdata)))
|
85 |
|
|
+ Mux(wb_wen, wb_reg_wdata,
|
86 |
|
|
+ reg_ll_wdata_postponed))))
|
87 |
|
|
+
|
88 |
|
|
+ //val rf_wen = wb_wen || ll_wen
|
89 |
|
|
+ //val rf_waddr = Mux(ll_wen, ll_waddr, wb_waddr)
|
90 |
|
|
+ //val rf_wdata = Mux(dmem_resp_valid && dmem_resp_xpu, io.dmem.resp.bits.data,
|
91 |
|
|
+ // Mux(ll_wen, ll_wdata,
|
92 |
|
|
+ // Mux(wb_ctrl.csr != CSR.N, csr.io.rw.rdata,
|
93 |
|
|
+ // wb_reg_wdata)))
|
94 |
|
|
when (rf_wen) { rf.write(rf_waddr, rf_wdata) }
|
95 |
|
|
|
96 |
|
|
// hook up control/status regfile
|
97 |
|
|
@@ -484,7 +512,8 @@ class Rocket(implicit p: Parameters) extends CoreModule()(p) {
|
98 |
|
|
id_ctrl.mem && !io.dmem.req.ready ||
|
99 |
|
|
Bool(usingRoCC) && wb_reg_rocc_pending && id_ctrl.rocc && !io.rocc.cmd.ready ||
|
100 |
|
|
id_do_fence ||
|
101 |
|
|
- csr.io.csr_stall
|
102 |
|
|
+ csr.io.csr_stall ||
|
103 |
|
|
+ stall_wen || reg_ll_wen_postponed //SH
|
104 |
|
|
ctrl_killd := !io.imem.resp.valid || take_pc || ctrl_stalld || csr.io.interrupt
|
105 |
|
|
|
106 |
|
|
io.imem.req.valid := take_pc
|
107 |
|
|
diff --git a/src/main/scala/tlb.scala b/src/main/scala/tlb.scala
|
108 |
|
|
index 55e7359..5ff3fda 100644
|
109 |
|
|
--- a/src/main/scala/tlb.scala
|
110 |
|
|
+++ b/src/main/scala/tlb.scala
|
111 |
|
|
@@ -148,14 +148,10 @@ class TLB(implicit p: Parameters) extends TLBModule()(p) {
|
112 |
|
|
plru.access(OHToUInt(tag_cam.io.hits))
|
113 |
|
|
}
|
114 |
|
|
|
115 |
|
|
- val paddr = Cat(io.resp.ppn, UInt(0, pgIdxBits))
|
116 |
|
|
- val addr_ok = addrMap.isValid(paddr)
|
117 |
|
|
- val addr_prot = addrMap.getProt(paddr)
|
118 |
|
|
-
|
119 |
|
|
io.req.ready := state === s_ready
|
120 |
|
|
- io.resp.xcpt_ld := !addr_ok || !addr_prot.r || bad_va || tlb_hit && !(r_array & tag_cam.io.hits).orR
|
121 |
|
|
- io.resp.xcpt_st := !addr_ok || !addr_prot.w || bad_va || tlb_hit && !(w_array & tag_cam.io.hits).orR
|
122 |
|
|
- io.resp.xcpt_if := !addr_ok || !addr_prot.x || bad_va || tlb_hit && !(x_array & tag_cam.io.hits).orR
|
123 |
|
|
+ io.resp.xcpt_ld := bad_va || tlb_hit && !(r_array & tag_cam.io.hits).orR
|
124 |
|
|
+ io.resp.xcpt_st := bad_va || tlb_hit && !(w_array & tag_cam.io.hits).orR
|
125 |
|
|
+ io.resp.xcpt_if := bad_va || tlb_hit && !(x_array & tag_cam.io.hits).orR
|
126 |
|
|
io.resp.miss := tlb_miss
|
127 |
|
|
io.resp.ppn := Mux(vm_enabled, Mux1H(tag_cam.io.hits, tag_ram), io.req.bits.vpn(ppnBits-1,0))
|
128 |
|
|
io.resp.hit_idx := tag_cam.io.hits
|
129 |
|
|
Entering 'torture'
|
130 |
|
|
Entering 'uncore'
|
131 |
|
|
Entering 'zscale'
|