lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 1 | // Copyright lowRISC contributors. |
| 2 | // Licensed under the Apache License, Version 2.0, see LICENSE for details. |
| 3 | // SPDX-License-Identifier: Apache-2.0 |
| 4 | // |
| 5 | // Register Top module auto-generated by `reggen` |
| 6 | <% |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 7 | from reggen import gen_rtl |
| 8 | from reggen.access import HwAccess, SwRdAccess, SwWrAccess |
Rupert Swarbrick | 269bb3d | 2021-02-23 15:41:56 +0000 | [diff] [blame] | 9 | from reggen.lib import get_basename |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 10 | from reggen.register import Register |
| 11 | from reggen.multi_register import MultiRegister |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 12 | |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 13 | num_wins = len(rb.windows) |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 14 | num_wins_width = ((num_wins+1).bit_length()) - 1 |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 15 | num_reg_dsp = 1 if rb.all_regs else 0 |
| 16 | num_dsp = num_wins + num_reg_dsp |
| 17 | regs_flat = rb.flat_regs |
Rupert Swarbrick | 1db6fcd | 2021-02-11 14:56:20 +0000 | [diff] [blame] | 18 | max_regs_char = len("{}".format(len(regs_flat) - 1)) |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 19 | addr_width = rb.get_addr_width() |
Rupert Swarbrick | 269bb3d | 2021-02-23 15:41:56 +0000 | [diff] [blame] | 20 | |
| 21 | lblock = block.name.lower() |
| 22 | ublock = lblock.upper() |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 23 | |
| 24 | u_mod_base = mod_base.upper() |
| 25 | |
| 26 | reg2hw_t = gen_rtl.get_iface_tx_type(block, if_name, False) |
| 27 | hw2reg_t = gen_rtl.get_iface_tx_type(block, if_name, True) |
| 28 | |
| 29 | # Calculate whether we're going to need an AW parameter. We use it if there |
| 30 | # are any registers (obviously). We also use it if there are any windows that |
| 31 | # don't start at zero and end at 1 << addr_width (see the "addr_checks" |
| 32 | # calculation below for where that comes from). |
| 33 | needs_aw = (bool(regs_flat) or |
| 34 | num_wins > 1 or |
| 35 | rb.windows and ( |
| 36 | rb.windows[0].offset != 0 or |
| 37 | rb.windows[0].size_in_bytes != (1 << addr_width))) |
| 38 | |
Timothy Chen | 62dabf7 | 2021-03-24 12:09:27 -0700 | [diff] [blame] | 39 | |
| 40 | common_data_intg_gen = 0 if rb.has_data_intg_passthru else 1 |
| 41 | adapt_data_intg_gen = 1 if rb.has_data_intg_passthru else 0 |
| 42 | assert common_data_intg_gen != adapt_data_intg_gen |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 43 | %> |
Greg Chadwick | cf42308 | 2020-02-05 16:52:23 +0000 | [diff] [blame] | 44 | `include "prim_assert.sv" |
| 45 | |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 46 | module ${mod_name} ( |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 47 | input clk_i, |
| 48 | input rst_ni, |
| 49 | |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 50 | input tlul_pkg::tl_h2d_t tl_i, |
| 51 | output tlul_pkg::tl_d2h_t tl_o, |
| 52 | % if num_wins != 0: |
| 53 | |
| 54 | // Output port for window |
| 55 | output tlul_pkg::tl_h2d_t tl_win_o [${num_wins}], |
| 56 | input tlul_pkg::tl_d2h_t tl_win_i [${num_wins}], |
| 57 | |
| 58 | % endif |
| 59 | // To HW |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 60 | % if rb.get_n_bits(["q","qe","re"]): |
| 61 | output ${lblock}_reg_pkg::${reg2hw_t} reg2hw, // Write |
Michael Schaffner | 9a92bea | 2019-09-30 18:13:14 -0700 | [diff] [blame] | 62 | % endif |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 63 | % if rb.get_n_bits(["d","de"]): |
| 64 | input ${lblock}_reg_pkg::${hw2reg_t} hw2reg, // Read |
Michael Schaffner | 9a92bea | 2019-09-30 18:13:14 -0700 | [diff] [blame] | 65 | % endif |
Eunchan Kim | de88e3a | 2019-09-23 11:06:41 -0700 | [diff] [blame] | 66 | |
Timothy Chen | aa6c1ed | 2021-03-01 16:20:11 -0800 | [diff] [blame] | 67 | // Integrity check errors |
| 68 | output logic intg_err_o, |
| 69 | |
Eunchan Kim | de88e3a | 2019-09-23 11:06:41 -0700 | [diff] [blame] | 70 | // Config |
| 71 | input devmode_i // If 1, explicit error return for unmapped register access |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 72 | ); |
| 73 | |
Rupert Swarbrick | 269bb3d | 2021-02-23 15:41:56 +0000 | [diff] [blame] | 74 | import ${lblock}_reg_pkg::* ; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 75 | |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 76 | % if needs_aw: |
Rupert Swarbrick | 269bb3d | 2021-02-23 15:41:56 +0000 | [diff] [blame] | 77 | localparam int AW = ${addr_width}; |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 78 | % endif |
| 79 | % if rb.all_regs: |
Rupert Swarbrick | 269bb3d | 2021-02-23 15:41:56 +0000 | [diff] [blame] | 80 | localparam int DW = ${block.regwidth}; |
Michael Schaffner | 1b5fa9f | 2020-01-17 17:43:42 -0800 | [diff] [blame] | 81 | localparam int DBW = DW/8; // Byte Width |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 82 | |
| 83 | // register signals |
Eunchan Kim | 819a466 | 2019-09-04 21:44:36 -0700 | [diff] [blame] | 84 | logic reg_we; |
| 85 | logic reg_re; |
| 86 | logic [AW-1:0] reg_addr; |
| 87 | logic [DW-1:0] reg_wdata; |
| 88 | logic [DBW-1:0] reg_be; |
| 89 | logic [DW-1:0] reg_rdata; |
| 90 | logic reg_error; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 91 | |
Eunchan Kim | 51461cd | 2019-09-18 14:00:49 -0700 | [diff] [blame] | 92 | logic addrmiss, wr_err; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 93 | |
Eunchan Kim | 819a466 | 2019-09-04 21:44:36 -0700 | [diff] [blame] | 94 | logic [DW-1:0] reg_rdata_next; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 95 | |
| 96 | tlul_pkg::tl_h2d_t tl_reg_h2d; |
| 97 | tlul_pkg::tl_d2h_t tl_reg_d2h; |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 98 | % endif |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 99 | |
Timothy Chen | d12569f | 2021-02-12 15:28:12 -0800 | [diff] [blame] | 100 | // incoming payload check |
Timothy Chen | aa6c1ed | 2021-03-01 16:20:11 -0800 | [diff] [blame] | 101 | logic intg_err; |
| 102 | tlul_cmd_intg_chk u_chk ( |
Timothy Chen | d12569f | 2021-02-12 15:28:12 -0800 | [diff] [blame] | 103 | .tl_i, |
Timothy Chen | 915df69 | 2021-03-05 13:16:36 -0800 | [diff] [blame] | 104 | .err_o(intg_err) |
Timothy Chen | d12569f | 2021-02-12 15:28:12 -0800 | [diff] [blame] | 105 | ); |
| 106 | |
Timothy Chen | 915df69 | 2021-03-05 13:16:36 -0800 | [diff] [blame] | 107 | logic intg_err_q; |
Timothy Chen | aa6c1ed | 2021-03-01 16:20:11 -0800 | [diff] [blame] | 108 | always_ff @(posedge clk_i or negedge rst_ni) begin |
| 109 | if (!rst_ni) begin |
Timothy Chen | 915df69 | 2021-03-05 13:16:36 -0800 | [diff] [blame] | 110 | intg_err_q <= '0; |
Timothy Chen | aa6c1ed | 2021-03-01 16:20:11 -0800 | [diff] [blame] | 111 | end else if (intg_err) begin |
Timothy Chen | 915df69 | 2021-03-05 13:16:36 -0800 | [diff] [blame] | 112 | intg_err_q <= 1'b1; |
Timothy Chen | aa6c1ed | 2021-03-01 16:20:11 -0800 | [diff] [blame] | 113 | end |
| 114 | end |
| 115 | |
Timothy Chen | 915df69 | 2021-03-05 13:16:36 -0800 | [diff] [blame] | 116 | // integrity error output is permanent and should be used for alert generation |
| 117 | // register errors are transactional |
| 118 | assign intg_err_o = intg_err_q | intg_err; |
| 119 | |
Timothy Chen | aa6c1ed | 2021-03-01 16:20:11 -0800 | [diff] [blame] | 120 | // outgoing integrity generation |
Timothy Chen | d12569f | 2021-02-12 15:28:12 -0800 | [diff] [blame] | 121 | tlul_pkg::tl_d2h_t tl_o_pre; |
Timothy Chen | 62dabf7 | 2021-03-24 12:09:27 -0700 | [diff] [blame] | 122 | tlul_rsp_intg_gen #( |
| 123 | .EnableRspIntgGen(1), |
| 124 | .EnableDataIntgGen(${common_data_intg_gen}) |
| 125 | ) u_rsp_intg_gen ( |
Timothy Chen | d12569f | 2021-02-12 15:28:12 -0800 | [diff] [blame] | 126 | .tl_i(tl_o_pre), |
| 127 | .tl_o |
| 128 | ); |
| 129 | |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 130 | % if num_dsp == 1: |
| 131 | ## Either no windows (and just registers) or no registers and only |
| 132 | ## one window. |
| 133 | % if num_wins == 0: |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 134 | assign tl_reg_h2d = tl_i; |
Timothy Chen | d12569f | 2021-02-12 15:28:12 -0800 | [diff] [blame] | 135 | assign tl_o_pre = tl_reg_d2h; |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 136 | % else: |
| 137 | assign tl_win_o[0] = tl_i; |
| 138 | assign tl_o_pre = tl_win_i[0]; |
| 139 | % endif |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 140 | % else: |
| 141 | tlul_pkg::tl_h2d_t tl_socket_h2d [${num_dsp}]; |
| 142 | tlul_pkg::tl_d2h_t tl_socket_d2h [${num_dsp}]; |
| 143 | |
| 144 | logic [${num_wins_width}:0] reg_steer; |
| 145 | |
| 146 | // socket_1n connection |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 147 | % if rb.all_regs: |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 148 | assign tl_reg_h2d = tl_socket_h2d[${num_wins}]; |
| 149 | assign tl_socket_d2h[${num_wins}] = tl_reg_d2h; |
| 150 | |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 151 | % endif |
| 152 | % for i,t in enumerate(rb.windows): |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 153 | assign tl_win_o[${i}] = tl_socket_h2d[${i}]; |
Timothy Chen | 62dabf7 | 2021-03-24 12:09:27 -0700 | [diff] [blame] | 154 | % if common_data_intg_gen == 0 and rb.windows[i].data_intg_passthru == False: |
| 155 | ## If there are multiple windows, and not every window has data integrity |
| 156 | ## passthrough, we must generate data integrity for it here. |
| 157 | tlul_rsp_intg_gen #( |
| 158 | .EnableRspIntgGen(0), |
| 159 | .EnableDataIntgGen(1) |
| 160 | ) u_win${i}_data_intg_gen ( |
| 161 | .tl_i(tl_win_i[${i}]), |
| 162 | .tl_o(tl_socket_d2h[${i}]) |
| 163 | ); |
| 164 | % else: |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 165 | assign tl_socket_d2h[${i}] = tl_win_i[${i}]; |
Timothy Chen | 62dabf7 | 2021-03-24 12:09:27 -0700 | [diff] [blame] | 166 | % endif |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 167 | % endfor |
| 168 | |
| 169 | // Create Socket_1n |
| 170 | tlul_socket_1n #( |
| 171 | .N (${num_dsp}), |
| 172 | .HReqPass (1'b1), |
| 173 | .HRspPass (1'b1), |
| 174 | .DReqPass ({${num_dsp}{1'b1}}), |
| 175 | .DRspPass ({${num_dsp}{1'b1}}), |
Eunchan Kim | 32dd11b | 2019-11-05 15:15:33 -0800 | [diff] [blame] | 176 | .HReqDepth (4'h0), |
| 177 | .HRspDepth (4'h0), |
| 178 | .DReqDepth ({${num_dsp}{4'h0}}), |
| 179 | .DRspDepth ({${num_dsp}{4'h0}}) |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 180 | ) u_socket ( |
| 181 | .clk_i, |
| 182 | .rst_ni, |
| 183 | .tl_h_i (tl_i), |
Timothy Chen | d12569f | 2021-02-12 15:28:12 -0800 | [diff] [blame] | 184 | .tl_h_o (tl_o_pre), |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 185 | .tl_d_o (tl_socket_h2d), |
| 186 | .tl_d_i (tl_socket_d2h), |
Scott Johnson | 204d98d | 2020-07-17 12:06:05 -0700 | [diff] [blame] | 187 | .dev_select_i (reg_steer) |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 188 | ); |
| 189 | |
| 190 | // Create steering logic |
| 191 | always_comb begin |
| 192 | reg_steer = ${num_dsp-1}; // Default set to register |
| 193 | |
| 194 | // TODO: Can below codes be unique case () inside ? |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 195 | % for i,w in enumerate(rb.windows): |
Rupert Swarbrick | bc2bc58 | 2021-02-09 13:30:37 +0000 | [diff] [blame] | 196 | <% |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 197 | base_addr = w.offset |
| 198 | limit_addr = w.offset + w.size_in_bytes |
| 199 | |
| 200 | hi_check = 'tl_i.a_address[AW-1:0] < {}'.format(limit_addr) |
| 201 | addr_checks = [] |
| 202 | if base_addr > 0: |
| 203 | addr_checks.append('tl_i.a_address[AW-1:0] >= {}'.format(base_addr)) |
| 204 | if limit_addr < 2**addr_width: |
| 205 | addr_checks.append('tl_i.a_address[AW-1:0] < {}'.format(limit_addr)) |
| 206 | |
| 207 | addr_test = ' && '.join(addr_checks) |
Rupert Swarbrick | bc2bc58 | 2021-02-09 13:30:37 +0000 | [diff] [blame] | 208 | %>\ |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 209 | % if addr_test: |
| 210 | if (${addr_test}) begin |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 211 | % endif |
| 212 | reg_steer = ${i}; |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 213 | % if addr_test: |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 214 | end |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 215 | % endif |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 216 | % endfor |
Timothy Chen | aa6c1ed | 2021-03-01 16:20:11 -0800 | [diff] [blame] | 217 | if (intg_err) begin |
Timothy Chen | d12569f | 2021-02-12 15:28:12 -0800 | [diff] [blame] | 218 | reg_steer = ${num_dsp-1}; |
| 219 | end |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 220 | end |
| 221 | % endif |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 222 | % if rb.all_regs: |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 223 | |
Eunchan Kim | 819a466 | 2019-09-04 21:44:36 -0700 | [diff] [blame] | 224 | tlul_adapter_reg #( |
| 225 | .RegAw(AW), |
Timothy Chen | 62dabf7 | 2021-03-24 12:09:27 -0700 | [diff] [blame] | 226 | .RegDw(DW), |
| 227 | .EnableDataIntgGen(${adapt_data_intg_gen}) |
Eunchan Kim | 819a466 | 2019-09-04 21:44:36 -0700 | [diff] [blame] | 228 | ) u_reg_if ( |
| 229 | .clk_i, |
| 230 | .rst_ni, |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 231 | |
Eunchan Kim | 819a466 | 2019-09-04 21:44:36 -0700 | [diff] [blame] | 232 | .tl_i (tl_reg_h2d), |
| 233 | .tl_o (tl_reg_d2h), |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 234 | |
Eunchan Kim | 819a466 | 2019-09-04 21:44:36 -0700 | [diff] [blame] | 235 | .we_o (reg_we), |
| 236 | .re_o (reg_re), |
| 237 | .addr_o (reg_addr), |
| 238 | .wdata_o (reg_wdata), |
| 239 | .be_o (reg_be), |
| 240 | .rdata_i (reg_rdata), |
| 241 | .error_i (reg_error) |
| 242 | ); |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 243 | |
Eunchan Kim | 819a466 | 2019-09-04 21:44:36 -0700 | [diff] [blame] | 244 | assign reg_rdata = reg_rdata_next ; |
Timothy Chen | aa6c1ed | 2021-03-01 16:20:11 -0800 | [diff] [blame] | 245 | assign reg_error = (devmode_i & addrmiss) | wr_err | intg_err; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 246 | |
| 247 | // Define SW related signals |
| 248 | // Format: <reg>_<field>_{wd|we|qs} |
| 249 | // or <reg>_{wd|we|qs} if field == 1 or 0 |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 250 | % for r in regs_flat: |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 251 | % if len(r.fields) == 1: |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 252 | ${sig_gen(r.fields[0], r.name.lower(), r.hwext, r.shadowed)}\ |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 253 | % else: |
| 254 | % for f in r.fields: |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 255 | ${sig_gen(f, r.name.lower() + "_" + f.name.lower(), r.hwext, r.shadowed)}\ |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 256 | % endfor |
| 257 | % endif |
| 258 | % endfor |
| 259 | |
| 260 | // Register instances |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 261 | % for r in rb.all_regs: |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 262 | ######################## multiregister ########################### |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 263 | % if isinstance(r, MultiRegister): |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 264 | <% |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 265 | k = 0 |
| 266 | %> |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 267 | % for sr in r.regs: |
| 268 | // Subregister ${k} of Multireg ${r.reg.name.lower()} |
| 269 | // R[${sr.name.lower()}]: V(${str(sr.hwext)}) |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 270 | % if len(sr.fields) == 1: |
| 271 | <% |
| 272 | f = sr.fields[0] |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 273 | finst_name = sr.name.lower() |
| 274 | fsig_name = r.reg.name.lower() + "[%d]" % k |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 275 | k = k + 1 |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 276 | %> |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 277 | ${finst_gen(f, finst_name, fsig_name, sr.hwext, sr.regwen, sr.shadowed)} |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 278 | % else: |
| 279 | % for f in sr.fields: |
| 280 | <% |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 281 | finst_name = sr.name.lower() + "_" + f.name.lower() |
| 282 | if r.is_homogeneous(): |
| 283 | fsig_name = r.reg.name.lower() + "[%d]" % k |
Michael Schaffner | a2c51d9 | 2019-09-27 16:38:24 -0700 | [diff] [blame] | 284 | k = k + 1 |
| 285 | else: |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 286 | fsig_name = r.reg.name.lower() + "[%d]" % k + "." + get_basename(f.name.lower()) |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 287 | %> |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 288 | // F[${f.name.lower()}]: ${f.bits.msb}:${f.bits.lsb} |
| 289 | ${finst_gen(f, finst_name, fsig_name, sr.hwext, sr.regwen, sr.shadowed)} |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 290 | % endfor |
Michael Schaffner | a2c51d9 | 2019-09-27 16:38:24 -0700 | [diff] [blame] | 291 | <% |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 292 | if not r.is_homogeneous(): |
Michael Schaffner | a2c51d9 | 2019-09-27 16:38:24 -0700 | [diff] [blame] | 293 | k += 1 |
| 294 | %> |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 295 | % endif |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 296 | ## for: mreg_flat |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 297 | % endfor |
| 298 | ######################## register with single field ########################### |
| 299 | % elif len(r.fields) == 1: |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 300 | // R[${r.name.lower()}]: V(${str(r.hwext)}) |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 301 | <% |
| 302 | f = r.fields[0] |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 303 | finst_name = r.name.lower() |
| 304 | fsig_name = r.name.lower() |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 305 | %> |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 306 | ${finst_gen(f, finst_name, fsig_name, r.hwext, r.regwen, r.shadowed)} |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 307 | ######################## register with multiple fields ########################### |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 308 | % else: |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 309 | // R[${r.name.lower()}]: V(${str(r.hwext)}) |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 310 | % for f in r.fields: |
| 311 | <% |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 312 | finst_name = r.name.lower() + "_" + f.name.lower() |
| 313 | fsig_name = r.name.lower() + "." + f.name.lower() |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 314 | %> |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 315 | // F[${f.name.lower()}]: ${f.bits.msb}:${f.bits.lsb} |
| 316 | ${finst_gen(f, finst_name, fsig_name, r.hwext, r.regwen, r.shadowed)} |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 317 | % endfor |
| 318 | % endif |
| 319 | |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 320 | ## for: rb.all_regs |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 321 | % endfor |
| 322 | |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 323 | |
| 324 | logic [${len(regs_flat)-1}:0] addr_hit; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 325 | always_comb begin |
| 326 | addr_hit = '0; |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 327 | % for i,r in enumerate(regs_flat): |
Rupert Swarbrick | 269bb3d | 2021-02-23 15:41:56 +0000 | [diff] [blame] | 328 | addr_hit[${"{}".format(i).rjust(max_regs_char)}] = (reg_addr == ${ublock}_${r.name.upper()}_OFFSET); |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 329 | % endfor |
| 330 | end |
| 331 | |
Eunchan Kim | 244a1d5 | 2019-09-23 15:46:43 -0700 | [diff] [blame] | 332 | assign addrmiss = (reg_re || reg_we) ? ~|addr_hit : 1'b0 ; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 333 | |
Rupert Swarbrick | ce8e393 | 2021-04-21 11:45:34 +0100 | [diff] [blame] | 334 | % if regs_flat: |
| 335 | <% |
| 336 | # We want to signal wr_err if reg_be (the byte enable signal) is true for |
| 337 | # any bytes that aren't supported by a register. That's true if a |
| 338 | # addr_hit[i] and a bit is set in reg_be but not in *_PERMIT[i]. |
| 339 | |
| 340 | wr_err_terms = ['(addr_hit[{idx}] & (|({mod}_PERMIT[{idx}] & ~reg_be)))' |
| 341 | .format(idx=str(i).rjust(max_regs_char), |
| 342 | mod=u_mod_base) |
| 343 | for i in range(len(regs_flat))] |
| 344 | wr_err_expr = (' |\n' + (' ' * 15)).join(wr_err_terms) |
| 345 | %>\ |
Eunchan Kim | 51461cd | 2019-09-18 14:00:49 -0700 | [diff] [blame] | 346 | // Check sub-word write is permitted |
| 347 | always_comb begin |
Rupert Swarbrick | ce8e393 | 2021-04-21 11:45:34 +0100 | [diff] [blame] | 348 | wr_err = (reg_we & |
| 349 | (${wr_err_expr})); |
Eunchan Kim | 51461cd | 2019-09-18 14:00:49 -0700 | [diff] [blame] | 350 | end |
Rupert Swarbrick | ce8e393 | 2021-04-21 11:45:34 +0100 | [diff] [blame] | 351 | % else: |
| 352 | assign wr_error = 1'b0; |
| 353 | % endif\ |
| 354 | |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 355 | % for i, r in enumerate(regs_flat): |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 356 | % if len(r.fields) == 1: |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 357 | ${we_gen(r.fields[0], r.name.lower(), r.hwext, r.shadowed, i)}\ |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 358 | % else: |
| 359 | % for f in r.fields: |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 360 | ${we_gen(f, r.name.lower() + "_" + f.name.lower(), r.hwext, r.shadowed, i)}\ |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 361 | % endfor |
| 362 | % endif |
| 363 | % endfor |
| 364 | |
| 365 | // Read data return |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 366 | always_comb begin |
| 367 | reg_rdata_next = '0; |
| 368 | unique case (1'b1) |
Michael Schaffner | 9a94b6c | 2019-09-25 16:17:35 -0700 | [diff] [blame] | 369 | % for i, r in enumerate(regs_flat): |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 370 | % if len(r.fields) == 1: |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 371 | addr_hit[${i}]: begin |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 372 | ${rdata_gen(r.fields[0], r.name.lower())}\ |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 373 | end |
| 374 | |
| 375 | % else: |
| 376 | addr_hit[${i}]: begin |
| 377 | % for f in r.fields: |
Rupert Swarbrick | 4d64536 | 2021-02-08 17:17:05 +0000 | [diff] [blame] | 378 | ${rdata_gen(f, r.name.lower() + "_" + f.name.lower())}\ |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 379 | % endfor |
| 380 | end |
| 381 | |
| 382 | % endif |
| 383 | % endfor |
| 384 | default: begin |
| 385 | reg_rdata_next = '1; |
| 386 | end |
| 387 | endcase |
| 388 | end |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 389 | % endif |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 390 | |
Timothy Chen | ac6af87 | 2021-02-22 17:17:52 -0800 | [diff] [blame] | 391 | // Unused signal tieoff |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 392 | % if rb.all_regs: |
Timothy Chen | ac6af87 | 2021-02-22 17:17:52 -0800 | [diff] [blame] | 393 | |
| 394 | // wdata / byte enable are not always fully used |
| 395 | // add a blanket unused statement to handle lint waivers |
| 396 | logic unused_wdata; |
| 397 | logic unused_be; |
| 398 | assign unused_wdata = ^reg_wdata; |
| 399 | assign unused_be = ^reg_be; |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 400 | % else: |
| 401 | // devmode_i is not used if there are no registers |
| 402 | logic unused_devmode; |
| 403 | assign unused_devmode = ^devmode_i; |
| 404 | % endif |
| 405 | % if rb.all_regs: |
Timothy Chen | ac6af87 | 2021-02-22 17:17:52 -0800 | [diff] [blame] | 406 | |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 407 | // Assertions for Register Interface |
Greg Chadwick | 46ede4b | 2020-01-14 12:46:39 +0000 | [diff] [blame] | 408 | `ASSERT_PULSE(wePulse, reg_we) |
| 409 | `ASSERT_PULSE(rePulse, reg_re) |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 410 | |
Greg Chadwick | 46ede4b | 2020-01-14 12:46:39 +0000 | [diff] [blame] | 411 | `ASSERT(reAfterRv, $rose(reg_re || reg_we) |=> tl_o.d_valid) |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 412 | |
Greg Chadwick | 46ede4b | 2020-01-14 12:46:39 +0000 | [diff] [blame] | 413 | `ASSERT(en2addrHit, (reg_we || reg_re) |-> $onehot0(addr_hit)) |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 414 | |
Michael Schaffner | ee9e8db | 2019-10-22 17:49:51 -0700 | [diff] [blame] | 415 | // this is formulated as an assumption such that the FPV testbenches do disprove this |
| 416 | // property by mistake |
Timothy Chen | 27b0a64 | 2021-02-16 14:02:08 -0800 | [diff] [blame] | 417 | //`ASSUME(reqParity, tl_reg_h2d.a_valid |-> tl_reg_h2d.a_user.chk_en == tlul_pkg::CheckDis) |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 418 | |
Rupert Swarbrick | 200d8b4 | 2021-03-08 12:32:11 +0000 | [diff] [blame] | 419 | % endif |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 420 | endmodule |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 421 | <%def name="str_bits_sv(bits)">\ |
| 422 | % if bits.msb != bits.lsb: |
| 423 | ${bits.msb}:${bits.lsb}\ |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 424 | % else: |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 425 | ${bits.msb}\ |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 426 | % endif |
| 427 | </%def>\ |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 428 | <%def name="str_arr_sv(bits)">\ |
| 429 | % if bits.msb != bits.lsb: |
| 430 | [${bits.msb-bits.lsb}:0] \ |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 431 | % endif |
| 432 | </%def>\ |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 433 | <%def name="sig_gen(field, sig_name, hwext, shadowed)">\ |
| 434 | % if field.swaccess.allows_read(): |
| 435 | logic ${str_arr_sv(field.bits)}${sig_name}_qs; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 436 | % endif |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 437 | % if field.swaccess.allows_write(): |
| 438 | logic ${str_arr_sv(field.bits)}${sig_name}_wd; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 439 | logic ${sig_name}_we; |
| 440 | % endif |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 441 | % if (field.swaccess.allows_read() and hwext) or shadowed: |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 442 | logic ${sig_name}_re; |
| 443 | % endif |
| 444 | </%def>\ |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 445 | <%def name="finst_gen(field, finst_name, fsig_name, hwext, regwen, shadowed)">\ |
Rupert Swarbrick | c5a68c9 | 2021-05-28 15:58:26 +0100 | [diff] [blame^] | 446 | <% |
| 447 | re_expr = f'{finst_name}_re' if field.swaccess.allows_read() else "1'b0" |
| 448 | |
| 449 | if field.swaccess.allows_write(): |
| 450 | if regwen: |
| 451 | we_expr = f'{finst_name}_we & {regwen.lower()}_qs' |
| 452 | else: |
| 453 | we_expr = f'{finst_name}_we' |
| 454 | wd_expr = f'{finst_name}_wd' |
| 455 | else: |
| 456 | we_expr = "1'b0" |
| 457 | wd_expr = "'0" |
| 458 | |
| 459 | if field.hwaccess.allows_write(): |
| 460 | de_expr = f'hw2reg.{fsig_name}.de' |
| 461 | d_expr = f'hw2reg.{fsig_name}.d' |
| 462 | else: |
| 463 | de_expr = "1'b0" |
| 464 | d_expr = "'0" |
| 465 | |
| 466 | qre_expr = f'reg2hw.{fsig_name}.re' if field.hwre or shadowed else "" |
| 467 | |
| 468 | if field.hwaccess.allows_read(): |
| 469 | qe_expr = f'reg2hw.{fsig_name}.qe' if field.hwqe else '' |
| 470 | q_expr = f'reg2hw.{fsig_name}.q' |
| 471 | else: |
| 472 | qe_expr = '' |
| 473 | q_expr = '' |
| 474 | |
| 475 | qs_expr = f'{finst_name}_qs' if field.swaccess.allows_read() else '' |
| 476 | %>\ |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 477 | % if hwext: ## if hwext, instantiate prim_subreg_ext |
| 478 | prim_subreg_ext #( |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 479 | .DW (${field.bits.width()}) |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 480 | ) u_${finst_name} ( |
Rupert Swarbrick | c5a68c9 | 2021-05-28 15:58:26 +0100 | [diff] [blame^] | 481 | .re (${re_expr}), |
| 482 | .we (${we_expr}), |
| 483 | .wd (${wd_expr}), |
| 484 | .d (${d_expr}), |
| 485 | .qre (${qre_expr}), |
| 486 | .qe (${qe_expr}), |
| 487 | .q (${q_expr}), |
| 488 | .qs (${qs_expr}) |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 489 | ); |
Rupert Swarbrick | c5a68c9 | 2021-05-28 15:58:26 +0100 | [diff] [blame^] | 490 | % else: |
| 491 | <% |
| 492 | # This isn't a field in a hwext register. Instantiate prim_subreg, |
| 493 | # prim_subreg_shadow or constant assign. |
| 494 | |
| 495 | resval_expr = f"{field.bits.width()}'h{field.resval or 0:x}" |
| 496 | is_const_reg = not (field.hwaccess.allows_read() or |
| 497 | field.hwaccess.allows_write() or |
| 498 | field.swaccess.allows_write() or |
| 499 | field.swaccess.swrd() != SwRdAccess.RD) |
| 500 | |
| 501 | subreg_block = 'prim_subreg' + ('_shadowed' if shadowed else '') |
| 502 | %>\ |
| 503 | % if is_const_reg: |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 504 | // constant-only read |
Rupert Swarbrick | c5a68c9 | 2021-05-28 15:58:26 +0100 | [diff] [blame^] | 505 | assign ${finst_name}_qs = ${resval_expr}; |
| 506 | % else: |
| 507 | ${subreg_block} #( |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 508 | .DW (${field.bits.width()}), |
| 509 | .SWACCESS("${field.swaccess.value[1].name.upper()}"), |
Rupert Swarbrick | c5a68c9 | 2021-05-28 15:58:26 +0100 | [diff] [blame^] | 510 | .RESVAL (${resval_expr}) |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 511 | ) u_${finst_name} ( |
Rupert Swarbrick | 359c126 | 2021-05-28 16:03:57 +0100 | [diff] [blame] | 512 | .clk_i (clk_i), |
| 513 | .rst_ni (rst_ni), |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 514 | |
Rupert Swarbrick | 359c126 | 2021-05-28 16:03:57 +0100 | [diff] [blame] | 515 | // from register interface |
Pirmin Vogel | ab9d1ca | 2020-05-25 14:52:55 +0200 | [diff] [blame] | 516 | % if shadowed: |
Rupert Swarbrick | c5a68c9 | 2021-05-28 15:58:26 +0100 | [diff] [blame^] | 517 | .re (${re_expr}), |
Pirmin Vogel | ab9d1ca | 2020-05-25 14:52:55 +0200 | [diff] [blame] | 518 | % endif |
Rupert Swarbrick | c5a68c9 | 2021-05-28 15:58:26 +0100 | [diff] [blame^] | 519 | .we (${we_expr}), |
| 520 | .wd (${wd_expr}), |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 521 | |
| 522 | // from internal hardware |
Rupert Swarbrick | c5a68c9 | 2021-05-28 15:58:26 +0100 | [diff] [blame^] | 523 | .de (${de_expr}), |
| 524 | .d (${d_expr}), |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 525 | |
| 526 | // to internal hardware |
Rupert Swarbrick | c5a68c9 | 2021-05-28 15:58:26 +0100 | [diff] [blame^] | 527 | .qe (${qe_expr}), |
| 528 | .q (${q_expr}), |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 529 | |
Rupert Swarbrick | 359c126 | 2021-05-28 16:03:57 +0100 | [diff] [blame] | 530 | // to register interface (read) |
Pirmin Vogel | ab9d1ca | 2020-05-25 14:52:55 +0200 | [diff] [blame] | 531 | % if not shadowed: |
Rupert Swarbrick | c5a68c9 | 2021-05-28 15:58:26 +0100 | [diff] [blame^] | 532 | .qs (${qs_expr}) |
Pirmin Vogel | ab9d1ca | 2020-05-25 14:52:55 +0200 | [diff] [blame] | 533 | % else: |
Rupert Swarbrick | c5a68c9 | 2021-05-28 15:58:26 +0100 | [diff] [blame^] | 534 | .qs (${qs_expr}), |
Pirmin Vogel | ab9d1ca | 2020-05-25 14:52:55 +0200 | [diff] [blame] | 535 | |
| 536 | // Shadow register error conditions |
Rupert Swarbrick | 359c126 | 2021-05-28 16:03:57 +0100 | [diff] [blame] | 537 | .err_update (reg2hw.${fsig_name}.err_update), |
Pirmin Vogel | ab9d1ca | 2020-05-25 14:52:55 +0200 | [diff] [blame] | 538 | .err_storage (reg2hw.${fsig_name}.err_storage) |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 539 | % endif |
| 540 | ); |
| 541 | % endif ## end non-constant prim_subreg |
| 542 | % endif |
| 543 | </%def>\ |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 544 | <%def name="we_gen(field, sig_name, hwext, shadowed, idx)">\ |
Rupert Swarbrick | 1032b47 | 2021-03-12 11:09:56 +0000 | [diff] [blame] | 545 | <% |
| 546 | needs_we = field.swaccess.allows_write() |
| 547 | needs_re = (field.swaccess.allows_read() and hwext) or shadowed |
| 548 | space = '\n' if needs_we or needs_re else '' |
| 549 | %>\ |
| 550 | ${space}\ |
| 551 | % if needs_we: |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 552 | % if field.swaccess.swrd() != SwRdAccess.RC: |
Timothy Chen | a6f5829 | 2021-03-02 14:02:47 -0800 | [diff] [blame] | 553 | assign ${sig_name}_we = addr_hit[${idx}] & reg_we & !reg_error; |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 554 | assign ${sig_name}_wd = reg_wdata[${str_bits_sv(field.bits)}]; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 555 | % else: |
| 556 | ## Generate WE based on read request, read should clear |
Timothy Chen | a6f5829 | 2021-03-02 14:02:47 -0800 | [diff] [blame] | 557 | assign ${sig_name}_we = addr_hit[${idx}] & reg_re & !reg_error; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 558 | assign ${sig_name}_wd = '1; |
| 559 | % endif |
| 560 | % endif |
Rupert Swarbrick | 1032b47 | 2021-03-12 11:09:56 +0000 | [diff] [blame] | 561 | % if needs_re: |
Timothy Chen | a6f5829 | 2021-03-02 14:02:47 -0800 | [diff] [blame] | 562 | assign ${sig_name}_re = addr_hit[${idx}] & reg_re & !reg_error; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 563 | % endif |
| 564 | </%def>\ |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 565 | <%def name="rdata_gen(field, sig_name)">\ |
| 566 | % if field.swaccess.allows_read(): |
| 567 | reg_rdata_next[${str_bits_sv(field.bits)}] = ${sig_name}_qs; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 568 | % else: |
Rupert Swarbrick | ede9480 | 2021-02-08 09:16:50 +0000 | [diff] [blame] | 569 | reg_rdata_next[${str_bits_sv(field.bits)}] = '0; |
lowRISC Contributors | 802543a | 2019-08-31 12:12:56 +0100 | [diff] [blame] | 570 | % endif |
| 571 | </%def>\ |