1 |
2 |
ZTEX |
//*****************************************************************************
|
2 |
|
|
// (c) Copyright 2009 - 2013 Xilinx, Inc. All rights reserved.
|
3 |
|
|
//
|
4 |
|
|
// This file contains confidential and proprietary information
|
5 |
|
|
// of Xilinx, Inc. and is protected under U.S. and
|
6 |
|
|
// international copyright and other intellectual property
|
7 |
|
|
// laws.
|
8 |
|
|
//
|
9 |
|
|
// DISCLAIMER
|
10 |
|
|
// This disclaimer is not a license and does not grant any
|
11 |
|
|
// rights to the materials distributed herewith. Except as
|
12 |
|
|
// otherwise provided in a valid license issued to you by
|
13 |
|
|
// Xilinx, and to the maximum extent permitted by applicable
|
14 |
|
|
// law: (1) THESE MATERIALS ARE MADE AVAILABLE "AS IS" AND
|
15 |
|
|
// WITH ALL FAULTS, AND XILINX HEREBY DISCLAIMS ALL WARRANTIES
|
16 |
|
|
// AND CONDITIONS, EXPRESS, IMPLIED, OR STATUTORY, INCLUDING
|
17 |
|
|
// BUT NOT LIMITED TO WARRANTIES OF MERCHANTABILITY, NON-
|
18 |
|
|
// INFRINGEMENT, OR FITNESS FOR ANY PARTICULAR PURPOSE; and
|
19 |
|
|
// (2) Xilinx shall not be liable (whether in contract or tort,
|
20 |
|
|
// including negligence, or under any other theory of
|
21 |
|
|
// liability) for any loss or damage of any kind or nature
|
22 |
|
|
// related to, arising under or in connection with these
|
23 |
|
|
// materials, including for any direct, or any indirect,
|
24 |
|
|
// special, incidental, or consequential loss or damage
|
25 |
|
|
// (including loss of data, profits, goodwill, or any type of
|
26 |
|
|
// loss or damage suffered as a result of any action brought
|
27 |
|
|
// by a third party) even if such damage or loss was
|
28 |
|
|
// reasonably foreseeable or Xilinx had been advised of the
|
29 |
|
|
// possibility of the same.
|
30 |
|
|
//
|
31 |
|
|
// CRITICAL APPLICATIONS
|
32 |
|
|
// Xilinx products are not designed or intended to be fail-
|
33 |
|
|
// safe, or for use in any application requiring fail-safe
|
34 |
|
|
// performance, such as life-support or safety devices or
|
35 |
|
|
// systems, Class III medical devices, nuclear facilities,
|
36 |
|
|
// applications related to the deployment of airbags, or any
|
37 |
|
|
// other applications that could lead to death, personal
|
38 |
|
|
// injury, or severe property or environmental damage
|
39 |
|
|
// (individually and collectively, "Critical
|
40 |
|
|
// Applications"). Customer assumes the sole risk and
|
41 |
|
|
// liability of any use of Xilinx products in Critical
|
42 |
|
|
// Applications, subject only to applicable laws and
|
43 |
|
|
// regulations governing limitations on product liability.
|
44 |
|
|
//
|
45 |
|
|
// THIS COPYRIGHT NOTICE AND DISCLAIMER MUST BE RETAINED AS
|
46 |
|
|
// PART OF THIS FILE AT ALL TIMES.
|
47 |
|
|
//
|
48 |
|
|
//*****************************************************************************
|
49 |
|
|
// ____ ____
|
50 |
|
|
// / /\/ /
|
51 |
|
|
// /___/ \ / Vendor: Xilinx
|
52 |
|
|
// \ \ \/ Version:
|
53 |
|
|
// \ \ Application: MIG
|
54 |
|
|
// / / Filename: ddr_phy_wrcal.v
|
55 |
|
|
// /___/ /\ Date Last Modified: $Date: 2011/06/02 08:35:09 $
|
56 |
|
|
// \ \ / \ Date Created:
|
57 |
|
|
// \___\/\___\
|
58 |
|
|
//
|
59 |
|
|
//Device: 7 Series
|
60 |
|
|
//Design Name: DDR3 SDRAM
|
61 |
|
|
//Purpose:
|
62 |
|
|
// Write calibration logic to align DQS to correct CK edge
|
63 |
|
|
//Reference:
|
64 |
|
|
//Revision History:
|
65 |
|
|
//*****************************************************************************
|
66 |
|
|
|
67 |
|
|
/******************************************************************************
|
68 |
|
|
**$Id: ddr_phy_wrcal.v,v 1.1 2011/06/02 08:35:09 mishra Exp $
|
69 |
|
|
**$Date: 2011/06/02 08:35:09 $
|
70 |
|
|
**$Author:
|
71 |
|
|
**$Revision:
|
72 |
|
|
**$Source:
|
73 |
|
|
******************************************************************************/
|
74 |
|
|
|
75 |
|
|
`timescale 1ps/1ps
|
76 |
|
|
|
77 |
|
|
module mig_7series_v2_3_ddr_phy_wrcal #
|
78 |
|
|
(
|
79 |
|
|
parameter TCQ = 100, // clk->out delay (sim only)
|
80 |
|
|
parameter nCK_PER_CLK = 2, // # of memory clocks per CLK
|
81 |
|
|
parameter CLK_PERIOD = 2500,
|
82 |
|
|
parameter DQ_WIDTH = 64, // # of DQ (data)
|
83 |
|
|
parameter DQS_CNT_WIDTH = 3, // = ceil(log2(DQS_WIDTH))
|
84 |
|
|
parameter DQS_WIDTH = 8, // # of DQS (strobe)
|
85 |
|
|
parameter DRAM_WIDTH = 8, // # of DQ per DQS
|
86 |
|
|
parameter PRE_REV3ES = "OFF", // Delay O/Ps using Phaser_Out fine dly
|
87 |
|
|
parameter SIM_CAL_OPTION = "NONE" // Skip various calibration steps
|
88 |
|
|
)
|
89 |
|
|
(
|
90 |
|
|
input clk,
|
91 |
|
|
input rst,
|
92 |
|
|
// Calibration status, control signals
|
93 |
|
|
input wrcal_start,
|
94 |
|
|
input wrcal_rd_wait,
|
95 |
|
|
input wrcal_sanity_chk,
|
96 |
|
|
input dqsfound_retry_done,
|
97 |
|
|
input phy_rddata_en,
|
98 |
|
|
output dqsfound_retry,
|
99 |
|
|
output wrcal_read_req,
|
100 |
|
|
output reg wrcal_act_req,
|
101 |
|
|
output reg wrcal_done,
|
102 |
|
|
output reg wrcal_pat_err,
|
103 |
|
|
output reg wrcal_prech_req,
|
104 |
|
|
output reg temp_wrcal_done,
|
105 |
|
|
output reg wrcal_sanity_chk_done,
|
106 |
|
|
input prech_done,
|
107 |
|
|
// Captured data in resync clock domain
|
108 |
|
|
input [2*nCK_PER_CLK*DQ_WIDTH-1:0] rd_data,
|
109 |
|
|
// Write level values of Phaser_Out coarse and fine
|
110 |
|
|
// delay taps required to load Phaser_Out register
|
111 |
|
|
input [3*DQS_WIDTH-1:0] wl_po_coarse_cnt,
|
112 |
|
|
input [6*DQS_WIDTH-1:0] wl_po_fine_cnt,
|
113 |
|
|
input wrlvl_byte_done,
|
114 |
|
|
output reg wrlvl_byte_redo,
|
115 |
|
|
output reg early1_data,
|
116 |
|
|
output reg early2_data,
|
117 |
|
|
// DQ IDELAY
|
118 |
|
|
output reg idelay_ld,
|
119 |
|
|
output reg wrcal_pat_resume, // to phy_init for write
|
120 |
|
|
output reg [DQS_CNT_WIDTH:0] po_stg2_wrcal_cnt,
|
121 |
|
|
output phy_if_reset,
|
122 |
|
|
|
123 |
|
|
// Debug Port
|
124 |
|
|
output [6*DQS_WIDTH-1:0] dbg_final_po_fine_tap_cnt,
|
125 |
|
|
output [3*DQS_WIDTH-1:0] dbg_final_po_coarse_tap_cnt,
|
126 |
|
|
output [99:0] dbg_phy_wrcal
|
127 |
|
|
);
|
128 |
|
|
|
129 |
|
|
// Length of calibration sequence (in # of words)
|
130 |
|
|
//localparam CAL_PAT_LEN = 8;
|
131 |
|
|
|
132 |
|
|
// Read data shift register length
|
133 |
|
|
localparam RD_SHIFT_LEN = 1; //(nCK_PER_CLK == 4) ? 1 : 2;
|
134 |
|
|
|
135 |
|
|
// # of reads for reliable read capture
|
136 |
|
|
localparam NUM_READS = 2;
|
137 |
|
|
|
138 |
|
|
// # of cycles to wait after changing RDEN count value
|
139 |
|
|
localparam RDEN_WAIT_CNT = 12;
|
140 |
|
|
|
141 |
|
|
localparam COARSE_CNT = (CLK_PERIOD/nCK_PER_CLK <= 2500) ? 3 : 6;
|
142 |
|
|
localparam FINE_CNT = (CLK_PERIOD/nCK_PER_CLK <= 2500) ? 22 : 44;
|
143 |
|
|
|
144 |
|
|
|
145 |
|
|
localparam CAL2_IDLE = 4'h0;
|
146 |
|
|
localparam CAL2_READ_WAIT = 4'h1;
|
147 |
|
|
localparam CAL2_NEXT_DQS = 4'h2;
|
148 |
|
|
localparam CAL2_WRLVL_WAIT = 4'h3;
|
149 |
|
|
localparam CAL2_IFIFO_RESET = 4'h4;
|
150 |
|
|
localparam CAL2_DQ_IDEL_DEC = 4'h5;
|
151 |
|
|
localparam CAL2_DONE = 4'h6;
|
152 |
|
|
localparam CAL2_SANITY_WAIT = 4'h7;
|
153 |
|
|
localparam CAL2_ERR = 4'h8;
|
154 |
|
|
|
155 |
|
|
integer i,j,k,l,m,p,q,d;
|
156 |
|
|
|
157 |
|
|
reg [2:0] po_coarse_tap_cnt [0:DQS_WIDTH-1];
|
158 |
|
|
reg [3*DQS_WIDTH-1:0] po_coarse_tap_cnt_w;
|
159 |
|
|
reg [5:0] po_fine_tap_cnt [0:DQS_WIDTH-1];
|
160 |
|
|
reg [6*DQS_WIDTH-1:0] po_fine_tap_cnt_w;
|
161 |
|
|
reg [DQS_CNT_WIDTH:0] wrcal_dqs_cnt_r/* synthesis syn_maxfan = 10 */;
|
162 |
|
|
reg [4:0] not_empty_wait_cnt;
|
163 |
|
|
reg [3:0] tap_inc_wait_cnt;
|
164 |
|
|
reg cal2_done_r;
|
165 |
|
|
reg cal2_done_r1;
|
166 |
|
|
reg cal2_prech_req_r;
|
167 |
|
|
reg [3:0] cal2_state_r;
|
168 |
|
|
reg [3:0] cal2_state_r1;
|
169 |
|
|
reg [2:0] wl_po_coarse_cnt_w [0:DQS_WIDTH-1];
|
170 |
|
|
reg [5:0] wl_po_fine_cnt_w [0:DQS_WIDTH-1];
|
171 |
|
|
reg cal2_if_reset;
|
172 |
|
|
reg wrcal_pat_resume_r;
|
173 |
|
|
reg wrcal_pat_resume_r1;
|
174 |
|
|
reg wrcal_pat_resume_r2;
|
175 |
|
|
reg wrcal_pat_resume_r3;
|
176 |
|
|
reg [DRAM_WIDTH-1:0] mux_rd_fall0_r;
|
177 |
|
|
reg [DRAM_WIDTH-1:0] mux_rd_fall1_r;
|
178 |
|
|
reg [DRAM_WIDTH-1:0] mux_rd_rise0_r;
|
179 |
|
|
reg [DRAM_WIDTH-1:0] mux_rd_rise1_r;
|
180 |
|
|
reg [DRAM_WIDTH-1:0] mux_rd_fall2_r;
|
181 |
|
|
reg [DRAM_WIDTH-1:0] mux_rd_fall3_r;
|
182 |
|
|
reg [DRAM_WIDTH-1:0] mux_rd_rise2_r;
|
183 |
|
|
reg [DRAM_WIDTH-1:0] mux_rd_rise3_r;
|
184 |
|
|
reg pat_data_match_r;
|
185 |
|
|
reg pat1_data_match_r;
|
186 |
|
|
reg pat1_data_match_r1;
|
187 |
|
|
reg pat2_data_match_r;
|
188 |
|
|
reg pat_data_match_valid_r;
|
189 |
|
|
wire [RD_SHIFT_LEN-1:0] pat_fall0 [3:0];
|
190 |
|
|
wire [RD_SHIFT_LEN-1:0] pat_fall1 [3:0];
|
191 |
|
|
wire [RD_SHIFT_LEN-1:0] pat_fall2 [3:0];
|
192 |
|
|
wire [RD_SHIFT_LEN-1:0] pat_fall3 [3:0];
|
193 |
|
|
wire [RD_SHIFT_LEN-1:0] pat1_fall0 [3:0];
|
194 |
|
|
wire [RD_SHIFT_LEN-1:0] pat1_fall1 [3:0];
|
195 |
|
|
wire [RD_SHIFT_LEN-1:0] pat2_fall0 [3:0];
|
196 |
|
|
wire [RD_SHIFT_LEN-1:0] pat2_fall1 [3:0];
|
197 |
|
|
wire [RD_SHIFT_LEN-1:0] early_fall0 [3:0];
|
198 |
|
|
wire [RD_SHIFT_LEN-1:0] early_fall1 [3:0];
|
199 |
|
|
wire [RD_SHIFT_LEN-1:0] early_fall2 [3:0];
|
200 |
|
|
wire [RD_SHIFT_LEN-1:0] early_fall3 [3:0];
|
201 |
|
|
wire [RD_SHIFT_LEN-1:0] early1_fall0 [3:0];
|
202 |
|
|
wire [RD_SHIFT_LEN-1:0] early1_fall1 [3:0];
|
203 |
|
|
wire [RD_SHIFT_LEN-1:0] early2_fall0 [3:0];
|
204 |
|
|
wire [RD_SHIFT_LEN-1:0] early2_fall1 [3:0];
|
205 |
|
|
reg [DRAM_WIDTH-1:0] pat_match_fall0_r;
|
206 |
|
|
reg pat_match_fall0_and_r;
|
207 |
|
|
reg [DRAM_WIDTH-1:0] pat_match_fall1_r;
|
208 |
|
|
reg pat_match_fall1_and_r;
|
209 |
|
|
reg [DRAM_WIDTH-1:0] pat_match_fall2_r;
|
210 |
|
|
reg pat_match_fall2_and_r;
|
211 |
|
|
reg [DRAM_WIDTH-1:0] pat_match_fall3_r;
|
212 |
|
|
reg pat_match_fall3_and_r;
|
213 |
|
|
reg [DRAM_WIDTH-1:0] pat_match_rise0_r;
|
214 |
|
|
reg pat_match_rise0_and_r;
|
215 |
|
|
reg [DRAM_WIDTH-1:0] pat_match_rise1_r;
|
216 |
|
|
reg pat_match_rise1_and_r;
|
217 |
|
|
reg [DRAM_WIDTH-1:0] pat_match_rise2_r;
|
218 |
|
|
reg pat_match_rise2_and_r;
|
219 |
|
|
reg [DRAM_WIDTH-1:0] pat_match_rise3_r;
|
220 |
|
|
reg pat_match_rise3_and_r;
|
221 |
|
|
reg [DRAM_WIDTH-1:0] pat1_match_rise0_r;
|
222 |
|
|
reg [DRAM_WIDTH-1:0] pat1_match_rise1_r;
|
223 |
|
|
reg [DRAM_WIDTH-1:0] pat1_match_fall0_r;
|
224 |
|
|
reg [DRAM_WIDTH-1:0] pat1_match_fall1_r;
|
225 |
|
|
reg [DRAM_WIDTH-1:0] pat2_match_rise0_r;
|
226 |
|
|
reg [DRAM_WIDTH-1:0] pat2_match_rise1_r;
|
227 |
|
|
reg [DRAM_WIDTH-1:0] pat2_match_fall0_r;
|
228 |
|
|
reg [DRAM_WIDTH-1:0] pat2_match_fall1_r;
|
229 |
|
|
reg pat1_match_rise0_and_r;
|
230 |
|
|
reg pat1_match_rise1_and_r;
|
231 |
|
|
reg pat1_match_fall0_and_r;
|
232 |
|
|
reg pat1_match_fall1_and_r;
|
233 |
|
|
reg pat2_match_rise0_and_r;
|
234 |
|
|
reg pat2_match_rise1_and_r;
|
235 |
|
|
reg pat2_match_fall0_and_r;
|
236 |
|
|
reg pat2_match_fall1_and_r;
|
237 |
|
|
reg early1_data_match_r;
|
238 |
|
|
reg early1_data_match_r1;
|
239 |
|
|
reg [DRAM_WIDTH-1:0] early1_match_fall0_r;
|
240 |
|
|
reg early1_match_fall0_and_r;
|
241 |
|
|
reg [DRAM_WIDTH-1:0] early1_match_fall1_r;
|
242 |
|
|
reg early1_match_fall1_and_r;
|
243 |
|
|
reg [DRAM_WIDTH-1:0] early1_match_fall2_r;
|
244 |
|
|
reg early1_match_fall2_and_r;
|
245 |
|
|
reg [DRAM_WIDTH-1:0] early1_match_fall3_r;
|
246 |
|
|
reg early1_match_fall3_and_r;
|
247 |
|
|
reg [DRAM_WIDTH-1:0] early1_match_rise0_r;
|
248 |
|
|
reg early1_match_rise0_and_r;
|
249 |
|
|
reg [DRAM_WIDTH-1:0] early1_match_rise1_r;
|
250 |
|
|
reg early1_match_rise1_and_r;
|
251 |
|
|
reg [DRAM_WIDTH-1:0] early1_match_rise2_r;
|
252 |
|
|
reg early1_match_rise2_and_r;
|
253 |
|
|
reg [DRAM_WIDTH-1:0] early1_match_rise3_r;
|
254 |
|
|
reg early1_match_rise3_and_r;
|
255 |
|
|
reg early2_data_match_r;
|
256 |
|
|
reg [DRAM_WIDTH-1:0] early2_match_fall0_r;
|
257 |
|
|
reg early2_match_fall0_and_r;
|
258 |
|
|
reg [DRAM_WIDTH-1:0] early2_match_fall1_r;
|
259 |
|
|
reg early2_match_fall1_and_r;
|
260 |
|
|
reg [DRAM_WIDTH-1:0] early2_match_fall2_r;
|
261 |
|
|
reg early2_match_fall2_and_r;
|
262 |
|
|
reg [DRAM_WIDTH-1:0] early2_match_fall3_r;
|
263 |
|
|
reg early2_match_fall3_and_r;
|
264 |
|
|
reg [DRAM_WIDTH-1:0] early2_match_rise0_r;
|
265 |
|
|
reg early2_match_rise0_and_r;
|
266 |
|
|
reg [DRAM_WIDTH-1:0] early2_match_rise1_r;
|
267 |
|
|
reg early2_match_rise1_and_r;
|
268 |
|
|
reg [DRAM_WIDTH-1:0] early2_match_rise2_r;
|
269 |
|
|
reg early2_match_rise2_and_r;
|
270 |
|
|
reg [DRAM_WIDTH-1:0] early2_match_rise3_r;
|
271 |
|
|
reg early2_match_rise3_and_r;
|
272 |
|
|
wire [RD_SHIFT_LEN-1:0] pat_rise0 [3:0];
|
273 |
|
|
wire [RD_SHIFT_LEN-1:0] pat_rise1 [3:0];
|
274 |
|
|
wire [RD_SHIFT_LEN-1:0] pat_rise2 [3:0];
|
275 |
|
|
wire [RD_SHIFT_LEN-1:0] pat_rise3 [3:0];
|
276 |
|
|
wire [RD_SHIFT_LEN-1:0] pat1_rise0 [3:0];
|
277 |
|
|
wire [RD_SHIFT_LEN-1:0] pat1_rise1 [3:0];
|
278 |
|
|
wire [RD_SHIFT_LEN-1:0] pat2_rise0 [3:0];
|
279 |
|
|
wire [RD_SHIFT_LEN-1:0] pat2_rise1 [3:0];
|
280 |
|
|
wire [RD_SHIFT_LEN-1:0] early_rise0 [3:0];
|
281 |
|
|
wire [RD_SHIFT_LEN-1:0] early_rise1 [3:0];
|
282 |
|
|
wire [RD_SHIFT_LEN-1:0] early_rise2 [3:0];
|
283 |
|
|
wire [RD_SHIFT_LEN-1:0] early_rise3 [3:0];
|
284 |
|
|
wire [RD_SHIFT_LEN-1:0] early1_rise0 [3:0];
|
285 |
|
|
wire [RD_SHIFT_LEN-1:0] early1_rise1 [3:0];
|
286 |
|
|
wire [RD_SHIFT_LEN-1:0] early2_rise0 [3:0];
|
287 |
|
|
wire [RD_SHIFT_LEN-1:0] early2_rise1 [3:0];
|
288 |
|
|
wire [DQ_WIDTH-1:0] rd_data_rise0;
|
289 |
|
|
wire [DQ_WIDTH-1:0] rd_data_fall0;
|
290 |
|
|
wire [DQ_WIDTH-1:0] rd_data_rise1;
|
291 |
|
|
wire [DQ_WIDTH-1:0] rd_data_fall1;
|
292 |
|
|
wire [DQ_WIDTH-1:0] rd_data_rise2;
|
293 |
|
|
wire [DQ_WIDTH-1:0] rd_data_fall2;
|
294 |
|
|
wire [DQ_WIDTH-1:0] rd_data_rise3;
|
295 |
|
|
wire [DQ_WIDTH-1:0] rd_data_fall3;
|
296 |
|
|
reg [DQS_CNT_WIDTH:0] rd_mux_sel_r;
|
297 |
|
|
reg rd_active_posedge_r;
|
298 |
|
|
reg rd_active_r;
|
299 |
|
|
reg rd_active_r1;
|
300 |
|
|
reg rd_active_r2;
|
301 |
|
|
reg rd_active_r3;
|
302 |
|
|
reg rd_active_r4;
|
303 |
|
|
reg rd_active_r5;
|
304 |
|
|
reg [RD_SHIFT_LEN-1:0] sr_fall0_r [DRAM_WIDTH-1:0];
|
305 |
|
|
reg [RD_SHIFT_LEN-1:0] sr_fall1_r [DRAM_WIDTH-1:0];
|
306 |
|
|
reg [RD_SHIFT_LEN-1:0] sr_rise0_r [DRAM_WIDTH-1:0];
|
307 |
|
|
reg [RD_SHIFT_LEN-1:0] sr_rise1_r [DRAM_WIDTH-1:0];
|
308 |
|
|
reg [RD_SHIFT_LEN-1:0] sr_fall2_r [DRAM_WIDTH-1:0];
|
309 |
|
|
reg [RD_SHIFT_LEN-1:0] sr_fall3_r [DRAM_WIDTH-1:0];
|
310 |
|
|
reg [RD_SHIFT_LEN-1:0] sr_rise2_r [DRAM_WIDTH-1:0];
|
311 |
|
|
reg [RD_SHIFT_LEN-1:0] sr_rise3_r [DRAM_WIDTH-1:0];
|
312 |
|
|
reg wrlvl_byte_done_r;
|
313 |
|
|
reg idelay_ld_done;
|
314 |
|
|
reg pat1_detect;
|
315 |
|
|
reg early1_detect;
|
316 |
|
|
reg wrcal_sanity_chk_r;
|
317 |
|
|
reg wrcal_sanity_chk_err;
|
318 |
|
|
|
319 |
|
|
|
320 |
|
|
//***************************************************************************
|
321 |
|
|
// Debug
|
322 |
|
|
//***************************************************************************
|
323 |
|
|
|
324 |
|
|
always @(*) begin
|
325 |
|
|
for (d = 0; d < DQS_WIDTH; d = d + 1) begin
|
326 |
|
|
po_fine_tap_cnt_w[(6*d)+:6] = po_fine_tap_cnt[d];
|
327 |
|
|
po_coarse_tap_cnt_w[(3*d)+:3] = po_coarse_tap_cnt[d];
|
328 |
|
|
end
|
329 |
|
|
end
|
330 |
|
|
|
331 |
|
|
assign dbg_final_po_fine_tap_cnt = po_fine_tap_cnt_w;
|
332 |
|
|
assign dbg_final_po_coarse_tap_cnt = po_coarse_tap_cnt_w;
|
333 |
|
|
|
334 |
|
|
assign dbg_phy_wrcal[0] = pat_data_match_r;
|
335 |
|
|
assign dbg_phy_wrcal[4:1] = cal2_state_r1[3:0];
|
336 |
|
|
assign dbg_phy_wrcal[5] = wrcal_sanity_chk_err;
|
337 |
|
|
assign dbg_phy_wrcal[6] = wrcal_start;
|
338 |
|
|
assign dbg_phy_wrcal[7] = wrcal_done;
|
339 |
|
|
assign dbg_phy_wrcal[8] = pat_data_match_valid_r;
|
340 |
|
|
assign dbg_phy_wrcal[13+:DQS_CNT_WIDTH]= wrcal_dqs_cnt_r;
|
341 |
|
|
assign dbg_phy_wrcal[17+:5] = not_empty_wait_cnt;
|
342 |
|
|
assign dbg_phy_wrcal[22] = early1_data;
|
343 |
|
|
assign dbg_phy_wrcal[23] = early2_data;
|
344 |
|
|
assign dbg_phy_wrcal[24+:8] = mux_rd_rise0_r;
|
345 |
|
|
assign dbg_phy_wrcal[32+:8] = mux_rd_fall0_r;
|
346 |
|
|
assign dbg_phy_wrcal[40+:8] = mux_rd_rise1_r;
|
347 |
|
|
assign dbg_phy_wrcal[48+:8] = mux_rd_fall1_r;
|
348 |
|
|
assign dbg_phy_wrcal[56+:8] = mux_rd_rise2_r;
|
349 |
|
|
assign dbg_phy_wrcal[64+:8] = mux_rd_fall2_r;
|
350 |
|
|
assign dbg_phy_wrcal[72+:8] = mux_rd_rise3_r;
|
351 |
|
|
assign dbg_phy_wrcal[80+:8] = mux_rd_fall3_r;
|
352 |
|
|
assign dbg_phy_wrcal[88] = early1_data_match_r;
|
353 |
|
|
assign dbg_phy_wrcal[89] = early2_data_match_r;
|
354 |
|
|
assign dbg_phy_wrcal[90] = wrcal_sanity_chk_r & pat_data_match_valid_r;
|
355 |
|
|
assign dbg_phy_wrcal[91] = wrcal_sanity_chk_r;
|
356 |
|
|
assign dbg_phy_wrcal[92] = wrcal_sanity_chk_done;
|
357 |
|
|
|
358 |
|
|
assign dqsfound_retry = 1'b0;
|
359 |
|
|
assign wrcal_read_req = 1'b0;
|
360 |
|
|
assign phy_if_reset = cal2_if_reset;
|
361 |
|
|
|
362 |
|
|
//**************************************************************************
|
363 |
|
|
// DQS count to hard PHY during write calibration using Phaser_OUT Stage2
|
364 |
|
|
// coarse delay
|
365 |
|
|
//**************************************************************************
|
366 |
|
|
|
367 |
|
|
always @(posedge clk) begin
|
368 |
|
|
po_stg2_wrcal_cnt <= #TCQ wrcal_dqs_cnt_r;
|
369 |
|
|
wrlvl_byte_done_r <= #TCQ wrlvl_byte_done;
|
370 |
|
|
wrcal_sanity_chk_r <= #TCQ wrcal_sanity_chk;
|
371 |
|
|
end
|
372 |
|
|
|
373 |
|
|
//***************************************************************************
|
374 |
|
|
// Data mux to route appropriate byte to calibration logic - i.e. calibration
|
375 |
|
|
// is done sequentially, one byte (or DQS group) at a time
|
376 |
|
|
//***************************************************************************
|
377 |
|
|
|
378 |
|
|
generate
|
379 |
|
|
if (nCK_PER_CLK == 4) begin: gen_rd_data_div4
|
380 |
|
|
assign rd_data_rise0 = rd_data[DQ_WIDTH-1:0];
|
381 |
|
|
assign rd_data_fall0 = rd_data[2*DQ_WIDTH-1:DQ_WIDTH];
|
382 |
|
|
assign rd_data_rise1 = rd_data[3*DQ_WIDTH-1:2*DQ_WIDTH];
|
383 |
|
|
assign rd_data_fall1 = rd_data[4*DQ_WIDTH-1:3*DQ_WIDTH];
|
384 |
|
|
assign rd_data_rise2 = rd_data[5*DQ_WIDTH-1:4*DQ_WIDTH];
|
385 |
|
|
assign rd_data_fall2 = rd_data[6*DQ_WIDTH-1:5*DQ_WIDTH];
|
386 |
|
|
assign rd_data_rise3 = rd_data[7*DQ_WIDTH-1:6*DQ_WIDTH];
|
387 |
|
|
assign rd_data_fall3 = rd_data[8*DQ_WIDTH-1:7*DQ_WIDTH];
|
388 |
|
|
end else if (nCK_PER_CLK == 2) begin: gen_rd_data_div2
|
389 |
|
|
assign rd_data_rise0 = rd_data[DQ_WIDTH-1:0];
|
390 |
|
|
assign rd_data_fall0 = rd_data[2*DQ_WIDTH-1:DQ_WIDTH];
|
391 |
|
|
assign rd_data_rise1 = rd_data[3*DQ_WIDTH-1:2*DQ_WIDTH];
|
392 |
|
|
assign rd_data_fall1 = rd_data[4*DQ_WIDTH-1:3*DQ_WIDTH];
|
393 |
|
|
end
|
394 |
|
|
endgenerate
|
395 |
|
|
|
396 |
|
|
//**************************************************************************
|
397 |
|
|
// Final Phaser OUT coarse and fine delay taps after write calibration
|
398 |
|
|
// Sum of taps used during write leveling taps and write calibration
|
399 |
|
|
//**************************************************************************
|
400 |
|
|
|
401 |
|
|
always @(*) begin
|
402 |
|
|
for (m = 0; m < DQS_WIDTH; m = m + 1) begin
|
403 |
|
|
wl_po_coarse_cnt_w[m] = wl_po_coarse_cnt[3*m+:3];
|
404 |
|
|
wl_po_fine_cnt_w[m] = wl_po_fine_cnt[6*m+:6];
|
405 |
|
|
end
|
406 |
|
|
end
|
407 |
|
|
|
408 |
|
|
always @(posedge clk) begin
|
409 |
|
|
if (rst) begin
|
410 |
|
|
for (p = 0; p < DQS_WIDTH; p = p + 1) begin
|
411 |
|
|
po_coarse_tap_cnt[p] <= #TCQ {3{1'b0}};
|
412 |
|
|
po_fine_tap_cnt[p] <= #TCQ {6{1'b0}};
|
413 |
|
|
end
|
414 |
|
|
end else if (cal2_done_r && ~cal2_done_r1) begin
|
415 |
|
|
for (q = 0; q < DQS_WIDTH; q = q + 1) begin
|
416 |
|
|
po_coarse_tap_cnt[q] <= #TCQ wl_po_coarse_cnt_w[i];
|
417 |
|
|
po_fine_tap_cnt[q] <= #TCQ wl_po_fine_cnt_w[i];
|
418 |
|
|
end
|
419 |
|
|
end
|
420 |
|
|
end
|
421 |
|
|
|
422 |
|
|
always @(posedge clk) begin
|
423 |
|
|
rd_mux_sel_r <= #TCQ wrcal_dqs_cnt_r;
|
424 |
|
|
end
|
425 |
|
|
|
426 |
|
|
// Register outputs for improved timing.
|
427 |
|
|
// NOTE: Will need to change when per-bit DQ deskew is supported.
|
428 |
|
|
// Currenly all bits in DQS group are checked in aggregate
|
429 |
|
|
generate
|
430 |
|
|
genvar mux_i;
|
431 |
|
|
if (nCK_PER_CLK == 4) begin: gen_mux_rd_div4
|
432 |
|
|
for (mux_i = 0; mux_i < DRAM_WIDTH; mux_i = mux_i + 1) begin: gen_mux_rd
|
433 |
|
|
always @(posedge clk) begin
|
434 |
|
|
mux_rd_rise0_r[mux_i] <= #TCQ rd_data_rise0[DRAM_WIDTH*rd_mux_sel_r + mux_i];
|
435 |
|
|
mux_rd_fall0_r[mux_i] <= #TCQ rd_data_fall0[DRAM_WIDTH*rd_mux_sel_r + mux_i];
|
436 |
|
|
mux_rd_rise1_r[mux_i] <= #TCQ rd_data_rise1[DRAM_WIDTH*rd_mux_sel_r + mux_i];
|
437 |
|
|
mux_rd_fall1_r[mux_i] <= #TCQ rd_data_fall1[DRAM_WIDTH*rd_mux_sel_r + mux_i];
|
438 |
|
|
mux_rd_rise2_r[mux_i] <= #TCQ rd_data_rise2[DRAM_WIDTH*rd_mux_sel_r + mux_i];
|
439 |
|
|
mux_rd_fall2_r[mux_i] <= #TCQ rd_data_fall2[DRAM_WIDTH*rd_mux_sel_r + mux_i];
|
440 |
|
|
mux_rd_rise3_r[mux_i] <= #TCQ rd_data_rise3[DRAM_WIDTH*rd_mux_sel_r + mux_i];
|
441 |
|
|
mux_rd_fall3_r[mux_i] <= #TCQ rd_data_fall3[DRAM_WIDTH*rd_mux_sel_r + mux_i];
|
442 |
|
|
end
|
443 |
|
|
end
|
444 |
|
|
end else if (nCK_PER_CLK == 2) begin: gen_mux_rd_div2
|
445 |
|
|
for (mux_i = 0; mux_i < DRAM_WIDTH; mux_i = mux_i + 1) begin: gen_mux_rd
|
446 |
|
|
always @(posedge clk) begin
|
447 |
|
|
mux_rd_rise0_r[mux_i] <= #TCQ rd_data_rise0[DRAM_WIDTH*rd_mux_sel_r + mux_i];
|
448 |
|
|
mux_rd_fall0_r[mux_i] <= #TCQ rd_data_fall0[DRAM_WIDTH*rd_mux_sel_r + mux_i];
|
449 |
|
|
mux_rd_rise1_r[mux_i] <= #TCQ rd_data_rise1[DRAM_WIDTH*rd_mux_sel_r + mux_i];
|
450 |
|
|
mux_rd_fall1_r[mux_i] <= #TCQ rd_data_fall1[DRAM_WIDTH*rd_mux_sel_r + mux_i];
|
451 |
|
|
end
|
452 |
|
|
end
|
453 |
|
|
end
|
454 |
|
|
endgenerate
|
455 |
|
|
|
456 |
|
|
//***************************************************************************
|
457 |
|
|
// generate request to PHY_INIT logic to issue precharged. Required when
|
458 |
|
|
// calibration can take a long time (during which there are only constant
|
459 |
|
|
// reads present on this bus). In this case need to issue perioidic
|
460 |
|
|
// precharges to avoid tRAS violation. This signal must meet the following
|
461 |
|
|
// requirements: (1) only transition from 0->1 when prech is first needed,
|
462 |
|
|
// (2) stay at 1 and only transition 1->0 when RDLVL_PRECH_DONE asserted
|
463 |
|
|
//***************************************************************************
|
464 |
|
|
|
465 |
|
|
always @(posedge clk)
|
466 |
|
|
if (rst)
|
467 |
|
|
wrcal_prech_req <= #TCQ 1'b0;
|
468 |
|
|
else
|
469 |
|
|
// Combine requests from all stages here
|
470 |
|
|
wrcal_prech_req <= #TCQ cal2_prech_req_r;
|
471 |
|
|
|
472 |
|
|
//***************************************************************************
|
473 |
|
|
// Shift register to store last RDDATA_SHIFT_LEN cycles of data from ISERDES
|
474 |
|
|
// NOTE: Written using discrete flops, but SRL can be used if the matching
|
475 |
|
|
// logic does the comparison sequentially, rather than parallel
|
476 |
|
|
//***************************************************************************
|
477 |
|
|
|
478 |
|
|
generate
|
479 |
|
|
genvar rd_i;
|
480 |
|
|
if (nCK_PER_CLK == 4) begin: gen_sr_div4
|
481 |
|
|
for (rd_i = 0; rd_i < DRAM_WIDTH; rd_i = rd_i + 1) begin: gen_sr
|
482 |
|
|
always @(posedge clk) begin
|
483 |
|
|
sr_rise0_r[rd_i] <= #TCQ mux_rd_rise0_r[rd_i];
|
484 |
|
|
sr_fall0_r[rd_i] <= #TCQ mux_rd_fall0_r[rd_i];
|
485 |
|
|
sr_rise1_r[rd_i] <= #TCQ mux_rd_rise1_r[rd_i];
|
486 |
|
|
sr_fall1_r[rd_i] <= #TCQ mux_rd_fall1_r[rd_i];
|
487 |
|
|
sr_rise2_r[rd_i] <= #TCQ mux_rd_rise2_r[rd_i];
|
488 |
|
|
sr_fall2_r[rd_i] <= #TCQ mux_rd_fall2_r[rd_i];
|
489 |
|
|
sr_rise3_r[rd_i] <= #TCQ mux_rd_rise3_r[rd_i];
|
490 |
|
|
sr_fall3_r[rd_i] <= #TCQ mux_rd_fall3_r[rd_i];
|
491 |
|
|
end
|
492 |
|
|
end
|
493 |
|
|
end else if (nCK_PER_CLK == 2) begin: gen_sr_div2
|
494 |
|
|
for (rd_i = 0; rd_i < DRAM_WIDTH; rd_i = rd_i + 1) begin: gen_sr
|
495 |
|
|
always @(posedge clk) begin
|
496 |
|
|
sr_rise0_r[rd_i] <= #TCQ mux_rd_rise0_r[rd_i];
|
497 |
|
|
sr_fall0_r[rd_i] <= #TCQ mux_rd_fall0_r[rd_i];
|
498 |
|
|
sr_rise1_r[rd_i] <= #TCQ mux_rd_rise1_r[rd_i];
|
499 |
|
|
sr_fall1_r[rd_i] <= #TCQ mux_rd_fall1_r[rd_i];
|
500 |
|
|
end
|
501 |
|
|
end
|
502 |
|
|
end
|
503 |
|
|
endgenerate
|
504 |
|
|
|
505 |
|
|
//***************************************************************************
|
506 |
|
|
// Write calibration:
|
507 |
|
|
// During write leveling DQS is aligned to the nearest CK edge that may not
|
508 |
|
|
// be the correct CK edge. Write calibration is required to align the DQS to
|
509 |
|
|
// the correct CK edge that clocks the write command.
|
510 |
|
|
// The Phaser_Out coarse delay line is adjusted if required to add a memory
|
511 |
|
|
// clock cycle of delay in order to read back the expected pattern.
|
512 |
|
|
//***************************************************************************
|
513 |
|
|
|
514 |
|
|
always @(posedge clk) begin
|
515 |
|
|
rd_active_r <= #TCQ phy_rddata_en;
|
516 |
|
|
rd_active_r1 <= #TCQ rd_active_r;
|
517 |
|
|
rd_active_r2 <= #TCQ rd_active_r1;
|
518 |
|
|
rd_active_r3 <= #TCQ rd_active_r2;
|
519 |
|
|
rd_active_r4 <= #TCQ rd_active_r3;
|
520 |
|
|
rd_active_r5 <= #TCQ rd_active_r4;
|
521 |
|
|
end
|
522 |
|
|
|
523 |
|
|
//*****************************************************************
|
524 |
|
|
// Expected data pattern when properly received by read capture
|
525 |
|
|
// logic:
|
526 |
|
|
// Based on pattern of ({rise,fall}) =
|
527 |
|
|
// 0xF, 0x0, 0xA, 0x5, 0x5, 0xA, 0x9, 0x6
|
528 |
|
|
// Each nibble will look like:
|
529 |
|
|
// bit3: 1, 0, 1, 0, 0, 1, 1, 0
|
530 |
|
|
// bit2: 1, 0, 0, 1, 1, 0, 0, 1
|
531 |
|
|
// bit1: 1, 0, 1, 0, 0, 1, 0, 1
|
532 |
|
|
// bit0: 1, 0, 0, 1, 1, 0, 1, 0
|
533 |
|
|
// Change the hard-coded pattern below accordingly as RD_SHIFT_LEN
|
534 |
|
|
// and the actual training pattern contents change
|
535 |
|
|
//*****************************************************************
|
536 |
|
|
|
537 |
|
|
generate
|
538 |
|
|
if (nCK_PER_CLK == 4) begin: gen_pat_div4
|
539 |
|
|
// FF00AA5555AA9966
|
540 |
|
|
assign pat_rise0[3] = 1'b1;
|
541 |
|
|
assign pat_fall0[3] = 1'b0;
|
542 |
|
|
assign pat_rise1[3] = 1'b1;
|
543 |
|
|
assign pat_fall1[3] = 1'b0;
|
544 |
|
|
assign pat_rise2[3] = 1'b0;
|
545 |
|
|
assign pat_fall2[3] = 1'b1;
|
546 |
|
|
assign pat_rise3[3] = 1'b1;
|
547 |
|
|
assign pat_fall3[3] = 1'b0;
|
548 |
|
|
|
549 |
|
|
assign pat_rise0[2] = 1'b1;
|
550 |
|
|
assign pat_fall0[2] = 1'b0;
|
551 |
|
|
assign pat_rise1[2] = 1'b0;
|
552 |
|
|
assign pat_fall1[2] = 1'b1;
|
553 |
|
|
assign pat_rise2[2] = 1'b1;
|
554 |
|
|
assign pat_fall2[2] = 1'b0;
|
555 |
|
|
assign pat_rise3[2] = 1'b0;
|
556 |
|
|
assign pat_fall3[2] = 1'b1;
|
557 |
|
|
|
558 |
|
|
assign pat_rise0[1] = 1'b1;
|
559 |
|
|
assign pat_fall0[1] = 1'b0;
|
560 |
|
|
assign pat_rise1[1] = 1'b1;
|
561 |
|
|
assign pat_fall1[1] = 1'b0;
|
562 |
|
|
assign pat_rise2[1] = 1'b0;
|
563 |
|
|
assign pat_fall2[1] = 1'b1;
|
564 |
|
|
assign pat_rise3[1] = 1'b0;
|
565 |
|
|
assign pat_fall3[1] = 1'b1;
|
566 |
|
|
|
567 |
|
|
assign pat_rise0[0] = 1'b1;
|
568 |
|
|
assign pat_fall0[0] = 1'b0;
|
569 |
|
|
assign pat_rise1[0] = 1'b0;
|
570 |
|
|
assign pat_fall1[0] = 1'b1;
|
571 |
|
|
assign pat_rise2[0] = 1'b1;
|
572 |
|
|
assign pat_fall2[0] = 1'b0;
|
573 |
|
|
assign pat_rise3[0] = 1'b1;
|
574 |
|
|
assign pat_fall3[0] = 1'b0;
|
575 |
|
|
|
576 |
|
|
// Pattern to distinguish between early write and incorrect read
|
577 |
|
|
// BB11EE4444EEDD88
|
578 |
|
|
assign early_rise0[3] = 1'b1;
|
579 |
|
|
assign early_fall0[3] = 1'b0;
|
580 |
|
|
assign early_rise1[3] = 1'b1;
|
581 |
|
|
assign early_fall1[3] = 1'b0;
|
582 |
|
|
assign early_rise2[3] = 1'b0;
|
583 |
|
|
assign early_fall2[3] = 1'b1;
|
584 |
|
|
assign early_rise3[3] = 1'b1;
|
585 |
|
|
assign early_fall3[3] = 1'b1;
|
586 |
|
|
|
587 |
|
|
assign early_rise0[2] = 1'b0;
|
588 |
|
|
assign early_fall0[2] = 1'b0;
|
589 |
|
|
assign early_rise1[2] = 1'b1;
|
590 |
|
|
assign early_fall1[2] = 1'b1;
|
591 |
|
|
assign early_rise2[2] = 1'b1;
|
592 |
|
|
assign early_fall2[2] = 1'b1;
|
593 |
|
|
assign early_rise3[2] = 1'b1;
|
594 |
|
|
assign early_fall3[2] = 1'b0;
|
595 |
|
|
|
596 |
|
|
assign early_rise0[1] = 1'b1;
|
597 |
|
|
assign early_fall0[1] = 1'b0;
|
598 |
|
|
assign early_rise1[1] = 1'b1;
|
599 |
|
|
assign early_fall1[1] = 1'b0;
|
600 |
|
|
assign early_rise2[1] = 1'b0;
|
601 |
|
|
assign early_fall2[1] = 1'b1;
|
602 |
|
|
assign early_rise3[1] = 1'b0;
|
603 |
|
|
assign early_fall3[1] = 1'b0;
|
604 |
|
|
|
605 |
|
|
assign early_rise0[0] = 1'b1;
|
606 |
|
|
assign early_fall0[0] = 1'b1;
|
607 |
|
|
assign early_rise1[0] = 1'b0;
|
608 |
|
|
assign early_fall1[0] = 1'b0;
|
609 |
|
|
assign early_rise2[0] = 1'b0;
|
610 |
|
|
assign early_fall2[0] = 1'b0;
|
611 |
|
|
assign early_rise3[0] = 1'b1;
|
612 |
|
|
assign early_fall3[0] = 1'b0;
|
613 |
|
|
|
614 |
|
|
end else if (nCK_PER_CLK == 2) begin: gen_pat_div2
|
615 |
|
|
// First cycle pattern FF00AA55
|
616 |
|
|
assign pat1_rise0[3] = 1'b1;
|
617 |
|
|
assign pat1_fall0[3] = 1'b0;
|
618 |
|
|
assign pat1_rise1[3] = 1'b1;
|
619 |
|
|
assign pat1_fall1[3] = 1'b0;
|
620 |
|
|
|
621 |
|
|
assign pat1_rise0[2] = 1'b1;
|
622 |
|
|
assign pat1_fall0[2] = 1'b0;
|
623 |
|
|
assign pat1_rise1[2] = 1'b0;
|
624 |
|
|
assign pat1_fall1[2] = 1'b1;
|
625 |
|
|
|
626 |
|
|
assign pat1_rise0[1] = 1'b1;
|
627 |
|
|
assign pat1_fall0[1] = 1'b0;
|
628 |
|
|
assign pat1_rise1[1] = 1'b1;
|
629 |
|
|
assign pat1_fall1[1] = 1'b0;
|
630 |
|
|
|
631 |
|
|
assign pat1_rise0[0] = 1'b1;
|
632 |
|
|
assign pat1_fall0[0] = 1'b0;
|
633 |
|
|
assign pat1_rise1[0] = 1'b0;
|
634 |
|
|
assign pat1_fall1[0] = 1'b1;
|
635 |
|
|
|
636 |
|
|
// Second cycle pattern 55AA9966
|
637 |
|
|
assign pat2_rise0[3] = 1'b0;
|
638 |
|
|
assign pat2_fall0[3] = 1'b1;
|
639 |
|
|
assign pat2_rise1[3] = 1'b1;
|
640 |
|
|
assign pat2_fall1[3] = 1'b0;
|
641 |
|
|
|
642 |
|
|
assign pat2_rise0[2] = 1'b1;
|
643 |
|
|
assign pat2_fall0[2] = 1'b0;
|
644 |
|
|
assign pat2_rise1[2] = 1'b0;
|
645 |
|
|
assign pat2_fall1[2] = 1'b1;
|
646 |
|
|
|
647 |
|
|
assign pat2_rise0[1] = 1'b0;
|
648 |
|
|
assign pat2_fall0[1] = 1'b1;
|
649 |
|
|
assign pat2_rise1[1] = 1'b0;
|
650 |
|
|
assign pat2_fall1[1] = 1'b1;
|
651 |
|
|
|
652 |
|
|
assign pat2_rise0[0] = 1'b1;
|
653 |
|
|
assign pat2_fall0[0] = 1'b0;
|
654 |
|
|
assign pat2_rise1[0] = 1'b1;
|
655 |
|
|
assign pat2_fall1[0] = 1'b0;
|
656 |
|
|
|
657 |
|
|
//Pattern to distinguish between early write and incorrect read
|
658 |
|
|
// First cycle pattern AA5555AA
|
659 |
|
|
assign early1_rise0[3] = 2'b1;
|
660 |
|
|
assign early1_fall0[3] = 2'b0;
|
661 |
|
|
assign early1_rise1[3] = 2'b0;
|
662 |
|
|
assign early1_fall1[3] = 2'b1;
|
663 |
|
|
|
664 |
|
|
assign early1_rise0[2] = 2'b0;
|
665 |
|
|
assign early1_fall0[2] = 2'b1;
|
666 |
|
|
assign early1_rise1[2] = 2'b1;
|
667 |
|
|
assign early1_fall1[2] = 2'b0;
|
668 |
|
|
|
669 |
|
|
assign early1_rise0[1] = 2'b1;
|
670 |
|
|
assign early1_fall0[1] = 2'b0;
|
671 |
|
|
assign early1_rise1[1] = 2'b0;
|
672 |
|
|
assign early1_fall1[1] = 2'b1;
|
673 |
|
|
|
674 |
|
|
assign early1_rise0[0] = 2'b0;
|
675 |
|
|
assign early1_fall0[0] = 2'b1;
|
676 |
|
|
assign early1_rise1[0] = 2'b1;
|
677 |
|
|
assign early1_fall1[0] = 2'b0;
|
678 |
|
|
|
679 |
|
|
// Second cycle pattern 9966BB11
|
680 |
|
|
assign early2_rise0[3] = 2'b1;
|
681 |
|
|
assign early2_fall0[3] = 2'b0;
|
682 |
|
|
assign early2_rise1[3] = 2'b1;
|
683 |
|
|
assign early2_fall1[3] = 2'b0;
|
684 |
|
|
|
685 |
|
|
assign early2_rise0[2] = 2'b0;
|
686 |
|
|
assign early2_fall0[2] = 2'b1;
|
687 |
|
|
assign early2_rise1[2] = 2'b0;
|
688 |
|
|
assign early2_fall1[2] = 2'b0;
|
689 |
|
|
|
690 |
|
|
assign early2_rise0[1] = 2'b0;
|
691 |
|
|
assign early2_fall0[1] = 2'b1;
|
692 |
|
|
assign early2_rise1[1] = 2'b1;
|
693 |
|
|
assign early2_fall1[1] = 2'b0;
|
694 |
|
|
|
695 |
|
|
assign early2_rise0[0] = 2'b1;
|
696 |
|
|
assign early2_fall0[0] = 2'b0;
|
697 |
|
|
assign early2_rise1[0] = 2'b1;
|
698 |
|
|
assign early2_fall1[0] = 2'b1;
|
699 |
|
|
end
|
700 |
|
|
endgenerate
|
701 |
|
|
|
702 |
|
|
// Each bit of each byte is compared to expected pattern.
|
703 |
|
|
// This was done to prevent (and "drastically decrease") the chance that
|
704 |
|
|
// invalid data clocked in when the DQ bus is tri-state (along with a
|
705 |
|
|
// combination of the correct data) will resemble the expected data
|
706 |
|
|
// pattern. A better fix for this is to change the training pattern and/or
|
707 |
|
|
// make the pattern longer.
|
708 |
|
|
generate
|
709 |
|
|
genvar pt_i;
|
710 |
|
|
if (nCK_PER_CLK == 4) begin: gen_pat_match_div4
|
711 |
|
|
for (pt_i = 0; pt_i < DRAM_WIDTH; pt_i = pt_i + 1) begin: gen_pat_match
|
712 |
|
|
always @(posedge clk) begin
|
713 |
|
|
if (sr_rise0_r[pt_i] == pat_rise0[pt_i%4])
|
714 |
|
|
pat_match_rise0_r[pt_i] <= #TCQ 1'b1;
|
715 |
|
|
else
|
716 |
|
|
pat_match_rise0_r[pt_i] <= #TCQ 1'b0;
|
717 |
|
|
|
718 |
|
|
if (sr_fall0_r[pt_i] == pat_fall0[pt_i%4])
|
719 |
|
|
pat_match_fall0_r[pt_i] <= #TCQ 1'b1;
|
720 |
|
|
else
|
721 |
|
|
pat_match_fall0_r[pt_i] <= #TCQ 1'b0;
|
722 |
|
|
|
723 |
|
|
if (sr_rise1_r[pt_i] == pat_rise1[pt_i%4])
|
724 |
|
|
pat_match_rise1_r[pt_i] <= #TCQ 1'b1;
|
725 |
|
|
else
|
726 |
|
|
pat_match_rise1_r[pt_i] <= #TCQ 1'b0;
|
727 |
|
|
|
728 |
|
|
if (sr_fall1_r[pt_i] == pat_fall1[pt_i%4])
|
729 |
|
|
pat_match_fall1_r[pt_i] <= #TCQ 1'b1;
|
730 |
|
|
else
|
731 |
|
|
pat_match_fall1_r[pt_i] <= #TCQ 1'b0;
|
732 |
|
|
|
733 |
|
|
if (sr_rise2_r[pt_i] == pat_rise2[pt_i%4])
|
734 |
|
|
pat_match_rise2_r[pt_i] <= #TCQ 1'b1;
|
735 |
|
|
else
|
736 |
|
|
pat_match_rise2_r[pt_i] <= #TCQ 1'b0;
|
737 |
|
|
|
738 |
|
|
if (sr_fall2_r[pt_i] == pat_fall2[pt_i%4])
|
739 |
|
|
pat_match_fall2_r[pt_i] <= #TCQ 1'b1;
|
740 |
|
|
else
|
741 |
|
|
pat_match_fall2_r[pt_i] <= #TCQ 1'b0;
|
742 |
|
|
|
743 |
|
|
if (sr_rise3_r[pt_i] == pat_rise3[pt_i%4])
|
744 |
|
|
pat_match_rise3_r[pt_i] <= #TCQ 1'b1;
|
745 |
|
|
else
|
746 |
|
|
pat_match_rise3_r[pt_i] <= #TCQ 1'b0;
|
747 |
|
|
|
748 |
|
|
if (sr_fall3_r[pt_i] == pat_fall3[pt_i%4])
|
749 |
|
|
pat_match_fall3_r[pt_i] <= #TCQ 1'b1;
|
750 |
|
|
else
|
751 |
|
|
pat_match_fall3_r[pt_i] <= #TCQ 1'b0;
|
752 |
|
|
end
|
753 |
|
|
|
754 |
|
|
always @(posedge clk) begin
|
755 |
|
|
if (sr_rise0_r[pt_i] == pat_rise1[pt_i%4])
|
756 |
|
|
early1_match_rise0_r[pt_i] <= #TCQ 1'b1;
|
757 |
|
|
else
|
758 |
|
|
early1_match_rise0_r[pt_i] <= #TCQ 1'b0;
|
759 |
|
|
|
760 |
|
|
if (sr_fall0_r[pt_i] == pat_fall1[pt_i%4])
|
761 |
|
|
early1_match_fall0_r[pt_i] <= #TCQ 1'b1;
|
762 |
|
|
else
|
763 |
|
|
early1_match_fall0_r[pt_i] <= #TCQ 1'b0;
|
764 |
|
|
|
765 |
|
|
if (sr_rise1_r[pt_i] == pat_rise2[pt_i%4])
|
766 |
|
|
early1_match_rise1_r[pt_i] <= #TCQ 1'b1;
|
767 |
|
|
else
|
768 |
|
|
early1_match_rise1_r[pt_i] <= #TCQ 1'b0;
|
769 |
|
|
|
770 |
|
|
if (sr_fall1_r[pt_i] == pat_fall2[pt_i%4])
|
771 |
|
|
early1_match_fall1_r[pt_i] <= #TCQ 1'b1;
|
772 |
|
|
else
|
773 |
|
|
early1_match_fall1_r[pt_i] <= #TCQ 1'b0;
|
774 |
|
|
|
775 |
|
|
if (sr_rise2_r[pt_i] == pat_rise3[pt_i%4])
|
776 |
|
|
early1_match_rise2_r[pt_i] <= #TCQ 1'b1;
|
777 |
|
|
else
|
778 |
|
|
early1_match_rise2_r[pt_i] <= #TCQ 1'b0;
|
779 |
|
|
|
780 |
|
|
if (sr_fall2_r[pt_i] == pat_fall3[pt_i%4])
|
781 |
|
|
early1_match_fall2_r[pt_i] <= #TCQ 1'b1;
|
782 |
|
|
else
|
783 |
|
|
early1_match_fall2_r[pt_i] <= #TCQ 1'b0;
|
784 |
|
|
|
785 |
|
|
if (sr_rise3_r[pt_i] == early_rise0[pt_i%4])
|
786 |
|
|
early1_match_rise3_r[pt_i] <= #TCQ 1'b1;
|
787 |
|
|
else
|
788 |
|
|
early1_match_rise3_r[pt_i] <= #TCQ 1'b0;
|
789 |
|
|
|
790 |
|
|
if (sr_fall3_r[pt_i] == early_fall0[pt_i%4])
|
791 |
|
|
early1_match_fall3_r[pt_i] <= #TCQ 1'b1;
|
792 |
|
|
else
|
793 |
|
|
early1_match_fall3_r[pt_i] <= #TCQ 1'b0;
|
794 |
|
|
end
|
795 |
|
|
|
796 |
|
|
always @(posedge clk) begin
|
797 |
|
|
if (sr_rise0_r[pt_i] == pat_rise2[pt_i%4])
|
798 |
|
|
early2_match_rise0_r[pt_i] <= #TCQ 1'b1;
|
799 |
|
|
else
|
800 |
|
|
early2_match_rise0_r[pt_i] <= #TCQ 1'b0;
|
801 |
|
|
|
802 |
|
|
if (sr_fall0_r[pt_i] == pat_fall2[pt_i%4])
|
803 |
|
|
early2_match_fall0_r[pt_i] <= #TCQ 1'b1;
|
804 |
|
|
else
|
805 |
|
|
early2_match_fall0_r[pt_i] <= #TCQ 1'b0;
|
806 |
|
|
|
807 |
|
|
if (sr_rise1_r[pt_i] == pat_rise3[pt_i%4])
|
808 |
|
|
early2_match_rise1_r[pt_i] <= #TCQ 1'b1;
|
809 |
|
|
else
|
810 |
|
|
early2_match_rise1_r[pt_i] <= #TCQ 1'b0;
|
811 |
|
|
|
812 |
|
|
if (sr_fall1_r[pt_i] == pat_fall3[pt_i%4])
|
813 |
|
|
early2_match_fall1_r[pt_i] <= #TCQ 1'b1;
|
814 |
|
|
else
|
815 |
|
|
early2_match_fall1_r[pt_i] <= #TCQ 1'b0;
|
816 |
|
|
|
817 |
|
|
if (sr_rise2_r[pt_i] == early_rise0[pt_i%4])
|
818 |
|
|
early2_match_rise2_r[pt_i] <= #TCQ 1'b1;
|
819 |
|
|
else
|
820 |
|
|
early2_match_rise2_r[pt_i] <= #TCQ 1'b0;
|
821 |
|
|
|
822 |
|
|
if (sr_fall2_r[pt_i] == early_fall0[pt_i%4])
|
823 |
|
|
early2_match_fall2_r[pt_i] <= #TCQ 1'b1;
|
824 |
|
|
else
|
825 |
|
|
early2_match_fall2_r[pt_i] <= #TCQ 1'b0;
|
826 |
|
|
|
827 |
|
|
if (sr_rise3_r[pt_i] == early_rise1[pt_i%4])
|
828 |
|
|
early2_match_rise3_r[pt_i] <= #TCQ 1'b1;
|
829 |
|
|
else
|
830 |
|
|
early2_match_rise3_r[pt_i] <= #TCQ 1'b0;
|
831 |
|
|
|
832 |
|
|
if (sr_fall3_r[pt_i] == early_fall1[pt_i%4])
|
833 |
|
|
early2_match_fall3_r[pt_i] <= #TCQ 1'b1;
|
834 |
|
|
else
|
835 |
|
|
early2_match_fall3_r[pt_i] <= #TCQ 1'b0;
|
836 |
|
|
end
|
837 |
|
|
end
|
838 |
|
|
|
839 |
|
|
|
840 |
|
|
always @(posedge clk) begin
|
841 |
|
|
pat_match_rise0_and_r <= #TCQ &pat_match_rise0_r;
|
842 |
|
|
pat_match_fall0_and_r <= #TCQ &pat_match_fall0_r;
|
843 |
|
|
pat_match_rise1_and_r <= #TCQ &pat_match_rise1_r;
|
844 |
|
|
pat_match_fall1_and_r <= #TCQ &pat_match_fall1_r;
|
845 |
|
|
pat_match_rise2_and_r <= #TCQ &pat_match_rise2_r;
|
846 |
|
|
pat_match_fall2_and_r <= #TCQ &pat_match_fall2_r;
|
847 |
|
|
pat_match_rise3_and_r <= #TCQ &pat_match_rise3_r;
|
848 |
|
|
pat_match_fall3_and_r <= #TCQ &pat_match_fall3_r;
|
849 |
|
|
pat_data_match_r <= #TCQ (pat_match_rise0_and_r &&
|
850 |
|
|
pat_match_fall0_and_r &&
|
851 |
|
|
pat_match_rise1_and_r &&
|
852 |
|
|
pat_match_fall1_and_r &&
|
853 |
|
|
pat_match_rise2_and_r &&
|
854 |
|
|
pat_match_fall2_and_r &&
|
855 |
|
|
pat_match_rise3_and_r &&
|
856 |
|
|
pat_match_fall3_and_r);
|
857 |
|
|
pat_data_match_valid_r <= #TCQ rd_active_r3;
|
858 |
|
|
end
|
859 |
|
|
|
860 |
|
|
always @(posedge clk) begin
|
861 |
|
|
early1_match_rise0_and_r <= #TCQ &early1_match_rise0_r;
|
862 |
|
|
early1_match_fall0_and_r <= #TCQ &early1_match_fall0_r;
|
863 |
|
|
early1_match_rise1_and_r <= #TCQ &early1_match_rise1_r;
|
864 |
|
|
early1_match_fall1_and_r <= #TCQ &early1_match_fall1_r;
|
865 |
|
|
early1_match_rise2_and_r <= #TCQ &early1_match_rise2_r;
|
866 |
|
|
early1_match_fall2_and_r <= #TCQ &early1_match_fall2_r;
|
867 |
|
|
early1_match_rise3_and_r <= #TCQ &early1_match_rise3_r;
|
868 |
|
|
early1_match_fall3_and_r <= #TCQ &early1_match_fall3_r;
|
869 |
|
|
early1_data_match_r <= #TCQ (early1_match_rise0_and_r &&
|
870 |
|
|
early1_match_fall0_and_r &&
|
871 |
|
|
early1_match_rise1_and_r &&
|
872 |
|
|
early1_match_fall1_and_r &&
|
873 |
|
|
early1_match_rise2_and_r &&
|
874 |
|
|
early1_match_fall2_and_r &&
|
875 |
|
|
early1_match_rise3_and_r &&
|
876 |
|
|
early1_match_fall3_and_r);
|
877 |
|
|
end
|
878 |
|
|
|
879 |
|
|
always @(posedge clk) begin
|
880 |
|
|
early2_match_rise0_and_r <= #TCQ &early2_match_rise0_r;
|
881 |
|
|
early2_match_fall0_and_r <= #TCQ &early2_match_fall0_r;
|
882 |
|
|
early2_match_rise1_and_r <= #TCQ &early2_match_rise1_r;
|
883 |
|
|
early2_match_fall1_and_r <= #TCQ &early2_match_fall1_r;
|
884 |
|
|
early2_match_rise2_and_r <= #TCQ &early2_match_rise2_r;
|
885 |
|
|
early2_match_fall2_and_r <= #TCQ &early2_match_fall2_r;
|
886 |
|
|
early2_match_rise3_and_r <= #TCQ &early2_match_rise3_r;
|
887 |
|
|
early2_match_fall3_and_r <= #TCQ &early2_match_fall3_r;
|
888 |
|
|
early2_data_match_r <= #TCQ (early2_match_rise0_and_r &&
|
889 |
|
|
early2_match_fall0_and_r &&
|
890 |
|
|
early2_match_rise1_and_r &&
|
891 |
|
|
early2_match_fall1_and_r &&
|
892 |
|
|
early2_match_rise2_and_r &&
|
893 |
|
|
early2_match_fall2_and_r &&
|
894 |
|
|
early2_match_rise3_and_r &&
|
895 |
|
|
early2_match_fall3_and_r);
|
896 |
|
|
end
|
897 |
|
|
|
898 |
|
|
end else if (nCK_PER_CLK == 2) begin: gen_pat_match_div2
|
899 |
|
|
|
900 |
|
|
for (pt_i = 0; pt_i < DRAM_WIDTH; pt_i = pt_i + 1) begin: gen_pat_match
|
901 |
|
|
always @(posedge clk) begin
|
902 |
|
|
if (sr_rise0_r[pt_i] == pat1_rise0[pt_i%4])
|
903 |
|
|
pat1_match_rise0_r[pt_i] <= #TCQ 1'b1;
|
904 |
|
|
else
|
905 |
|
|
pat1_match_rise0_r[pt_i] <= #TCQ 1'b0;
|
906 |
|
|
|
907 |
|
|
if (sr_fall0_r[pt_i] == pat1_fall0[pt_i%4])
|
908 |
|
|
pat1_match_fall0_r[pt_i] <= #TCQ 1'b1;
|
909 |
|
|
else
|
910 |
|
|
pat1_match_fall0_r[pt_i] <= #TCQ 1'b0;
|
911 |
|
|
|
912 |
|
|
if (sr_rise1_r[pt_i] == pat1_rise1[pt_i%4])
|
913 |
|
|
pat1_match_rise1_r[pt_i] <= #TCQ 1'b1;
|
914 |
|
|
else
|
915 |
|
|
pat1_match_rise1_r[pt_i] <= #TCQ 1'b0;
|
916 |
|
|
|
917 |
|
|
if (sr_fall1_r[pt_i] == pat1_fall1[pt_i%4])
|
918 |
|
|
pat1_match_fall1_r[pt_i] <= #TCQ 1'b1;
|
919 |
|
|
else
|
920 |
|
|
pat1_match_fall1_r[pt_i] <= #TCQ 1'b0;
|
921 |
|
|
end
|
922 |
|
|
|
923 |
|
|
always @(posedge clk) begin
|
924 |
|
|
if (sr_rise0_r[pt_i] == pat2_rise0[pt_i%4])
|
925 |
|
|
pat2_match_rise0_r[pt_i] <= #TCQ 1'b1;
|
926 |
|
|
else
|
927 |
|
|
pat2_match_rise0_r[pt_i] <= #TCQ 1'b0;
|
928 |
|
|
|
929 |
|
|
if (sr_fall0_r[pt_i] == pat2_fall0[pt_i%4])
|
930 |
|
|
pat2_match_fall0_r[pt_i] <= #TCQ 1'b1;
|
931 |
|
|
else
|
932 |
|
|
pat2_match_fall0_r[pt_i] <= #TCQ 1'b0;
|
933 |
|
|
|
934 |
|
|
if (sr_rise1_r[pt_i] == pat2_rise1[pt_i%4])
|
935 |
|
|
pat2_match_rise1_r[pt_i] <= #TCQ 1'b1;
|
936 |
|
|
else
|
937 |
|
|
pat2_match_rise1_r[pt_i] <= #TCQ 1'b0;
|
938 |
|
|
|
939 |
|
|
if (sr_fall1_r[pt_i] == pat2_fall1[pt_i%4])
|
940 |
|
|
pat2_match_fall1_r[pt_i] <= #TCQ 1'b1;
|
941 |
|
|
else
|
942 |
|
|
pat2_match_fall1_r[pt_i] <= #TCQ 1'b0;
|
943 |
|
|
end
|
944 |
|
|
|
945 |
|
|
always @(posedge clk) begin
|
946 |
|
|
if (sr_rise0_r[pt_i] == early1_rise0[pt_i%4])
|
947 |
|
|
early1_match_rise0_r[pt_i] <= #TCQ 1'b1;
|
948 |
|
|
else
|
949 |
|
|
early1_match_rise0_r[pt_i] <= #TCQ 1'b0;
|
950 |
|
|
|
951 |
|
|
if (sr_fall0_r[pt_i] == early1_fall0[pt_i%4])
|
952 |
|
|
early1_match_fall0_r[pt_i] <= #TCQ 1'b1;
|
953 |
|
|
else
|
954 |
|
|
early1_match_fall0_r[pt_i] <= #TCQ 1'b0;
|
955 |
|
|
|
956 |
|
|
if (sr_rise1_r[pt_i] == early1_rise1[pt_i%4])
|
957 |
|
|
early1_match_rise1_r[pt_i] <= #TCQ 1'b1;
|
958 |
|
|
else
|
959 |
|
|
early1_match_rise1_r[pt_i] <= #TCQ 1'b0;
|
960 |
|
|
|
961 |
|
|
if (sr_fall1_r[pt_i] == early1_fall1[pt_i%4])
|
962 |
|
|
early1_match_fall1_r[pt_i] <= #TCQ 1'b1;
|
963 |
|
|
else
|
964 |
|
|
early1_match_fall1_r[pt_i] <= #TCQ 1'b0;
|
965 |
|
|
end
|
966 |
|
|
|
967 |
|
|
// early2 in this case does not mean 2 cycles early but
|
968 |
|
|
// the second cycle of read data in 2:1 mode
|
969 |
|
|
always @(posedge clk) begin
|
970 |
|
|
if (sr_rise0_r[pt_i] == early2_rise0[pt_i%4])
|
971 |
|
|
early2_match_rise0_r[pt_i] <= #TCQ 1'b1;
|
972 |
|
|
else
|
973 |
|
|
early2_match_rise0_r[pt_i] <= #TCQ 1'b0;
|
974 |
|
|
|
975 |
|
|
if (sr_fall0_r[pt_i] == early2_fall0[pt_i%4])
|
976 |
|
|
early2_match_fall0_r[pt_i] <= #TCQ 1'b1;
|
977 |
|
|
else
|
978 |
|
|
early2_match_fall0_r[pt_i] <= #TCQ 1'b0;
|
979 |
|
|
|
980 |
|
|
if (sr_rise1_r[pt_i] == early2_rise1[pt_i%4])
|
981 |
|
|
early2_match_rise1_r[pt_i] <= #TCQ 1'b1;
|
982 |
|
|
else
|
983 |
|
|
early2_match_rise1_r[pt_i] <= #TCQ 1'b0;
|
984 |
|
|
|
985 |
|
|
if (sr_fall1_r[pt_i] == early2_fall1[pt_i%4])
|
986 |
|
|
early2_match_fall1_r[pt_i] <= #TCQ 1'b1;
|
987 |
|
|
else
|
988 |
|
|
early2_match_fall1_r[pt_i] <= #TCQ 1'b0;
|
989 |
|
|
end
|
990 |
|
|
end
|
991 |
|
|
|
992 |
|
|
always @(posedge clk) begin
|
993 |
|
|
pat1_match_rise0_and_r <= #TCQ &pat1_match_rise0_r;
|
994 |
|
|
pat1_match_fall0_and_r <= #TCQ &pat1_match_fall0_r;
|
995 |
|
|
pat1_match_rise1_and_r <= #TCQ &pat1_match_rise1_r;
|
996 |
|
|
pat1_match_fall1_and_r <= #TCQ &pat1_match_fall1_r;
|
997 |
|
|
pat1_data_match_r <= #TCQ (pat1_match_rise0_and_r &&
|
998 |
|
|
pat1_match_fall0_and_r &&
|
999 |
|
|
pat1_match_rise1_and_r &&
|
1000 |
|
|
pat1_match_fall1_and_r);
|
1001 |
|
|
pat1_data_match_r1 <= #TCQ pat1_data_match_r;
|
1002 |
|
|
|
1003 |
|
|
pat2_match_rise0_and_r <= #TCQ &pat2_match_rise0_r && rd_active_r3;
|
1004 |
|
|
pat2_match_fall0_and_r <= #TCQ &pat2_match_fall0_r && rd_active_r3;
|
1005 |
|
|
pat2_match_rise1_and_r <= #TCQ &pat2_match_rise1_r && rd_active_r3;
|
1006 |
|
|
pat2_match_fall1_and_r <= #TCQ &pat2_match_fall1_r && rd_active_r3;
|
1007 |
|
|
pat2_data_match_r <= #TCQ (pat2_match_rise0_and_r &&
|
1008 |
|
|
pat2_match_fall0_and_r &&
|
1009 |
|
|
pat2_match_rise1_and_r &&
|
1010 |
|
|
pat2_match_fall1_and_r);
|
1011 |
|
|
|
1012 |
|
|
// For 2:1 mode, read valid is asserted for 2 clock cycles -
|
1013 |
|
|
// here we generate a "match valid" pulse that is only 1 clock
|
1014 |
|
|
// cycle wide that is simulatenous when the match calculation
|
1015 |
|
|
// is complete
|
1016 |
|
|
pat_data_match_valid_r <= #TCQ rd_active_r4 & ~rd_active_r5;
|
1017 |
|
|
end
|
1018 |
|
|
|
1019 |
|
|
always @(posedge clk) begin
|
1020 |
|
|
early1_match_rise0_and_r <= #TCQ &early1_match_rise0_r;
|
1021 |
|
|
early1_match_fall0_and_r <= #TCQ &early1_match_fall0_r;
|
1022 |
|
|
early1_match_rise1_and_r <= #TCQ &early1_match_rise1_r;
|
1023 |
|
|
early1_match_fall1_and_r <= #TCQ &early1_match_fall1_r;
|
1024 |
|
|
early1_data_match_r <= #TCQ (early1_match_rise0_and_r &&
|
1025 |
|
|
early1_match_fall0_and_r &&
|
1026 |
|
|
early1_match_rise1_and_r &&
|
1027 |
|
|
early1_match_fall1_and_r);
|
1028 |
|
|
early1_data_match_r1 <= #TCQ early1_data_match_r;
|
1029 |
|
|
|
1030 |
|
|
early2_match_rise0_and_r <= #TCQ &early2_match_rise0_r && rd_active_r3;
|
1031 |
|
|
early2_match_fall0_and_r <= #TCQ &early2_match_fall0_r && rd_active_r3;
|
1032 |
|
|
early2_match_rise1_and_r <= #TCQ &early2_match_rise1_r && rd_active_r3;
|
1033 |
|
|
early2_match_fall1_and_r <= #TCQ &early2_match_fall1_r && rd_active_r3;
|
1034 |
|
|
early2_data_match_r <= #TCQ (early2_match_rise0_and_r &&
|
1035 |
|
|
early2_match_fall0_and_r &&
|
1036 |
|
|
early2_match_rise1_and_r &&
|
1037 |
|
|
early2_match_fall1_and_r);
|
1038 |
|
|
end
|
1039 |
|
|
|
1040 |
|
|
end
|
1041 |
|
|
endgenerate
|
1042 |
|
|
|
1043 |
|
|
// Need to delay it by 3 cycles in order to wait for Phaser_Out
|
1044 |
|
|
// coarse delay to take effect before issuing a write command
|
1045 |
|
|
always @(posedge clk) begin
|
1046 |
|
|
wrcal_pat_resume_r1 <= #TCQ wrcal_pat_resume_r;
|
1047 |
|
|
wrcal_pat_resume_r2 <= #TCQ wrcal_pat_resume_r1;
|
1048 |
|
|
wrcal_pat_resume <= #TCQ wrcal_pat_resume_r2;
|
1049 |
|
|
end
|
1050 |
|
|
|
1051 |
|
|
always @(posedge clk) begin
|
1052 |
|
|
if (rst)
|
1053 |
|
|
tap_inc_wait_cnt <= #TCQ 'd0;
|
1054 |
|
|
else if ((cal2_state_r == CAL2_DQ_IDEL_DEC) ||
|
1055 |
|
|
(cal2_state_r == CAL2_IFIFO_RESET) ||
|
1056 |
|
|
(cal2_state_r == CAL2_SANITY_WAIT))
|
1057 |
|
|
tap_inc_wait_cnt <= #TCQ tap_inc_wait_cnt + 1;
|
1058 |
|
|
else
|
1059 |
|
|
tap_inc_wait_cnt <= #TCQ 'd0;
|
1060 |
|
|
end
|
1061 |
|
|
|
1062 |
|
|
always @(posedge clk) begin
|
1063 |
|
|
if (rst)
|
1064 |
|
|
not_empty_wait_cnt <= #TCQ 'd0;
|
1065 |
|
|
else if ((cal2_state_r == CAL2_READ_WAIT) && wrcal_rd_wait)
|
1066 |
|
|
not_empty_wait_cnt <= #TCQ not_empty_wait_cnt + 1;
|
1067 |
|
|
else
|
1068 |
|
|
not_empty_wait_cnt <= #TCQ 'd0;
|
1069 |
|
|
end
|
1070 |
|
|
|
1071 |
|
|
always @(posedge clk)
|
1072 |
|
|
cal2_state_r1 <= #TCQ cal2_state_r;
|
1073 |
|
|
|
1074 |
|
|
//*****************************************************************
|
1075 |
|
|
// Write Calibration state machine
|
1076 |
|
|
//*****************************************************************
|
1077 |
|
|
|
1078 |
|
|
// when calibrating, check to see if the expected pattern is received.
|
1079 |
|
|
// Otherwise delay DQS to align to correct CK edge.
|
1080 |
|
|
// NOTES:
|
1081 |
|
|
// 1. An error condition can occur due to two reasons:
|
1082 |
|
|
// a. If the matching logic does not receive the expected data
|
1083 |
|
|
// pattern. However, the error may be "recoverable" because
|
1084 |
|
|
// the write calibration is still in progress. If an error is
|
1085 |
|
|
// found the write calibration logic delays DQS by an additional
|
1086 |
|
|
// clock cycle and restarts the pattern detection process.
|
1087 |
|
|
// By design, if the write path timing is incorrect, the correct
|
1088 |
|
|
// data pattern will never be detected.
|
1089 |
|
|
// b. Valid data not found even after incrementing Phaser_Out
|
1090 |
|
|
// coarse delay line.
|
1091 |
|
|
|
1092 |
|
|
|
1093 |
|
|
always @(posedge clk) begin
|
1094 |
|
|
if (rst) begin
|
1095 |
|
|
wrcal_dqs_cnt_r <= #TCQ 'b0;
|
1096 |
|
|
cal2_done_r <= #TCQ 1'b0;
|
1097 |
|
|
cal2_prech_req_r <= #TCQ 1'b0;
|
1098 |
|
|
cal2_state_r <= #TCQ CAL2_IDLE;
|
1099 |
|
|
wrcal_pat_err <= #TCQ 1'b0;
|
1100 |
|
|
wrcal_pat_resume_r <= #TCQ 1'b0;
|
1101 |
|
|
wrcal_act_req <= #TCQ 1'b0;
|
1102 |
|
|
cal2_if_reset <= #TCQ 1'b0;
|
1103 |
|
|
temp_wrcal_done <= #TCQ 1'b0;
|
1104 |
|
|
wrlvl_byte_redo <= #TCQ 1'b0;
|
1105 |
|
|
early1_data <= #TCQ 1'b0;
|
1106 |
|
|
early2_data <= #TCQ 1'b0;
|
1107 |
|
|
idelay_ld <= #TCQ 1'b0;
|
1108 |
|
|
idelay_ld_done <= #TCQ 1'b0;
|
1109 |
|
|
pat1_detect <= #TCQ 1'b0;
|
1110 |
|
|
early1_detect <= #TCQ 1'b0;
|
1111 |
|
|
wrcal_sanity_chk_done <= #TCQ 1'b0;
|
1112 |
|
|
wrcal_sanity_chk_err <= #TCQ 1'b0;
|
1113 |
|
|
end else begin
|
1114 |
|
|
cal2_prech_req_r <= #TCQ 1'b0;
|
1115 |
|
|
case (cal2_state_r)
|
1116 |
|
|
CAL2_IDLE: begin
|
1117 |
|
|
wrcal_pat_err <= #TCQ 1'b0;
|
1118 |
|
|
if (wrcal_start) begin
|
1119 |
|
|
cal2_if_reset <= #TCQ 1'b0;
|
1120 |
|
|
if (SIM_CAL_OPTION == "SKIP_CAL")
|
1121 |
|
|
// If skip write calibration, then proceed to end.
|
1122 |
|
|
cal2_state_r <= #TCQ CAL2_DONE;
|
1123 |
|
|
else
|
1124 |
|
|
cal2_state_r <= #TCQ CAL2_READ_WAIT;
|
1125 |
|
|
end
|
1126 |
|
|
end
|
1127 |
|
|
|
1128 |
|
|
// General wait state to wait for read data to be output by the
|
1129 |
|
|
// IN_FIFO
|
1130 |
|
|
CAL2_READ_WAIT: begin
|
1131 |
|
|
wrcal_pat_resume_r <= #TCQ 1'b0;
|
1132 |
|
|
cal2_if_reset <= #TCQ 1'b0;
|
1133 |
|
|
// Wait until read data is received, and pattern matching
|
1134 |
|
|
// calculation is complete. NOTE: Need to add a timeout here
|
1135 |
|
|
// in case for some reason data is never received (or rather
|
1136 |
|
|
// the PHASER_IN and IN_FIFO think they never receives data)
|
1137 |
|
|
if (pat_data_match_valid_r && (nCK_PER_CLK == 4)) begin
|
1138 |
|
|
if (pat_data_match_r)
|
1139 |
|
|
// If found data match, then move on to next DQS group
|
1140 |
|
|
cal2_state_r <= #TCQ CAL2_NEXT_DQS;
|
1141 |
|
|
else begin
|
1142 |
|
|
if (wrcal_sanity_chk_r)
|
1143 |
|
|
cal2_state_r <= #TCQ CAL2_ERR;
|
1144 |
|
|
// If writes are one or two cycles early then redo
|
1145 |
|
|
// write leveling for the byte
|
1146 |
|
|
else if (early1_data_match_r) begin
|
1147 |
|
|
early1_data <= #TCQ 1'b1;
|
1148 |
|
|
early2_data <= #TCQ 1'b0;
|
1149 |
|
|
wrlvl_byte_redo <= #TCQ 1'b1;
|
1150 |
|
|
cal2_state_r <= #TCQ CAL2_WRLVL_WAIT;
|
1151 |
|
|
end else if (early2_data_match_r) begin
|
1152 |
|
|
early1_data <= #TCQ 1'b0;
|
1153 |
|
|
early2_data <= #TCQ 1'b1;
|
1154 |
|
|
wrlvl_byte_redo <= #TCQ 1'b1;
|
1155 |
|
|
cal2_state_r <= #TCQ CAL2_WRLVL_WAIT;
|
1156 |
|
|
// Read late due to incorrect MPR idelay value
|
1157 |
|
|
// Decrement Idelay to '0'for the current byte
|
1158 |
|
|
end else if (~idelay_ld_done) begin
|
1159 |
|
|
cal2_state_r <= #TCQ CAL2_DQ_IDEL_DEC;
|
1160 |
|
|
idelay_ld <= #TCQ 1'b1;
|
1161 |
|
|
end else
|
1162 |
|
|
cal2_state_r <= #TCQ CAL2_ERR;
|
1163 |
|
|
end
|
1164 |
|
|
end else if (pat_data_match_valid_r && (nCK_PER_CLK == 2)) begin
|
1165 |
|
|
if ((pat1_data_match_r1 && pat2_data_match_r) ||
|
1166 |
|
|
(pat1_detect && pat2_data_match_r))
|
1167 |
|
|
// If found data match, then move on to next DQS group
|
1168 |
|
|
cal2_state_r <= #TCQ CAL2_NEXT_DQS;
|
1169 |
|
|
else if (pat1_data_match_r1 && ~pat2_data_match_r) begin
|
1170 |
|
|
cal2_state_r <= #TCQ CAL2_READ_WAIT;
|
1171 |
|
|
pat1_detect <= #TCQ 1'b1;
|
1172 |
|
|
end else begin
|
1173 |
|
|
// If writes are one or two cycles early then redo
|
1174 |
|
|
// write leveling for the byte
|
1175 |
|
|
if (wrcal_sanity_chk_r)
|
1176 |
|
|
cal2_state_r <= #TCQ CAL2_ERR;
|
1177 |
|
|
else if ((early1_data_match_r1 && early2_data_match_r) ||
|
1178 |
|
|
(early1_detect && early2_data_match_r)) begin
|
1179 |
|
|
early1_data <= #TCQ 1'b1;
|
1180 |
|
|
early2_data <= #TCQ 1'b0;
|
1181 |
|
|
wrlvl_byte_redo <= #TCQ 1'b1;
|
1182 |
|
|
cal2_state_r <= #TCQ CAL2_WRLVL_WAIT;
|
1183 |
|
|
end else if (early1_data_match_r1 && ~early2_data_match_r) begin
|
1184 |
|
|
early1_detect <= #TCQ 1'b1;
|
1185 |
|
|
cal2_state_r <= #TCQ CAL2_READ_WAIT;
|
1186 |
|
|
// Read late due to incorrect MPR idelay value
|
1187 |
|
|
// Decrement Idelay to '0'for the current byte
|
1188 |
|
|
end else if (~idelay_ld_done) begin
|
1189 |
|
|
cal2_state_r <= #TCQ CAL2_DQ_IDEL_DEC;
|
1190 |
|
|
idelay_ld <= #TCQ 1'b1;
|
1191 |
|
|
end else
|
1192 |
|
|
cal2_state_r <= #TCQ CAL2_ERR;
|
1193 |
|
|
end
|
1194 |
|
|
end else if (not_empty_wait_cnt == 'd31)
|
1195 |
|
|
cal2_state_r <= #TCQ CAL2_ERR;
|
1196 |
|
|
end
|
1197 |
|
|
|
1198 |
|
|
CAL2_WRLVL_WAIT: begin
|
1199 |
|
|
early1_detect <= #TCQ 1'b0;
|
1200 |
|
|
if (wrlvl_byte_done && ~wrlvl_byte_done_r)
|
1201 |
|
|
wrlvl_byte_redo <= #TCQ 1'b0;
|
1202 |
|
|
if (wrlvl_byte_done) begin
|
1203 |
|
|
if (rd_active_r1 && ~rd_active_r) begin
|
1204 |
|
|
cal2_state_r <= #TCQ CAL2_IFIFO_RESET;
|
1205 |
|
|
cal2_if_reset <= #TCQ 1'b1;
|
1206 |
|
|
early1_data <= #TCQ 1'b0;
|
1207 |
|
|
early2_data <= #TCQ 1'b0;
|
1208 |
|
|
end
|
1209 |
|
|
end
|
1210 |
|
|
end
|
1211 |
|
|
|
1212 |
|
|
CAL2_DQ_IDEL_DEC: begin
|
1213 |
|
|
if (tap_inc_wait_cnt == 'd4) begin
|
1214 |
|
|
idelay_ld <= #TCQ 1'b0;
|
1215 |
|
|
cal2_state_r <= #TCQ CAL2_IFIFO_RESET;
|
1216 |
|
|
cal2_if_reset <= #TCQ 1'b1;
|
1217 |
|
|
idelay_ld_done <= #TCQ 1'b1;
|
1218 |
|
|
end
|
1219 |
|
|
end
|
1220 |
|
|
|
1221 |
|
|
CAL2_IFIFO_RESET: begin
|
1222 |
|
|
if (tap_inc_wait_cnt == 'd15) begin
|
1223 |
|
|
cal2_if_reset <= #TCQ 1'b0;
|
1224 |
|
|
if (wrcal_sanity_chk_r)
|
1225 |
|
|
cal2_state_r <= #TCQ CAL2_DONE;
|
1226 |
|
|
else if (idelay_ld_done) begin
|
1227 |
|
|
wrcal_pat_resume_r <= #TCQ 1'b1;
|
1228 |
|
|
cal2_state_r <= #TCQ CAL2_READ_WAIT;
|
1229 |
|
|
end else
|
1230 |
|
|
cal2_state_r <= #TCQ CAL2_IDLE;
|
1231 |
|
|
end
|
1232 |
|
|
end
|
1233 |
|
|
|
1234 |
|
|
// Final processing for current DQS group. Move on to next group
|
1235 |
|
|
CAL2_NEXT_DQS: begin
|
1236 |
|
|
// At this point, we've just found the correct pattern for the
|
1237 |
|
|
// current DQS group.
|
1238 |
|
|
|
1239 |
|
|
// Request bank/row precharge, and wait for its completion. Always
|
1240 |
|
|
// precharge after each DQS group to avoid tRAS(max) violation
|
1241 |
|
|
//verilint STARC-2.2.3.3 off
|
1242 |
|
|
if (wrcal_sanity_chk_r && (wrcal_dqs_cnt_r != DQS_WIDTH-1)) begin
|
1243 |
|
|
cal2_prech_req_r <= #TCQ 1'b0;
|
1244 |
|
|
wrcal_dqs_cnt_r <= #TCQ wrcal_dqs_cnt_r + 1;
|
1245 |
|
|
cal2_state_r <= #TCQ CAL2_SANITY_WAIT;
|
1246 |
|
|
end else
|
1247 |
|
|
cal2_prech_req_r <= #TCQ 1'b1;
|
1248 |
|
|
idelay_ld_done <= #TCQ 1'b0;
|
1249 |
|
|
pat1_detect <= #TCQ 1'b0;
|
1250 |
|
|
if (prech_done)
|
1251 |
|
|
if (((DQS_WIDTH == 1) || (SIM_CAL_OPTION == "FAST_CAL")) ||
|
1252 |
|
|
(wrcal_dqs_cnt_r == DQS_WIDTH-1)) begin
|
1253 |
|
|
// If either FAST_CAL is enabled and first DQS group is
|
1254 |
|
|
// finished, or if the last DQS group was just finished,
|
1255 |
|
|
// then end of write calibration
|
1256 |
|
|
if (wrcal_sanity_chk_r) begin
|
1257 |
|
|
cal2_if_reset <= #TCQ 1'b1;
|
1258 |
|
|
cal2_state_r <= #TCQ CAL2_IFIFO_RESET;
|
1259 |
|
|
end else
|
1260 |
|
|
cal2_state_r <= #TCQ CAL2_DONE;
|
1261 |
|
|
end else begin
|
1262 |
|
|
// Continue to next DQS group
|
1263 |
|
|
wrcal_dqs_cnt_r <= #TCQ wrcal_dqs_cnt_r + 1;
|
1264 |
|
|
cal2_state_r <= #TCQ CAL2_READ_WAIT;
|
1265 |
|
|
end
|
1266 |
|
|
end
|
1267 |
|
|
//verilint STARC-2.2.3.3 on
|
1268 |
|
|
CAL2_SANITY_WAIT: begin
|
1269 |
|
|
if (tap_inc_wait_cnt == 'd15) begin
|
1270 |
|
|
cal2_state_r <= #TCQ CAL2_READ_WAIT;
|
1271 |
|
|
wrcal_pat_resume_r <= #TCQ 1'b1;
|
1272 |
|
|
end
|
1273 |
|
|
end
|
1274 |
|
|
|
1275 |
|
|
// Finished with read enable calibration
|
1276 |
|
|
CAL2_DONE: begin
|
1277 |
|
|
if (wrcal_sanity_chk && ~wrcal_sanity_chk_r) begin
|
1278 |
|
|
cal2_done_r <= #TCQ 1'b0;
|
1279 |
|
|
wrcal_dqs_cnt_r <= #TCQ 'd0;
|
1280 |
|
|
cal2_state_r <= #TCQ CAL2_IDLE;
|
1281 |
|
|
end else
|
1282 |
|
|
cal2_done_r <= #TCQ 1'b1;
|
1283 |
|
|
cal2_prech_req_r <= #TCQ 1'b0;
|
1284 |
|
|
cal2_if_reset <= #TCQ 1'b0;
|
1285 |
|
|
if (wrcal_sanity_chk_r)
|
1286 |
|
|
wrcal_sanity_chk_done <= #TCQ 1'b1;
|
1287 |
|
|
end
|
1288 |
|
|
|
1289 |
|
|
// Assert error signal indicating that writes timing is incorrect
|
1290 |
|
|
CAL2_ERR: begin
|
1291 |
|
|
wrcal_pat_resume_r <= #TCQ 1'b0;
|
1292 |
|
|
if (wrcal_sanity_chk_r)
|
1293 |
|
|
wrcal_sanity_chk_err <= #TCQ 1'b1;
|
1294 |
|
|
else
|
1295 |
|
|
wrcal_pat_err <= #TCQ 1'b1;
|
1296 |
|
|
cal2_state_r <= #TCQ CAL2_ERR;
|
1297 |
|
|
end
|
1298 |
|
|
endcase
|
1299 |
|
|
end
|
1300 |
|
|
end
|
1301 |
|
|
|
1302 |
|
|
// Delay assertion of wrcal_done for write calibration by a few cycles after
|
1303 |
|
|
// we've reached CAL2_DONE
|
1304 |
|
|
always @(posedge clk)
|
1305 |
|
|
if (rst)
|
1306 |
|
|
cal2_done_r1 <= #TCQ 1'b0;
|
1307 |
|
|
else
|
1308 |
|
|
cal2_done_r1 <= #TCQ cal2_done_r;
|
1309 |
|
|
|
1310 |
|
|
always @(posedge clk)
|
1311 |
|
|
if (rst || (wrcal_sanity_chk && ~wrcal_sanity_chk_r))
|
1312 |
|
|
wrcal_done <= #TCQ 1'b0;
|
1313 |
|
|
else if (cal2_done_r)
|
1314 |
|
|
wrcal_done <= #TCQ 1'b1;
|
1315 |
|
|
|
1316 |
|
|
endmodule
|