1 /************************************************************************
3 * Copyright (c) 2013-2015 Intel Corporation.
5 * This program and the accompanying materials
6 * are licensed and made available under the terms and conditions of the BSD License
7 * which accompanies this distribution. The full text of the license may be found at
8 * http://opensource.org/licenses/bsd-license.php
10 * THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
13 * This file contains all of the Cat Mountain Memory Reference Code (MRC).
15 * These functions are generic and should work for any Cat Mountain config.
17 * MRC requires two data structures to be passed in which are initialised by "PreMemInit()".
19 * The basic flow is as follows:
20 * 01) Check for supported DDR speed configuration
21 * 02) Set up MEMORY_MANAGER buffer as pass-through (POR)
22 * 03) Set Channel Interleaving Mode and Channel Stride to the most aggressive setting possible
23 * 04) Set up the MCU logic
24 * 05) Set up the DDR_PHY logic
25 * 06) Initialise the DRAMs (JEDEC)
26 * 07) Perform the Receive Enable Calibration algorithm
27 * 08) Perform the Write Leveling algorithm
28 * 09) Perform the Read Training algorithm (includes internal Vref)
29 * 10) Perform the Write Training algorithm
30 * 11) Set Channel Interleaving Mode and Channel Stride to the desired settings
32 * Dunit configuration based on Valleyview MRC.
34 ***************************************************************************/
37 #include "memory_options.h"
40 #include "meminit_utils.h"
44 // Override ODT to off state if requested
45 #define DRMC_DEFAULT (mrc_params->rd_odt_value==0?BIT12:0)
48 // tRFC values (in picoseconds) per density
49 const uint32_t tRFC
[5] =
58 // tCK clock period in picoseconds per speed index 800, 1066, 1333
59 const uint32_t tCK
[3] =
67 // Select static timings specific to simulation environment
70 // Select static timings specific to ClantonPeek platform
76 const uint16_t ddr_wclk
[] =
79 const uint16_t ddr_wctl
[] =
82 const uint16_t ddr_wcmd
[] =
87 const uint16_t ddr_rcvn
[] =
92 const uint16_t ddr_wdqs
[] =
97 const uint8_t ddr_rdqs
[] =
102 const uint16_t ddr_wdq
[] =
108 // Select MEMORY_MANAGER as the source for PRI interface
109 static void select_memory_manager(
110 MRCParams_t
*mrc_params
)
116 Dco
.raw
= isbR32m(MCU
, DCO
);
117 Dco
.field
.PMICTL
= 0; //0 - PRI owned by MEMORY_MANAGER
118 isbW32m(MCU
, DCO
, Dco
.raw
);
123 // Select HTE as the source for PRI interface
125 MRCParams_t
*mrc_params
)
131 Dco
.raw
= isbR32m(MCU
, DCO
);
132 Dco
.field
.PMICTL
= 1; //1 - PRI owned by HTE
133 isbW32m(MCU
, DCO
, Dco
.raw
);
138 // Send DRAM command, data should be formated
139 // using DCMD_Xxxx macro or emrsXCommand structure.
140 static void dram_init_command(
146 // Send DRAM wake command using special MCU side-band WAKE opcode
147 static void dram_wake_command(
152 Wr32(MMIO
, PCIADDR(0,0,0,SB_PACKET_REG
),
153 (uint32_t) SB_COMMAND(SB_WAKE_CMND_OPCODE
, MCU
, 0));
158 // Stop self refresh driven by MCU
159 static void clear_self_refresh(
160 MRCParams_t
*mrc_params
)
164 // clear the PMSTS Channel Self Refresh bits
165 isbM32m(MCU
, PMSTS
, BIT0
, BIT0
);
170 // Configure MCU before jedec init sequence
171 static void prog_decode_before_jedec(
172 MRCParams_t
*mrc_params
)
182 // Disable power saving features
183 Dpmc0
.raw
= isbR32m(MCU
, DPMC0
);
184 Dpmc0
.field
.CLKGTDIS
= 1;
185 Dpmc0
.field
.DISPWRDN
= 1;
186 Dpmc0
.field
.DYNSREN
= 0;
187 Dpmc0
.field
.PCLSTO
= 0;
188 isbW32m(MCU
, DPMC0
, Dpmc0
.raw
);
190 // Disable out of order transactions
191 Dsch
.raw
= isbR32m(MCU
, DSCH
);
192 Dsch
.field
.OOODIS
= 1;
193 Dsch
.field
.NEWBYPDIS
= 1;
194 isbW32m(MCU
, DSCH
, Dsch
.raw
);
196 // Disable issuing the REF command
197 Drfc
.raw
= isbR32m(MCU
, DRFC
);
198 Drfc
.field
.tREFI
= 0;
199 isbW32m(MCU
, DRFC
, Drfc
.raw
);
201 // Disable ZQ calibration short
202 Dcal
.raw
= isbR32m(MCU
, DCAL
);
203 Dcal
.field
.ZQCINT
= 0;
204 Dcal
.field
.SRXZQCL
= 0;
205 isbW32m(MCU
, DCAL
, Dcal
.raw
);
207 // Training performed in address mode 0, rank population has limited impact, however
208 // simulator complains if enabled non-existing rank.
210 if (mrc_params
->rank_enables
& 1)
211 Drp
.field
.rank0Enabled
= 1;
212 if (mrc_params
->rank_enables
& 2)
213 Drp
.field
.rank1Enabled
= 1;
214 isbW32m(MCU
, DRP
, Drp
.raw
);
219 // After Cold Reset, BIOS should set COLDWAKE bit to 1 before
220 // sending the WAKE message to the Dunit.
221 // For Standby Exit, or any other mode in which the DRAM is in
222 // SR, this bit must be set to 0.
223 static void perform_ddr_reset(
224 MRCParams_t
*mrc_params
)
228 // Set COLDWAKE bit before sending the WAKE message
229 isbM32m(MCU
, DRMC
, BIT16
, BIT16
);
231 // Send wake command to DUNIT (MUST be done before JEDEC)
235 isbW32m(MCU
, DRMC
, DRMC_DEFAULT
);
240 // Dunit Initialisation Complete.
241 // Indicates that initialisation of the Dunit has completed.
242 // Memory accesses are permitted and maintenance operation
243 // begins. Until this bit is set to a 1, the memory controller will
244 // not accept DRAM requests from the MEMORY_MANAGER or HTE.
245 static void set_ddr_init_complete(
246 MRCParams_t
*mrc_params
)
252 Dco
.raw
= isbR32m(MCU
, DCO
);
253 Dco
.field
.PMICTL
= 0; //0 - PRI owned by MEMORY_MANAGER
254 Dco
.field
.IC
= 1; //1 - initialisation complete
255 isbW32m(MCU
, DCO
, Dco
.raw
);
260 static void prog_page_ctrl(
261 MRCParams_t
*mrc_params
)
267 Dpmc0
.raw
= isbR32m(MCU
, DPMC0
);
269 Dpmc0
.field
.PCLSTO
= 0x4;
270 Dpmc0
.field
.PREAPWDEN
= 1;
272 isbW32m(MCU
, DPMC0
, Dpmc0
.raw
);
275 // Configure MCU Power Management Control Register
276 // and Scheduler Control Register.
277 static void prog_ddr_control(
278 MRCParams_t
*mrc_params
)
285 Dpmc0
.raw
= isbR32m(MCU
, DPMC0
);
286 Dsch
.raw
= isbR32m(MCU
, DSCH
);
288 Dpmc0
.field
.DISPWRDN
= mrc_params
->power_down_disable
;
289 Dpmc0
.field
.CLKGTDIS
= 0;
290 Dpmc0
.field
.PCLSTO
= 4;
291 Dpmc0
.field
.PREAPWDEN
= 1;
293 Dsch
.field
.OOODIS
= 0;
294 Dsch
.field
.OOOST3DIS
= 0;
295 Dsch
.field
.NEWBYPDIS
= 0;
297 isbW32m(MCU
, DSCH
, Dsch
.raw
);
298 isbW32m(MCU
, DPMC0
, Dpmc0
.raw
);
300 // CMDTRIST = 2h - CMD/ADDR are tristated when no valid command
301 isbM32m(MCU
, DPMC1
, 2 << 4, BIT5
|BIT4
);
306 // After training complete configure MCU Rank Population Register
307 // specifying: ranks enabled, device width, density, address mode.
308 static void prog_dra_drb(
309 MRCParams_t
*mrc_params
)
316 Dco
.raw
= isbR32m(MCU
, DCO
);
318 isbW32m(MCU
, DCO
, Dco
.raw
);
321 if (mrc_params
->rank_enables
& 1)
322 Drp
.field
.rank0Enabled
= 1;
323 if (mrc_params
->rank_enables
& 2)
324 Drp
.field
.rank1Enabled
= 1;
325 if (mrc_params
->dram_width
== x16
)
327 Drp
.field
.dimm0DevWidth
= 1;
328 Drp
.field
.dimm1DevWidth
= 1;
330 // Density encoding in DRAMParams_t 0=512Mb, 1=Gb, 2=2Gb, 3=4Gb
331 // has to be mapped RANKDENSx encoding (0=1Gb)
332 Drp
.field
.dimm0DevDensity
= mrc_params
->params
.DENSITY
- 1;
333 Drp
.field
.dimm1DevDensity
= mrc_params
->params
.DENSITY
- 1;
335 // Address mode can be overwritten if ECC enabled
336 Drp
.field
.addressMap
= mrc_params
->address_mode
;
338 isbW32m(MCU
, DRP
, Drp
.raw
);
340 Dco
.field
.PMICTL
= 0; //0 - PRI owned by MEMORY_MANAGER
341 Dco
.field
.IC
= 1; //1 - initialisation complete
342 isbW32m(MCU
, DCO
, Dco
.raw
);
347 // Configure refresh rate and short ZQ calibration interval.
348 // Activate dynamic self refresh.
349 static void change_refresh_period(
350 MRCParams_t
*mrc_params
)
358 Drfc
.raw
= isbR32m(MCU
, DRFC
);
359 Drfc
.field
.tREFI
= mrc_params
->refresh_rate
;
360 Drfc
.field
.REFDBTCLR
= 1;
361 isbW32m(MCU
, DRFC
, Drfc
.raw
);
363 Dcal
.raw
= isbR32m(MCU
, DCAL
);
364 Dcal
.field
.ZQCINT
= 3; // 63ms
365 isbW32m(MCU
, DCAL
, Dcal
.raw
);
367 Dpmc0
.raw
= isbR32m(MCU
, DPMC0
);
368 Dpmc0
.field
.ENPHYCLKGATE
= 1;
369 Dpmc0
.field
.DYNSREN
= 1;
370 isbW32m(MCU
, DPMC0
, Dpmc0
.raw
);
375 // Send DRAM wake command
376 static void perform_wake(
377 MRCParams_t
*mrc_params
)
386 // prog_ddr_timing_control (aka mcu_init):
387 // POST_CODE[major] == 0x02
389 // It will initialise timing registers in the MCU (DTR0..DTR4).
390 static void prog_ddr_timing_control(
391 MRCParams_t
*mrc_params
)
394 uint8_t TRP
, TRCD
, TRAS
, TRFC
, TWR
, TWTR
, TRRD
, TRTP
, TFAW
;
406 post_code(0x02, 0x00);
408 Dtr0
.raw
= isbR32m(MCU
, DTR0
);
409 Dtr1
.raw
= isbR32m(MCU
, DTR1
);
410 Dtr2
.raw
= isbR32m(MCU
, DTR2
);
411 Dtr3
.raw
= isbR32m(MCU
, DTR3
);
412 Dtr4
.raw
= isbR32m(MCU
, DTR4
);
414 TCK
= tCK
[mrc_params
->ddr_speed
]; // Clock in picoseconds
415 TCL
= mrc_params
->params
.tCL
; // CAS latency in clocks
416 TRP
= TCL
; // Per CAT MRC
417 TRCD
= TCL
; // Per CAT MRC
418 TRAS
= MCEIL(mrc_params
->params
.tRAS
, TCK
);
419 TRFC
= MCEIL(tRFC
[mrc_params
->params
.DENSITY
], TCK
);
420 TWR
= MCEIL(15000, TCK
); // Per JEDEC: tWR=15000ps DDR2/3 from 800-1600
422 TWTR
= MCEIL(mrc_params
->params
.tWTR
, TCK
);
423 TRRD
= MCEIL(mrc_params
->params
.tRRD
, TCK
);
424 TRTP
= 4; // Valid for 800 and 1066, use 5 for 1333
425 TFAW
= MCEIL(mrc_params
->params
.tFAW
, TCK
);
427 WL
= 5 + mrc_params
->ddr_speed
;
429 Dtr0
.field
.dramFrequency
= mrc_params
->ddr_speed
;
431 Dtr0
.field
.tCL
= TCL
- 5; //Convert from TCL (DRAM clocks) to VLV indx
432 Dtr0
.field
.tRP
= TRP
- 5; //5 bit DRAM Clock
433 Dtr0
.field
.tRCD
= TRCD
- 5; //5 bit DRAM Clock
435 Dtr1
.field
.tWCL
= WL
- 3; //Convert from WL (DRAM clocks) to VLV indx
436 Dtr1
.field
.tWTP
= WL
+ 4 + TWR
- 14; //Change to tWTP
437 Dtr1
.field
.tRTP
= MMAX(TRTP
, 4) - 3; //4 bit DRAM Clock
438 Dtr1
.field
.tRRD
= TRRD
- 4; //4 bit DRAM Clock
439 Dtr1
.field
.tCMD
= 1; //2N
440 Dtr1
.field
.tRAS
= TRAS
- 14; //6 bit DRAM Clock
442 Dtr1
.field
.tFAW
= ((TFAW
+ 1) >> 1) - 5; //4 bit DRAM Clock
443 Dtr1
.field
.tCCD
= 0; //Set 4 Clock CAS to CAS delay (multi-burst)
444 Dtr2
.field
.tRRDR
= 1;
445 Dtr2
.field
.tWWDR
= 2;
446 Dtr2
.field
.tRWDR
= 2;
447 Dtr3
.field
.tWRDR
= 2;
448 Dtr3
.field
.tWRDD
= 2;
450 if (mrc_params
->ddr_speed
== DDRFREQ_800
)
452 // Extended RW delay (+1)
453 Dtr3
.field
.tRWSR
= TCL
- 5 + 1;
455 else if(mrc_params
->ddr_speed
== DDRFREQ_1066
)
457 // Extended RW delay (+1)
458 Dtr3
.field
.tRWSR
= TCL
- 5 + 1;
461 Dtr3
.field
.tWRSR
= 4 + WL
+ TWTR
- 11;
463 if (mrc_params
->ddr_speed
== DDRFREQ_800
)
465 Dtr3
.field
.tXP
= MMAX(0, 1 - Dtr1
.field
.tCMD
);
469 Dtr3
.field
.tXP
= MMAX(0, 2 - Dtr1
.field
.tCMD
);
472 Dtr4
.field
.WRODTSTRT
= Dtr1
.field
.tCMD
;
473 Dtr4
.field
.WRODTSTOP
= Dtr1
.field
.tCMD
;
474 Dtr4
.field
.RDODTSTRT
= Dtr1
.field
.tCMD
+ Dtr0
.field
.tCL
- Dtr1
.field
.tWCL
+ 2; //Convert from WL (DRAM clocks) to VLV indx
475 Dtr4
.field
.RDODTSTOP
= Dtr1
.field
.tCMD
+ Dtr0
.field
.tCL
- Dtr1
.field
.tWCL
+ 2;
476 Dtr4
.field
.TRGSTRDIS
= 0;
477 Dtr4
.field
.ODTDIS
= 0;
479 isbW32m(MCU
, DTR0
, Dtr0
.raw
);
480 isbW32m(MCU
, DTR1
, Dtr1
.raw
);
481 isbW32m(MCU
, DTR2
, Dtr2
.raw
);
482 isbW32m(MCU
, DTR3
, Dtr3
.raw
);
483 isbW32m(MCU
, DTR4
, Dtr4
.raw
);
489 // POST_CODE[major] == 0x03
491 // This function performs some initialisation on the DDRIO unit.
492 // This function is dependent on BOARD_ID, DDR_SPEED, and CHANNEL_ENABLES.
493 static void ddrphy_init(MRCParams_t
*mrc_params
)
495 uint32_t tempD
; // temporary DWORD
496 uint8_t channel_i
; // channel counter
497 uint8_t rank_i
; // rank counter
498 uint8_t bl_grp_i
; // byte lane group counter (2 BLs per module)
500 uint8_t bl_divisor
= /*(mrc_params->channel_width==x16)?2:*/1; // byte lane divisor
501 uint8_t speed
= mrc_params
->ddr_speed
& (BIT1
|BIT0
); // For DDR3 --> 0 == 800, 1 == 1066, 2 == 1333
507 tCAS
= mrc_params
->params
.tCL
;
508 tCWL
= 5 + mrc_params
->ddr_speed
;
510 // ddrphy_init starts
511 post_code(0x03, 0x00);
514 // Make sure IOBUFACT is deasserted before initialising the DDR PHY.
516 // Make sure WRPTRENABLE is deasserted before initialising the DDR PHY.
517 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++) {
518 if (mrc_params
->channel_enables
& (1<<channel_i
)) {
519 // Deassert DDRPHY Initialisation Complete
520 isbM32m(DDRPHY
, (CMDPMCONFIG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ~BIT20
, BIT20
); // SPID_INIT_COMPLETE=0
522 isbM32m(DDRPHY
, (CMDCFGREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ~BIT2
, BIT2
); // IOBUFACTRST_N=0
524 isbM32m(DDRPHY
, (CMDPTRREG
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ~BIT0
, BIT0
); // WRPTRENABLE=0
525 } // if channel enabled
529 isbM32m(DDRPHY
, MASTERRSTN
, 0, BIT0
); // PHYRSTN=0
531 // Initialise DQ01,DQ23,CMD,CLK-CTL,COMP modules
533 post_code(0x03, 0x10);
534 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++) {
535 if (mrc_params
->channel_enables
& (1<<channel_i
)) {
538 for (bl_grp_i
=0; bl_grp_i
<((NUM_BYTE_LANES
/bl_divisor
)/2); bl_grp_i
++) {
539 isbM32m(DDRPHY
, (DQOBSCKEBBCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), ((bl_grp_i
) ? (0x00) : (BIT22
)), (BIT22
)); // Analog MUX select - IO2xCLKSEL
542 switch (mrc_params
->rd_odt_value
) {
543 case 1: tempD
= 0x3; break; // 60 ohm
544 case 2: tempD
= 0x3; break; // 120 ohm
545 case 3: tempD
= 0x3; break; // 180 ohm
546 default: tempD
= 0x3; break; // 120 ohm
548 isbM32m(DDRPHY
, (B0RXIOBUFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (tempD
<<5), (BIT6
|BIT5
)); // ODT strength
549 isbM32m(DDRPHY
, (B1RXIOBUFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (tempD
<<5), (BIT6
|BIT5
)); // ODT strength
550 // Dynamic ODT/DIFFAMP
551 tempD
= (((tCAS
)<<24)|((tCAS
)<<16)|((tCAS
)<<8)|((tCAS
)<<0));
553 case 0: tempD
-= 0x01010101; break; // 800
554 case 1: tempD
-= 0x02020202; break; // 1066
555 case 2: tempD
-= 0x03030303; break; // 1333
556 case 3: tempD
-= 0x04040404; break; // 1600
558 isbM32m(DDRPHY
, (B01LATCTL1
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), tempD
, ((BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // Launch Time: ODT, DIFFAMP, ODT, DIFFAMP
561 case 0: tempD
= ((0x06<<16)|(0x07<<8)); break; // 800
562 case 1: tempD
= ((0x07<<16)|(0x08<<8)); break; // 1066
563 case 2: tempD
= ((0x09<<16)|(0x0A<<8)); break; // 1333
564 case 3: tempD
= ((0x0A<<16)|(0x0B<<8)); break; // 1600
566 isbM32m(DDRPHY
, (B0ONDURCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), tempD
, ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
))); // On Duration: ODT, DIFFAMP
567 isbM32m(DDRPHY
, (B1ONDURCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), tempD
, ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
))); // On Duration: ODT, DIFFAMP
569 switch (mrc_params
->rd_odt_value
) {
570 case 0: tempD
= ((0x3F<<16)|(0x3f<<10)); break; // override DIFFAMP=on, ODT=off
571 default: tempD
= ((0x3F<<16)|(0x2A<<10)); break; // override DIFFAMP=on, ODT=on
573 isbM32m(DDRPHY
, (B0OVRCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), tempD
, ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
))); // Override: DIFFAMP, ODT
574 isbM32m(DDRPHY
, (B1OVRCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), tempD
, ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
))); // Override: DIFFAMP, ODT
577 // 1xCLK Domain Timings: tEDP,RCVEN,WDQS (PO)
578 isbM32m(DDRPHY
, (B0LATCTL0
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (((tCAS
+7)<<16)|((tCAS
-4)<<8)|((tCWL
-2)<<0)), ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // 1xCLK: tEDP, RCVEN, WDQS
579 isbM32m(DDRPHY
, (B1LATCTL0
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (((tCAS
+7)<<16)|((tCAS
-4)<<8)|((tCWL
-2)<<0)), ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // 1xCLK: tEDP, RCVEN, WDQS
582 isbM32m(DDRPHY
, (B0RXIOBUFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), ((0x0<<7)|(0x0<<0)), (BIT7
|BIT0
)); // AFE Bypass, RCVEN DIFFAMP
583 isbM32m(DDRPHY
, (B1RXIOBUFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), ((0x0<<7)|(0x0<<0)), (BIT7
|BIT0
)); // AFE Bypass, RCVEN DIFFAMP
585 isbM32m(DDRPHY
, (DQCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (BIT16
), (BIT16
)); // 0 means driving DQ during DQS-preamble
586 isbM32m(DDRPHY
, (B01PTRCTL1
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (BIT8
), (BIT8
)); // WR_LVL mode disable
588 isbM32m(DDRPHY
, (B0VREFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), ((0x03<<2)|(0x0<<1)|(0x0<<0)), ((BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
)|BIT1
|BIT0
)); // Internal Vref Code, Enable#, Ext_or_Int (1=Ext)
589 isbM32m(DDRPHY
, (B1VREFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), ((0x03<<2)|(0x0<<1)|(0x0<<0)), ((BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
)|BIT1
|BIT0
)); // Internal Vref Code, Enable#, Ext_or_Int (1=Ext)
590 isbM32m(DDRPHY
, (B0RXIOBUFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (0), (BIT4
)); // Per-Bit De-Skew Enable
591 isbM32m(DDRPHY
, (B1RXIOBUFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (0), (BIT4
)); // Per-Bit De-Skew Enable
594 isbM32m(DDRPHY
, (CMDOBSCKEBBCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), 0, (BIT23
));
596 // Enable tristate control of cmd/address bus
597 isbM32m(DDRPHY
, (CMDCFGREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), 0, (BIT1
|BIT0
));
600 isbM32m(DDRPHY
, (CMDRCOMPODT
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x03<<5)|(0x03<<0)), ((BIT9
|BIT8
|BIT7
|BIT6
|BIT5
)|(BIT4
|BIT3
|BIT2
|BIT1
|BIT0
)));
602 // CMDPM* registers must be programmed in this order...
603 isbM32m(DDRPHY
, (CMDPMDLYREG4
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0xFFFFU
<<16)|(0xFFFF<<0)), ((BIT31
|BIT30
|BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
|BIT23
|BIT22
|BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
|BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // Turn On Delays: SFR (regulator), MPLL
604 isbM32m(DDRPHY
, (CMDPMDLYREG3
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0xFU
<<28)|(0xFFF<<16)|(0xF<<12)|(0x616<<0)), ((BIT31
|BIT30
|BIT29
|BIT28
)|(BIT27
|BIT26
|BIT25
|BIT24
|BIT23
|BIT22
|BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
)|(BIT11
|BIT10
|BIT9
|BIT8
|BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // Delays: ASSERT_IOBUFACT_to_ALLON0_for_PM_MSG_3, VREG (MDLL) Turn On, ALLON0_to_DEASSERT_IOBUFACT_for_PM_MSG_gt0, MDLL Turn On
605 isbM32m(DDRPHY
, (CMDPMDLYREG2
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0xFFU
<<24)|(0xFF<<16)|(0xFF<<8)|(0xFF<<0)), ((BIT31
|BIT30
|BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT23
|BIT22
|BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // MPLL Divider Reset Delays
606 isbM32m(DDRPHY
, (CMDPMDLYREG1
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0xFFU
<<24)|(0xFF<<16)|(0xFF<<8)|(0xFF<<0)), ((BIT31
|BIT30
|BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT23
|BIT22
|BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // Turn Off Delays: VREG, Staggered MDLL, MDLL, PI
607 isbM32m(DDRPHY
, (CMDPMDLYREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0xFFU
<<24)|(0xFF<<16)|(0xFF<<8)|(0xFF<<0)), ((BIT31
|BIT30
|BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT23
|BIT22
|BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // Turn On Delays: MPLL, Staggered MDLL, PI, IOBUFACT
608 isbM32m(DDRPHY
, (CMDPMCONFIG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x6<<8)|BIT6
|(0x4<<0)), (BIT31
|BIT30
|BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
|BIT23
|BIT22
|BIT21
|(BIT11
|BIT10
|BIT9
|BIT8
)|BIT6
|(BIT3
|BIT2
|BIT1
|BIT0
))); // Allow PUnit signals
609 isbM32m(DDRPHY
, (CMDMDLLCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x3<<4)|(0x7<<0)), ((BIT6
|BIT5
|BIT4
)|(BIT3
|BIT2
|BIT1
|BIT0
))); // DLL_VREG Bias Trim, VREF Tuning for DLL_VREG
611 isbM32m(DDRPHY
, (CCOBSCKEBBCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), 0, (BIT24
)); // CLKEBB
612 isbM32m(DDRPHY
, (CCCFGREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x0<<16)|(0x0<<12)|(0x0<<8)|(0xF<<4)|BIT0
), ((BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
)|(BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
)|BIT0
)); // Buffer Enable: CS,CKE,ODT,CLK
613 isbM32m(DDRPHY
, (CCRCOMPODT
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x03<<8)|(0x03<<0)), ((BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // ODT RCOMP
614 isbM32m(DDRPHY
, (CCMDLLCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x3<<4)|(0x7<<0)), ((BIT6
|BIT5
|BIT4
)|(BIT3
|BIT2
|BIT1
|BIT0
))); // DLL_VREG Bias Trim, VREF Tuning for DLL_VREG
616 // COMP (RON channel specific)
617 // - DQ/DQS/DM RON: 32 Ohm
618 // - CTRL/CMD RON: 27 Ohm
620 isbM32m(DDRPHY
, (DQVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x08<<24)|(0x03<<16)), ((BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP Vref PU/PD
621 isbM32m(DDRPHY
, (CMDVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x0C<<24)|(0x03<<16)), ((BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP Vref PU/PD
622 isbM32m(DDRPHY
, (CLKVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x0F<<24)|(0x03<<16)), ((BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP Vref PU/PD
623 isbM32m(DDRPHY
, (DQSVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x08<<24)|(0x03<<16)), ((BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP Vref PU/PD
624 isbM32m(DDRPHY
, (CTLVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x0C<<24)|(0x03<<16)), ((BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP Vref PU/PD
626 // DQS Swapped Input Enable
627 isbM32m(DDRPHY
, (COMPEN1CH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT19
|BIT17
), ((BIT31
|BIT30
)|BIT19
|BIT17
|(BIT15
|BIT14
)));
629 // ODT VREF = 1.5 x 274/360+274 = 0.65V (code of ~50)
630 isbM32m(DDRPHY
, (DQVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x32<<8)|(0x03<<0)), ((BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // ODT Vref PU/PD
631 isbM32m(DDRPHY
, (DQSVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x32<<8)|(0x03<<0)), ((BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // ODT Vref PU/PD
632 isbM32m(DDRPHY
, (CLKVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x0E<<8)|(0x05<<0)), ((BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // ODT Vref PU/PD
634 // Slew rate settings are frequency specific, numbers below are for 800Mhz (speed == 0)
635 // - DQ/DQS/DM/CLK SR: 4V/ns,
636 // - CTRL/CMD SR: 1.5V/ns
637 tempD
= (0x0E<<16)|(0x0E<<12)|(0x08<<8)|(0x0B<<4)|(0x0B<<0);
638 isbM32m(DDRPHY
, (DLYSELCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (tempD
), ((BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
)|(BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
)|(BIT3
|BIT2
|BIT1
|BIT0
))); // DCOMP Delay Select: CTL,CMD,CLK,DQS,DQ
639 isbM32m(DDRPHY
, (TCOVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x05<<16)|(0x05<<8)|(0x05<<0)), ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // TCO Vref CLK,DQS,DQ
640 isbM32m(DDRPHY
, (CCBUFODTCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x03<<8)|(0x03<<0)), ((BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // ODTCOMP CMD/CTL PU/PD
641 isbM32m(DDRPHY
, (COMPEN0CH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (0), ((BIT31
|BIT30
)|BIT8
)); // COMP
645 isbM32m(DDRPHY
, (DQDRVPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PU
646 isbM32m(DDRPHY
, (DQDRVPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PD
647 isbM32m(DDRPHY
, (DQDLYPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x10<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PU
648 isbM32m(DDRPHY
, (DQDLYPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x10<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PD
649 isbM32m(DDRPHY
, (DQODTPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0B<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODTCOMP PU
650 isbM32m(DDRPHY
, (DQODTPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0B<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODTCOMP PD
651 isbM32m(DDRPHY
, (DQTCOPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
), (BIT31
)); // TCOCOMP PU
652 isbM32m(DDRPHY
, (DQTCOPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
), (BIT31
)); // TCOCOMP PD
653 // DQS COMP Overrides
654 isbM32m(DDRPHY
, (DQSDRVPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PU
655 isbM32m(DDRPHY
, (DQSDRVPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PD
656 isbM32m(DDRPHY
, (DQSDLYPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x10<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PU
657 isbM32m(DDRPHY
, (DQSDLYPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x10<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PD
658 isbM32m(DDRPHY
, (DQSODTPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0B<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODTCOMP PU
659 isbM32m(DDRPHY
, (DQSODTPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0B<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODTCOMP PD
660 isbM32m(DDRPHY
, (DQSTCOPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
), (BIT31
)); // TCOCOMP PU
661 isbM32m(DDRPHY
, (DQSTCOPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
), (BIT31
)); // TCOCOMP PD
662 // CLK COMP Overrides
663 isbM32m(DDRPHY
, (CLKDRVPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0C<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PU
664 isbM32m(DDRPHY
, (CLKDRVPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0C<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PD
665 isbM32m(DDRPHY
, (CLKDLYPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x07<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PU
666 isbM32m(DDRPHY
, (CLKDLYPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x07<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PD
667 isbM32m(DDRPHY
, (CLKODTPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0B<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODTCOMP PU
668 isbM32m(DDRPHY
, (CLKODTPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0B<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODTCOMP PD
669 isbM32m(DDRPHY
, (CLKTCOPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
), (BIT31
)); // TCOCOMP PU
670 isbM32m(DDRPHY
, (CLKTCOPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
), (BIT31
)); // TCOCOMP PD
671 // CMD COMP Overrides
672 isbM32m(DDRPHY
, (CMDDRVPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0D<<16)), (BIT31
|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PU
673 isbM32m(DDRPHY
, (CMDDRVPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0D<<16)), (BIT31
|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PD
674 isbM32m(DDRPHY
, (CMDDLYPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PU
675 isbM32m(DDRPHY
, (CMDDLYPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PD
676 // CTL COMP Overrides
677 isbM32m(DDRPHY
, (CTLDRVPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0D<<16)), (BIT31
|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PU
678 isbM32m(DDRPHY
, (CTLDRVPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0D<<16)), (BIT31
|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PD
679 isbM32m(DDRPHY
, (CTLDLYPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PU
680 isbM32m(DDRPHY
, (CTLDLYPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PD
682 // DQ TCOCOMP Overrides
683 isbM32m(DDRPHY
, (DQTCOPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x1F<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // TCOCOMP PU
684 isbM32m(DDRPHY
, (DQTCOPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x1F<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // TCOCOMP PD
685 // DQS TCOCOMP Overrides
686 isbM32m(DDRPHY
, (DQSTCOPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x1F<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // TCOCOMP PU
687 isbM32m(DDRPHY
, (DQSTCOPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x1F<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // TCOCOMP PD
688 // CLK TCOCOMP Overrides
689 isbM32m(DDRPHY
, (CLKTCOPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x1F<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // TCOCOMP PU
690 isbM32m(DDRPHY
, (CLKTCOPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x1F<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // TCOCOMP PD
691 #endif // BACKUP_COMPS
692 // program STATIC delays
694 set_wcmd(channel_i
, ddr_wcmd
[PLATFORM_ID
]);
696 set_wcmd(channel_i
, ddr_wclk
[PLATFORM_ID
] + HALF_CLK
);
697 #endif // BACKUP_WCMD
698 for (rank_i
=0; rank_i
<NUM_RANKS
; rank_i
++) {
699 if (mrc_params
->rank_enables
& (1<<rank_i
)) {
700 set_wclk(channel_i
, rank_i
, ddr_wclk
[PLATFORM_ID
]);
702 set_wctl(channel_i
, rank_i
, ddr_wctl
[PLATFORM_ID
]);
704 set_wctl(channel_i
, rank_i
, ddr_wclk
[PLATFORM_ID
] + HALF_CLK
);
705 #endif // BACKUP_WCTL
710 // COMP (non channel specific)
711 //isbM32m(DDRPHY, (), (), ());
712 isbM32m(DDRPHY
, (DQANADRVPUCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PU Enable
713 isbM32m(DDRPHY
, (DQANADRVPDCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PD Enable
714 isbM32m(DDRPHY
, (CMDANADRVPUCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PU Enable
715 isbM32m(DDRPHY
, (CMDANADRVPDCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PD Enable
716 isbM32m(DDRPHY
, (CLKANADRVPUCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PU Enable
717 isbM32m(DDRPHY
, (CLKANADRVPDCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PD Enable
718 isbM32m(DDRPHY
, (DQSANADRVPUCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PU Enable
719 isbM32m(DDRPHY
, (DQSANADRVPDCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PD Enable
720 isbM32m(DDRPHY
, (CTLANADRVPUCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PU Enable
721 isbM32m(DDRPHY
, (CTLANADRVPDCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PD Enable
722 isbM32m(DDRPHY
, (DQANAODTPUCTL
), (BIT30
), (BIT30
)); // ODT: Dither PU Enable
723 isbM32m(DDRPHY
, (DQANAODTPDCTL
), (BIT30
), (BIT30
)); // ODT: Dither PD Enable
724 isbM32m(DDRPHY
, (CLKANAODTPUCTL
), (BIT30
), (BIT30
)); // ODT: Dither PU Enable
725 isbM32m(DDRPHY
, (CLKANAODTPDCTL
), (BIT30
), (BIT30
)); // ODT: Dither PD Enable
726 isbM32m(DDRPHY
, (DQSANAODTPUCTL
), (BIT30
), (BIT30
)); // ODT: Dither PU Enable
727 isbM32m(DDRPHY
, (DQSANAODTPDCTL
), (BIT30
), (BIT30
)); // ODT: Dither PD Enable
728 isbM32m(DDRPHY
, (DQANADLYPUCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PU Enable
729 isbM32m(DDRPHY
, (DQANADLYPDCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PD Enable
730 isbM32m(DDRPHY
, (CMDANADLYPUCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PU Enable
731 isbM32m(DDRPHY
, (CMDANADLYPDCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PD Enable
732 isbM32m(DDRPHY
, (CLKANADLYPUCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PU Enable
733 isbM32m(DDRPHY
, (CLKANADLYPDCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PD Enable
734 isbM32m(DDRPHY
, (DQSANADLYPUCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PU Enable
735 isbM32m(DDRPHY
, (DQSANADLYPDCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PD Enable
736 isbM32m(DDRPHY
, (CTLANADLYPUCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PU Enable
737 isbM32m(DDRPHY
, (CTLANADLYPDCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PD Enable
738 isbM32m(DDRPHY
, (DQANATCOPUCTL
), (BIT30
), (BIT30
)); // TCO: Dither PU Enable
739 isbM32m(DDRPHY
, (DQANATCOPDCTL
), (BIT30
), (BIT30
)); // TCO: Dither PD Enable
740 isbM32m(DDRPHY
, (CLKANATCOPUCTL
), (BIT30
), (BIT30
)); // TCO: Dither PU Enable
741 isbM32m(DDRPHY
, (CLKANATCOPDCTL
), (BIT30
), (BIT30
)); // TCO: Dither PD Enable
742 isbM32m(DDRPHY
, (DQSANATCOPUCTL
), (BIT30
), (BIT30
)); // TCO: Dither PU Enable
743 isbM32m(DDRPHY
, (DQSANATCOPDCTL
), (BIT30
), (BIT30
)); // TCO: Dither PD Enable
744 isbM32m(DDRPHY
, (TCOCNTCTRL
), (0x1<<0), (BIT1
|BIT0
)); // TCOCOMP: Pulse Count
745 isbM32m(DDRPHY
, (CHNLBUFSTATIC
), ((0x03<<24)|(0x03<<16)), ((BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODT: CMD/CTL PD/PU
746 isbM32m(DDRPHY
, (MSCNTR
), (0x64<<0), (BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
)); // Set 1us counter
747 isbM32m(DDRPHY
, (LATCH1CTL
), (0x1<<28), (BIT30
|BIT29
|BIT28
)); // ???
749 // Release PHY from reset
750 isbM32m(DDRPHY
, MASTERRSTN
, BIT0
, BIT0
); // PHYRSTN=1
753 post_code(0x03, 0x11);
754 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++) {
755 if (mrc_params
->channel_enables
& (1<<channel_i
)) {
757 for (bl_grp_i
=0; bl_grp_i
<((NUM_BYTE_LANES
/bl_divisor
)/2); bl_grp_i
++) {
758 isbM32m(DDRPHY
, (DQMDLLCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (BIT13
), (BIT13
)); // Enable VREG
762 isbM32m(DDRPHY
, (ECCMDLLCTL
), (BIT13
), (BIT13
)); // Enable VREG
765 isbM32m(DDRPHY
, (CMDMDLLCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), (BIT13
), (BIT13
)); // Enable VREG
768 isbM32m(DDRPHY
, (CCMDLLCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), (BIT13
), (BIT13
)); // Enable VREG
774 post_code(0x03, 0x12);
776 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++) {
777 if (mrc_params
->channel_enables
& (1<<channel_i
)) {
779 for (bl_grp_i
=0; bl_grp_i
<((NUM_BYTE_LANES
/bl_divisor
)/2); bl_grp_i
++) {
780 isbM32m(DDRPHY
, (DQMDLLCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (BIT17
), (BIT17
)); // Enable MCDLL
784 isbM32m(DDRPHY
, (ECCMDLLCTL
), (BIT17
), (BIT17
)); // Enable MCDLL
787 isbM32m(DDRPHY
, (CMDMDLLCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), (BIT18
), (BIT18
)); // Enable MCDLL
790 isbM32m(DDRPHY
, (CCMDLLCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), (BIT18
), (BIT18
)); // Enable MCDLL
796 post_code(0x03, 0x13);
798 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++) {
799 if (mrc_params
->channel_enables
& (1<<channel_i
)) {
801 for (bl_grp_i
=0; bl_grp_i
<((NUM_BYTE_LANES
/bl_divisor
)/2); bl_grp_i
++) {
802 #ifdef FORCE_16BIT_DDRIO
803 tempD
= ((bl_grp_i
) && (mrc_params
->channel_width
== x16
)) ? ((0x1<<12)|(0x1<<8)|(0xF<<4)|(0xF<<0)) : ((0xF<<12)|(0xF<<8)|(0xF<<4)|(0xF<<0));
805 tempD
= ((0xF<<12)|(0xF<<8)|(0xF<<4)|(0xF<<0));
807 isbM32m(DDRPHY
, (DQDLLTXCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (tempD
), ((BIT15
|BIT14
|BIT13
|BIT12
)|(BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
)|(BIT3
|BIT2
|BIT1
|BIT0
))); // Enable TXDLL
809 isbM32m(DDRPHY
, (DQDLLRXCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (BIT3
|BIT2
|BIT1
|BIT0
), (BIT3
|BIT2
|BIT1
|BIT0
)); // Enable RXDLL
811 isbM32m(DDRPHY
, (B0OVRCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (BIT3
|BIT2
|BIT1
|BIT0
), (BIT3
|BIT2
|BIT1
|BIT0
)); // Enable RXDLL Overrides BL0
815 tempD
= ((0xF<<12)|(0xF<<8)|(0xF<<4)|(0xF<<0));
816 isbM32m(DDRPHY
, (ECCDLLTXCTL
), (tempD
), ((BIT15
|BIT14
|BIT13
|BIT12
)|(BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
)|(BIT3
|BIT2
|BIT1
|BIT0
))); // Enable TXDLL
820 isbM32m(DDRPHY
, (CMDDLLTXCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0xF<<12)|(0xF<<8)|(0xF<<4)|(0xF<<0)), ((BIT15
|BIT14
|BIT13
|BIT12
)|(BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
)|(BIT3
|BIT2
|BIT1
|BIT0
))); // Enable TXDLL
827 post_code(0x03, 0x14);
828 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++) {
829 if (mrc_params
->channel_enables
& (1<<channel_i
)) {
830 // Host To Memory Clock Alignment (HMC) for 800/1066
831 for (bl_grp_i
=0; bl_grp_i
<((NUM_BYTE_LANES
/bl_divisor
)/2); bl_grp_i
++) {
832 isbM32m(DDRPHY
, (DQCLKALIGNREG2
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), ((bl_grp_i
)?(0x3):(0x1)), (BIT3
|BIT2
|BIT1
|BIT0
)); // CLK_ALIGN_MOD_ID
834 isbM32m(DDRPHY
, (ECCCLKALIGNREG2
+ (channel_i
* DDRIODQ_CH_OFFSET
)), 0x2, (BIT3
|BIT2
|BIT1
|BIT0
)); // CLK_ALIGN_MOD_ID
835 isbM32m(DDRPHY
, (CMDCLKALIGNREG2
+ (channel_i
* DDRIODQ_CH_OFFSET
)), 0x0, (BIT3
|BIT2
|BIT1
|BIT0
)); // CLK_ALIGN_MOD_ID
836 isbM32m(DDRPHY
, (CCCLKALIGNREG2
+ (channel_i
* DDRIODQ_CH_OFFSET
)), 0x2, (BIT3
|BIT2
|BIT1
|BIT0
)); // CLK_ALIGN_MOD_ID
837 isbM32m(DDRPHY
, (CMDCLKALIGNREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), (0x2<<4), (BIT5
|BIT4
)); // CLK_ALIGN_MODE
838 isbM32m(DDRPHY
, (CMDCLKALIGNREG1
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x18<<16)|(0x10<<8)|(0x8<<2)|(0x1<<0)), ((BIT22
|BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT14
|BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
)|(BIT1
|BIT0
))); // NUM_SAMPLES, MAX_SAMPLES, MACRO_PI_STEP, MICRO_PI_STEP
839 isbM32m(DDRPHY
, (CMDCLKALIGNREG2
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x10<<16)|(0x4<<8)|(0x2<<4)), ((BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
))); // ???, TOTAL_NUM_MODULES, FIRST_U_PARTITION
841 isbM32m(DDRPHY
, (CMDCLKALIGNREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), BIT24
, BIT24
); // START_CLK_ALIGN=1
842 while (isbR32m(DDRPHY
, (CMDCLKALIGNREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
))) & BIT24
); // wait for START_CLK_ALIGN=0
845 // Set RD/WR Pointer Seperation & COUNTEN & FIFOPTREN
846 isbM32m(DDRPHY
, (CMDPTRREG
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), BIT0
, BIT0
); // WRPTRENABLE=1
850 // comp is not working on simulator
853 isbM32m(DDRPHY
, (COMPEN0CH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), BIT5
, BIT5
); // enable bypass for CLK buffer (PO)
854 isbM32m(DDRPHY
, (CMPCTRL
), (BIT0
), (BIT0
)); // Initial COMP Enable
855 while (isbR32m(DDRPHY
, (CMPCTRL
)) & BIT0
); // wait for Initial COMP Enable = 0
856 isbM32m(DDRPHY
, (COMPEN0CH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ~BIT5
, BIT5
); // disable bypass for CLK buffer (PO)
861 isbM32m(DDRPHY
, (CMDCFGREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), BIT2
, BIT2
); // IOBUFACTRST_N=1
863 // DDRPHY initialisation complete
864 isbM32m(DDRPHY
, (CMDPMCONFIG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), BIT20
, BIT20
); // SPID_INIT_COMPLETE=1
872 // jedec_init (aka PerformJedecInit):
873 // This function performs JEDEC initialisation on all enabled channels.
874 static void jedec_init(
875 MRCParams_t
*mrc_params
,
878 uint8_t TWR
, WL
, Rank
;
883 DramInitDDR3MRS0 mrs0Command
;
884 DramInitDDR3EMR1 emrs1Command
;
885 DramInitDDR3EMR2 emrs2Command
;
886 DramInitDDR3EMR3 emrs3Command
;
893 post_code(0x04, 0x00);
896 // Assert RESET# for 200us
897 isbM32m(DDRPHY
, CCDDR3RESETCTL
, BIT1
, (BIT8
|BIT1
)); // DDR3_RESET_SET=0, DDR3_RESET_RESET=1
899 // Don't waste time during simulation
904 isbM32m(DDRPHY
, CCDDR3RESETCTL
, BIT8
, (BIT8
|BIT1
)); // DDR3_RESET_SET=1, DDR3_RESET_RESET=0
906 DTR0reg
.raw
= isbR32m(MCU
, DTR0
);
908 // Set CKEVAL for populated ranks
909 // then send NOP to each rank (#4550197)
914 DRPbuffer
= isbR32m(MCU
, DRP
);
916 DRMCbuffer
= isbR32m(MCU
, DRMC
);
917 DRMCbuffer
&= 0xFFFFFFFC;
918 DRMCbuffer
|= (BIT4
| DRPbuffer
);
920 isbW32m(MCU
, DRMC
, DRMCbuffer
);
922 for (Rank
= 0; Rank
< NUM_RANKS
; Rank
++)
924 // Skip to next populated rank
925 if ((mrc_params
->rank_enables
& (1 << Rank
)) == 0)
930 dram_init_command(DCMD_NOP(Rank
));
933 isbW32m(MCU
, DRMC
, DRMC_DEFAULT
);
937 // BIT[15:11] --> Always "0"
938 // BIT[10:09] --> Rtt_WR: want "Dynamic ODT Off" (0)
939 // BIT[08] --> Always "0"
940 // BIT[07] --> SRT: use sr_temp_range
941 // BIT[06] --> ASR: want "Manual SR Reference" (0)
942 // BIT[05:03] --> CWL: use oem_tCWL
943 // BIT[02:00] --> PASR: want "Full Array" (0)
944 emrs2Command
.raw
= 0;
945 emrs2Command
.field
.bankAddress
= 2;
947 WL
= 5 + mrc_params
->ddr_speed
;
948 emrs2Command
.field
.CWL
= WL
- 5;
949 emrs2Command
.field
.SRT
= mrc_params
->sr_temp_range
;
952 // BIT[15:03] --> Always "0"
953 // BIT[02] --> MPR: want "Normal Operation" (0)
954 // BIT[01:00] --> MPR_Loc: want "Predefined Pattern" (0)
955 emrs3Command
.raw
= 0;
956 emrs3Command
.field
.bankAddress
= 3;
959 // BIT[15:13] --> Always "0"
960 // BIT[12:12] --> Qoff: want "Output Buffer Enabled" (0)
961 // BIT[11:11] --> TDQS: want "Disabled" (0)
962 // BIT[10:10] --> Always "0"
963 // BIT[09,06,02] --> Rtt_nom: use rtt_nom_value
964 // BIT[08] --> Always "0"
965 // BIT[07] --> WR_LVL: want "Disabled" (0)
966 // BIT[05,01] --> DIC: use ron_value
967 // BIT[04:03] --> AL: additive latency want "0" (0)
968 // BIT[00] --> DLL: want "Enable" (0)
970 // (BIT5|BIT1) set Ron value
971 // 00 --> RZQ/6 (40ohm)
972 // 01 --> RZQ/7 (34ohm)
975 // (BIT9|BIT6|BIT2) set Rtt_nom value
977 // 001 --> RZQ/4 ( 60ohm)
978 // 010 --> RZQ/2 (120ohm)
979 // 011 --> RZQ/6 ( 40ohm)
981 emrs1Command
.raw
= 0;
982 emrs1Command
.field
.bankAddress
= 1;
983 emrs1Command
.field
.dllEnabled
= 0; // 0 = Enable , 1 = Disable
985 if (mrc_params
->ron_value
== 0)
987 emrs1Command
.field
.DIC0
= DDR3_EMRS1_DIC_34
;
991 emrs1Command
.field
.DIC0
= DDR3_EMRS1_DIC_40
;
995 if (mrc_params
->rtt_nom_value
== 0)
997 emrs1Command
.raw
|= (DDR3_EMRS1_RTTNOM_40
<< 6);
999 else if (mrc_params
->rtt_nom_value
== 1)
1001 emrs1Command
.raw
|= (DDR3_EMRS1_RTTNOM_60
<< 6);
1003 else if (mrc_params
->rtt_nom_value
== 2)
1005 emrs1Command
.raw
|= (DDR3_EMRS1_RTTNOM_120
<< 6);
1008 // save MRS1 value (excluding control fields)
1009 mrc_params
->mrs1
= emrs1Command
.raw
>> 6;
1012 // BIT[15:13] --> Always "0"
1013 // BIT[12] --> PPD: for Quark (1)
1014 // BIT[11:09] --> WR: use oem_tWR
1015 // BIT[08] --> DLL: want "Reset" (1, self clearing)
1016 // BIT[07] --> MODE: want "Normal" (0)
1017 // BIT[06:04,02] --> CL: use oem_tCAS
1018 // BIT[03] --> RD_BURST_TYPE: want "Interleave" (1)
1019 // BIT[01:00] --> BL: want "8 Fixed" (0)
1030 // BIT[02:02] "0" if oem_tCAS <= 11 (1866?)
1031 // BIT[06:04] use oem_tCAS-4
1032 mrs0Command
.raw
= 0;
1033 mrs0Command
.field
.bankAddress
= 0;
1034 mrs0Command
.field
.dllReset
= 1;
1035 mrs0Command
.field
.BL
= 0;
1036 mrs0Command
.field
.PPD
= 1;
1037 mrs0Command
.field
.casLatency
= DTR0reg
.field
.tCL
+ 1;
1039 TCK
= tCK
[mrc_params
->ddr_speed
];
1040 TWR
= MCEIL(15000, TCK
); // Per JEDEC: tWR=15000ps DDR2/3 from 800-1600
1041 mrs0Command
.field
.writeRecovery
= TWR
- 4;
1043 for (Rank
= 0; Rank
< NUM_RANKS
; Rank
++)
1045 // Skip to next populated rank
1046 if ((mrc_params
->rank_enables
& (1 << Rank
)) == 0)
1051 emrs2Command
.field
.rankSelect
= Rank
;
1052 dram_init_command(emrs2Command
.raw
);
1054 emrs3Command
.field
.rankSelect
= Rank
;
1055 dram_init_command(emrs3Command
.raw
);
1057 emrs1Command
.field
.rankSelect
= Rank
;
1058 dram_init_command(emrs1Command
.raw
);
1060 mrs0Command
.field
.rankSelect
= Rank
;
1061 dram_init_command(mrs0Command
.raw
);
1063 dram_init_command(DCMD_ZQCL(Rank
));
1071 // POST_CODE[major] == 0x05
1073 // This function will perform our RCVEN Calibration Algorithm.
1074 // We will only use the 2xCLK domain timings to perform RCVEN Calibration.
1075 // All byte lanes will be calibrated "simultaneously" per channel per rank.
1076 static void rcvn_cal(
1077 MRCParams_t
*mrc_params
)
1079 uint8_t channel_i
; // channel counter
1080 uint8_t rank_i
; // rank counter
1081 uint8_t bl_i
; // byte lane counter
1082 uint8_t bl_divisor
= (mrc_params
->channel_width
== x16
) ? 2 : 1; // byte lane divisor
1085 uint32_t final_delay
[NUM_CHANNELS
][NUM_BYTE_LANES
]; // used to find placement for rank2rank sharing configs
1087 uint32_t num_ranks_enabled
= 0; // used to find placement for rank2rank sharing configs
1088 #endif // BACKUP_RCVN
1089 #endif // R2R_SHARING
1093 uint32_t tempD
; // temporary DWORD
1094 uint32_t delay
[NUM_BYTE_LANES
]; // absolute PI value to be programmed on the byte lane
1097 #endif // BACKUP_RCVN
1101 post_code(0x05, 0x00);
1104 // need separate burst to sample DQS preamble
1105 dtr1
.raw
= dtr1save
.raw
= isbR32m(MCU
, DTR1
);
1106 dtr1
.field
.tCCD
= 1;
1107 isbW32m(MCU
, DTR1
, dtr1
.raw
);
1111 // need to set "final_delay[][]" elements to "0"
1112 memset((void *) (final_delay
), 0x00, (size_t) sizeof(final_delay
));
1113 #endif // R2R_SHARING
1115 // loop through each enabled channel
1116 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1118 if (mrc_params
->channel_enables
& (1 << channel_i
))
1120 // perform RCVEN Calibration on a per rank basis
1121 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1123 if (mrc_params
->rank_enables
& (1 << rank_i
))
1125 // POST_CODE here indicates the current channel and rank being calibrated
1126 post_code(0x05, (0x10 + ((channel_i
<< 4) | rank_i
)));
1129 // set hard-coded timing values
1130 for (bl_i
=0; bl_i
<(NUM_BYTE_LANES
/bl_divisor
); bl_i
++)
1132 set_rcvn(channel_i
, rank_i
, bl_i
, ddr_rcvn
[PLATFORM_ID
]);
1136 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
+= 2)
1138 isbM32m(DDRPHY
, (B01PTRCTL1
+ ((bl_i
>> 1) * DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), 0,
1139 BIT8
); // 0 is enabled
1141 // initialise the starting delay to 128 PI (tCAS +1 CLK)
1142 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1145 // Original value was late at the end of DQS sequence
1146 delay
[bl_i
] = 3 * FULL_CLK
;
1148 delay
[bl_i
] = (4 + 1) * FULL_CLK
; // 1x CLK domain timing is tCAS-4
1151 set_rcvn(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1154 // now find the rising edge
1155 find_rising_edge(mrc_params
, delay
, channel_i
, rank_i
, true);
1156 // Now increase delay by 32 PI (1/4 CLK) to place in center of high pulse.
1157 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1159 delay
[bl_i
] += QRTR_CLK
;
1160 set_rcvn(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1162 // Now decrement delay by 128 PI (1 CLK) until we sample a "0"
1166 tempD
= sample_dqs(mrc_params
, channel_i
, rank_i
, true);
1167 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1169 if (tempD
& (1 << bl_i
))
1171 if (delay
[bl_i
] >= FULL_CLK
)
1173 delay
[bl_i
] -= FULL_CLK
;
1174 set_rcvn(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1179 training_message(channel_i
, rank_i
, bl_i
);
1180 post_code(0xEE, 0x50);
1184 } while (tempD
& 0xFF);
1187 // increment "num_ranks_enabled"
1188 num_ranks_enabled
++;
1189 // Finally increment delay by 32 PI (1/4 CLK) to place in center of preamble.
1190 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1192 delay
[bl_i
] += QRTR_CLK
;
1193 // add "delay[]" values to "final_delay[][]" for rolling average
1194 final_delay
[channel_i
][bl_i
] += delay
[bl_i
];
1195 // set timing based on rolling average values
1196 set_rcvn(channel_i
, rank_i
, bl_i
, ((final_delay
[channel_i
][bl_i
]) / num_ranks_enabled
));
1199 // Finally increment delay by 32 PI (1/4 CLK) to place in center of preamble.
1200 for (bl_i
=0; bl_i
<(NUM_BYTE_LANES
/bl_divisor
); bl_i
++)
1202 delay
[bl_i
] += QRTR_CLK
;
1203 set_rcvn(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1206 #endif // R2R_SHARING
1209 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
+= 2)
1211 isbM32m(DDRPHY
, (B01PTRCTL1
+ ((bl_i
>> 1) * DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), BIT8
,
1212 BIT8
); // 1 is disabled
1215 #endif // BACKUP_RCVN
1217 } // if rank is enabled
1219 } // if channel is enabled
1224 isbW32m(MCU
, DTR1
, dtr1save
.raw
);
1228 if (mrc_params
->tune_rcvn
)
1230 uint32_t rcven
, val
;
1231 uint32_t rdcmd2rcven
;
1234 Formulas for RDCMD2DATAVALID & DIFFAMP dynamic timings
1236 1. Set after RCVEN training
1238 //Tune RDCMD2DATAVALID
1241 MAX OF 2 RANKS : round up (rdcmd2rcven (rcven 1x) + 2x x 2 + PI/128) + 5
1243 //rdcmd2rcven x80/84[12:8]
1244 //rcven 2x x70[23:20] & [11:8]
1246 //Tune DIFFAMP Timings
1248 //diffampen launch x88[20:16] & [4:0] -- B01LATCTL1
1249 MIN OF 2 RANKS : round down (rcven 1x + 2x x 2 + PI/128) - 1
1251 //diffampen length x8C/x90 [13:8] -- B0ONDURCTL B1ONDURCTL
1252 MAX OF 2 RANKS : roundup (rcven 1x + 2x x 2 + PI/128) + 5
1255 2. need to do a fiforst after settings these values
1258 DPF(D_INFO
, "BEFORE\n");
1259 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B0LATCTL0
));
1260 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B01LATCTL1
));
1261 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B0ONDURCTL
));
1263 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B1LATCTL0
));
1264 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B1ONDURCTL
));
1266 rcven
= get_rcvn(0, 0, 0) / 128;
1267 rdcmd2rcven
= (isbR32m(DDRPHY
, B0LATCTL0
) >> 8) & 0x1F;
1268 val
= rdcmd2rcven
+ rcven
+ 6;
1269 isbM32m(DDRPHY
, B0LATCTL0
, val
<< 16, (BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
));
1271 val
= rdcmd2rcven
+ rcven
- 1;
1272 isbM32m(DDRPHY
, B01LATCTL1
, val
<< 0, (BIT4
|BIT3
|BIT2
|BIT1
|BIT0
));
1274 val
= rdcmd2rcven
+ rcven
+ 5;
1275 isbM32m(DDRPHY
, B0ONDURCTL
, val
<< 8, (BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
));
1277 rcven
= get_rcvn(0, 0, 1) / 128;
1278 rdcmd2rcven
= (isbR32m(DDRPHY
, B1LATCTL0
) >> 8) & 0x1F;
1279 val
= rdcmd2rcven
+ rcven
+ 6;
1280 isbM32m(DDRPHY
, B1LATCTL0
, val
<< 16, (BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
));
1282 val
= rdcmd2rcven
+ rcven
- 1;
1283 isbM32m(DDRPHY
, B01LATCTL1
, val
<< 16, (BIT20
|BIT19
|BIT18
|BIT17
|BIT16
));
1285 val
= rdcmd2rcven
+ rcven
+ 5;
1286 isbM32m(DDRPHY
, B1ONDURCTL
, val
<< 8, (BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
));
1288 DPF(D_INFO
, "AFTER\n");
1289 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B0LATCTL0
));
1290 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B01LATCTL1
));
1291 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B0ONDURCTL
));
1293 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B1LATCTL0
));
1294 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B1ONDURCTL
));
1296 DPF(D_INFO
, "\nPress a key\n");
1300 isbM32m(DDRPHY
, B01PTRCTL1
, 0, BIT8
); // 0 is enabled
1302 isbM32m(DDRPHY
, B01PTRCTL1
, BIT8
, BIT8
); // 1 is disabled
1310 // Check memory executing write/read/verify of many data patterns
1311 // at the specified address. Bits in the result indicate failure
1312 // on specific byte lane.
1313 static uint32_t check_bls_ex(
1314 MRCParams_t
*mrc_params
,
1318 uint8_t first_run
= 0;
1320 if (mrc_params
->hte_setup
)
1322 mrc_params
->hte_setup
= 0;
1325 select_hte(mrc_params
);
1328 result
= WriteStressBitLanesHTE(mrc_params
, address
, first_run
);
1330 DPF(D_TRN
, "check_bls_ex result is %x\n", result
);
1334 // Check memory executing simple write/read/verify at
1335 // the specified address. Bits in the result indicate failure
1336 // on specific byte lane.
1337 static uint32_t check_rw_coarse(
1338 MRCParams_t
*mrc_params
,
1341 uint32_t result
= 0;
1342 uint8_t first_run
= 0;
1344 if (mrc_params
->hte_setup
)
1346 mrc_params
->hte_setup
= 0;
1349 select_hte(mrc_params
);
1352 result
= BasicWriteReadHTE(mrc_params
, address
, first_run
, WRITE_TRAIN
);
1354 DPF(D_TRN
, "check_rw_coarse result is %x\n", result
);
1359 // POST_CODE[major] == 0x06
1361 // This function will perform the Write Levelling algorithm (align WCLK and WDQS).
1362 // This algorithm will act on each rank in each channel separately.
1363 static void wr_level(
1364 MRCParams_t
*mrc_params
)
1366 uint8_t channel_i
; // channel counter
1367 uint8_t rank_i
; // rank counter
1368 uint8_t bl_i
; // byte lane counter
1369 uint8_t bl_divisor
= (mrc_params
->channel_width
== x16
) ? 2 : 1; // byte lane divisor
1372 uint32_t final_delay
[NUM_CHANNELS
][NUM_BYTE_LANES
]; // used to find placement for rank2rank sharing configs
1374 uint32_t num_ranks_enabled
= 0; // used to find placement for rank2rank sharing configs
1375 #endif // BACKUP_WDQS
1376 #endif // R2R_SHARING
1380 bool all_edges_found
; // determines stop condition for CRS_WR_LVL
1381 uint32_t delay
[NUM_BYTE_LANES
]; // absolute PI value to be programmed on the byte lane
1382 // static makes it so the data is loaded in the heap once by shadow(), where
1383 // non-static copies the data onto the stack every time this function is called.
1385 uint32_t address
; // address to be checked during COARSE_WR_LVL
1388 #endif // BACKUP_WDQS
1393 post_code(0x06, 0x00);
1396 // need to set "final_delay[][]" elements to "0"
1397 memset((void *) (final_delay
), 0x00, (size_t) sizeof(final_delay
));
1398 #endif // R2R_SHARING
1399 // loop through each enabled channel
1400 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1402 if (mrc_params
->channel_enables
& (1 << channel_i
))
1404 // perform WRITE LEVELING algorithm on a per rank basis
1405 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1407 if (mrc_params
->rank_enables
& (1 << rank_i
))
1409 // POST_CODE here indicates the current rank and channel being calibrated
1410 post_code(0x06, (0x10 + ((channel_i
<< 4) | rank_i
)));
1413 for (bl_i
=0; bl_i
<(NUM_BYTE_LANES
/bl_divisor
); bl_i
++)
1415 set_wdqs(channel_i
, rank_i
, bl_i
, ddr_wdqs
[PLATFORM_ID
]);
1416 set_wdq(channel_i
, rank_i
, bl_i
, (ddr_wdqs
[PLATFORM_ID
] - QRTR_CLK
));
1420 { // Begin product specific code
1422 // perform a single PRECHARGE_ALL command to make DRAM state machine go to IDLE state
1423 dram_init_command(DCMD_PREA(rank_i
));
1425 // enable Write Levelling Mode (EMRS1 w/ Write Levelling Mode Enable)
1426 dram_init_command(DCMD_MRS1(rank_i
,0x0082));
1428 // set ODT DRAM Full Time Termination disable in MCU
1429 dtr4
.raw
= dtr4save
.raw
= isbR32m(MCU
, DTR4
);
1430 dtr4
.field
.ODTDIS
= 1;
1431 isbW32m(MCU
, DTR4
, dtr4
.raw
);
1433 for (bl_i
= 0; bl_i
< ((NUM_BYTE_LANES
/ bl_divisor
) / 2); bl_i
++)
1435 isbM32m(DDRPHY
, DQCTL
+ (DDRIODQ_BL_OFFSET
* bl_i
) + (DDRIODQ_CH_OFFSET
* channel_i
),
1436 (BIT28
| (0x1 << 8) | (0x1 << 6) | (0x1 << 4) | (0x1 << 2)),
1437 (BIT28
| (BIT9
|BIT8
) | (BIT7
|BIT6
) | (BIT5
|BIT4
) | (BIT3
|BIT2
))); // Enable Sandy Bridge Mode (WDQ Tri-State) & Ensure 5 WDQS pulses during Write Leveling
1440 isbM32m(DDRPHY
, CCDDR3RESETCTL
+ (DDRIOCCC_CH_OFFSET
* channel_i
), (BIT16
), (BIT16
)); // Write Leveling Mode enabled in IO
1441 } // End product specific code
1442 // Initialise the starting delay to WCLK
1443 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1445 { // Begin product specific code
1448 delay
[bl_i
] = get_wclk(channel_i
, rank_i
);
1449 } // End product specific code
1450 set_wdqs(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1452 // now find the rising edge
1453 find_rising_edge(mrc_params
, delay
, channel_i
, rank_i
, false);
1454 { // Begin product specific code
1455 // disable Write Levelling Mode
1456 isbM32m(DDRPHY
, CCDDR3RESETCTL
+ (DDRIOCCC_CH_OFFSET
* channel_i
), (0), (BIT16
)); // Write Leveling Mode disabled in IO
1458 for (bl_i
= 0; bl_i
< ((NUM_BYTE_LANES
/ bl_divisor
) / 2); bl_i
++)
1460 isbM32m(DDRPHY
, DQCTL
+ (DDRIODQ_BL_OFFSET
* bl_i
) + (DDRIODQ_CH_OFFSET
* channel_i
),
1461 ((0x1 << 8) | (0x1 << 6) | (0x1 << 4) | (0x1 << 2)),
1462 (BIT28
| (BIT9
|BIT8
) | (BIT7
|BIT6
) | (BIT5
|BIT4
) | (BIT3
|BIT2
))); // Disable Sandy Bridge Mode & Ensure 4 WDQS pulses during normal operation
1465 // restore original DTR4
1466 isbW32m(MCU
, DTR4
, dtr4save
.raw
);
1468 // restore original value (Write Levelling Mode Disable)
1469 dram_init_command(DCMD_MRS1(rank_i
, mrc_params
->mrs1
));
1471 // perform a single PRECHARGE_ALL command to make DRAM state machine go to IDLE state
1472 dram_init_command(DCMD_PREA(rank_i
));
1473 } // End product specific code
1475 post_code(0x06, (0x30 + ((channel_i
<< 4) | rank_i
)));
1477 // COARSE WRITE LEVEL:
1478 // check that we're on the correct clock edge
1480 // hte reconfiguration request
1481 mrc_params
->hte_setup
= 1;
1483 // start CRS_WR_LVL with WDQS = WDQS + 128 PI
1484 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1486 delay
[bl_i
] = get_wdqs(channel_i
, rank_i
, bl_i
) + FULL_CLK
;
1487 set_wdqs(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1488 // program WDQ timings based on WDQS (WDQ = WDQS - 32 PI)
1489 set_wdq(channel_i
, rank_i
, bl_i
, (delay
[bl_i
] - QRTR_CLK
));
1492 // get an address in the targeted channel/rank
1493 address
= get_addr(mrc_params
, channel_i
, rank_i
);
1496 uint32_t coarse_result
= 0x00;
1497 uint32_t coarse_result_mask
= byte_lane_mask(mrc_params
);
1498 all_edges_found
= true; // assume pass
1501 // need restore memory to idle state as write can be in bad sync
1502 dram_init_command (DCMD_PREA(rank_i
));
1505 mrc_params
->hte_setup
= 1;
1506 coarse_result
= check_rw_coarse(mrc_params
, address
);
1508 // check for failures and margin the byte lane back 128 PI (1 CLK)
1509 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1511 if (coarse_result
& (coarse_result_mask
<< bl_i
))
1513 all_edges_found
= false;
1514 delay
[bl_i
] -= FULL_CLK
;
1515 set_wdqs(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1516 // program WDQ timings based on WDQS (WDQ = WDQS - 32 PI)
1517 set_wdq(channel_i
, rank_i
, bl_i
, (delay
[bl_i
] - QRTR_CLK
));
1521 } while (!all_edges_found
);
1524 // increment "num_ranks_enabled"
1525 num_ranks_enabled
++;
1526 // accumulate "final_delay[][]" values from "delay[]" values for rolling average
1527 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1529 final_delay
[channel_i
][bl_i
] += delay
[bl_i
];
1530 set_wdqs(channel_i
, rank_i
, bl_i
, ((final_delay
[channel_i
][bl_i
]) / num_ranks_enabled
));
1531 // program WDQ timings based on WDQS (WDQ = WDQS - 32 PI)
1532 set_wdq(channel_i
, rank_i
, bl_i
, ((final_delay
[channel_i
][bl_i
]) / num_ranks_enabled
) - QRTR_CLK
);
1534 #endif // R2R_SHARING
1535 #endif // BACKUP_WDQS
1537 } // if rank is enabled
1539 } // if channel is enabled
1547 // POST_CODE[major] == 0x07
1549 // This function will perform the READ TRAINING Algorithm on all channels/ranks/byte_lanes simultaneously to minimize execution time.
1550 // The idea here is to train the VREF and RDQS (and eventually RDQ) values to achieve maximum READ margins.
1551 // The algorithm will first determine the X coordinate (RDQS setting).
1552 // This is done by collapsing the VREF eye until we find a minimum required RDQS eye for VREF_MIN and VREF_MAX.
1553 // Then we take the averages of the RDQS eye at VREF_MIN and VREF_MAX, then average those; this will be the final X coordinate.
1554 // The algorithm will then determine the Y coordinate (VREF setting).
1555 // This is done by collapsing the RDQS eye until we find a minimum required VREF eye for RDQS_MIN and RDQS_MAX.
1556 // Then we take the averages of the VREF eye at RDQS_MIN and RDQS_MAX, then average those; this will be the final Y coordinate.
1557 // NOTE: this algorithm assumes the eye curves have a one-to-one relationship, meaning for each X the curve has only one Y and vice-a-versa.
1558 static void rd_train(
1559 MRCParams_t
*mrc_params
)
1562 #define MIN_RDQS_EYE 10 // in PI Codes
1563 #define MIN_VREF_EYE 10 // in VREF Codes
1564 #define RDQS_STEP 1 // how many RDQS codes to jump while margining
1565 #define VREF_STEP 1 // how many VREF codes to jump while margining
1566 #define VREF_MIN (0x00) // offset into "vref_codes[]" for minimum allowed VREF setting
1567 #define VREF_MAX (0x3F) // offset into "vref_codes[]" for maximum allowed VREF setting
1568 #define RDQS_MIN (0x00) // minimum RDQS delay value
1569 #define RDQS_MAX (0x3F) // maximum RDQS delay value
1570 #define B 0 // BOTTOM VREF
1571 #define T 1 // TOP VREF
1572 #define L 0 // LEFT RDQS
1573 #define R 1 // RIGHT RDQS
1575 uint8_t channel_i
; // channel counter
1576 uint8_t rank_i
; // rank counter
1577 uint8_t bl_i
; // byte lane counter
1578 uint8_t bl_divisor
= (mrc_params
->channel_width
== x16
) ? 2 : 1; // byte lane divisor
1581 uint8_t side_x
; // tracks LEFT/RIGHT approach vectors
1582 uint8_t side_y
; // tracks BOTTOM/TOP approach vectors
1583 uint8_t x_coordinate
[2/*side_x*/][2/*side_y*/][NUM_CHANNELS
][NUM_RANKS
][NUM_BYTE_LANES
]; // X coordinate data (passing RDQS values) for approach vectors
1584 uint8_t y_coordinate
[2/*side_x*/][2/*side_y*/][NUM_CHANNELS
][NUM_BYTE_LANES
]; // Y coordinate data (passing VREF values) for approach vectors
1585 uint8_t x_center
[NUM_CHANNELS
][NUM_RANKS
][NUM_BYTE_LANES
]; // centered X (RDQS)
1586 uint8_t y_center
[NUM_CHANNELS
][NUM_BYTE_LANES
]; // centered Y (VREF)
1587 uint32_t address
; // target address for "check_bls_ex()"
1588 uint32_t result
; // result of "check_bls_ex()"
1589 uint32_t bl_mask
; // byte lane mask for "result" checking
1591 uint32_t final_delay
[NUM_CHANNELS
][NUM_BYTE_LANES
]; // used to find placement for rank2rank sharing configs
1592 uint32_t num_ranks_enabled
= 0; // used to find placement for rank2rank sharing configs
1593 #endif // R2R_SHARING
1594 #endif // BACKUP_RDQS
1596 post_code(0x07, 0x00);
1601 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++)
1603 if (mrc_params
->channel_enables
& (1<<channel_i
))
1605 for (rank_i
=0; rank_i
<NUM_RANKS
; rank_i
++)
1607 if (mrc_params
->rank_enables
& (1<<rank_i
))
1609 for (bl_i
=0; bl_i
<(NUM_BYTE_LANES
/bl_divisor
); bl_i
++)
1611 set_rdqs(channel_i
, rank_i
, bl_i
, ddr_rdqs
[PLATFORM_ID
]);
1613 } // if rank is enabled
1615 } // if channel is enabled
1618 // initialise x/y_coordinate arrays
1619 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1621 if (mrc_params
->channel_enables
& (1 << channel_i
))
1623 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1625 if (mrc_params
->rank_enables
& (1 << rank_i
))
1627 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1630 x_coordinate
[L
][B
][channel_i
][rank_i
][bl_i
] = RDQS_MIN
;
1631 x_coordinate
[R
][B
][channel_i
][rank_i
][bl_i
] = RDQS_MAX
;
1632 x_coordinate
[L
][T
][channel_i
][rank_i
][bl_i
] = RDQS_MIN
;
1633 x_coordinate
[R
][T
][channel_i
][rank_i
][bl_i
] = RDQS_MAX
;
1635 y_coordinate
[L
][B
][channel_i
][bl_i
] = VREF_MIN
;
1636 y_coordinate
[R
][B
][channel_i
][bl_i
] = VREF_MIN
;
1637 y_coordinate
[L
][T
][channel_i
][bl_i
] = VREF_MAX
;
1638 y_coordinate
[R
][T
][channel_i
][bl_i
] = VREF_MAX
;
1640 } // if rank is enabled
1642 } // if channel is enabled
1645 // initialise other variables
1646 bl_mask
= byte_lane_mask(mrc_params
);
1647 address
= get_addr(mrc_params
, 0, 0);
1650 // need to set "final_delay[][]" elements to "0"
1651 memset((void *) (final_delay
), 0x00, (size_t) sizeof(final_delay
));
1652 #endif // R2R_SHARING
1654 // look for passing coordinates
1655 for (side_y
= B
; side_y
<= T
; side_y
++)
1657 for (side_x
= L
; side_x
<= R
; side_x
++)
1660 post_code(0x07, (0x10 + (side_y
* 2) + (side_x
)));
1662 // find passing values
1663 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1665 if (mrc_params
->channel_enables
& (0x1 << channel_i
))
1667 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1670 if (mrc_params
->rank_enables
& (0x1 << rank_i
))
1672 // set x/y_coordinate search starting settings
1673 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1675 set_rdqs(channel_i
, rank_i
, bl_i
, x_coordinate
[side_x
][side_y
][channel_i
][rank_i
][bl_i
]);
1676 set_vref(channel_i
, bl_i
, y_coordinate
[side_x
][side_y
][channel_i
][bl_i
]);
1678 // get an address in the target channel/rank
1679 address
= get_addr(mrc_params
, channel_i
, rank_i
);
1681 // request HTE reconfiguration
1682 mrc_params
->hte_setup
= 1;
1684 // test the settings
1688 // result[07:00] == failing byte lane (MAX 8)
1689 result
= check_bls_ex( mrc_params
, address
);
1691 // check for failures
1694 // at least 1 byte lane failed
1695 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1697 if (result
& (bl_mask
<< bl_i
))
1699 // adjust the RDQS values accordingly
1702 x_coordinate
[L
][side_y
][channel_i
][rank_i
][bl_i
] += RDQS_STEP
;
1706 x_coordinate
[R
][side_y
][channel_i
][rank_i
][bl_i
] -= RDQS_STEP
;
1708 // check that we haven't closed the RDQS_EYE too much
1709 if ((x_coordinate
[L
][side_y
][channel_i
][rank_i
][bl_i
] > (RDQS_MAX
- MIN_RDQS_EYE
)) ||
1710 (x_coordinate
[R
][side_y
][channel_i
][rank_i
][bl_i
] < (RDQS_MIN
+ MIN_RDQS_EYE
))
1712 (x_coordinate
[L
][side_y
][channel_i
][rank_i
][bl_i
]
1713 == x_coordinate
[R
][side_y
][channel_i
][rank_i
][bl_i
]))
1715 // not enough RDQS margin available at this VREF
1716 // update VREF values accordingly
1719 y_coordinate
[side_x
][B
][channel_i
][bl_i
] += VREF_STEP
;
1723 y_coordinate
[side_x
][T
][channel_i
][bl_i
] -= VREF_STEP
;
1725 // check that we haven't closed the VREF_EYE too much
1726 if ((y_coordinate
[side_x
][B
][channel_i
][bl_i
] > (VREF_MAX
- MIN_VREF_EYE
)) ||
1727 (y_coordinate
[side_x
][T
][channel_i
][bl_i
] < (VREF_MIN
+ MIN_VREF_EYE
)) ||
1728 (y_coordinate
[side_x
][B
][channel_i
][bl_i
] == y_coordinate
[side_x
][T
][channel_i
][bl_i
]))
1730 // VREF_EYE collapsed below MIN_VREF_EYE
1731 training_message(channel_i
, rank_i
, bl_i
);
1732 post_code(0xEE, (0x70 + (side_y
* 2) + (side_x
)));
1736 // update the VREF setting
1737 set_vref(channel_i
, bl_i
, y_coordinate
[side_x
][side_y
][channel_i
][bl_i
]);
1738 // reset the X coordinate to begin the search at the new VREF
1739 x_coordinate
[side_x
][side_y
][channel_i
][rank_i
][bl_i
] =
1740 (side_x
== L
) ? (RDQS_MIN
) : (RDQS_MAX
);
1743 // update the RDQS setting
1744 set_rdqs(channel_i
, rank_i
, bl_i
, x_coordinate
[side_x
][side_y
][channel_i
][rank_i
][bl_i
]);
1747 } // at least 1 byte lane failed
1748 } while (result
& 0xFF);
1749 } // if rank is enabled
1751 } // if channel is enabled
1756 post_code(0x07, 0x20);
1758 // find final RDQS (X coordinate) & final VREF (Y coordinate)
1759 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1761 if (mrc_params
->channel_enables
& (1 << channel_i
))
1763 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1765 if (mrc_params
->rank_enables
& (1 << rank_i
))
1767 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1773 DPF(D_INFO
, "RDQS T/B eye rank%d lane%d : %d-%d %d-%d\n", rank_i
, bl_i
,
1774 x_coordinate
[L
][T
][channel_i
][rank_i
][bl_i
],
1775 x_coordinate
[R
][T
][channel_i
][rank_i
][bl_i
],
1776 x_coordinate
[L
][B
][channel_i
][rank_i
][bl_i
],
1777 x_coordinate
[R
][B
][channel_i
][rank_i
][bl_i
]);
1779 tempD1
= (x_coordinate
[R
][T
][channel_i
][rank_i
][bl_i
] + x_coordinate
[L
][T
][channel_i
][rank_i
][bl_i
]) / 2; // average the TOP side LEFT & RIGHT values
1780 tempD2
= (x_coordinate
[R
][B
][channel_i
][rank_i
][bl_i
] + x_coordinate
[L
][B
][channel_i
][rank_i
][bl_i
]) / 2; // average the BOTTOM side LEFT & RIGHT values
1781 x_center
[channel_i
][rank_i
][bl_i
] = (uint8_t) ((tempD1
+ tempD2
) / 2); // average the above averages
1784 DPF(D_INFO
, "VREF R/L eye lane%d : %d-%d %d-%d\n", bl_i
,
1785 y_coordinate
[R
][B
][channel_i
][bl_i
],
1786 y_coordinate
[R
][T
][channel_i
][bl_i
],
1787 y_coordinate
[L
][B
][channel_i
][bl_i
],
1788 y_coordinate
[L
][T
][channel_i
][bl_i
]);
1790 tempD1
= (y_coordinate
[R
][T
][channel_i
][bl_i
] + y_coordinate
[R
][B
][channel_i
][bl_i
]) / 2; // average the RIGHT side TOP & BOTTOM values
1791 tempD2
= (y_coordinate
[L
][T
][channel_i
][bl_i
] + y_coordinate
[L
][B
][channel_i
][bl_i
]) / 2; // average the LEFT side TOP & BOTTOM values
1792 y_center
[channel_i
][bl_i
] = (uint8_t) ((tempD1
+ tempD2
) / 2); // average the above averages
1794 } // if rank is enabled
1796 } // if channel is enabled
1800 // perform an eye check
1801 for (side_y
=B
; side_y
<=T
; side_y
++)
1803 for (side_x
=L
; side_x
<=R
; side_x
++)
1806 post_code(0x07, (0x30 + (side_y
* 2) + (side_x
)));
1808 // update the settings for the eye check
1809 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++)
1811 if (mrc_params
->channel_enables
& (1<<channel_i
))
1813 for (rank_i
=0; rank_i
<NUM_RANKS
; rank_i
++)
1815 if (mrc_params
->rank_enables
& (1<<rank_i
))
1817 for (bl_i
=0; bl_i
<(NUM_BYTE_LANES
/bl_divisor
); bl_i
++)
1821 set_rdqs(channel_i
, rank_i
, bl_i
, (x_center
[channel_i
][rank_i
][bl_i
] - (MIN_RDQS_EYE
/ 2)));
1825 set_rdqs(channel_i
, rank_i
, bl_i
, (x_center
[channel_i
][rank_i
][bl_i
] + (MIN_RDQS_EYE
/ 2)));
1829 set_vref(channel_i
, bl_i
, (y_center
[channel_i
][bl_i
] - (MIN_VREF_EYE
/ 2)));
1833 set_vref(channel_i
, bl_i
, (y_center
[channel_i
][bl_i
] + (MIN_VREF_EYE
/ 2)));
1836 } // if rank is enabled
1838 } // if channel is enabled
1841 // request HTE reconfiguration
1842 mrc_params
->hte_setup
= 1;
1845 if (check_bls_ex( mrc_params
, address
) & 0xFF)
1847 // one or more byte lanes failed
1848 post_code(0xEE, (0x74 + (side_x
* 2) + (side_y
)));
1852 #endif // RX_EYE_CHECK
1854 post_code(0x07, 0x40);
1856 // set final placements
1857 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1859 if (mrc_params
->channel_enables
& (1 << channel_i
))
1861 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1863 if (mrc_params
->rank_enables
& (1 << rank_i
))
1866 // increment "num_ranks_enabled"
1867 num_ranks_enabled
++;
1868 #endif // R2R_SHARING
1869 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1873 final_delay
[channel_i
][bl_i
] += x_center
[channel_i
][rank_i
][bl_i
];
1874 set_rdqs(channel_i
, rank_i
, bl_i
, ((final_delay
[channel_i
][bl_i
]) / num_ranks_enabled
));
1876 set_rdqs(channel_i
, rank_i
, bl_i
, x_center
[channel_i
][rank_i
][bl_i
]);
1877 #endif // R2R_SHARING
1879 set_vref(channel_i
, bl_i
, y_center
[channel_i
][bl_i
]);
1881 } // if rank is enabled
1883 } // if channel is enabled
1885 #endif // BACKUP_RDQS
1891 // POST_CODE[major] == 0x08
1893 // This function will perform the WRITE TRAINING Algorithm on all channels/ranks/byte_lanes simultaneously to minimize execution time.
1894 // The idea here is to train the WDQ timings to achieve maximum WRITE margins.
1895 // The algorithm will start with WDQ at the current WDQ setting (tracks WDQS in WR_LVL) +/- 32 PIs (+/- 1/4 CLK) and collapse the eye until all data patterns pass.
1896 // This is because WDQS will be aligned to WCLK by the Write Leveling algorithm and WDQ will only ever have a 1/2 CLK window of validity.
1897 static void wr_train(
1898 MRCParams_t
*mrc_params
)
1901 #define WDQ_STEP 1 // how many WDQ codes to jump while margining
1902 #define L 0 // LEFT side loop value definition
1903 #define R 1 // RIGHT side loop value definition
1905 uint8_t channel_i
; // channel counter
1906 uint8_t rank_i
; // rank counter
1907 uint8_t bl_i
; // byte lane counter
1908 uint8_t bl_divisor
= (mrc_params
->channel_width
== x16
) ? 2 : 1; // byte lane divisor
1911 uint8_t side_i
; // LEFT/RIGHT side indicator (0=L, 1=R)
1912 uint32_t tempD
; // temporary DWORD
1913 uint32_t delay
[2/*side_i*/][NUM_CHANNELS
][NUM_RANKS
][NUM_BYTE_LANES
]; // 2 arrays, for L & R side passing delays
1914 uint32_t address
; // target address for "check_bls_ex()"
1915 uint32_t result
; // result of "check_bls_ex()"
1916 uint32_t bl_mask
; // byte lane mask for "result" checking
1918 uint32_t final_delay
[NUM_CHANNELS
][NUM_BYTE_LANES
]; // used to find placement for rank2rank sharing configs
1919 uint32_t num_ranks_enabled
= 0; // used to find placement for rank2rank sharing configs
1920 #endif // R2R_SHARING
1921 #endif // BACKUP_WDQ
1924 post_code(0x08, 0x00);
1929 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++)
1931 if (mrc_params
->channel_enables
& (1<<channel_i
))
1933 for (rank_i
=0; rank_i
<NUM_RANKS
; rank_i
++)
1935 if (mrc_params
->rank_enables
& (1<<rank_i
))
1937 for (bl_i
=0; bl_i
<(NUM_BYTE_LANES
/bl_divisor
); bl_i
++)
1939 set_wdq(channel_i
, rank_i
, bl_i
, ddr_wdq
[PLATFORM_ID
]);
1941 } // if rank is enabled
1943 } // if channel is enabled
1946 // initialise "delay"
1947 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1949 if (mrc_params
->channel_enables
& (1 << channel_i
))
1951 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1953 if (mrc_params
->rank_enables
& (1 << rank_i
))
1955 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1957 // want to start with WDQ = (WDQS - QRTR_CLK) +/- QRTR_CLK
1958 tempD
= get_wdqs(channel_i
, rank_i
, bl_i
) - QRTR_CLK
;
1959 delay
[L
][channel_i
][rank_i
][bl_i
] = tempD
- QRTR_CLK
;
1960 delay
[R
][channel_i
][rank_i
][bl_i
] = tempD
+ QRTR_CLK
;
1962 } // if rank is enabled
1964 } // if channel is enabled
1967 // initialise other variables
1968 bl_mask
= byte_lane_mask(mrc_params
);
1969 address
= get_addr(mrc_params
, 0, 0);
1972 // need to set "final_delay[][]" elements to "0"
1973 memset((void *) (final_delay
), 0x00, (size_t) sizeof(final_delay
));
1974 #endif // R2R_SHARING
1976 // start algorithm on the LEFT side and train each channel/bl until no failures are observed, then repeat for the RIGHT side.
1977 for (side_i
= L
; side_i
<= R
; side_i
++)
1979 post_code(0x08, (0x10 + (side_i
)));
1981 // set starting values
1982 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1984 if (mrc_params
->channel_enables
& (1 << channel_i
))
1986 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1988 if (mrc_params
->rank_enables
& (1 << rank_i
))
1990 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1992 set_wdq(channel_i
, rank_i
, bl_i
, delay
[side_i
][channel_i
][rank_i
][bl_i
]);
1994 } // if rank is enabled
1996 } // if channel is enabled
1999 // find passing values
2000 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
2002 if (mrc_params
->channel_enables
& (0x1 << channel_i
))
2004 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
2006 if (mrc_params
->rank_enables
& (0x1 << rank_i
))
2008 // get an address in the target channel/rank
2009 address
= get_addr(mrc_params
, channel_i
, rank_i
);
2011 // request HTE reconfiguration
2012 mrc_params
->hte_setup
= 1;
2014 // check the settings
2019 // need restore memory to idle state as write can be in bad sync
2020 dram_init_command (DCMD_PREA(rank_i
));
2023 // result[07:00] == failing byte lane (MAX 8)
2024 result
= check_bls_ex( mrc_params
, address
);
2025 // check for failures
2028 // at least 1 byte lane failed
2029 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
2031 if (result
& (bl_mask
<< bl_i
))
2035 delay
[L
][channel_i
][rank_i
][bl_i
] += WDQ_STEP
;
2039 delay
[R
][channel_i
][rank_i
][bl_i
] -= WDQ_STEP
;
2041 // check for algorithm failure
2042 if (delay
[L
][channel_i
][rank_i
][bl_i
] != delay
[R
][channel_i
][rank_i
][bl_i
])
2044 // margin available, update delay setting
2045 set_wdq(channel_i
, rank_i
, bl_i
, delay
[side_i
][channel_i
][rank_i
][bl_i
]);
2049 // no margin available, notify the user and halt
2050 training_message(channel_i
, rank_i
, bl_i
);
2051 post_code(0xEE, (0x80 + side_i
));
2055 } // at least 1 byte lane failed
2056 } while (result
& 0xFF); // stop when all byte lanes pass
2057 } // if rank is enabled
2059 } // if channel is enabled
2063 // program WDQ to the middle of passing window
2064 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
2066 if (mrc_params
->channel_enables
& (1 << channel_i
))
2068 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
2070 if (mrc_params
->rank_enables
& (1 << rank_i
))
2073 // increment "num_ranks_enabled"
2074 num_ranks_enabled
++;
2075 #endif // R2R_SHARING
2076 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
2079 DPF(D_INFO
, "WDQ eye rank%d lane%d : %d-%d\n", rank_i
, bl_i
,
2080 delay
[L
][channel_i
][rank_i
][bl_i
],
2081 delay
[R
][channel_i
][rank_i
][bl_i
]);
2083 tempD
= (delay
[R
][channel_i
][rank_i
][bl_i
] + delay
[L
][channel_i
][rank_i
][bl_i
]) / 2;
2086 final_delay
[channel_i
][bl_i
] += tempD
;
2087 set_wdq(channel_i
, rank_i
, bl_i
, ((final_delay
[channel_i
][bl_i
]) / num_ranks_enabled
));
2089 set_wdq(channel_i
, rank_i
, bl_i
, tempD
);
2090 #endif // R2R_SHARING
2093 } // if rank is enabled
2095 } // if channel is enabled
2097 #endif // BACKUP_WDQ
2102 // Wrapper for jedec initialisation routine
2103 static void perform_jedec_init(
2104 MRCParams_t
*mrc_params
)
2106 jedec_init(mrc_params
, 0);
2109 // Configure DDRPHY for Auto-Refresh, Periodic Compensations,
2110 // Dynamic Diff-Amp, ZQSPERIOD, Auto-Precharge, CKE Power-Down
2111 static void set_auto_refresh(
2112 MRCParams_t
*mrc_params
)
2117 uint32_t bl_divisor
= /*(mrc_params->channel_width==x16)?2:*/1;
2122 // enable Auto-Refresh, Periodic Compensations, Dynamic Diff-Amp, ZQSPERIOD, Auto-Precharge, CKE Power-Down
2123 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
2125 if (mrc_params
->channel_enables
& (1 << channel_i
))
2127 // Enable Periodic RCOMPS
2128 isbM32m(DDRPHY
, CMPCTRL
, (BIT1
), (BIT1
));
2131 // Enable Dynamic DiffAmp & Set Read ODT Value
2132 switch (mrc_params
->rd_odt_value
)
2134 case 0: tempD
= 0x3F; break; // OFF
2135 default: tempD
= 0x00; break; // Auto
2136 } // rd_odt_value switch
2138 for (bl_i
=0; bl_i
<((NUM_BYTE_LANES
/bl_divisor
)/2); bl_i
++)
2140 isbM32m(DDRPHY
, (B0OVRCTL
+ (bl_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)),
2141 ((0x00<<16)|(tempD
<<10)),
2142 ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
))); // Override: DIFFAMP, ODT
2144 isbM32m(DDRPHY
, (B1OVRCTL
+ (bl_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)),
2145 ((0x00<<16)|(tempD
<<10)),
2146 ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
)));// Override: DIFFAMP, ODT
2149 // Issue ZQCS command
2150 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
2152 if (mrc_params
->rank_enables
& (1 << rank_i
))
2154 dram_init_command(DCMD_ZQCS(rank_i
));
2155 } // if rank_i enabled
2158 } // if channel_i enabled
2167 // Depending on configuration enables ECC support.
2168 // Available memory size is decresed, and updated with 0s
2169 // in order to clear error status. Address mode 2 forced.
2170 static void ecc_enable(
2171 MRCParams_t
*mrc_params
)
2177 if (mrc_params
->ecc_enables
== 0) return;
2181 // Configuration required in ECC mode
2182 Drp
.raw
= isbR32m(MCU
, DRP
);
2183 Drp
.field
.addressMap
= 2;
2184 Drp
.field
.split64
= 1;
2185 isbW32m(MCU
, DRP
, Drp
.raw
);
2187 // Disable new request bypass
2188 Dsch
.raw
= isbR32m(MCU
, DSCH
);
2189 Dsch
.field
.NEWBYPDIS
= 1;
2190 isbW32m(MCU
, DSCH
, Dsch
.raw
);
2194 Ctr
.field
.SBEEN
= 1;
2195 Ctr
.field
.DBEEN
= 1;
2196 Ctr
.field
.ENCBGEN
= 1;
2197 isbW32m(MCU
, DECCCTRL
, Ctr
.raw
);
2200 // Read back to be sure writing took place
2201 Ctr
.raw
= isbR32m(MCU
, DECCCTRL
);
2204 // Assume 8 bank memory, one bank is gone for ECC
2205 mrc_params
->mem_size
-= mrc_params
->mem_size
/ 8;
2207 // For S3 resume memory content has to be preserved
2208 if (mrc_params
->boot_mode
!= bmS3
)
2210 select_hte(mrc_params
);
2211 HteMemInit(mrc_params
, MrcMemInit
, MrcHaltHteEngineOnError
);
2212 select_memory_manager(mrc_params
);
2219 // Lock MCU registers at the end of initialisation sequence.
2220 static void lock_registers(
2221 MRCParams_t
*mrc_params
)
2227 Dco
.raw
= isbR32m(MCU
, DCO
);
2228 Dco
.field
.PMIDIS
= 0; //0 - PRI enabled
2229 Dco
.field
.PMICTL
= 0; //0 - PRI owned by MEMORY_MANAGER
2230 Dco
.field
.DRPLOCK
= 1;
2231 Dco
.field
.REUTLOCK
= 1;
2232 isbW32m(MCU
, DCO
, Dco
.raw
);
2240 // cache write back invalidate
2241 static void asm_wbinvd(void)
2243 #if defined (SIM) || defined (GCC)
2253 static void asm_invd(void)
2255 #if defined (SIM) || defined (GCC)
2265 static void cpu_read(void)
2267 uint32_t adr
, dat
, limit
;
2272 for (adr
= 0; adr
< limit
; adr
+= 4)
2274 dat
= *(uint32_t*) adr
;
2275 if ((adr
& 0x0F) == 0)
2277 DPF(D_INFO
, "\n%x : ", adr
);
2279 DPF(D_INFO
, "%x ", dat
);
2283 DPF(D_INFO
, "CPU read done\n");
2287 static void cpu_write(void)
2289 uint32_t adr
, limit
;
2292 for (adr
= 0; adr
< limit
; adr
+= 4)
2294 *(uint32_t*) adr
= 0xDEAD0000 + adr
;
2299 DPF(D_INFO
, "CPU write done\n");
2303 static void cpu_memory_test(
2304 MRCParams_t
*mrc_params
)
2306 uint32_t result
= 0;
2307 uint32_t val
, dat
, adr
, adr0
, step
, limit
;
2314 adr0
= 1 * 1024 * 1024;
2315 limit
= 256 * 1024 * 1024;
2317 for (step
= 0; step
<= 4; step
++)
2319 DPF(D_INFO
, "Mem test step %d starting from %xh\n", step
, adr0
);
2321 my_tsc
= read_tsc();
2322 for (adr
= adr0
; adr
< limit
; adr
+= sizeof(uint32_t))
2324 if (step
== 0) dat
= adr
;
2325 else if (step
== 1) dat
= (1 << ((adr
>> 2) & 0x1f));
2326 else if (step
== 2) dat
= ~(1 << ((adr
>> 2) & 0x1f));
2327 else if (step
== 3) dat
= 0x5555AAAA;
2328 else if (step
== 4) dat
= 0xAAAA5555;
2330 *(uint32_t*) adr
= dat
;
2332 DPF(D_INFO
, "Write time %llXh\n", read_tsc() - my_tsc
);
2334 my_tsc
= read_tsc();
2335 for (adr
= adr0
; adr
< limit
; adr
+= sizeof(uint32_t))
2337 if (step
== 0) dat
= adr
;
2338 else if (step
== 1) dat
= (1 << ((adr
>> 2) & 0x1f));
2339 else if (step
== 2) dat
= ~(1 << ((adr
>> 2) & 0x1f));
2340 else if (step
== 3) dat
= 0x5555AAAA;
2341 else if (step
== 4) dat
= 0xAAAA5555;
2343 val
= *(uint32_t*) adr
;
2347 DPF(D_INFO
, "%x vs. %x@%x\n", dat
, val
, adr
);
2351 DPF(D_INFO
, "Read time %llXh\n", read_tsc() - my_tsc
);
2354 DPF( D_INFO
, "Memory test result %x\n", result
);
2360 // Execute memory test, if error dtected it is
2361 // indicated in mrc_params->status.
2362 static void memory_test(
2363 MRCParams_t
*mrc_params
)
2365 uint32_t result
= 0;
2369 select_hte(mrc_params
);
2370 result
= HteMemInit(mrc_params
, MrcMemTest
, MrcHaltHteEngineOnError
);
2371 select_memory_manager(mrc_params
);
2373 DPF(D_INFO
, "Memory test result %x\n", result
);
2374 mrc_params
->status
= ((result
== 0) ? MRC_SUCCESS
: MRC_E_MEMTEST
);
2379 // Force same timings as with backup settings
2380 static void static_timings(
2381 MRCParams_t
*mrc_params
)
2386 for (ch
= 0; ch
< NUM_CHANNELS
; ch
++)
2388 for (rk
= 0; rk
< NUM_RANKS
; rk
++)
2390 for (bl
= 0; bl
< NUM_BYTE_LANES
; bl
++)
2392 set_rcvn(ch
, rk
, bl
, 498); // RCVN
2393 set_rdqs(ch
, rk
, bl
, 24); // RDQS
2394 set_wdqs(ch
, rk
, bl
, 292); // WDQS
2395 set_wdq( ch
, rk
, bl
, 260); // WDQ
2398 set_vref(ch
, bl
, 32); // VREF (RANK0 only)
2401 set_wctl(ch
, rk
, 217); // WCTL
2403 set_wcmd(ch
, 220); // WCMD
2410 // Initialise system memory.
2413 MRCParams_t
*mrc_params
)
2415 static const MemInit_t init
[] =
2417 { 0x0101, bmCold
|bmFast
|bmWarm
|bmS3
, clear_self_refresh
}, //0
2418 { 0x0200, bmCold
|bmFast
|bmWarm
|bmS3
, prog_ddr_timing_control
}, //1 initialise the MCU
2419 { 0x0103, bmCold
|bmFast
, prog_decode_before_jedec
}, //2
2420 { 0x0104, bmCold
|bmFast
, perform_ddr_reset
}, //3
2421 { 0x0300, bmCold
|bmFast
|bmS3
, ddrphy_init
}, //4 initialise the DDRPHY
2422 { 0x0400, bmCold
|bmFast
, perform_jedec_init
}, //5 perform JEDEC initialisation of DRAMs
2423 { 0x0105, bmCold
|bmFast
, set_ddr_init_complete
}, //6
2424 { 0x0106, bmFast
|bmWarm
|bmS3
, restore_timings
}, //7
2425 { 0x0106, bmCold
, default_timings
}, //8
2426 { 0x0500, bmCold
, rcvn_cal
}, //9 perform RCVN_CAL algorithm
2427 { 0x0600, bmCold
, wr_level
}, //10 perform WR_LEVEL algorithm
2428 { 0x0120, bmCold
, prog_page_ctrl
}, //11
2429 { 0x0700, bmCold
, rd_train
}, //12 perform RD_TRAIN algorithm
2430 { 0x0800, bmCold
, wr_train
}, //13 perform WR_TRAIN algorithm
2431 { 0x010B, bmCold
, store_timings
}, //14
2432 { 0x010C, bmCold
|bmFast
|bmWarm
|bmS3
, enable_scrambling
}, //15
2433 { 0x010D, bmCold
|bmFast
|bmWarm
|bmS3
, prog_ddr_control
}, //16
2434 { 0x010E, bmCold
|bmFast
|bmWarm
|bmS3
, prog_dra_drb
}, //17
2435 { 0x010F, bmWarm
|bmS3
, perform_wake
}, //18
2436 { 0x0110, bmCold
|bmFast
|bmWarm
|bmS3
, change_refresh_period
}, //19
2437 { 0x0111, bmCold
|bmFast
|bmWarm
|bmS3
, set_auto_refresh
}, //20
2438 { 0x0112, bmCold
|bmFast
|bmWarm
|bmS3
, ecc_enable
}, //21
2439 { 0x0113, bmCold
|bmFast
, memory_test
}, //22
2440 { 0x0114, bmCold
|bmFast
|bmWarm
|bmS3
, lock_registers
} //23 set init done
2447 DPF(D_INFO
, "Meminit build %s %s\n", __DATE__
, __TIME__
);
2450 post_code(0x01, 0x00);
2452 if (mrc_params
->boot_mode
!= bmCold
)
2454 if (mrc_params
->ddr_speed
!= mrc_params
->timings
.ddr_speed
)
2456 // full training required as frequency changed
2457 mrc_params
->boot_mode
= bmCold
;
2461 for (i
= 0; i
< MCOUNT(init
); i
++)
2466 if (mrc_params
->menu_after_mrc
&& i
> 14)
2472 DPF(D_INFO
, "-- c - continue --\n");
2473 DPF(D_INFO
, "-- j - move to jedec init --\n");
2474 DPF(D_INFO
, "-- m - memory test --\n");
2475 DPF(D_INFO
, "-- r - cpu read --\n");
2476 DPF(D_INFO
, "-- w - cpu write --\n");
2477 DPF(D_INFO
, "-- b - hte base test --\n");
2478 DPF(D_INFO
, "-- g - hte extended test --\n");
2485 case 'j': //move to jedec init
2492 uint32_t n
, res
, cnt
=0;
2494 for(n
=0; mgetch()==0; n
++)
2496 if( ch
== 'M' || n
% 256 == 0)
2498 DPF(D_INFO
, "n=%d e=%d\n", n
, cnt
);
2505 memory_test(mrc_params
);
2506 res
|= mrc_params
->status
;
2509 mrc_params
->hte_setup
= 1;
2510 res
|= check_bls_ex(mrc_params
, 0x00000000);
2511 res
|= check_bls_ex(mrc_params
, 0x00000000);
2512 res
|= check_bls_ex(mrc_params
, 0x00000000);
2513 res
|= check_bls_ex(mrc_params
, 0x00000000);
2515 if( mrc_params
->rank_enables
& 2)
2517 mrc_params
->hte_setup
= 1;
2518 res
|= check_bls_ex(mrc_params
, 0x40000000);
2519 res
|= check_bls_ex(mrc_params
, 0x40000000);
2520 res
|= check_bls_ex(mrc_params
, 0x40000000);
2521 res
|= check_bls_ex(mrc_params
, 0x40000000);
2526 DPF(D_INFO
, "###########\n");
2528 DPF(D_INFO
, "# Error count %d\n", ++cnt
);
2530 DPF(D_INFO
, "###########\n");
2535 select_memory_manager(mrc_params
);
2539 memory_test(mrc_params
);
2542 cpu_memory_test(mrc_params
);
2547 if (ch
<= '9') DpfPrintMask
^= (ch
- '0') << 3;
2548 DPF(D_INFO
, "Log mask %x\n", DpfPrintMask
);
2551 print_timings(mrc_params
);
2554 rd_train(mrc_params
);
2557 wr_train(mrc_params
);
2570 select_hte(mrc_params
);
2571 mrc_params
->hte_setup
= 1;
2572 result
= check_bls_ex(mrc_params
, 0);
2573 DPF(D_INFO
, "Extended test result %x\n", result
);
2574 select_memory_manager(mrc_params
);
2580 select_hte(mrc_params
);
2581 mrc_params
->hte_setup
= 1;
2582 result
= check_rw_coarse(mrc_params
, 0);
2583 DPF(D_INFO
, "Base test result %x\n", result
);
2584 select_memory_manager(mrc_params
);
2588 select_hte(mrc_params
);
2589 HteMemOp(0x2340, 1, 1);
2590 select_memory_manager(mrc_params
);
2597 DPF( D_INFO
, "===>> Start suspend\n");
2598 isbR32m(MCU
, DSTAT
);
2600 DPMC0reg
.raw
= isbR32m(MCU
, DPMC0
);
2601 DPMC0reg
.field
.DYNSREN
= 0;
2602 DPMC0reg
.field
.powerModeOpCode
= 0x05; // Disable Master DLL
2603 isbW32m(MCU
, DPMC0
, DPMC0reg
.raw
);
2605 // Should be off for negative test case verification
2607 Wr32(MMIO
, PCIADDR(0,0,0,SB_PACKET_REG
),
2608 (uint32_t)SB_COMMAND(SB_SUSPEND_CMND_OPCODE
, MCU
, 0));
2611 DPF( D_INFO
, "press key\n");
2613 DPF( D_INFO
, "===>> Start resume\n");
2614 isbR32m(MCU
, DSTAT
);
2616 mrc_params
->boot_mode
= bmS3
;
2625 if (mrc_params
->boot_mode
& init
[i
].boot_path
)
2627 uint8_t major
= init
[i
].post_code
>> 8 & 0xFF;
2628 uint8_t minor
= init
[i
].post_code
>> 0 & 0xFF;
2629 post_code(major
, minor
);
2631 my_tsc
= read_tsc();
2632 init
[i
].init_fn(mrc_params
);
2633 DPF(D_TIME
, "Execution time %llX", read_tsc() - my_tsc
);
2637 // display the timings
2638 print_timings(mrc_params
);
2641 post_code(0x01, 0xFF);