1 /************************************************************************
3 * Copyright (c) 2013-2015 Intel Corporation.
5 * This program and the accompanying materials
6 * are licensed and made available under the terms and conditions of the BSD License
7 * which accompanies this distribution. The full text of the license may be found at
8 * http://opensource.org/licenses/bsd-license.php
10 * THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
13 * This file contains all of the Cat Mountain Memory Reference Code (MRC).
15 * These functions are generic and should work for any Cat Mountain config.
17 * MRC requires two data structures to be passed in which are initialised by "PreMemInit()".
19 * The basic flow is as follows:
20 * 01) Check for supported DDR speed configuration
21 * 02) Set up MEMORY_MANAGER buffer as pass-through (POR)
22 * 03) Set Channel Interleaving Mode and Channel Stride to the most aggressive setting possible
23 * 04) Set up the MCU logic
24 * 05) Set up the DDR_PHY logic
25 * 06) Initialise the DRAMs (JEDEC)
26 * 07) Perform the Receive Enable Calibration algorithm
27 * 08) Perform the Write Leveling algorithm
28 * 09) Perform the Read Training algorithm (includes internal Vref)
29 * 10) Perform the Write Training algorithm
30 * 11) Set Channel Interleaving Mode and Channel Stride to the desired settings
32 * Dunit configuration based on Valleyview MRC.
34 ***************************************************************************/
37 #include "memory_options.h"
40 #include "meminit_utils.h"
44 // Override ODT to off state if requested
45 #define DRMC_DEFAULT (mrc_params->rd_odt_value==0?BIT12:0)
48 // tRFC values (in picoseconds) per density
49 const uint32_t tRFC
[5] =
58 // tCK clock period in picoseconds per speed index 800, 1066, 1333
59 const uint32_t tCK
[3] =
67 // Select static timings specific to simulation environment
70 // Select static timings specific to ClantonPeek platform
76 const uint16_t ddr_wclk
[] =
79 const uint16_t ddr_wctl
[] =
82 const uint16_t ddr_wcmd
[] =
87 const uint16_t ddr_rcvn
[] =
92 const uint16_t ddr_wdqs
[] =
97 const uint8_t ddr_rdqs
[] =
102 const uint16_t ddr_wdq
[] =
108 // Select MEMORY_MANAGER as the source for PRI interface
109 static void select_memory_manager(
110 MRCParams_t
*mrc_params
)
116 Dco
.raw
= isbR32m(MCU
, DCO
);
117 Dco
.field
.PMICTL
= 0; //0 - PRI owned by MEMORY_MANAGER
118 isbW32m(MCU
, DCO
, Dco
.raw
);
123 // Select HTE as the source for PRI interface
125 MRCParams_t
*mrc_params
)
131 Dco
.raw
= isbR32m(MCU
, DCO
);
132 Dco
.field
.PMICTL
= 1; //1 - PRI owned by HTE
133 isbW32m(MCU
, DCO
, Dco
.raw
);
138 // Send DRAM command, data should be formated
139 // using DCMD_Xxxx macro or emrsXCommand structure.
140 static void dram_init_command(
146 // Send DRAM wake command using special MCU side-band WAKE opcode
147 static void dram_wake_command(
152 Wr32(MMIO
, PCIADDR(0,0,0,SB_PACKET_REG
),
153 (uint32_t) SB_COMMAND(SB_WAKE_CMND_OPCODE
, MCU
, 0));
158 // Stop self refresh driven by MCU
159 static void clear_self_refresh(
160 MRCParams_t
*mrc_params
)
164 // clear the PMSTS Channel Self Refresh bits
165 isbM32m(MCU
, PMSTS
, BIT0
, BIT0
);
170 // Configure MCU before jedec init sequence
171 static void prog_decode_before_jedec(
172 MRCParams_t
*mrc_params
)
182 // Disable power saving features
183 Dpmc0
.raw
= isbR32m(MCU
, DPMC0
);
184 Dpmc0
.field
.CLKGTDIS
= 1;
185 Dpmc0
.field
.DISPWRDN
= 1;
186 Dpmc0
.field
.DYNSREN
= 0;
187 Dpmc0
.field
.PCLSTO
= 0;
188 isbW32m(MCU
, DPMC0
, Dpmc0
.raw
);
190 // Disable out of order transactions
191 Dsch
.raw
= isbR32m(MCU
, DSCH
);
192 Dsch
.field
.OOODIS
= 1;
193 Dsch
.field
.NEWBYPDIS
= 1;
194 isbW32m(MCU
, DSCH
, Dsch
.raw
);
196 // Disable issuing the REF command
197 Drfc
.raw
= isbR32m(MCU
, DRFC
);
198 Drfc
.field
.tREFI
= 0;
199 isbW32m(MCU
, DRFC
, Drfc
.raw
);
201 // Disable ZQ calibration short
202 Dcal
.raw
= isbR32m(MCU
, DCAL
);
203 Dcal
.field
.ZQCINT
= 0;
204 Dcal
.field
.SRXZQCL
= 0;
205 isbW32m(MCU
, DCAL
, Dcal
.raw
);
207 // Training performed in address mode 0, rank population has limited impact, however
208 // simulator complains if enabled non-existing rank.
210 if (mrc_params
->rank_enables
& 1)
211 Drp
.field
.rank0Enabled
= 1;
212 if (mrc_params
->rank_enables
& 2)
213 Drp
.field
.rank1Enabled
= 1;
214 isbW32m(MCU
, DRP
, Drp
.raw
);
219 // After Cold Reset, BIOS should set COLDWAKE bit to 1 before
220 // sending the WAKE message to the Dunit.
221 // For Standby Exit, or any other mode in which the DRAM is in
222 // SR, this bit must be set to 0.
223 static void perform_ddr_reset(
224 MRCParams_t
*mrc_params
)
228 // Set COLDWAKE bit before sending the WAKE message
229 isbM32m(MCU
, DRMC
, BIT16
, BIT16
);
231 // Send wake command to DUNIT (MUST be done before JEDEC)
235 isbW32m(MCU
, DRMC
, DRMC_DEFAULT
);
240 // Dunit Initialisation Complete.
241 // Indicates that initialisation of the Dunit has completed.
242 // Memory accesses are permitted and maintenance operation
243 // begins. Until this bit is set to a 1, the memory controller will
244 // not accept DRAM requests from the MEMORY_MANAGER or HTE.
245 static void set_ddr_init_complete(
246 MRCParams_t
*mrc_params
)
252 Dco
.raw
= isbR32m(MCU
, DCO
);
253 Dco
.field
.PMICTL
= 0; //0 - PRI owned by MEMORY_MANAGER
254 Dco
.field
.IC
= 1; //1 - initialisation complete
255 isbW32m(MCU
, DCO
, Dco
.raw
);
260 static void prog_page_ctrl(
261 MRCParams_t
*mrc_params
)
267 Dpmc0
.raw
= isbR32m(MCU
, DPMC0
);
269 Dpmc0
.field
.PCLSTO
= 0x4;
270 Dpmc0
.field
.PREAPWDEN
= 1;
272 isbW32m(MCU
, DPMC0
, Dpmc0
.raw
);
275 // Configure MCU Power Management Control Register
276 // and Scheduler Control Register.
277 static void prog_ddr_control(
278 MRCParams_t
*mrc_params
)
285 Dpmc0
.raw
= isbR32m(MCU
, DPMC0
);
286 Dsch
.raw
= isbR32m(MCU
, DSCH
);
288 Dpmc0
.field
.DISPWRDN
= mrc_params
->power_down_disable
;
289 Dpmc0
.field
.CLKGTDIS
= 0;
290 Dpmc0
.field
.PCLSTO
= 4;
291 Dpmc0
.field
.PREAPWDEN
= 1;
293 Dsch
.field
.OOODIS
= 0;
294 Dsch
.field
.OOOST3DIS
= 0;
295 Dsch
.field
.NEWBYPDIS
= 0;
297 isbW32m(MCU
, DSCH
, Dsch
.raw
);
298 isbW32m(MCU
, DPMC0
, Dpmc0
.raw
);
300 // CMDTRIST = 2h - CMD/ADDR are tristated when no valid command
301 isbM32m(MCU
, DPMC1
, 2 << 4, BIT5
|BIT4
);
306 // After training complete configure MCU Rank Population Register
307 // specifying: ranks enabled, device width, density, address mode.
308 static void prog_dra_drb(
309 MRCParams_t
*mrc_params
)
316 Dco
.raw
= isbR32m(MCU
, DCO
);
318 isbW32m(MCU
, DCO
, Dco
.raw
);
321 if (mrc_params
->rank_enables
& 1)
322 Drp
.field
.rank0Enabled
= 1;
323 if (mrc_params
->rank_enables
& 2)
324 Drp
.field
.rank1Enabled
= 1;
325 if (mrc_params
->dram_width
== x16
)
327 Drp
.field
.dimm0DevWidth
= 1;
328 Drp
.field
.dimm1DevWidth
= 1;
330 // Density encoding in DRAMParams_t 0=512Mb, 1=Gb, 2=2Gb, 3=4Gb
331 // has to be mapped RANKDENSx encoding (0=1Gb)
332 Drp
.field
.dimm0DevDensity
= mrc_params
->params
.DENSITY
- 1;
333 Drp
.field
.dimm1DevDensity
= mrc_params
->params
.DENSITY
- 1;
335 // Address mode can be overwritten if ECC enabled
336 Drp
.field
.addressMap
= mrc_params
->address_mode
;
338 isbW32m(MCU
, DRP
, Drp
.raw
);
340 Dco
.field
.PMICTL
= 0; //0 - PRI owned by MEMORY_MANAGER
341 Dco
.field
.IC
= 1; //1 - initialisation complete
342 isbW32m(MCU
, DCO
, Dco
.raw
);
347 // Configure refresh rate and short ZQ calibration interval.
348 // Activate dynamic self refresh.
349 static void change_refresh_period(
350 MRCParams_t
*mrc_params
)
358 Drfc
.raw
= isbR32m(MCU
, DRFC
);
359 Drfc
.field
.tREFI
= mrc_params
->refresh_rate
;
360 Drfc
.field
.REFDBTCLR
= 1;
361 isbW32m(MCU
, DRFC
, Drfc
.raw
);
363 Dcal
.raw
= isbR32m(MCU
, DCAL
);
364 Dcal
.field
.ZQCINT
= 3; // 63ms
365 isbW32m(MCU
, DCAL
, Dcal
.raw
);
367 Dpmc0
.raw
= isbR32m(MCU
, DPMC0
);
368 Dpmc0
.field
.ENPHYCLKGATE
= 1;
369 Dpmc0
.field
.DYNSREN
= 1;
370 isbW32m(MCU
, DPMC0
, Dpmc0
.raw
);
375 // Send DRAM wake command
376 static void perform_wake(
377 MRCParams_t
*mrc_params
)
386 // prog_ddr_timing_control (aka mcu_init):
387 // POST_CODE[major] == 0x02
389 // It will initialise timing registers in the MCU (DTR0..DTR4).
390 static void prog_ddr_timing_control(
391 MRCParams_t
*mrc_params
)
394 uint8_t TRP
, TRCD
, TRAS
, TWR
, TWTR
, TRRD
, TRTP
, TFAW
;
406 post_code(0x02, 0x00);
408 Dtr0
.raw
= isbR32m(MCU
, DTR0
);
409 Dtr1
.raw
= isbR32m(MCU
, DTR1
);
410 Dtr2
.raw
= isbR32m(MCU
, DTR2
);
411 Dtr3
.raw
= isbR32m(MCU
, DTR3
);
412 Dtr4
.raw
= isbR32m(MCU
, DTR4
);
414 TCK
= tCK
[mrc_params
->ddr_speed
]; // Clock in picoseconds
415 TCL
= mrc_params
->params
.tCL
; // CAS latency in clocks
416 TRP
= TCL
; // Per CAT MRC
417 TRCD
= TCL
; // Per CAT MRC
418 TRAS
= MCEIL(mrc_params
->params
.tRAS
, TCK
);
419 TWR
= MCEIL(15000, TCK
); // Per JEDEC: tWR=15000ps DDR2/3 from 800-1600
421 TWTR
= MCEIL(mrc_params
->params
.tWTR
, TCK
);
422 TRRD
= MCEIL(mrc_params
->params
.tRRD
, TCK
);
423 TRTP
= 4; // Valid for 800 and 1066, use 5 for 1333
424 TFAW
= MCEIL(mrc_params
->params
.tFAW
, TCK
);
426 WL
= 5 + mrc_params
->ddr_speed
;
428 Dtr0
.field
.dramFrequency
= mrc_params
->ddr_speed
;
430 Dtr0
.field
.tCL
= TCL
- 5; //Convert from TCL (DRAM clocks) to VLV indx
431 Dtr0
.field
.tRP
= TRP
- 5; //5 bit DRAM Clock
432 Dtr0
.field
.tRCD
= TRCD
- 5; //5 bit DRAM Clock
434 Dtr1
.field
.tWCL
= WL
- 3; //Convert from WL (DRAM clocks) to VLV indx
435 Dtr1
.field
.tWTP
= WL
+ 4 + TWR
- 14; //Change to tWTP
436 Dtr1
.field
.tRTP
= MMAX(TRTP
, 4) - 3; //4 bit DRAM Clock
437 Dtr1
.field
.tRRD
= TRRD
- 4; //4 bit DRAM Clock
438 Dtr1
.field
.tCMD
= 1; //2N
439 Dtr1
.field
.tRAS
= TRAS
- 14; //6 bit DRAM Clock
441 Dtr1
.field
.tFAW
= ((TFAW
+ 1) >> 1) - 5; //4 bit DRAM Clock
442 Dtr1
.field
.tCCD
= 0; //Set 4 Clock CAS to CAS delay (multi-burst)
443 Dtr2
.field
.tRRDR
= 1;
444 Dtr2
.field
.tWWDR
= 2;
445 Dtr2
.field
.tRWDR
= 2;
446 Dtr3
.field
.tWRDR
= 2;
447 Dtr3
.field
.tWRDD
= 2;
449 if (mrc_params
->ddr_speed
== DDRFREQ_800
)
451 // Extended RW delay (+1)
452 Dtr3
.field
.tRWSR
= TCL
- 5 + 1;
454 else if(mrc_params
->ddr_speed
== DDRFREQ_1066
)
456 // Extended RW delay (+1)
457 Dtr3
.field
.tRWSR
= TCL
- 5 + 1;
460 Dtr3
.field
.tWRSR
= 4 + WL
+ TWTR
- 11;
462 if (mrc_params
->ddr_speed
== DDRFREQ_800
)
464 Dtr3
.field
.tXP
= MMAX(0, 1 - Dtr1
.field
.tCMD
);
468 Dtr3
.field
.tXP
= MMAX(0, 2 - Dtr1
.field
.tCMD
);
471 Dtr4
.field
.WRODTSTRT
= Dtr1
.field
.tCMD
;
472 Dtr4
.field
.WRODTSTOP
= Dtr1
.field
.tCMD
;
473 Dtr4
.field
.RDODTSTRT
= Dtr1
.field
.tCMD
+ Dtr0
.field
.tCL
- Dtr1
.field
.tWCL
+ 2; //Convert from WL (DRAM clocks) to VLV indx
474 Dtr4
.field
.RDODTSTOP
= Dtr1
.field
.tCMD
+ Dtr0
.field
.tCL
- Dtr1
.field
.tWCL
+ 2;
475 Dtr4
.field
.TRGSTRDIS
= 0;
476 Dtr4
.field
.ODTDIS
= 0;
478 isbW32m(MCU
, DTR0
, Dtr0
.raw
);
479 isbW32m(MCU
, DTR1
, Dtr1
.raw
);
480 isbW32m(MCU
, DTR2
, Dtr2
.raw
);
481 isbW32m(MCU
, DTR3
, Dtr3
.raw
);
482 isbW32m(MCU
, DTR4
, Dtr4
.raw
);
488 // POST_CODE[major] == 0x03
490 // This function performs some initialisation on the DDRIO unit.
491 // This function is dependent on BOARD_ID, DDR_SPEED, and CHANNEL_ENABLES.
492 static void ddrphy_init(MRCParams_t
*mrc_params
)
494 uint32_t tempD
; // temporary DWORD
495 uint8_t channel_i
; // channel counter
496 uint8_t rank_i
; // rank counter
497 uint8_t bl_grp_i
; // byte lane group counter (2 BLs per module)
499 uint8_t bl_divisor
= /*(mrc_params->channel_width==x16)?2:*/1; // byte lane divisor
500 uint8_t speed
= mrc_params
->ddr_speed
& (BIT1
|BIT0
); // For DDR3 --> 0 == 800, 1 == 1066, 2 == 1333
506 tCAS
= mrc_params
->params
.tCL
;
507 tCWL
= 5 + mrc_params
->ddr_speed
;
509 // ddrphy_init starts
510 post_code(0x03, 0x00);
513 // Make sure IOBUFACT is deasserted before initialising the DDR PHY.
515 // Make sure WRPTRENABLE is deasserted before initialising the DDR PHY.
516 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++) {
517 if (mrc_params
->channel_enables
& (1<<channel_i
)) {
518 // Deassert DDRPHY Initialisation Complete
519 isbM32m(DDRPHY
, (CMDPMCONFIG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ~BIT20
, BIT20
); // SPID_INIT_COMPLETE=0
521 isbM32m(DDRPHY
, (CMDCFGREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ~BIT2
, BIT2
); // IOBUFACTRST_N=0
523 isbM32m(DDRPHY
, (CMDPTRREG
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ~BIT0
, BIT0
); // WRPTRENABLE=0
524 } // if channel enabled
528 isbM32m(DDRPHY
, MASTERRSTN
, 0, BIT0
); // PHYRSTN=0
530 // Initialise DQ01,DQ23,CMD,CLK-CTL,COMP modules
532 post_code(0x03, 0x10);
533 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++) {
534 if (mrc_params
->channel_enables
& (1<<channel_i
)) {
537 for (bl_grp_i
=0; bl_grp_i
<((NUM_BYTE_LANES
/bl_divisor
)/2); bl_grp_i
++) {
538 isbM32m(DDRPHY
, (DQOBSCKEBBCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), ((bl_grp_i
) ? (0x00) : (BIT22
)), (BIT22
)); // Analog MUX select - IO2xCLKSEL
541 switch (mrc_params
->rd_odt_value
) {
542 case 1: tempD
= 0x3; break; // 60 ohm
543 case 2: tempD
= 0x3; break; // 120 ohm
544 case 3: tempD
= 0x3; break; // 180 ohm
545 default: tempD
= 0x3; break; // 120 ohm
547 isbM32m(DDRPHY
, (B0RXIOBUFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (tempD
<<5), (BIT6
|BIT5
)); // ODT strength
548 isbM32m(DDRPHY
, (B1RXIOBUFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (tempD
<<5), (BIT6
|BIT5
)); // ODT strength
549 // Dynamic ODT/DIFFAMP
550 tempD
= (((tCAS
)<<24)|((tCAS
)<<16)|((tCAS
)<<8)|((tCAS
)<<0));
552 case 0: tempD
-= 0x01010101; break; // 800
553 case 1: tempD
-= 0x02020202; break; // 1066
554 case 2: tempD
-= 0x03030303; break; // 1333
555 case 3: tempD
-= 0x04040404; break; // 1600
557 isbM32m(DDRPHY
, (B01LATCTL1
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), tempD
, ((BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // Launch Time: ODT, DIFFAMP, ODT, DIFFAMP
560 case 0: tempD
= ((0x06<<16)|(0x07<<8)); break; // 800
561 case 1: tempD
= ((0x07<<16)|(0x08<<8)); break; // 1066
562 case 2: tempD
= ((0x09<<16)|(0x0A<<8)); break; // 1333
563 case 3: tempD
= ((0x0A<<16)|(0x0B<<8)); break; // 1600
565 isbM32m(DDRPHY
, (B0ONDURCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), tempD
, ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
))); // On Duration: ODT, DIFFAMP
566 isbM32m(DDRPHY
, (B1ONDURCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), tempD
, ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
))); // On Duration: ODT, DIFFAMP
568 switch (mrc_params
->rd_odt_value
) {
569 case 0: tempD
= ((0x3F<<16)|(0x3f<<10)); break; // override DIFFAMP=on, ODT=off
570 default: tempD
= ((0x3F<<16)|(0x2A<<10)); break; // override DIFFAMP=on, ODT=on
572 isbM32m(DDRPHY
, (B0OVRCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), tempD
, ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
))); // Override: DIFFAMP, ODT
573 isbM32m(DDRPHY
, (B1OVRCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), tempD
, ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
))); // Override: DIFFAMP, ODT
576 // 1xCLK Domain Timings: tEDP,RCVEN,WDQS (PO)
577 isbM32m(DDRPHY
, (B0LATCTL0
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (((tCAS
+7)<<16)|((tCAS
-4)<<8)|((tCWL
-2)<<0)), ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // 1xCLK: tEDP, RCVEN, WDQS
578 isbM32m(DDRPHY
, (B1LATCTL0
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (((tCAS
+7)<<16)|((tCAS
-4)<<8)|((tCWL
-2)<<0)), ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // 1xCLK: tEDP, RCVEN, WDQS
581 isbM32m(DDRPHY
, (B0RXIOBUFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), ((0x0<<7)|(0x0<<0)), (BIT7
|BIT0
)); // AFE Bypass, RCVEN DIFFAMP
582 isbM32m(DDRPHY
, (B1RXIOBUFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), ((0x0<<7)|(0x0<<0)), (BIT7
|BIT0
)); // AFE Bypass, RCVEN DIFFAMP
584 isbM32m(DDRPHY
, (DQCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (BIT16
), (BIT16
)); // 0 means driving DQ during DQS-preamble
585 isbM32m(DDRPHY
, (B01PTRCTL1
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (BIT8
), (BIT8
)); // WR_LVL mode disable
587 isbM32m(DDRPHY
, (B0VREFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), ((0x03<<2)|(0x0<<1)|(0x0<<0)), ((BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
)|BIT1
|BIT0
)); // Internal Vref Code, Enable#, Ext_or_Int (1=Ext)
588 isbM32m(DDRPHY
, (B1VREFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), ((0x03<<2)|(0x0<<1)|(0x0<<0)), ((BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
)|BIT1
|BIT0
)); // Internal Vref Code, Enable#, Ext_or_Int (1=Ext)
589 isbM32m(DDRPHY
, (B0RXIOBUFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (0), (BIT4
)); // Per-Bit De-Skew Enable
590 isbM32m(DDRPHY
, (B1RXIOBUFCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (0), (BIT4
)); // Per-Bit De-Skew Enable
593 isbM32m(DDRPHY
, (CMDOBSCKEBBCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), 0, (BIT23
));
595 // Enable tristate control of cmd/address bus
596 isbM32m(DDRPHY
, (CMDCFGREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), 0, (BIT1
|BIT0
));
599 isbM32m(DDRPHY
, (CMDRCOMPODT
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x03<<5)|(0x03<<0)), ((BIT9
|BIT8
|BIT7
|BIT6
|BIT5
)|(BIT4
|BIT3
|BIT2
|BIT1
|BIT0
)));
601 // CMDPM* registers must be programmed in this order...
602 isbM32m(DDRPHY
, (CMDPMDLYREG4
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0xFFFFU
<<16)|(0xFFFF<<0)), ((BIT31
|BIT30
|BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
|BIT23
|BIT22
|BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
|BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // Turn On Delays: SFR (regulator), MPLL
603 isbM32m(DDRPHY
, (CMDPMDLYREG3
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0xFU
<<28)|(0xFFF<<16)|(0xF<<12)|(0x616<<0)), ((BIT31
|BIT30
|BIT29
|BIT28
)|(BIT27
|BIT26
|BIT25
|BIT24
|BIT23
|BIT22
|BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
)|(BIT11
|BIT10
|BIT9
|BIT8
|BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // Delays: ASSERT_IOBUFACT_to_ALLON0_for_PM_MSG_3, VREG (MDLL) Turn On, ALLON0_to_DEASSERT_IOBUFACT_for_PM_MSG_gt0, MDLL Turn On
604 isbM32m(DDRPHY
, (CMDPMDLYREG2
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0xFFU
<<24)|(0xFF<<16)|(0xFF<<8)|(0xFF<<0)), ((BIT31
|BIT30
|BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT23
|BIT22
|BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // MPLL Divider Reset Delays
605 isbM32m(DDRPHY
, (CMDPMDLYREG1
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0xFFU
<<24)|(0xFF<<16)|(0xFF<<8)|(0xFF<<0)), ((BIT31
|BIT30
|BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT23
|BIT22
|BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // Turn Off Delays: VREG, Staggered MDLL, MDLL, PI
606 isbM32m(DDRPHY
, (CMDPMDLYREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0xFFU
<<24)|(0xFF<<16)|(0xFF<<8)|(0xFF<<0)), ((BIT31
|BIT30
|BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT23
|BIT22
|BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // Turn On Delays: MPLL, Staggered MDLL, PI, IOBUFACT
607 isbM32m(DDRPHY
, (CMDPMCONFIG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x6<<8)|BIT6
|(0x4<<0)), (BIT31
|BIT30
|BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
|BIT23
|BIT22
|BIT21
|(BIT11
|BIT10
|BIT9
|BIT8
)|BIT6
|(BIT3
|BIT2
|BIT1
|BIT0
))); // Allow PUnit signals
608 isbM32m(DDRPHY
, (CMDMDLLCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x3<<4)|(0x7<<0)), ((BIT6
|BIT5
|BIT4
)|(BIT3
|BIT2
|BIT1
|BIT0
))); // DLL_VREG Bias Trim, VREF Tuning for DLL_VREG
610 isbM32m(DDRPHY
, (CCOBSCKEBBCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), 0, (BIT24
)); // CLKEBB
611 isbM32m(DDRPHY
, (CCCFGREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x0<<16)|(0x0<<12)|(0x0<<8)|(0xF<<4)|BIT0
), ((BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
)|(BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
)|BIT0
)); // Buffer Enable: CS,CKE,ODT,CLK
612 isbM32m(DDRPHY
, (CCRCOMPODT
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x03<<8)|(0x03<<0)), ((BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // ODT RCOMP
613 isbM32m(DDRPHY
, (CCMDLLCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x3<<4)|(0x7<<0)), ((BIT6
|BIT5
|BIT4
)|(BIT3
|BIT2
|BIT1
|BIT0
))); // DLL_VREG Bias Trim, VREF Tuning for DLL_VREG
615 // COMP (RON channel specific)
616 // - DQ/DQS/DM RON: 32 Ohm
617 // - CTRL/CMD RON: 27 Ohm
619 isbM32m(DDRPHY
, (DQVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x08<<24)|(0x03<<16)), ((BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP Vref PU/PD
620 isbM32m(DDRPHY
, (CMDVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x0C<<24)|(0x03<<16)), ((BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP Vref PU/PD
621 isbM32m(DDRPHY
, (CLKVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x0F<<24)|(0x03<<16)), ((BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP Vref PU/PD
622 isbM32m(DDRPHY
, (DQSVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x08<<24)|(0x03<<16)), ((BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP Vref PU/PD
623 isbM32m(DDRPHY
, (CTLVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x0C<<24)|(0x03<<16)), ((BIT29
|BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP Vref PU/PD
625 // DQS Swapped Input Enable
626 isbM32m(DDRPHY
, (COMPEN1CH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT19
|BIT17
), ((BIT31
|BIT30
)|BIT19
|BIT17
|(BIT15
|BIT14
)));
628 // ODT VREF = 1.5 x 274/360+274 = 0.65V (code of ~50)
629 isbM32m(DDRPHY
, (DQVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x32<<8)|(0x03<<0)), ((BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // ODT Vref PU/PD
630 isbM32m(DDRPHY
, (DQSVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x32<<8)|(0x03<<0)), ((BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // ODT Vref PU/PD
631 isbM32m(DDRPHY
, (CLKVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x0E<<8)|(0x05<<0)), ((BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // ODT Vref PU/PD
633 // Slew rate settings are frequency specific, numbers below are for 800Mhz (speed == 0)
634 // - DQ/DQS/DM/CLK SR: 4V/ns,
635 // - CTRL/CMD SR: 1.5V/ns
636 tempD
= (0x0E<<16)|(0x0E<<12)|(0x08<<8)|(0x0B<<4)|(0x0B<<0);
637 isbM32m(DDRPHY
, (DLYSELCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (tempD
), ((BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
)|(BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
)|(BIT3
|BIT2
|BIT1
|BIT0
))); // DCOMP Delay Select: CTL,CMD,CLK,DQS,DQ
638 isbM32m(DDRPHY
, (TCOVREFCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x05<<16)|(0x05<<8)|(0x05<<0)), ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // TCO Vref CLK,DQS,DQ
639 isbM32m(DDRPHY
, (CCBUFODTCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ((0x03<<8)|(0x03<<0)), ((BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT4
|BIT3
|BIT2
|BIT1
|BIT0
))); // ODTCOMP CMD/CTL PU/PD
640 isbM32m(DDRPHY
, (COMPEN0CH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (0), ((BIT31
|BIT30
)|BIT8
)); // COMP
644 isbM32m(DDRPHY
, (DQDRVPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PU
645 isbM32m(DDRPHY
, (DQDRVPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PD
646 isbM32m(DDRPHY
, (DQDLYPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x10<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PU
647 isbM32m(DDRPHY
, (DQDLYPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x10<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PD
648 isbM32m(DDRPHY
, (DQODTPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0B<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODTCOMP PU
649 isbM32m(DDRPHY
, (DQODTPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0B<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODTCOMP PD
650 isbM32m(DDRPHY
, (DQTCOPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
), (BIT31
)); // TCOCOMP PU
651 isbM32m(DDRPHY
, (DQTCOPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
), (BIT31
)); // TCOCOMP PD
652 // DQS COMP Overrides
653 isbM32m(DDRPHY
, (DQSDRVPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PU
654 isbM32m(DDRPHY
, (DQSDRVPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PD
655 isbM32m(DDRPHY
, (DQSDLYPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x10<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PU
656 isbM32m(DDRPHY
, (DQSDLYPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x10<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PD
657 isbM32m(DDRPHY
, (DQSODTPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0B<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODTCOMP PU
658 isbM32m(DDRPHY
, (DQSODTPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0B<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODTCOMP PD
659 isbM32m(DDRPHY
, (DQSTCOPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
), (BIT31
)); // TCOCOMP PU
660 isbM32m(DDRPHY
, (DQSTCOPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
), (BIT31
)); // TCOCOMP PD
661 // CLK COMP Overrides
662 isbM32m(DDRPHY
, (CLKDRVPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0C<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PU
663 isbM32m(DDRPHY
, (CLKDRVPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0C<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PD
664 isbM32m(DDRPHY
, (CLKDLYPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x07<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PU
665 isbM32m(DDRPHY
, (CLKDLYPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x07<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PD
666 isbM32m(DDRPHY
, (CLKODTPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0B<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODTCOMP PU
667 isbM32m(DDRPHY
, (CLKODTPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0B<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODTCOMP PD
668 isbM32m(DDRPHY
, (CLKTCOPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
), (BIT31
)); // TCOCOMP PU
669 isbM32m(DDRPHY
, (CLKTCOPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
), (BIT31
)); // TCOCOMP PD
670 // CMD COMP Overrides
671 isbM32m(DDRPHY
, (CMDDRVPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0D<<16)), (BIT31
|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PU
672 isbM32m(DDRPHY
, (CMDDRVPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0D<<16)), (BIT31
|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PD
673 isbM32m(DDRPHY
, (CMDDLYPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PU
674 isbM32m(DDRPHY
, (CMDDLYPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PD
675 // CTL COMP Overrides
676 isbM32m(DDRPHY
, (CTLDRVPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0D<<16)), (BIT31
|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PU
677 isbM32m(DDRPHY
, (CTLDRVPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0D<<16)), (BIT31
|(BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // RCOMP PD
678 isbM32m(DDRPHY
, (CTLDLYPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PU
679 isbM32m(DDRPHY
, (CTLDLYPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x0A<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // DCOMP PD
681 // DQ TCOCOMP Overrides
682 isbM32m(DDRPHY
, (DQTCOPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x1F<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // TCOCOMP PU
683 isbM32m(DDRPHY
, (DQTCOPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x1F<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // TCOCOMP PD
684 // DQS TCOCOMP Overrides
685 isbM32m(DDRPHY
, (DQSTCOPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x1F<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // TCOCOMP PU
686 isbM32m(DDRPHY
, (DQSTCOPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x1F<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // TCOCOMP PD
687 // CLK TCOCOMP Overrides
688 isbM32m(DDRPHY
, (CLKTCOPUCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x1F<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // TCOCOMP PU
689 isbM32m(DDRPHY
, (CLKTCOPDCTLCH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), (BIT31
|(0x1F<<16)), (BIT31
|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // TCOCOMP PD
690 #endif // BACKUP_COMPS
691 // program STATIC delays
693 set_wcmd(channel_i
, ddr_wcmd
[PLATFORM_ID
]);
695 set_wcmd(channel_i
, ddr_wclk
[PLATFORM_ID
] + HALF_CLK
);
696 #endif // BACKUP_WCMD
697 for (rank_i
=0; rank_i
<NUM_RANKS
; rank_i
++) {
698 if (mrc_params
->rank_enables
& (1<<rank_i
)) {
699 set_wclk(channel_i
, rank_i
, ddr_wclk
[PLATFORM_ID
]);
701 set_wctl(channel_i
, rank_i
, ddr_wctl
[PLATFORM_ID
]);
703 set_wctl(channel_i
, rank_i
, ddr_wclk
[PLATFORM_ID
] + HALF_CLK
);
704 #endif // BACKUP_WCTL
709 // COMP (non channel specific)
710 //isbM32m(DDRPHY, (), (), ());
711 isbM32m(DDRPHY
, (DQANADRVPUCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PU Enable
712 isbM32m(DDRPHY
, (DQANADRVPDCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PD Enable
713 isbM32m(DDRPHY
, (CMDANADRVPUCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PU Enable
714 isbM32m(DDRPHY
, (CMDANADRVPDCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PD Enable
715 isbM32m(DDRPHY
, (CLKANADRVPUCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PU Enable
716 isbM32m(DDRPHY
, (CLKANADRVPDCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PD Enable
717 isbM32m(DDRPHY
, (DQSANADRVPUCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PU Enable
718 isbM32m(DDRPHY
, (DQSANADRVPDCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PD Enable
719 isbM32m(DDRPHY
, (CTLANADRVPUCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PU Enable
720 isbM32m(DDRPHY
, (CTLANADRVPDCTL
), (BIT30
), (BIT30
)); // RCOMP: Dither PD Enable
721 isbM32m(DDRPHY
, (DQANAODTPUCTL
), (BIT30
), (BIT30
)); // ODT: Dither PU Enable
722 isbM32m(DDRPHY
, (DQANAODTPDCTL
), (BIT30
), (BIT30
)); // ODT: Dither PD Enable
723 isbM32m(DDRPHY
, (CLKANAODTPUCTL
), (BIT30
), (BIT30
)); // ODT: Dither PU Enable
724 isbM32m(DDRPHY
, (CLKANAODTPDCTL
), (BIT30
), (BIT30
)); // ODT: Dither PD Enable
725 isbM32m(DDRPHY
, (DQSANAODTPUCTL
), (BIT30
), (BIT30
)); // ODT: Dither PU Enable
726 isbM32m(DDRPHY
, (DQSANAODTPDCTL
), (BIT30
), (BIT30
)); // ODT: Dither PD Enable
727 isbM32m(DDRPHY
, (DQANADLYPUCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PU Enable
728 isbM32m(DDRPHY
, (DQANADLYPDCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PD Enable
729 isbM32m(DDRPHY
, (CMDANADLYPUCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PU Enable
730 isbM32m(DDRPHY
, (CMDANADLYPDCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PD Enable
731 isbM32m(DDRPHY
, (CLKANADLYPUCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PU Enable
732 isbM32m(DDRPHY
, (CLKANADLYPDCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PD Enable
733 isbM32m(DDRPHY
, (DQSANADLYPUCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PU Enable
734 isbM32m(DDRPHY
, (DQSANADLYPDCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PD Enable
735 isbM32m(DDRPHY
, (CTLANADLYPUCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PU Enable
736 isbM32m(DDRPHY
, (CTLANADLYPDCTL
), (BIT30
), (BIT30
)); // DCOMP: Dither PD Enable
737 isbM32m(DDRPHY
, (DQANATCOPUCTL
), (BIT30
), (BIT30
)); // TCO: Dither PU Enable
738 isbM32m(DDRPHY
, (DQANATCOPDCTL
), (BIT30
), (BIT30
)); // TCO: Dither PD Enable
739 isbM32m(DDRPHY
, (CLKANATCOPUCTL
), (BIT30
), (BIT30
)); // TCO: Dither PU Enable
740 isbM32m(DDRPHY
, (CLKANATCOPDCTL
), (BIT30
), (BIT30
)); // TCO: Dither PD Enable
741 isbM32m(DDRPHY
, (DQSANATCOPUCTL
), (BIT30
), (BIT30
)); // TCO: Dither PU Enable
742 isbM32m(DDRPHY
, (DQSANATCOPDCTL
), (BIT30
), (BIT30
)); // TCO: Dither PD Enable
743 isbM32m(DDRPHY
, (TCOCNTCTRL
), (0x1<<0), (BIT1
|BIT0
)); // TCOCOMP: Pulse Count
744 isbM32m(DDRPHY
, (CHNLBUFSTATIC
), ((0x03<<24)|(0x03<<16)), ((BIT28
|BIT27
|BIT26
|BIT25
|BIT24
)|(BIT20
|BIT19
|BIT18
|BIT17
|BIT16
))); // ODT: CMD/CTL PD/PU
745 isbM32m(DDRPHY
, (MSCNTR
), (0x64<<0), (BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
|BIT1
|BIT0
)); // Set 1us counter
746 isbM32m(DDRPHY
, (LATCH1CTL
), (0x1<<28), (BIT30
|BIT29
|BIT28
)); // ???
748 // Release PHY from reset
749 isbM32m(DDRPHY
, MASTERRSTN
, BIT0
, BIT0
); // PHYRSTN=1
752 post_code(0x03, 0x11);
753 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++) {
754 if (mrc_params
->channel_enables
& (1<<channel_i
)) {
756 for (bl_grp_i
=0; bl_grp_i
<((NUM_BYTE_LANES
/bl_divisor
)/2); bl_grp_i
++) {
757 isbM32m(DDRPHY
, (DQMDLLCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (BIT13
), (BIT13
)); // Enable VREG
761 isbM32m(DDRPHY
, (ECCMDLLCTL
), (BIT13
), (BIT13
)); // Enable VREG
764 isbM32m(DDRPHY
, (CMDMDLLCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), (BIT13
), (BIT13
)); // Enable VREG
767 isbM32m(DDRPHY
, (CCMDLLCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), (BIT13
), (BIT13
)); // Enable VREG
773 post_code(0x03, 0x12);
775 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++) {
776 if (mrc_params
->channel_enables
& (1<<channel_i
)) {
778 for (bl_grp_i
=0; bl_grp_i
<((NUM_BYTE_LANES
/bl_divisor
)/2); bl_grp_i
++) {
779 isbM32m(DDRPHY
, (DQMDLLCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (BIT17
), (BIT17
)); // Enable MCDLL
783 isbM32m(DDRPHY
, (ECCMDLLCTL
), (BIT17
), (BIT17
)); // Enable MCDLL
786 isbM32m(DDRPHY
, (CMDMDLLCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), (BIT18
), (BIT18
)); // Enable MCDLL
789 isbM32m(DDRPHY
, (CCMDLLCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), (BIT18
), (BIT18
)); // Enable MCDLL
795 post_code(0x03, 0x13);
797 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++) {
798 if (mrc_params
->channel_enables
& (1<<channel_i
)) {
800 for (bl_grp_i
=0; bl_grp_i
<((NUM_BYTE_LANES
/bl_divisor
)/2); bl_grp_i
++) {
801 #ifdef FORCE_16BIT_DDRIO
802 tempD
= ((bl_grp_i
) && (mrc_params
->channel_width
== x16
)) ? ((0x1<<12)|(0x1<<8)|(0xF<<4)|(0xF<<0)) : ((0xF<<12)|(0xF<<8)|(0xF<<4)|(0xF<<0));
804 tempD
= ((0xF<<12)|(0xF<<8)|(0xF<<4)|(0xF<<0));
806 isbM32m(DDRPHY
, (DQDLLTXCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (tempD
), ((BIT15
|BIT14
|BIT13
|BIT12
)|(BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
)|(BIT3
|BIT2
|BIT1
|BIT0
))); // Enable TXDLL
808 isbM32m(DDRPHY
, (DQDLLRXCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (BIT3
|BIT2
|BIT1
|BIT0
), (BIT3
|BIT2
|BIT1
|BIT0
)); // Enable RXDLL
810 isbM32m(DDRPHY
, (B0OVRCTL
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), (BIT3
|BIT2
|BIT1
|BIT0
), (BIT3
|BIT2
|BIT1
|BIT0
)); // Enable RXDLL Overrides BL0
814 tempD
= ((0xF<<12)|(0xF<<8)|(0xF<<4)|(0xF<<0));
815 isbM32m(DDRPHY
, (ECCDLLTXCTL
), (tempD
), ((BIT15
|BIT14
|BIT13
|BIT12
)|(BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
)|(BIT3
|BIT2
|BIT1
|BIT0
))); // Enable TXDLL
819 isbM32m(DDRPHY
, (CMDDLLTXCTL
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0xF<<12)|(0xF<<8)|(0xF<<4)|(0xF<<0)), ((BIT15
|BIT14
|BIT13
|BIT12
)|(BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
)|(BIT3
|BIT2
|BIT1
|BIT0
))); // Enable TXDLL
826 post_code(0x03, 0x14);
827 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++) {
828 if (mrc_params
->channel_enables
& (1<<channel_i
)) {
829 // Host To Memory Clock Alignment (HMC) for 800/1066
830 for (bl_grp_i
=0; bl_grp_i
<((NUM_BYTE_LANES
/bl_divisor
)/2); bl_grp_i
++) {
831 isbM32m(DDRPHY
, (DQCLKALIGNREG2
+ (bl_grp_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), ((bl_grp_i
)?(0x3):(0x1)), (BIT3
|BIT2
|BIT1
|BIT0
)); // CLK_ALIGN_MOD_ID
833 isbM32m(DDRPHY
, (ECCCLKALIGNREG2
+ (channel_i
* DDRIODQ_CH_OFFSET
)), 0x2, (BIT3
|BIT2
|BIT1
|BIT0
)); // CLK_ALIGN_MOD_ID
834 isbM32m(DDRPHY
, (CMDCLKALIGNREG2
+ (channel_i
* DDRIODQ_CH_OFFSET
)), 0x0, (BIT3
|BIT2
|BIT1
|BIT0
)); // CLK_ALIGN_MOD_ID
835 isbM32m(DDRPHY
, (CCCLKALIGNREG2
+ (channel_i
* DDRIODQ_CH_OFFSET
)), 0x2, (BIT3
|BIT2
|BIT1
|BIT0
)); // CLK_ALIGN_MOD_ID
836 isbM32m(DDRPHY
, (CMDCLKALIGNREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), (0x2<<4), (BIT5
|BIT4
)); // CLK_ALIGN_MODE
837 isbM32m(DDRPHY
, (CMDCLKALIGNREG1
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x18<<16)|(0x10<<8)|(0x8<<2)|(0x1<<0)), ((BIT22
|BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT14
|BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
|BIT3
|BIT2
)|(BIT1
|BIT0
))); // NUM_SAMPLES, MAX_SAMPLES, MACRO_PI_STEP, MICRO_PI_STEP
838 isbM32m(DDRPHY
, (CMDCLKALIGNREG2
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), ((0x10<<16)|(0x4<<8)|(0x2<<4)), ((BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT11
|BIT10
|BIT9
|BIT8
)|(BIT7
|BIT6
|BIT5
|BIT4
))); // ???, TOTAL_NUM_MODULES, FIRST_U_PARTITION
840 isbM32m(DDRPHY
, (CMDCLKALIGNREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), BIT24
, BIT24
); // START_CLK_ALIGN=1
841 while (isbR32m(DDRPHY
, (CMDCLKALIGNREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
))) & BIT24
); // wait for START_CLK_ALIGN=0
844 // Set RD/WR Pointer Seperation & COUNTEN & FIFOPTREN
845 isbM32m(DDRPHY
, (CMDPTRREG
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), BIT0
, BIT0
); // WRPTRENABLE=1
849 // comp is not working on simulator
852 isbM32m(DDRPHY
, (COMPEN0CH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), BIT5
, BIT5
); // enable bypass for CLK buffer (PO)
853 isbM32m(DDRPHY
, (CMPCTRL
), (BIT0
), (BIT0
)); // Initial COMP Enable
854 while (isbR32m(DDRPHY
, (CMPCTRL
)) & BIT0
); // wait for Initial COMP Enable = 0
855 isbM32m(DDRPHY
, (COMPEN0CH0
+ (channel_i
* DDRCOMP_CH_OFFSET
)), ~BIT5
, BIT5
); // disable bypass for CLK buffer (PO)
860 isbM32m(DDRPHY
, (CMDCFGREG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), BIT2
, BIT2
); // IOBUFACTRST_N=1
862 // DDRPHY initialisation complete
863 isbM32m(DDRPHY
, (CMDPMCONFIG0
+ (channel_i
* DDRIOCCC_CH_OFFSET
)), BIT20
, BIT20
); // SPID_INIT_COMPLETE=1
871 // jedec_init (aka PerformJedecInit):
872 // This function performs JEDEC initialisation on all enabled channels.
873 static void jedec_init(
874 MRCParams_t
*mrc_params
,
877 uint8_t TWR
, WL
, Rank
;
882 DramInitDDR3MRS0 mrs0Command
;
883 DramInitDDR3EMR1 emrs1Command
;
884 DramInitDDR3EMR2 emrs2Command
;
885 DramInitDDR3EMR3 emrs3Command
;
892 post_code(0x04, 0x00);
895 // Assert RESET# for 200us
896 isbM32m(DDRPHY
, CCDDR3RESETCTL
, BIT1
, (BIT8
|BIT1
)); // DDR3_RESET_SET=0, DDR3_RESET_RESET=1
898 // Don't waste time during simulation
903 isbM32m(DDRPHY
, CCDDR3RESETCTL
, BIT8
, (BIT8
|BIT1
)); // DDR3_RESET_SET=1, DDR3_RESET_RESET=0
905 DTR0reg
.raw
= isbR32m(MCU
, DTR0
);
907 // Set CKEVAL for populated ranks
908 // then send NOP to each rank (#4550197)
913 DRPbuffer
= isbR32m(MCU
, DRP
);
915 DRMCbuffer
= isbR32m(MCU
, DRMC
);
916 DRMCbuffer
&= 0xFFFFFFFC;
917 DRMCbuffer
|= (BIT4
| DRPbuffer
);
919 isbW32m(MCU
, DRMC
, DRMCbuffer
);
921 for (Rank
= 0; Rank
< NUM_RANKS
; Rank
++)
923 // Skip to next populated rank
924 if ((mrc_params
->rank_enables
& (1 << Rank
)) == 0)
929 dram_init_command(DCMD_NOP(Rank
));
932 isbW32m(MCU
, DRMC
, DRMC_DEFAULT
);
936 // BIT[15:11] --> Always "0"
937 // BIT[10:09] --> Rtt_WR: want "Dynamic ODT Off" (0)
938 // BIT[08] --> Always "0"
939 // BIT[07] --> SRT: use sr_temp_range
940 // BIT[06] --> ASR: want "Manual SR Reference" (0)
941 // BIT[05:03] --> CWL: use oem_tCWL
942 // BIT[02:00] --> PASR: want "Full Array" (0)
943 emrs2Command
.raw
= 0;
944 emrs2Command
.field
.bankAddress
= 2;
946 WL
= 5 + mrc_params
->ddr_speed
;
947 emrs2Command
.field
.CWL
= WL
- 5;
948 emrs2Command
.field
.SRT
= mrc_params
->sr_temp_range
;
951 // BIT[15:03] --> Always "0"
952 // BIT[02] --> MPR: want "Normal Operation" (0)
953 // BIT[01:00] --> MPR_Loc: want "Predefined Pattern" (0)
954 emrs3Command
.raw
= 0;
955 emrs3Command
.field
.bankAddress
= 3;
958 // BIT[15:13] --> Always "0"
959 // BIT[12:12] --> Qoff: want "Output Buffer Enabled" (0)
960 // BIT[11:11] --> TDQS: want "Disabled" (0)
961 // BIT[10:10] --> Always "0"
962 // BIT[09,06,02] --> Rtt_nom: use rtt_nom_value
963 // BIT[08] --> Always "0"
964 // BIT[07] --> WR_LVL: want "Disabled" (0)
965 // BIT[05,01] --> DIC: use ron_value
966 // BIT[04:03] --> AL: additive latency want "0" (0)
967 // BIT[00] --> DLL: want "Enable" (0)
969 // (BIT5|BIT1) set Ron value
970 // 00 --> RZQ/6 (40ohm)
971 // 01 --> RZQ/7 (34ohm)
974 // (BIT9|BIT6|BIT2) set Rtt_nom value
976 // 001 --> RZQ/4 ( 60ohm)
977 // 010 --> RZQ/2 (120ohm)
978 // 011 --> RZQ/6 ( 40ohm)
980 emrs1Command
.raw
= 0;
981 emrs1Command
.field
.bankAddress
= 1;
982 emrs1Command
.field
.dllEnabled
= 0; // 0 = Enable , 1 = Disable
984 if (mrc_params
->ron_value
== 0)
986 emrs1Command
.field
.DIC0
= DDR3_EMRS1_DIC_34
;
990 emrs1Command
.field
.DIC0
= DDR3_EMRS1_DIC_40
;
994 if (mrc_params
->rtt_nom_value
== 0)
996 emrs1Command
.raw
|= (DDR3_EMRS1_RTTNOM_40
<< 6);
998 else if (mrc_params
->rtt_nom_value
== 1)
1000 emrs1Command
.raw
|= (DDR3_EMRS1_RTTNOM_60
<< 6);
1002 else if (mrc_params
->rtt_nom_value
== 2)
1004 emrs1Command
.raw
|= (DDR3_EMRS1_RTTNOM_120
<< 6);
1007 // save MRS1 value (excluding control fields)
1008 mrc_params
->mrs1
= emrs1Command
.raw
>> 6;
1011 // BIT[15:13] --> Always "0"
1012 // BIT[12] --> PPD: for Quark (1)
1013 // BIT[11:09] --> WR: use oem_tWR
1014 // BIT[08] --> DLL: want "Reset" (1, self clearing)
1015 // BIT[07] --> MODE: want "Normal" (0)
1016 // BIT[06:04,02] --> CL: use oem_tCAS
1017 // BIT[03] --> RD_BURST_TYPE: want "Interleave" (1)
1018 // BIT[01:00] --> BL: want "8 Fixed" (0)
1029 // BIT[02:02] "0" if oem_tCAS <= 11 (1866?)
1030 // BIT[06:04] use oem_tCAS-4
1031 mrs0Command
.raw
= 0;
1032 mrs0Command
.field
.bankAddress
= 0;
1033 mrs0Command
.field
.dllReset
= 1;
1034 mrs0Command
.field
.BL
= 0;
1035 mrs0Command
.field
.PPD
= 1;
1036 mrs0Command
.field
.casLatency
= DTR0reg
.field
.tCL
+ 1;
1038 TCK
= tCK
[mrc_params
->ddr_speed
];
1039 TWR
= MCEIL(15000, TCK
); // Per JEDEC: tWR=15000ps DDR2/3 from 800-1600
1040 mrs0Command
.field
.writeRecovery
= TWR
- 4;
1042 for (Rank
= 0; Rank
< NUM_RANKS
; Rank
++)
1044 // Skip to next populated rank
1045 if ((mrc_params
->rank_enables
& (1 << Rank
)) == 0)
1050 emrs2Command
.field
.rankSelect
= Rank
;
1051 dram_init_command(emrs2Command
.raw
);
1053 emrs3Command
.field
.rankSelect
= Rank
;
1054 dram_init_command(emrs3Command
.raw
);
1056 emrs1Command
.field
.rankSelect
= Rank
;
1057 dram_init_command(emrs1Command
.raw
);
1059 mrs0Command
.field
.rankSelect
= Rank
;
1060 dram_init_command(mrs0Command
.raw
);
1062 dram_init_command(DCMD_ZQCL(Rank
));
1070 // POST_CODE[major] == 0x05
1072 // This function will perform our RCVEN Calibration Algorithm.
1073 // We will only use the 2xCLK domain timings to perform RCVEN Calibration.
1074 // All byte lanes will be calibrated "simultaneously" per channel per rank.
1075 static void rcvn_cal(
1076 MRCParams_t
*mrc_params
)
1078 uint8_t channel_i
; // channel counter
1079 uint8_t rank_i
; // rank counter
1080 uint8_t bl_i
; // byte lane counter
1081 uint8_t bl_divisor
= (mrc_params
->channel_width
== x16
) ? 2 : 1; // byte lane divisor
1084 uint32_t final_delay
[NUM_CHANNELS
][NUM_BYTE_LANES
]; // used to find placement for rank2rank sharing configs
1086 uint32_t num_ranks_enabled
= 0; // used to find placement for rank2rank sharing configs
1087 #endif // BACKUP_RCVN
1088 #endif // R2R_SHARING
1092 uint32_t tempD
; // temporary DWORD
1093 uint32_t delay
[NUM_BYTE_LANES
]; // absolute PI value to be programmed on the byte lane
1096 #endif // BACKUP_RCVN
1100 post_code(0x05, 0x00);
1103 // need separate burst to sample DQS preamble
1104 dtr1
.raw
= dtr1save
.raw
= isbR32m(MCU
, DTR1
);
1105 dtr1
.field
.tCCD
= 1;
1106 isbW32m(MCU
, DTR1
, dtr1
.raw
);
1110 // need to set "final_delay[][]" elements to "0"
1111 memset((void *) (final_delay
), 0x00, (size_t) sizeof(final_delay
));
1112 #endif // R2R_SHARING
1114 // loop through each enabled channel
1115 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1117 if (mrc_params
->channel_enables
& (1 << channel_i
))
1119 // perform RCVEN Calibration on a per rank basis
1120 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1122 if (mrc_params
->rank_enables
& (1 << rank_i
))
1124 // POST_CODE here indicates the current channel and rank being calibrated
1125 post_code(0x05, (0x10 + ((channel_i
<< 4) | rank_i
)));
1128 // set hard-coded timing values
1129 for (bl_i
=0; bl_i
<(NUM_BYTE_LANES
/bl_divisor
); bl_i
++)
1131 set_rcvn(channel_i
, rank_i
, bl_i
, ddr_rcvn
[PLATFORM_ID
]);
1135 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
+= 2)
1137 isbM32m(DDRPHY
, (B01PTRCTL1
+ ((bl_i
>> 1) * DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), 0,
1138 BIT8
); // 0 is enabled
1140 // initialise the starting delay to 128 PI (tCAS +1 CLK)
1141 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1144 // Original value was late at the end of DQS sequence
1145 delay
[bl_i
] = 3 * FULL_CLK
;
1147 delay
[bl_i
] = (4 + 1) * FULL_CLK
; // 1x CLK domain timing is tCAS-4
1150 set_rcvn(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1153 // now find the rising edge
1154 find_rising_edge(mrc_params
, delay
, channel_i
, rank_i
, true);
1155 // Now increase delay by 32 PI (1/4 CLK) to place in center of high pulse.
1156 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1158 delay
[bl_i
] += QRTR_CLK
;
1159 set_rcvn(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1161 // Now decrement delay by 128 PI (1 CLK) until we sample a "0"
1165 tempD
= sample_dqs(mrc_params
, channel_i
, rank_i
, true);
1166 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1168 if (tempD
& (1 << bl_i
))
1170 if (delay
[bl_i
] >= FULL_CLK
)
1172 delay
[bl_i
] -= FULL_CLK
;
1173 set_rcvn(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1178 training_message(channel_i
, rank_i
, bl_i
);
1179 post_code(0xEE, 0x50);
1183 } while (tempD
& 0xFF);
1186 // increment "num_ranks_enabled"
1187 num_ranks_enabled
++;
1188 // Finally increment delay by 32 PI (1/4 CLK) to place in center of preamble.
1189 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1191 delay
[bl_i
] += QRTR_CLK
;
1192 // add "delay[]" values to "final_delay[][]" for rolling average
1193 final_delay
[channel_i
][bl_i
] += delay
[bl_i
];
1194 // set timing based on rolling average values
1195 set_rcvn(channel_i
, rank_i
, bl_i
, ((final_delay
[channel_i
][bl_i
]) / num_ranks_enabled
));
1198 // Finally increment delay by 32 PI (1/4 CLK) to place in center of preamble.
1199 for (bl_i
=0; bl_i
<(NUM_BYTE_LANES
/bl_divisor
); bl_i
++)
1201 delay
[bl_i
] += QRTR_CLK
;
1202 set_rcvn(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1205 #endif // R2R_SHARING
1208 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
+= 2)
1210 isbM32m(DDRPHY
, (B01PTRCTL1
+ ((bl_i
>> 1) * DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)), BIT8
,
1211 BIT8
); // 1 is disabled
1214 #endif // BACKUP_RCVN
1216 } // if rank is enabled
1218 } // if channel is enabled
1223 isbW32m(MCU
, DTR1
, dtr1save
.raw
);
1227 if (mrc_params
->tune_rcvn
)
1229 uint32_t rcven
, val
;
1230 uint32_t rdcmd2rcven
;
1233 Formulas for RDCMD2DATAVALID & DIFFAMP dynamic timings
1235 1. Set after RCVEN training
1237 //Tune RDCMD2DATAVALID
1240 MAX OF 2 RANKS : round up (rdcmd2rcven (rcven 1x) + 2x x 2 + PI/128) + 5
1242 //rdcmd2rcven x80/84[12:8]
1243 //rcven 2x x70[23:20] & [11:8]
1245 //Tune DIFFAMP Timings
1247 //diffampen launch x88[20:16] & [4:0] -- B01LATCTL1
1248 MIN OF 2 RANKS : round down (rcven 1x + 2x x 2 + PI/128) - 1
1250 //diffampen length x8C/x90 [13:8] -- B0ONDURCTL B1ONDURCTL
1251 MAX OF 2 RANKS : roundup (rcven 1x + 2x x 2 + PI/128) + 5
1254 2. need to do a fiforst after settings these values
1257 DPF(D_INFO
, "BEFORE\n");
1258 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B0LATCTL0
));
1259 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B01LATCTL1
));
1260 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B0ONDURCTL
));
1262 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B1LATCTL0
));
1263 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B1ONDURCTL
));
1265 rcven
= get_rcvn(0, 0, 0) / 128;
1266 rdcmd2rcven
= (isbR32m(DDRPHY
, B0LATCTL0
) >> 8) & 0x1F;
1267 val
= rdcmd2rcven
+ rcven
+ 6;
1268 isbM32m(DDRPHY
, B0LATCTL0
, val
<< 16, (BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
));
1270 val
= rdcmd2rcven
+ rcven
- 1;
1271 isbM32m(DDRPHY
, B01LATCTL1
, val
<< 0, (BIT4
|BIT3
|BIT2
|BIT1
|BIT0
));
1273 val
= rdcmd2rcven
+ rcven
+ 5;
1274 isbM32m(DDRPHY
, B0ONDURCTL
, val
<< 8, (BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
));
1276 rcven
= get_rcvn(0, 0, 1) / 128;
1277 rdcmd2rcven
= (isbR32m(DDRPHY
, B1LATCTL0
) >> 8) & 0x1F;
1278 val
= rdcmd2rcven
+ rcven
+ 6;
1279 isbM32m(DDRPHY
, B1LATCTL0
, val
<< 16, (BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
));
1281 val
= rdcmd2rcven
+ rcven
- 1;
1282 isbM32m(DDRPHY
, B01LATCTL1
, val
<< 16, (BIT20
|BIT19
|BIT18
|BIT17
|BIT16
));
1284 val
= rdcmd2rcven
+ rcven
+ 5;
1285 isbM32m(DDRPHY
, B1ONDURCTL
, val
<< 8, (BIT13
|BIT12
|BIT11
|BIT10
|BIT9
|BIT8
));
1287 DPF(D_INFO
, "AFTER\n");
1288 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B0LATCTL0
));
1289 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B01LATCTL1
));
1290 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B0ONDURCTL
));
1292 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B1LATCTL0
));
1293 DPF(D_INFO
, "### %x\n", isbR32m(DDRPHY
, B1ONDURCTL
));
1295 DPF(D_INFO
, "\nPress a key\n");
1299 isbM32m(DDRPHY
, B01PTRCTL1
, 0, BIT8
); // 0 is enabled
1301 isbM32m(DDRPHY
, B01PTRCTL1
, BIT8
, BIT8
); // 1 is disabled
1309 // Check memory executing write/read/verify of many data patterns
1310 // at the specified address. Bits in the result indicate failure
1311 // on specific byte lane.
1312 static uint32_t check_bls_ex(
1313 MRCParams_t
*mrc_params
,
1317 uint8_t first_run
= 0;
1319 if (mrc_params
->hte_setup
)
1321 mrc_params
->hte_setup
= 0;
1324 select_hte(mrc_params
);
1327 result
= WriteStressBitLanesHTE(mrc_params
, address
, first_run
);
1329 DPF(D_TRN
, "check_bls_ex result is %x\n", result
);
1333 // Check memory executing simple write/read/verify at
1334 // the specified address. Bits in the result indicate failure
1335 // on specific byte lane.
1336 static uint32_t check_rw_coarse(
1337 MRCParams_t
*mrc_params
,
1340 uint32_t result
= 0;
1341 uint8_t first_run
= 0;
1343 if (mrc_params
->hte_setup
)
1345 mrc_params
->hte_setup
= 0;
1348 select_hte(mrc_params
);
1351 result
= BasicWriteReadHTE(mrc_params
, address
, first_run
, WRITE_TRAIN
);
1353 DPF(D_TRN
, "check_rw_coarse result is %x\n", result
);
1358 // POST_CODE[major] == 0x06
1360 // This function will perform the Write Levelling algorithm (align WCLK and WDQS).
1361 // This algorithm will act on each rank in each channel separately.
1362 static void wr_level(
1363 MRCParams_t
*mrc_params
)
1365 uint8_t channel_i
; // channel counter
1366 uint8_t rank_i
; // rank counter
1367 uint8_t bl_i
; // byte lane counter
1368 uint8_t bl_divisor
= (mrc_params
->channel_width
== x16
) ? 2 : 1; // byte lane divisor
1371 uint32_t final_delay
[NUM_CHANNELS
][NUM_BYTE_LANES
]; // used to find placement for rank2rank sharing configs
1373 uint32_t num_ranks_enabled
= 0; // used to find placement for rank2rank sharing configs
1374 #endif // BACKUP_WDQS
1375 #endif // R2R_SHARING
1379 bool all_edges_found
; // determines stop condition for CRS_WR_LVL
1380 uint32_t delay
[NUM_BYTE_LANES
]; // absolute PI value to be programmed on the byte lane
1381 // static makes it so the data is loaded in the heap once by shadow(), where
1382 // non-static copies the data onto the stack every time this function is called.
1384 uint32_t address
; // address to be checked during COARSE_WR_LVL
1387 #endif // BACKUP_WDQS
1392 post_code(0x06, 0x00);
1395 // need to set "final_delay[][]" elements to "0"
1396 memset((void *) (final_delay
), 0x00, (size_t) sizeof(final_delay
));
1397 #endif // R2R_SHARING
1398 // loop through each enabled channel
1399 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1401 if (mrc_params
->channel_enables
& (1 << channel_i
))
1403 // perform WRITE LEVELING algorithm on a per rank basis
1404 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1406 if (mrc_params
->rank_enables
& (1 << rank_i
))
1408 // POST_CODE here indicates the current rank and channel being calibrated
1409 post_code(0x06, (0x10 + ((channel_i
<< 4) | rank_i
)));
1412 for (bl_i
=0; bl_i
<(NUM_BYTE_LANES
/bl_divisor
); bl_i
++)
1414 set_wdqs(channel_i
, rank_i
, bl_i
, ddr_wdqs
[PLATFORM_ID
]);
1415 set_wdq(channel_i
, rank_i
, bl_i
, (ddr_wdqs
[PLATFORM_ID
] - QRTR_CLK
));
1419 { // Begin product specific code
1421 // perform a single PRECHARGE_ALL command to make DRAM state machine go to IDLE state
1422 dram_init_command(DCMD_PREA(rank_i
));
1424 // enable Write Levelling Mode (EMRS1 w/ Write Levelling Mode Enable)
1425 dram_init_command(DCMD_MRS1(rank_i
,0x0082));
1427 // set ODT DRAM Full Time Termination disable in MCU
1428 dtr4
.raw
= dtr4save
.raw
= isbR32m(MCU
, DTR4
);
1429 dtr4
.field
.ODTDIS
= 1;
1430 isbW32m(MCU
, DTR4
, dtr4
.raw
);
1432 for (bl_i
= 0; bl_i
< ((NUM_BYTE_LANES
/ bl_divisor
) / 2); bl_i
++)
1434 isbM32m(DDRPHY
, DQCTL
+ (DDRIODQ_BL_OFFSET
* bl_i
) + (DDRIODQ_CH_OFFSET
* channel_i
),
1435 (BIT28
| (0x1 << 8) | (0x1 << 6) | (0x1 << 4) | (0x1 << 2)),
1436 (BIT28
| (BIT9
|BIT8
) | (BIT7
|BIT6
) | (BIT5
|BIT4
) | (BIT3
|BIT2
))); // Enable Sandy Bridge Mode (WDQ Tri-State) & Ensure 5 WDQS pulses during Write Leveling
1439 isbM32m(DDRPHY
, CCDDR3RESETCTL
+ (DDRIOCCC_CH_OFFSET
* channel_i
), (BIT16
), (BIT16
)); // Write Leveling Mode enabled in IO
1440 } // End product specific code
1441 // Initialise the starting delay to WCLK
1442 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1444 { // Begin product specific code
1447 delay
[bl_i
] = get_wclk(channel_i
, rank_i
);
1448 } // End product specific code
1449 set_wdqs(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1451 // now find the rising edge
1452 find_rising_edge(mrc_params
, delay
, channel_i
, rank_i
, false);
1453 { // Begin product specific code
1454 // disable Write Levelling Mode
1455 isbM32m(DDRPHY
, CCDDR3RESETCTL
+ (DDRIOCCC_CH_OFFSET
* channel_i
), (0), (BIT16
)); // Write Leveling Mode disabled in IO
1457 for (bl_i
= 0; bl_i
< ((NUM_BYTE_LANES
/ bl_divisor
) / 2); bl_i
++)
1459 isbM32m(DDRPHY
, DQCTL
+ (DDRIODQ_BL_OFFSET
* bl_i
) + (DDRIODQ_CH_OFFSET
* channel_i
),
1460 ((0x1 << 8) | (0x1 << 6) | (0x1 << 4) | (0x1 << 2)),
1461 (BIT28
| (BIT9
|BIT8
) | (BIT7
|BIT6
) | (BIT5
|BIT4
) | (BIT3
|BIT2
))); // Disable Sandy Bridge Mode & Ensure 4 WDQS pulses during normal operation
1464 // restore original DTR4
1465 isbW32m(MCU
, DTR4
, dtr4save
.raw
);
1467 // restore original value (Write Levelling Mode Disable)
1468 dram_init_command(DCMD_MRS1(rank_i
, mrc_params
->mrs1
));
1470 // perform a single PRECHARGE_ALL command to make DRAM state machine go to IDLE state
1471 dram_init_command(DCMD_PREA(rank_i
));
1472 } // End product specific code
1474 post_code(0x06, (0x30 + ((channel_i
<< 4) | rank_i
)));
1476 // COARSE WRITE LEVEL:
1477 // check that we're on the correct clock edge
1479 // hte reconfiguration request
1480 mrc_params
->hte_setup
= 1;
1482 // start CRS_WR_LVL with WDQS = WDQS + 128 PI
1483 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1485 delay
[bl_i
] = get_wdqs(channel_i
, rank_i
, bl_i
) + FULL_CLK
;
1486 set_wdqs(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1487 // program WDQ timings based on WDQS (WDQ = WDQS - 32 PI)
1488 set_wdq(channel_i
, rank_i
, bl_i
, (delay
[bl_i
] - QRTR_CLK
));
1491 // get an address in the targeted channel/rank
1492 address
= get_addr(mrc_params
, channel_i
, rank_i
);
1495 uint32_t coarse_result
= 0x00;
1496 uint32_t coarse_result_mask
= byte_lane_mask(mrc_params
);
1497 all_edges_found
= true; // assume pass
1500 // need restore memory to idle state as write can be in bad sync
1501 dram_init_command (DCMD_PREA(rank_i
));
1504 mrc_params
->hte_setup
= 1;
1505 coarse_result
= check_rw_coarse(mrc_params
, address
);
1507 // check for failures and margin the byte lane back 128 PI (1 CLK)
1508 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1510 if (coarse_result
& (coarse_result_mask
<< bl_i
))
1512 all_edges_found
= false;
1513 delay
[bl_i
] -= FULL_CLK
;
1514 set_wdqs(channel_i
, rank_i
, bl_i
, delay
[bl_i
]);
1515 // program WDQ timings based on WDQS (WDQ = WDQS - 32 PI)
1516 set_wdq(channel_i
, rank_i
, bl_i
, (delay
[bl_i
] - QRTR_CLK
));
1520 } while (!all_edges_found
);
1523 // increment "num_ranks_enabled"
1524 num_ranks_enabled
++;
1525 // accumulate "final_delay[][]" values from "delay[]" values for rolling average
1526 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1528 final_delay
[channel_i
][bl_i
] += delay
[bl_i
];
1529 set_wdqs(channel_i
, rank_i
, bl_i
, ((final_delay
[channel_i
][bl_i
]) / num_ranks_enabled
));
1530 // program WDQ timings based on WDQS (WDQ = WDQS - 32 PI)
1531 set_wdq(channel_i
, rank_i
, bl_i
, ((final_delay
[channel_i
][bl_i
]) / num_ranks_enabled
) - QRTR_CLK
);
1533 #endif // R2R_SHARING
1534 #endif // BACKUP_WDQS
1536 } // if rank is enabled
1538 } // if channel is enabled
1546 // POST_CODE[major] == 0x07
1548 // This function will perform the READ TRAINING Algorithm on all channels/ranks/byte_lanes simultaneously to minimize execution time.
1549 // The idea here is to train the VREF and RDQS (and eventually RDQ) values to achieve maximum READ margins.
1550 // The algorithm will first determine the X coordinate (RDQS setting).
1551 // This is done by collapsing the VREF eye until we find a minimum required RDQS eye for VREF_MIN and VREF_MAX.
1552 // Then we take the averages of the RDQS eye at VREF_MIN and VREF_MAX, then average those; this will be the final X coordinate.
1553 // The algorithm will then determine the Y coordinate (VREF setting).
1554 // This is done by collapsing the RDQS eye until we find a minimum required VREF eye for RDQS_MIN and RDQS_MAX.
1555 // Then we take the averages of the VREF eye at RDQS_MIN and RDQS_MAX, then average those; this will be the final Y coordinate.
1556 // NOTE: this algorithm assumes the eye curves have a one-to-one relationship, meaning for each X the curve has only one Y and vice-a-versa.
1557 static void rd_train(
1558 MRCParams_t
*mrc_params
)
1561 #define MIN_RDQS_EYE 10 // in PI Codes
1562 #define MIN_VREF_EYE 10 // in VREF Codes
1563 #define RDQS_STEP 1 // how many RDQS codes to jump while margining
1564 #define VREF_STEP 1 // how many VREF codes to jump while margining
1565 #define VREF_MIN (0x00) // offset into "vref_codes[]" for minimum allowed VREF setting
1566 #define VREF_MAX (0x3F) // offset into "vref_codes[]" for maximum allowed VREF setting
1567 #define RDQS_MIN (0x00) // minimum RDQS delay value
1568 #define RDQS_MAX (0x3F) // maximum RDQS delay value
1569 #define B 0 // BOTTOM VREF
1570 #define T 1 // TOP VREF
1571 #define L 0 // LEFT RDQS
1572 #define R 1 // RIGHT RDQS
1574 uint8_t channel_i
; // channel counter
1575 uint8_t rank_i
; // rank counter
1576 uint8_t bl_i
; // byte lane counter
1577 uint8_t bl_divisor
= (mrc_params
->channel_width
== x16
) ? 2 : 1; // byte lane divisor
1580 uint8_t side_x
; // tracks LEFT/RIGHT approach vectors
1581 uint8_t side_y
; // tracks BOTTOM/TOP approach vectors
1582 uint8_t x_coordinate
[2/*side_x*/][2/*side_y*/][NUM_CHANNELS
][NUM_RANKS
][NUM_BYTE_LANES
]; // X coordinate data (passing RDQS values) for approach vectors
1583 uint8_t y_coordinate
[2/*side_x*/][2/*side_y*/][NUM_CHANNELS
][NUM_BYTE_LANES
]; // Y coordinate data (passing VREF values) for approach vectors
1584 uint8_t x_center
[NUM_CHANNELS
][NUM_RANKS
][NUM_BYTE_LANES
]; // centered X (RDQS)
1585 uint8_t y_center
[NUM_CHANNELS
][NUM_BYTE_LANES
]; // centered Y (VREF)
1586 uint32_t address
; // target address for "check_bls_ex()"
1587 uint32_t result
; // result of "check_bls_ex()"
1588 uint32_t bl_mask
; // byte lane mask for "result" checking
1590 uint32_t final_delay
[NUM_CHANNELS
][NUM_BYTE_LANES
]; // used to find placement for rank2rank sharing configs
1591 uint32_t num_ranks_enabled
= 0; // used to find placement for rank2rank sharing configs
1592 #endif // R2R_SHARING
1593 #endif // BACKUP_RDQS
1595 post_code(0x07, 0x00);
1600 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++)
1602 if (mrc_params
->channel_enables
& (1<<channel_i
))
1604 for (rank_i
=0; rank_i
<NUM_RANKS
; rank_i
++)
1606 if (mrc_params
->rank_enables
& (1<<rank_i
))
1608 for (bl_i
=0; bl_i
<(NUM_BYTE_LANES
/bl_divisor
); bl_i
++)
1610 set_rdqs(channel_i
, rank_i
, bl_i
, ddr_rdqs
[PLATFORM_ID
]);
1612 } // if rank is enabled
1614 } // if channel is enabled
1617 // initialise x/y_coordinate arrays
1618 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1620 if (mrc_params
->channel_enables
& (1 << channel_i
))
1622 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1624 if (mrc_params
->rank_enables
& (1 << rank_i
))
1626 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1629 x_coordinate
[L
][B
][channel_i
][rank_i
][bl_i
] = RDQS_MIN
;
1630 x_coordinate
[R
][B
][channel_i
][rank_i
][bl_i
] = RDQS_MAX
;
1631 x_coordinate
[L
][T
][channel_i
][rank_i
][bl_i
] = RDQS_MIN
;
1632 x_coordinate
[R
][T
][channel_i
][rank_i
][bl_i
] = RDQS_MAX
;
1634 y_coordinate
[L
][B
][channel_i
][bl_i
] = VREF_MIN
;
1635 y_coordinate
[R
][B
][channel_i
][bl_i
] = VREF_MIN
;
1636 y_coordinate
[L
][T
][channel_i
][bl_i
] = VREF_MAX
;
1637 y_coordinate
[R
][T
][channel_i
][bl_i
] = VREF_MAX
;
1639 } // if rank is enabled
1641 } // if channel is enabled
1644 // initialise other variables
1645 bl_mask
= byte_lane_mask(mrc_params
);
1646 address
= get_addr(mrc_params
, 0, 0);
1649 // need to set "final_delay[][]" elements to "0"
1650 memset((void *) (final_delay
), 0x00, (size_t) sizeof(final_delay
));
1651 #endif // R2R_SHARING
1653 // look for passing coordinates
1654 for (side_y
= B
; side_y
<= T
; side_y
++)
1656 for (side_x
= L
; side_x
<= R
; side_x
++)
1659 post_code(0x07, (0x10 + (side_y
* 2) + (side_x
)));
1661 // find passing values
1662 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1664 if (mrc_params
->channel_enables
& (0x1 << channel_i
))
1666 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1669 if (mrc_params
->rank_enables
& (0x1 << rank_i
))
1671 // set x/y_coordinate search starting settings
1672 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1674 set_rdqs(channel_i
, rank_i
, bl_i
, x_coordinate
[side_x
][side_y
][channel_i
][rank_i
][bl_i
]);
1675 set_vref(channel_i
, bl_i
, y_coordinate
[side_x
][side_y
][channel_i
][bl_i
]);
1677 // get an address in the target channel/rank
1678 address
= get_addr(mrc_params
, channel_i
, rank_i
);
1680 // request HTE reconfiguration
1681 mrc_params
->hte_setup
= 1;
1683 // test the settings
1687 // result[07:00] == failing byte lane (MAX 8)
1688 result
= check_bls_ex( mrc_params
, address
);
1690 // check for failures
1693 // at least 1 byte lane failed
1694 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1696 if (result
& (bl_mask
<< bl_i
))
1698 // adjust the RDQS values accordingly
1701 x_coordinate
[L
][side_y
][channel_i
][rank_i
][bl_i
] += RDQS_STEP
;
1705 x_coordinate
[R
][side_y
][channel_i
][rank_i
][bl_i
] -= RDQS_STEP
;
1707 // check that we haven't closed the RDQS_EYE too much
1708 if ((x_coordinate
[L
][side_y
][channel_i
][rank_i
][bl_i
] > (RDQS_MAX
- MIN_RDQS_EYE
)) ||
1709 (x_coordinate
[R
][side_y
][channel_i
][rank_i
][bl_i
] < (RDQS_MIN
+ MIN_RDQS_EYE
))
1711 (x_coordinate
[L
][side_y
][channel_i
][rank_i
][bl_i
]
1712 == x_coordinate
[R
][side_y
][channel_i
][rank_i
][bl_i
]))
1714 // not enough RDQS margin available at this VREF
1715 // update VREF values accordingly
1718 y_coordinate
[side_x
][B
][channel_i
][bl_i
] += VREF_STEP
;
1722 y_coordinate
[side_x
][T
][channel_i
][bl_i
] -= VREF_STEP
;
1724 // check that we haven't closed the VREF_EYE too much
1725 if ((y_coordinate
[side_x
][B
][channel_i
][bl_i
] > (VREF_MAX
- MIN_VREF_EYE
)) ||
1726 (y_coordinate
[side_x
][T
][channel_i
][bl_i
] < (VREF_MIN
+ MIN_VREF_EYE
)) ||
1727 (y_coordinate
[side_x
][B
][channel_i
][bl_i
] == y_coordinate
[side_x
][T
][channel_i
][bl_i
]))
1729 // VREF_EYE collapsed below MIN_VREF_EYE
1730 training_message(channel_i
, rank_i
, bl_i
);
1731 post_code(0xEE, (0x70 + (side_y
* 2) + (side_x
)));
1735 // update the VREF setting
1736 set_vref(channel_i
, bl_i
, y_coordinate
[side_x
][side_y
][channel_i
][bl_i
]);
1737 // reset the X coordinate to begin the search at the new VREF
1738 x_coordinate
[side_x
][side_y
][channel_i
][rank_i
][bl_i
] =
1739 (side_x
== L
) ? (RDQS_MIN
) : (RDQS_MAX
);
1742 // update the RDQS setting
1743 set_rdqs(channel_i
, rank_i
, bl_i
, x_coordinate
[side_x
][side_y
][channel_i
][rank_i
][bl_i
]);
1746 } // at least 1 byte lane failed
1747 } while (result
& 0xFF);
1748 } // if rank is enabled
1750 } // if channel is enabled
1755 post_code(0x07, 0x20);
1757 // find final RDQS (X coordinate) & final VREF (Y coordinate)
1758 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1760 if (mrc_params
->channel_enables
& (1 << channel_i
))
1762 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1764 if (mrc_params
->rank_enables
& (1 << rank_i
))
1766 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1772 DPF(D_INFO
, "RDQS T/B eye rank%d lane%d : %d-%d %d-%d\n", rank_i
, bl_i
,
1773 x_coordinate
[L
][T
][channel_i
][rank_i
][bl_i
],
1774 x_coordinate
[R
][T
][channel_i
][rank_i
][bl_i
],
1775 x_coordinate
[L
][B
][channel_i
][rank_i
][bl_i
],
1776 x_coordinate
[R
][B
][channel_i
][rank_i
][bl_i
]);
1778 tempD1
= (x_coordinate
[R
][T
][channel_i
][rank_i
][bl_i
] + x_coordinate
[L
][T
][channel_i
][rank_i
][bl_i
]) / 2; // average the TOP side LEFT & RIGHT values
1779 tempD2
= (x_coordinate
[R
][B
][channel_i
][rank_i
][bl_i
] + x_coordinate
[L
][B
][channel_i
][rank_i
][bl_i
]) / 2; // average the BOTTOM side LEFT & RIGHT values
1780 x_center
[channel_i
][rank_i
][bl_i
] = (uint8_t) ((tempD1
+ tempD2
) / 2); // average the above averages
1783 DPF(D_INFO
, "VREF R/L eye lane%d : %d-%d %d-%d\n", bl_i
,
1784 y_coordinate
[R
][B
][channel_i
][bl_i
],
1785 y_coordinate
[R
][T
][channel_i
][bl_i
],
1786 y_coordinate
[L
][B
][channel_i
][bl_i
],
1787 y_coordinate
[L
][T
][channel_i
][bl_i
]);
1789 tempD1
= (y_coordinate
[R
][T
][channel_i
][bl_i
] + y_coordinate
[R
][B
][channel_i
][bl_i
]) / 2; // average the RIGHT side TOP & BOTTOM values
1790 tempD2
= (y_coordinate
[L
][T
][channel_i
][bl_i
] + y_coordinate
[L
][B
][channel_i
][bl_i
]) / 2; // average the LEFT side TOP & BOTTOM values
1791 y_center
[channel_i
][bl_i
] = (uint8_t) ((tempD1
+ tempD2
) / 2); // average the above averages
1793 } // if rank is enabled
1795 } // if channel is enabled
1799 // perform an eye check
1800 for (side_y
=B
; side_y
<=T
; side_y
++)
1802 for (side_x
=L
; side_x
<=R
; side_x
++)
1805 post_code(0x07, (0x30 + (side_y
* 2) + (side_x
)));
1807 // update the settings for the eye check
1808 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++)
1810 if (mrc_params
->channel_enables
& (1<<channel_i
))
1812 for (rank_i
=0; rank_i
<NUM_RANKS
; rank_i
++)
1814 if (mrc_params
->rank_enables
& (1<<rank_i
))
1816 for (bl_i
=0; bl_i
<(NUM_BYTE_LANES
/bl_divisor
); bl_i
++)
1820 set_rdqs(channel_i
, rank_i
, bl_i
, (x_center
[channel_i
][rank_i
][bl_i
] - (MIN_RDQS_EYE
/ 2)));
1824 set_rdqs(channel_i
, rank_i
, bl_i
, (x_center
[channel_i
][rank_i
][bl_i
] + (MIN_RDQS_EYE
/ 2)));
1828 set_vref(channel_i
, bl_i
, (y_center
[channel_i
][bl_i
] - (MIN_VREF_EYE
/ 2)));
1832 set_vref(channel_i
, bl_i
, (y_center
[channel_i
][bl_i
] + (MIN_VREF_EYE
/ 2)));
1835 } // if rank is enabled
1837 } // if channel is enabled
1840 // request HTE reconfiguration
1841 mrc_params
->hte_setup
= 1;
1844 if (check_bls_ex( mrc_params
, address
) & 0xFF)
1846 // one or more byte lanes failed
1847 post_code(0xEE, (0x74 + (side_x
* 2) + (side_y
)));
1851 #endif // RX_EYE_CHECK
1853 post_code(0x07, 0x40);
1855 // set final placements
1856 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1858 if (mrc_params
->channel_enables
& (1 << channel_i
))
1860 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1862 if (mrc_params
->rank_enables
& (1 << rank_i
))
1865 // increment "num_ranks_enabled"
1866 num_ranks_enabled
++;
1867 #endif // R2R_SHARING
1868 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1872 final_delay
[channel_i
][bl_i
] += x_center
[channel_i
][rank_i
][bl_i
];
1873 set_rdqs(channel_i
, rank_i
, bl_i
, ((final_delay
[channel_i
][bl_i
]) / num_ranks_enabled
));
1875 set_rdqs(channel_i
, rank_i
, bl_i
, x_center
[channel_i
][rank_i
][bl_i
]);
1876 #endif // R2R_SHARING
1878 set_vref(channel_i
, bl_i
, y_center
[channel_i
][bl_i
]);
1880 } // if rank is enabled
1882 } // if channel is enabled
1884 #endif // BACKUP_RDQS
1890 // POST_CODE[major] == 0x08
1892 // This function will perform the WRITE TRAINING Algorithm on all channels/ranks/byte_lanes simultaneously to minimize execution time.
1893 // The idea here is to train the WDQ timings to achieve maximum WRITE margins.
1894 // The algorithm will start with WDQ at the current WDQ setting (tracks WDQS in WR_LVL) +/- 32 PIs (+/- 1/4 CLK) and collapse the eye until all data patterns pass.
1895 // This is because WDQS will be aligned to WCLK by the Write Leveling algorithm and WDQ will only ever have a 1/2 CLK window of validity.
1896 static void wr_train(
1897 MRCParams_t
*mrc_params
)
1900 #define WDQ_STEP 1 // how many WDQ codes to jump while margining
1901 #define L 0 // LEFT side loop value definition
1902 #define R 1 // RIGHT side loop value definition
1904 uint8_t channel_i
; // channel counter
1905 uint8_t rank_i
; // rank counter
1906 uint8_t bl_i
; // byte lane counter
1907 uint8_t bl_divisor
= (mrc_params
->channel_width
== x16
) ? 2 : 1; // byte lane divisor
1910 uint8_t side_i
; // LEFT/RIGHT side indicator (0=L, 1=R)
1911 uint32_t tempD
; // temporary DWORD
1912 uint32_t delay
[2/*side_i*/][NUM_CHANNELS
][NUM_RANKS
][NUM_BYTE_LANES
]; // 2 arrays, for L & R side passing delays
1913 uint32_t address
; // target address for "check_bls_ex()"
1914 uint32_t result
; // result of "check_bls_ex()"
1915 uint32_t bl_mask
; // byte lane mask for "result" checking
1917 uint32_t final_delay
[NUM_CHANNELS
][NUM_BYTE_LANES
]; // used to find placement for rank2rank sharing configs
1918 uint32_t num_ranks_enabled
= 0; // used to find placement for rank2rank sharing configs
1919 #endif // R2R_SHARING
1920 #endif // BACKUP_WDQ
1923 post_code(0x08, 0x00);
1928 for (channel_i
=0; channel_i
<NUM_CHANNELS
; channel_i
++)
1930 if (mrc_params
->channel_enables
& (1<<channel_i
))
1932 for (rank_i
=0; rank_i
<NUM_RANKS
; rank_i
++)
1934 if (mrc_params
->rank_enables
& (1<<rank_i
))
1936 for (bl_i
=0; bl_i
<(NUM_BYTE_LANES
/bl_divisor
); bl_i
++)
1938 set_wdq(channel_i
, rank_i
, bl_i
, ddr_wdq
[PLATFORM_ID
]);
1940 } // if rank is enabled
1942 } // if channel is enabled
1945 // initialise "delay"
1946 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1948 if (mrc_params
->channel_enables
& (1 << channel_i
))
1950 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1952 if (mrc_params
->rank_enables
& (1 << rank_i
))
1954 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1956 // want to start with WDQ = (WDQS - QRTR_CLK) +/- QRTR_CLK
1957 tempD
= get_wdqs(channel_i
, rank_i
, bl_i
) - QRTR_CLK
;
1958 delay
[L
][channel_i
][rank_i
][bl_i
] = tempD
- QRTR_CLK
;
1959 delay
[R
][channel_i
][rank_i
][bl_i
] = tempD
+ QRTR_CLK
;
1961 } // if rank is enabled
1963 } // if channel is enabled
1966 // initialise other variables
1967 bl_mask
= byte_lane_mask(mrc_params
);
1968 address
= get_addr(mrc_params
, 0, 0);
1971 // need to set "final_delay[][]" elements to "0"
1972 memset((void *) (final_delay
), 0x00, (size_t) sizeof(final_delay
));
1973 #endif // R2R_SHARING
1975 // start algorithm on the LEFT side and train each channel/bl until no failures are observed, then repeat for the RIGHT side.
1976 for (side_i
= L
; side_i
<= R
; side_i
++)
1978 post_code(0x08, (0x10 + (side_i
)));
1980 // set starting values
1981 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
1983 if (mrc_params
->channel_enables
& (1 << channel_i
))
1985 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
1987 if (mrc_params
->rank_enables
& (1 << rank_i
))
1989 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
1991 set_wdq(channel_i
, rank_i
, bl_i
, delay
[side_i
][channel_i
][rank_i
][bl_i
]);
1993 } // if rank is enabled
1995 } // if channel is enabled
1998 // find passing values
1999 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
2001 if (mrc_params
->channel_enables
& (0x1 << channel_i
))
2003 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
2005 if (mrc_params
->rank_enables
& (0x1 << rank_i
))
2007 // get an address in the target channel/rank
2008 address
= get_addr(mrc_params
, channel_i
, rank_i
);
2010 // request HTE reconfiguration
2011 mrc_params
->hte_setup
= 1;
2013 // check the settings
2018 // need restore memory to idle state as write can be in bad sync
2019 dram_init_command (DCMD_PREA(rank_i
));
2022 // result[07:00] == failing byte lane (MAX 8)
2023 result
= check_bls_ex( mrc_params
, address
);
2024 // check for failures
2027 // at least 1 byte lane failed
2028 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
2030 if (result
& (bl_mask
<< bl_i
))
2034 delay
[L
][channel_i
][rank_i
][bl_i
] += WDQ_STEP
;
2038 delay
[R
][channel_i
][rank_i
][bl_i
] -= WDQ_STEP
;
2040 // check for algorithm failure
2041 if (delay
[L
][channel_i
][rank_i
][bl_i
] != delay
[R
][channel_i
][rank_i
][bl_i
])
2043 // margin available, update delay setting
2044 set_wdq(channel_i
, rank_i
, bl_i
, delay
[side_i
][channel_i
][rank_i
][bl_i
]);
2048 // no margin available, notify the user and halt
2049 training_message(channel_i
, rank_i
, bl_i
);
2050 post_code(0xEE, (0x80 + side_i
));
2054 } // at least 1 byte lane failed
2055 } while (result
& 0xFF); // stop when all byte lanes pass
2056 } // if rank is enabled
2058 } // if channel is enabled
2062 // program WDQ to the middle of passing window
2063 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
2065 if (mrc_params
->channel_enables
& (1 << channel_i
))
2067 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
2069 if (mrc_params
->rank_enables
& (1 << rank_i
))
2072 // increment "num_ranks_enabled"
2073 num_ranks_enabled
++;
2074 #endif // R2R_SHARING
2075 for (bl_i
= 0; bl_i
< (NUM_BYTE_LANES
/ bl_divisor
); bl_i
++)
2078 DPF(D_INFO
, "WDQ eye rank%d lane%d : %d-%d\n", rank_i
, bl_i
,
2079 delay
[L
][channel_i
][rank_i
][bl_i
],
2080 delay
[R
][channel_i
][rank_i
][bl_i
]);
2082 tempD
= (delay
[R
][channel_i
][rank_i
][bl_i
] + delay
[L
][channel_i
][rank_i
][bl_i
]) / 2;
2085 final_delay
[channel_i
][bl_i
] += tempD
;
2086 set_wdq(channel_i
, rank_i
, bl_i
, ((final_delay
[channel_i
][bl_i
]) / num_ranks_enabled
));
2088 set_wdq(channel_i
, rank_i
, bl_i
, tempD
);
2089 #endif // R2R_SHARING
2092 } // if rank is enabled
2094 } // if channel is enabled
2096 #endif // BACKUP_WDQ
2101 // Wrapper for jedec initialisation routine
2102 static void perform_jedec_init(
2103 MRCParams_t
*mrc_params
)
2105 jedec_init(mrc_params
, 0);
2108 // Configure DDRPHY for Auto-Refresh, Periodic Compensations,
2109 // Dynamic Diff-Amp, ZQSPERIOD, Auto-Precharge, CKE Power-Down
2110 static void set_auto_refresh(
2111 MRCParams_t
*mrc_params
)
2116 uint32_t bl_divisor
= /*(mrc_params->channel_width==x16)?2:*/1;
2121 // enable Auto-Refresh, Periodic Compensations, Dynamic Diff-Amp, ZQSPERIOD, Auto-Precharge, CKE Power-Down
2122 for (channel_i
= 0; channel_i
< NUM_CHANNELS
; channel_i
++)
2124 if (mrc_params
->channel_enables
& (1 << channel_i
))
2126 // Enable Periodic RCOMPS
2127 isbM32m(DDRPHY
, CMPCTRL
, (BIT1
), (BIT1
));
2130 // Enable Dynamic DiffAmp & Set Read ODT Value
2131 switch (mrc_params
->rd_odt_value
)
2133 case 0: tempD
= 0x3F; break; // OFF
2134 default: tempD
= 0x00; break; // Auto
2135 } // rd_odt_value switch
2137 for (bl_i
=0; bl_i
<((NUM_BYTE_LANES
/bl_divisor
)/2); bl_i
++)
2139 isbM32m(DDRPHY
, (B0OVRCTL
+ (bl_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)),
2140 ((0x00<<16)|(tempD
<<10)),
2141 ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
))); // Override: DIFFAMP, ODT
2143 isbM32m(DDRPHY
, (B1OVRCTL
+ (bl_i
* DDRIODQ_BL_OFFSET
) + (channel_i
* DDRIODQ_CH_OFFSET
)),
2144 ((0x00<<16)|(tempD
<<10)),
2145 ((BIT21
|BIT20
|BIT19
|BIT18
|BIT17
|BIT16
)|(BIT15
|BIT14
|BIT13
|BIT12
|BIT11
|BIT10
)));// Override: DIFFAMP, ODT
2148 // Issue ZQCS command
2149 for (rank_i
= 0; rank_i
< NUM_RANKS
; rank_i
++)
2151 if (mrc_params
->rank_enables
& (1 << rank_i
))
2153 dram_init_command(DCMD_ZQCS(rank_i
));
2154 } // if rank_i enabled
2157 } // if channel_i enabled
2166 // Depending on configuration enables ECC support.
2167 // Available memory size is decresed, and updated with 0s
2168 // in order to clear error status. Address mode 2 forced.
2169 static void ecc_enable(
2170 MRCParams_t
*mrc_params
)
2176 if (mrc_params
->ecc_enables
== 0) return;
2180 // Configuration required in ECC mode
2181 Drp
.raw
= isbR32m(MCU
, DRP
);
2182 Drp
.field
.addressMap
= 2;
2183 Drp
.field
.split64
= 1;
2184 isbW32m(MCU
, DRP
, Drp
.raw
);
2186 // Disable new request bypass
2187 Dsch
.raw
= isbR32m(MCU
, DSCH
);
2188 Dsch
.field
.NEWBYPDIS
= 1;
2189 isbW32m(MCU
, DSCH
, Dsch
.raw
);
2193 Ctr
.field
.SBEEN
= 1;
2194 Ctr
.field
.DBEEN
= 1;
2195 Ctr
.field
.ENCBGEN
= 1;
2196 isbW32m(MCU
, DECCCTRL
, Ctr
.raw
);
2199 // Read back to be sure writing took place
2200 Ctr
.raw
= isbR32m(MCU
, DECCCTRL
);
2203 // Assume 8 bank memory, one bank is gone for ECC
2204 mrc_params
->mem_size
-= mrc_params
->mem_size
/ 8;
2206 // For S3 resume memory content has to be preserved
2207 if (mrc_params
->boot_mode
!= bmS3
)
2209 select_hte(mrc_params
);
2210 HteMemInit(mrc_params
, MrcMemInit
, MrcHaltHteEngineOnError
);
2211 select_memory_manager(mrc_params
);
2218 // Lock MCU registers at the end of initialisation sequence.
2219 static void lock_registers(
2220 MRCParams_t
*mrc_params
)
2226 Dco
.raw
= isbR32m(MCU
, DCO
);
2227 Dco
.field
.PMIDIS
= 0; //0 - PRI enabled
2228 Dco
.field
.PMICTL
= 0; //0 - PRI owned by MEMORY_MANAGER
2229 Dco
.field
.DRPLOCK
= 1;
2230 Dco
.field
.REUTLOCK
= 1;
2231 isbW32m(MCU
, DCO
, Dco
.raw
);
2239 // cache write back invalidate
2240 static void asm_wbinvd(void)
2242 #if defined (SIM) || defined (GCC)
2252 static void asm_invd(void)
2254 #if defined (SIM) || defined (GCC)
2264 static void cpu_read(void)
2266 uint32_t adr
, dat
, limit
;
2271 for (adr
= 0; adr
< limit
; adr
+= 4)
2273 dat
= *(uint32_t*) adr
;
2274 if ((adr
& 0x0F) == 0)
2276 DPF(D_INFO
, "\n%x : ", adr
);
2278 DPF(D_INFO
, "%x ", dat
);
2282 DPF(D_INFO
, "CPU read done\n");
2286 static void cpu_write(void)
2288 uint32_t adr
, limit
;
2291 for (adr
= 0; adr
< limit
; adr
+= 4)
2293 *(uint32_t*) adr
= 0xDEAD0000 + adr
;
2298 DPF(D_INFO
, "CPU write done\n");
2302 static void cpu_memory_test(
2303 MRCParams_t
*mrc_params
)
2305 uint32_t result
= 0;
2306 uint32_t val
, dat
, adr
, adr0
, step
, limit
;
2313 adr0
= 1 * 1024 * 1024;
2314 limit
= 256 * 1024 * 1024;
2316 for (step
= 0; step
<= 4; step
++)
2318 DPF(D_INFO
, "Mem test step %d starting from %xh\n", step
, adr0
);
2320 my_tsc
= read_tsc();
2321 for (adr
= adr0
; adr
< limit
; adr
+= sizeof(uint32_t))
2323 if (step
== 0) dat
= adr
;
2324 else if (step
== 1) dat
= (1 << ((adr
>> 2) & 0x1f));
2325 else if (step
== 2) dat
= ~(1 << ((adr
>> 2) & 0x1f));
2326 else if (step
== 3) dat
= 0x5555AAAA;
2327 else if (step
== 4) dat
= 0xAAAA5555;
2329 *(uint32_t*) adr
= dat
;
2331 DPF(D_INFO
, "Write time %llXh\n", read_tsc() - my_tsc
);
2333 my_tsc
= read_tsc();
2334 for (adr
= adr0
; adr
< limit
; adr
+= sizeof(uint32_t))
2336 if (step
== 0) dat
= adr
;
2337 else if (step
== 1) dat
= (1 << ((adr
>> 2) & 0x1f));
2338 else if (step
== 2) dat
= ~(1 << ((adr
>> 2) & 0x1f));
2339 else if (step
== 3) dat
= 0x5555AAAA;
2340 else if (step
== 4) dat
= 0xAAAA5555;
2342 val
= *(uint32_t*) adr
;
2346 DPF(D_INFO
, "%x vs. %x@%x\n", dat
, val
, adr
);
2350 DPF(D_INFO
, "Read time %llXh\n", read_tsc() - my_tsc
);
2353 DPF( D_INFO
, "Memory test result %x\n", result
);
2359 // Execute memory test, if error dtected it is
2360 // indicated in mrc_params->status.
2361 static void memory_test(
2362 MRCParams_t
*mrc_params
)
2364 uint32_t result
= 0;
2368 select_hte(mrc_params
);
2369 result
= HteMemInit(mrc_params
, MrcMemTest
, MrcHaltHteEngineOnError
);
2370 select_memory_manager(mrc_params
);
2372 DPF(D_INFO
, "Memory test result %x\n", result
);
2373 mrc_params
->status
= ((result
== 0) ? MRC_SUCCESS
: MRC_E_MEMTEST
);
2378 // Force same timings as with backup settings
2379 static void static_timings(
2380 MRCParams_t
*mrc_params
)
2385 for (ch
= 0; ch
< NUM_CHANNELS
; ch
++)
2387 for (rk
= 0; rk
< NUM_RANKS
; rk
++)
2389 for (bl
= 0; bl
< NUM_BYTE_LANES
; bl
++)
2391 set_rcvn(ch
, rk
, bl
, 498); // RCVN
2392 set_rdqs(ch
, rk
, bl
, 24); // RDQS
2393 set_wdqs(ch
, rk
, bl
, 292); // WDQS
2394 set_wdq( ch
, rk
, bl
, 260); // WDQ
2397 set_vref(ch
, bl
, 32); // VREF (RANK0 only)
2400 set_wctl(ch
, rk
, 217); // WCTL
2402 set_wcmd(ch
, 220); // WCMD
2409 // Initialise system memory.
2412 MRCParams_t
*mrc_params
)
2414 static const MemInit_t init
[] =
2416 { 0x0101, bmCold
|bmFast
|bmWarm
|bmS3
, clear_self_refresh
}, //0
2417 { 0x0200, bmCold
|bmFast
|bmWarm
|bmS3
, prog_ddr_timing_control
}, //1 initialise the MCU
2418 { 0x0103, bmCold
|bmFast
, prog_decode_before_jedec
}, //2
2419 { 0x0104, bmCold
|bmFast
, perform_ddr_reset
}, //3
2420 { 0x0300, bmCold
|bmFast
|bmS3
, ddrphy_init
}, //4 initialise the DDRPHY
2421 { 0x0400, bmCold
|bmFast
, perform_jedec_init
}, //5 perform JEDEC initialisation of DRAMs
2422 { 0x0105, bmCold
|bmFast
, set_ddr_init_complete
}, //6
2423 { 0x0106, bmFast
|bmWarm
|bmS3
, restore_timings
}, //7
2424 { 0x0106, bmCold
, default_timings
}, //8
2425 { 0x0500, bmCold
, rcvn_cal
}, //9 perform RCVN_CAL algorithm
2426 { 0x0600, bmCold
, wr_level
}, //10 perform WR_LEVEL algorithm
2427 { 0x0120, bmCold
, prog_page_ctrl
}, //11
2428 { 0x0700, bmCold
, rd_train
}, //12 perform RD_TRAIN algorithm
2429 { 0x0800, bmCold
, wr_train
}, //13 perform WR_TRAIN algorithm
2430 { 0x010B, bmCold
, store_timings
}, //14
2431 { 0x010C, bmCold
|bmFast
|bmWarm
|bmS3
, enable_scrambling
}, //15
2432 { 0x010D, bmCold
|bmFast
|bmWarm
|bmS3
, prog_ddr_control
}, //16
2433 { 0x010E, bmCold
|bmFast
|bmWarm
|bmS3
, prog_dra_drb
}, //17
2434 { 0x010F, bmWarm
|bmS3
, perform_wake
}, //18
2435 { 0x0110, bmCold
|bmFast
|bmWarm
|bmS3
, change_refresh_period
}, //19
2436 { 0x0111, bmCold
|bmFast
|bmWarm
|bmS3
, set_auto_refresh
}, //20
2437 { 0x0112, bmCold
|bmFast
|bmWarm
|bmS3
, ecc_enable
}, //21
2438 { 0x0113, bmCold
|bmFast
, memory_test
}, //22
2439 { 0x0114, bmCold
|bmFast
|bmWarm
|bmS3
, lock_registers
} //23 set init done
2446 DPF(D_INFO
, "Meminit build %s %s\n", __DATE__
, __TIME__
);
2449 post_code(0x01, 0x00);
2451 if (mrc_params
->boot_mode
!= bmCold
)
2453 if (mrc_params
->ddr_speed
!= mrc_params
->timings
.ddr_speed
)
2455 // full training required as frequency changed
2456 mrc_params
->boot_mode
= bmCold
;
2460 for (i
= 0; i
< MCOUNT(init
); i
++)
2465 if (mrc_params
->menu_after_mrc
&& i
> 14)
2471 DPF(D_INFO
, "-- c - continue --\n");
2472 DPF(D_INFO
, "-- j - move to jedec init --\n");
2473 DPF(D_INFO
, "-- m - memory test --\n");
2474 DPF(D_INFO
, "-- r - cpu read --\n");
2475 DPF(D_INFO
, "-- w - cpu write --\n");
2476 DPF(D_INFO
, "-- b - hte base test --\n");
2477 DPF(D_INFO
, "-- g - hte extended test --\n");
2484 case 'j': //move to jedec init
2491 uint32_t n
, res
, cnt
=0;
2493 for(n
=0; mgetch()==0; n
++)
2495 if( ch
== 'M' || n
% 256 == 0)
2497 DPF(D_INFO
, "n=%d e=%d\n", n
, cnt
);
2504 memory_test(mrc_params
);
2505 res
|= mrc_params
->status
;
2508 mrc_params
->hte_setup
= 1;
2509 res
|= check_bls_ex(mrc_params
, 0x00000000);
2510 res
|= check_bls_ex(mrc_params
, 0x00000000);
2511 res
|= check_bls_ex(mrc_params
, 0x00000000);
2512 res
|= check_bls_ex(mrc_params
, 0x00000000);
2514 if( mrc_params
->rank_enables
& 2)
2516 mrc_params
->hte_setup
= 1;
2517 res
|= check_bls_ex(mrc_params
, 0x40000000);
2518 res
|= check_bls_ex(mrc_params
, 0x40000000);
2519 res
|= check_bls_ex(mrc_params
, 0x40000000);
2520 res
|= check_bls_ex(mrc_params
, 0x40000000);
2525 DPF(D_INFO
, "###########\n");
2527 DPF(D_INFO
, "# Error count %d\n", ++cnt
);
2529 DPF(D_INFO
, "###########\n");
2534 select_memory_manager(mrc_params
);
2538 memory_test(mrc_params
);
2541 cpu_memory_test(mrc_params
);
2546 if (ch
<= '9') DpfPrintMask
^= (ch
- '0') << 3;
2547 DPF(D_INFO
, "Log mask %x\n", DpfPrintMask
);
2550 print_timings(mrc_params
);
2553 rd_train(mrc_params
);
2556 wr_train(mrc_params
);
2569 select_hte(mrc_params
);
2570 mrc_params
->hte_setup
= 1;
2571 result
= check_bls_ex(mrc_params
, 0);
2572 DPF(D_INFO
, "Extended test result %x\n", result
);
2573 select_memory_manager(mrc_params
);
2579 select_hte(mrc_params
);
2580 mrc_params
->hte_setup
= 1;
2581 result
= check_rw_coarse(mrc_params
, 0);
2582 DPF(D_INFO
, "Base test result %x\n", result
);
2583 select_memory_manager(mrc_params
);
2587 select_hte(mrc_params
);
2588 HteMemOp(0x2340, 1, 1);
2589 select_memory_manager(mrc_params
);
2596 DPF( D_INFO
, "===>> Start suspend\n");
2597 isbR32m(MCU
, DSTAT
);
2599 DPMC0reg
.raw
= isbR32m(MCU
, DPMC0
);
2600 DPMC0reg
.field
.DYNSREN
= 0;
2601 DPMC0reg
.field
.powerModeOpCode
= 0x05; // Disable Master DLL
2602 isbW32m(MCU
, DPMC0
, DPMC0reg
.raw
);
2604 // Should be off for negative test case verification
2606 Wr32(MMIO
, PCIADDR(0,0,0,SB_PACKET_REG
),
2607 (uint32_t)SB_COMMAND(SB_SUSPEND_CMND_OPCODE
, MCU
, 0));
2610 DPF( D_INFO
, "press key\n");
2612 DPF( D_INFO
, "===>> Start resume\n");
2613 isbR32m(MCU
, DSTAT
);
2615 mrc_params
->boot_mode
= bmS3
;
2624 if (mrc_params
->boot_mode
& init
[i
].boot_path
)
2626 uint8_t major
= init
[i
].post_code
>> 8 & 0xFF;
2627 uint8_t minor
= init
[i
].post_code
>> 0 & 0xFF;
2628 post_code(major
, minor
);
2630 my_tsc
= read_tsc();
2631 init
[i
].init_fn(mrc_params
);
2632 DPF(D_TIME
, "Execution time %llX", read_tsc() - my_tsc
);
2636 // display the timings
2637 print_timings(mrc_params
);
2640 post_code(0x01, 0xFF);