]> git.karo-electronics.de Git - karo-tx-uboot.git/blob - drivers/ddr/mvebu/ddr3_spd.c
arm: mvebu: Add Armada 38x SERDES / PHY init code from Marvell bin_hdr
[karo-tx-uboot.git] / drivers / ddr / mvebu / ddr3_spd.c
1 /*
2  * Copyright (C) Marvell International Ltd. and its affiliates
3  *
4  * SPDX-License-Identifier:     GPL-2.0
5  */
6
7 #include <common.h>
8 #include <i2c.h>
9 #include <spl.h>
10 #include <asm/io.h>
11 #include <asm/arch/cpu.h>
12 #include <asm/arch/soc.h>
13
14 #include "ddr3_init.h"
15
16 #if defined(MV88F78X60)
17 #include "ddr3_axp_config.h"
18 #elif defined(MV88F67XX)
19 #include "ddr3_a370_config.h"
20 #endif
21
22 #if defined(MV88F672X)
23 #include "ddr3_a375_config.h"
24 #endif
25
26 #ifdef DUNIT_SPD
27
28 /* DIMM SPD offsets */
29 #define SPD_DEV_TYPE_BYTE               2
30
31 #define SPD_MODULE_TYPE_BYTE            3
32 #define SPD_MODULE_MASK                 0xf
33 #define SPD_MODULE_TYPE_RDIMM           1
34 #define SPD_MODULE_TYPE_UDIMM           2
35
36 #define SPD_DEV_DENSITY_BYTE            4
37 #define SPD_DEV_DENSITY_MASK            0xf
38
39 #define SPD_ROW_NUM_BYTE                5
40 #define SPD_ROW_NUM_MIN                 12
41 #define SPD_ROW_NUM_OFF                 3
42 #define SPD_ROW_NUM_MASK                (7 << SPD_ROW_NUM_OFF)
43
44 #define SPD_COL_NUM_BYTE                5
45 #define SPD_COL_NUM_MIN                 9
46 #define SPD_COL_NUM_OFF                 0
47 #define SPD_COL_NUM_MASK                (7 << SPD_COL_NUM_OFF)
48
49 #define SPD_MODULE_ORG_BYTE             7
50 #define SPD_MODULE_SDRAM_DEV_WIDTH_OFF  0
51 #define SPD_MODULE_SDRAM_DEV_WIDTH_MASK (7 << SPD_MODULE_SDRAM_DEV_WIDTH_OFF)
52 #define SPD_MODULE_BANK_NUM_MIN         1
53 #define SPD_MODULE_BANK_NUM_OFF         3
54 #define SPD_MODULE_BANK_NUM_MASK        (7 << SPD_MODULE_BANK_NUM_OFF)
55
56 #define SPD_BUS_WIDTH_BYTE              8
57 #define SPD_BUS_WIDTH_OFF               0
58 #define SPD_BUS_WIDTH_MASK              (7 << SPD_BUS_WIDTH_OFF)
59 #define SPD_BUS_ECC_OFF                 3
60 #define SPD_BUS_ECC_MASK                (3 << SPD_BUS_ECC_OFF)
61
62 #define SPD_MTB_DIVIDEND_BYTE           10
63 #define SPD_MTB_DIVISOR_BYTE            11
64 #define SPD_TCK_BYTE                    12
65 #define SPD_SUP_CAS_LAT_LSB_BYTE        14
66 #define SPD_SUP_CAS_LAT_MSB_BYTE        15
67 #define SPD_TAA_BYTE                    16
68 #define SPD_TWR_BYTE                    17
69 #define SPD_TRCD_BYTE                   18
70 #define SPD_TRRD_BYTE                   19
71 #define SPD_TRP_BYTE                    20
72
73 #define SPD_TRAS_MSB_BYTE               21
74 #define SPD_TRAS_MSB_MASK               0xf
75
76 #define SPD_TRC_MSB_BYTE                21
77 #define SPD_TRC_MSB_MASK                0xf0
78
79 #define SPD_TRAS_LSB_BYTE               22
80 #define SPD_TRC_LSB_BYTE                23
81 #define SPD_TRFC_LSB_BYTE               24
82 #define SPD_TRFC_MSB_BYTE               25
83 #define SPD_TWTR_BYTE                   26
84 #define SPD_TRTP_BYTE                   27
85
86 #define SPD_TFAW_MSB_BYTE               28
87 #define SPD_TFAW_MSB_MASK               0xf
88
89 #define SPD_TFAW_LSB_BYTE               29
90 #define SPD_OPT_FEATURES_BYTE           30
91 #define SPD_THERMAL_REFRESH_OPT_BYTE    31
92
93 #define SPD_ADDR_MAP_BYTE               63
94 #define SPD_ADDR_MAP_MIRROR_OFFS        0
95
96 #define SPD_RDIMM_RC_BYTE               69
97 #define SPD_RDIMM_RC_NIBBLE_MASK        0xF
98 #define SPD_RDIMM_RC_NUM                16
99
100 /* Dimm Memory Type values */
101 #define SPD_MEM_TYPE_SDRAM              0x4
102 #define SPD_MEM_TYPE_DDR1               0x7
103 #define SPD_MEM_TYPE_DDR2               0x8
104 #define SPD_MEM_TYPE_DDR3               0xB
105
106 #define DIMM_MODULE_MANU_OFFS           64
107 #define DIMM_MODULE_MANU_SIZE           8
108 #define DIMM_MODULE_VEN_OFFS            73
109 #define DIMM_MODULE_VEN_SIZE            25
110 #define DIMM_MODULE_ID_OFFS             99
111 #define DIMM_MODULE_ID_SIZE             18
112
113 /* enumeration for voltage levels. */
114 enum dimm_volt_if {
115         TTL_5V_TOLERANT,
116         LVTTL,
117         HSTL_1_5V,
118         SSTL_3_3V,
119         SSTL_2_5V,
120         VOLTAGE_UNKNOWN,
121 };
122
123 /* enumaration for SDRAM CAS Latencies. */
124 enum dimm_sdram_cas {
125         SD_CL_1 = 1,
126         SD_CL_2,
127         SD_CL_3,
128         SD_CL_4,
129         SD_CL_5,
130         SD_CL_6,
131         SD_CL_7,
132         SD_FAULT
133 };
134
135 /* enumeration for memory types */
136 enum memory_type {
137         MEM_TYPE_SDRAM,
138         MEM_TYPE_DDR1,
139         MEM_TYPE_DDR2,
140         MEM_TYPE_DDR3
141 };
142
143 /* DIMM information structure */
144 typedef struct dimm_info {
145         /* DIMM dimensions */
146         u32 num_of_module_ranks;
147         u32 data_width;
148         u32 rank_capacity;
149         u32 num_of_devices;
150
151         u32 sdram_width;
152         u32 num_of_banks_on_each_device;
153         u32 sdram_capacity;
154
155         u32 num_of_row_addr;
156         u32 num_of_col_addr;
157
158         u32 addr_mirroring;
159
160         u32 err_check_type;                     /* ECC , PARITY.. */
161         u32 type_info;                          /* DDR2 only */
162
163         /* DIMM timing parameters */
164         u32 supported_cas_latencies;
165         u32 refresh_interval;
166         u32 min_cycle_time;
167         u32 min_row_precharge_time;
168         u32 min_row_active_to_row_active;
169         u32 min_ras_to_cas_delay;
170         u32 min_write_recovery_time;            /* DDR3/2 only */
171         u32 min_write_to_read_cmd_delay;        /* DDR3/2 only */
172         u32 min_read_to_prech_cmd_delay;        /* DDR3/2 only */
173         u32 min_active_to_precharge;
174         u32 min_refresh_recovery;               /* DDR3/2 only */
175         u32 min_cas_lat_time;
176         u32 min_four_active_win_delay;
177         u8 dimm_rc[SPD_RDIMM_RC_NUM];
178
179         /* DIMM vendor ID */
180         u32 vendor;
181 } MV_DIMM_INFO;
182
183 static int ddr3_spd_sum_init(MV_DIMM_INFO *info, MV_DIMM_INFO *sum_info,
184                              u32 dimm);
185 static u32 ddr3_get_max_val(u32 spd_val, u32 dimm_num, u32 static_val);
186 static u32 ddr3_get_min_val(u32 spd_val, u32 dimm_num, u32 static_val);
187 static int ddr3_spd_init(MV_DIMM_INFO *info, u32 dimm_addr, u32 dimm_width);
188 static u32 ddr3_div(u32 val, u32 divider, u32 sub);
189
190 extern u8 spd_data[SPD_SIZE];
191 extern u32 odt_config[ODT_OPT];
192 extern u16 odt_static[ODT_OPT][MAX_CS];
193 extern u16 odt_dynamic[ODT_OPT][MAX_CS];
194
195 #if !(defined(DB_88F6710) || defined(DB_88F6710_PCAC) || defined(RD_88F6710))
196 /*
197  * Name:     ddr3_get_dimm_num - Find number of dimms and their addresses
198  * Desc:
199  * Args:     dimm_addr - array of dimm addresses
200  * Notes:
201  * Returns:  None.
202  */
203 static u32 ddr3_get_dimm_num(u32 *dimm_addr)
204 {
205         u32 dimm_cur_addr;
206         u8 data[3];
207         u32 dimm_num = 0;
208         int ret;
209
210         /* Read the dimm eeprom */
211         for (dimm_cur_addr = MAX_DIMM_ADDR; dimm_cur_addr > MIN_DIMM_ADDR;
212              dimm_cur_addr--) {
213                 data[SPD_DEV_TYPE_BYTE] = 0;
214
215                 /* Far-End DIMM must be connected */
216                 if ((dimm_num == 0) && (dimm_cur_addr < FAR_END_DIMM_ADDR))
217                         return 0;
218
219                 ret = i2c_read(dimm_cur_addr, 0, 1, (uchar *)data, 3);
220                 if (!ret) {
221                         if (data[SPD_DEV_TYPE_BYTE] == SPD_MEM_TYPE_DDR3) {
222                                 dimm_addr[dimm_num] = dimm_cur_addr;
223                                 dimm_num++;
224                         }
225                 }
226         }
227
228         return dimm_num;
229 }
230 #endif
231
232 /*
233  * Name:     dimmSpdInit - Get the SPD parameters.
234  * Desc:     Read the DIMM SPD parameters into given struct parameter.
235  * Args:     dimmNum - DIMM number. See MV_BOARD_DIMM_NUM enumerator.
236  *           info - DIMM information structure.
237  * Notes:
238  * Returns:  MV_OK if function could read DIMM parameters, 0 otherwise.
239  */
240 int ddr3_spd_init(MV_DIMM_INFO *info, u32 dimm_addr, u32 dimm_width)
241 {
242         u32 tmp;
243         u32 time_base;
244         int ret;
245         __maybe_unused u32 rc;
246         __maybe_unused u8 vendor_high, vendor_low;
247
248         if (dimm_addr != 0) {
249                 memset(spd_data, 0, SPD_SIZE * sizeof(u8));
250
251                 ret = i2c_read(dimm_addr, 0, 1, (uchar *)spd_data, SPD_SIZE);
252                 if (ret)
253                         return MV_DDR3_TRAINING_ERR_TWSI_FAIL;
254         }
255
256         /* Check if DDR3 */
257         if (spd_data[SPD_DEV_TYPE_BYTE] != SPD_MEM_TYPE_DDR3)
258                 return MV_DDR3_TRAINING_ERR_TWSI_BAD_TYPE;
259
260         /* Error Check Type */
261         /* No byte for error check in DDR3 SPD, use DDR2 convention */
262         info->err_check_type = 0;
263
264         /* Check if ECC */
265         if ((spd_data[SPD_BUS_WIDTH_BYTE] & 0x18) >> 3)
266                 info->err_check_type = 1;
267
268         DEBUG_INIT_FULL_C("DRAM err_check_type ", info->err_check_type, 1);
269         switch (spd_data[SPD_MODULE_TYPE_BYTE]) {
270         case 1:
271                 /* support RDIMM */
272                 info->type_info = SPD_MODULE_TYPE_RDIMM;
273                 break;
274         case 2:
275                 /* support UDIMM */
276                 info->type_info = SPD_MODULE_TYPE_UDIMM;
277                 break;
278         case 11:                /* LRDIMM current not supported */
279         default:
280                 info->type_info = (spd_data[SPD_MODULE_TYPE_BYTE]);
281                 break;
282         }
283
284         /* Size Calculations: */
285
286         /* Number Of Row Addresses - 12/13/14/15/16 */
287         info->num_of_row_addr =
288                 (spd_data[SPD_ROW_NUM_BYTE] & SPD_ROW_NUM_MASK) >>
289                 SPD_ROW_NUM_OFF;
290         info->num_of_row_addr += SPD_ROW_NUM_MIN;
291         DEBUG_INIT_FULL_C("DRAM num_of_row_addr ", info->num_of_row_addr, 2);
292
293         /* Number Of Column Addresses - 9/10/11/12 */
294         info->num_of_col_addr =
295                 (spd_data[SPD_COL_NUM_BYTE] & SPD_COL_NUM_MASK) >>
296                 SPD_COL_NUM_OFF;
297         info->num_of_col_addr += SPD_COL_NUM_MIN;
298         DEBUG_INIT_FULL_C("DRAM num_of_col_addr ", info->num_of_col_addr, 1);
299
300         /* Number Of Ranks = number of CS on Dimm - 1/2/3/4 Ranks */
301         info->num_of_module_ranks =
302                 (spd_data[SPD_MODULE_ORG_BYTE] & SPD_MODULE_BANK_NUM_MASK) >>
303                 SPD_MODULE_BANK_NUM_OFF;
304         info->num_of_module_ranks += SPD_MODULE_BANK_NUM_MIN;
305         DEBUG_INIT_FULL_C("DRAM numOfModuleBanks ", info->num_of_module_ranks,
306                           1);
307
308         /* Data Width - 8/16/32/64 bits */
309         info->data_width =
310                 1 << (3 + (spd_data[SPD_BUS_WIDTH_BYTE] & SPD_BUS_WIDTH_MASK));
311         DEBUG_INIT_FULL_C("DRAM data_width ", info->data_width, 1);
312
313         /* Number Of Banks On Each Device - 8/16/32/64 banks */
314         info->num_of_banks_on_each_device =
315                 1 << (3 + ((spd_data[SPD_DEV_DENSITY_BYTE] >> 4) & 0x7));
316         DEBUG_INIT_FULL_C("DRAM num_of_banks_on_each_device ",
317                           info->num_of_banks_on_each_device, 1);
318
319         /* Total SDRAM capacity - 256Mb/512Mb/1Gb/2Gb/4Gb/8Gb/16Gb - MegaBits */
320         info->sdram_capacity =
321                 spd_data[SPD_DEV_DENSITY_BYTE] & SPD_DEV_DENSITY_MASK;
322
323         /* Sdram Width - 4/8/16/32 bits */
324         info->sdram_width = 1 << (2 + (spd_data[SPD_MODULE_ORG_BYTE] &
325                                        SPD_MODULE_SDRAM_DEV_WIDTH_MASK));
326         DEBUG_INIT_FULL_C("DRAM sdram_width ", info->sdram_width, 1);
327
328         /* CS (Rank) Capacity - MB */
329         /*
330          * DDR3 device uiDensity val are: (device capacity/8) *
331          * (Module_width/Device_width)
332          */
333         /* Jedec SPD DDR3 - page 7, Save spd_data in Mb  - 2048=2GB */
334         if (dimm_width == 32) {
335                 info->rank_capacity =
336                         ((1 << info->sdram_capacity) * 256 *
337                          (info->data_width / info->sdram_width)) << 16;
338                 /* CS size = CS size / 2  */
339         } else {
340                 info->rank_capacity =
341                         ((1 << info->sdram_capacity) * 256 *
342                          (info->data_width / info->sdram_width) * 0x2) << 16;
343                 /* 0x2 =>  0x100000-1Mbit / 8-bit->byte / 0x10000  */
344         }
345         DEBUG_INIT_FULL_C("DRAM rank_capacity[31] ", info->rank_capacity, 1);
346
347         /* Number of devices includeing Error correction */
348         info->num_of_devices =
349                 ((info->data_width / info->sdram_width) *
350                  info->num_of_module_ranks) + info->err_check_type;
351         DEBUG_INIT_FULL_C("DRAM num_of_devices  ", info->num_of_devices, 1);
352
353         /* Address Mapping from Edge connector to DRAM - mirroring option */
354         info->addr_mirroring =
355                 spd_data[SPD_ADDR_MAP_BYTE] & (1 << SPD_ADDR_MAP_MIRROR_OFFS);
356
357         /* Timings - All in ps */
358
359         time_base = (1000 * spd_data[SPD_MTB_DIVIDEND_BYTE]) /
360                 spd_data[SPD_MTB_DIVISOR_BYTE];
361
362         /* Minimum Cycle Time At Max CasLatancy */
363         info->min_cycle_time = spd_data[SPD_TCK_BYTE] * time_base;
364         DEBUG_INIT_FULL_C("DRAM tCKmin ", info->min_cycle_time, 1);
365
366         /* Refresh Interval */
367         /* No byte for refresh interval in DDR3 SPD, use DDR2 convention */
368         /*
369          * JEDEC param are 0 <= Tcase <= 85: 7.8uSec, 85 <= Tcase
370          * <= 95: 3.9uSec
371          */
372         info->refresh_interval = 7800000;       /* Set to 7.8uSec */
373         DEBUG_INIT_FULL_C("DRAM refresh_interval ", info->refresh_interval, 1);
374
375         /* Suported Cas Latencies -  DDR 3: */
376
377         /*
378          *         bit7 | bit6 | bit5 | bit4 | bit3 | bit2 | bit1 | bit0 *
379          *******-******-******-******-******-******-******-*******-*******
380          CAS =      11  |  10  |  9   |  8   |  7   |  6   |  5   |  4   *
381          *********************************************************-*******
382          *******-******-******-******-******-******-******-*******-*******
383          *        bit15 |bit14 |bit13 |bit12 |bit11 |bit10 | bit9 | bit8 *
384          *******-******-******-******-******-******-******-*******-*******
385          CAS =     TBD  |  18  |  17  |  16  |  15  |  14  |  13  |  12  *
386         */
387
388         /* DDR3 include 2 byte of CAS support */
389         info->supported_cas_latencies =
390                 (spd_data[SPD_SUP_CAS_LAT_MSB_BYTE] << 8) |
391                 spd_data[SPD_SUP_CAS_LAT_LSB_BYTE];
392         DEBUG_INIT_FULL_C("DRAM supported_cas_latencies ",
393                           info->supported_cas_latencies, 1);
394
395         /* Minimum Cycle Time At Max CasLatancy */
396         info->min_cas_lat_time = (spd_data[SPD_TAA_BYTE] * time_base);
397         /*
398          * This field divided by the cycleTime will give us the CAS latency
399          * to config
400          */
401
402         /*
403          * For DDR3 and DDR2 includes Write Recovery Time field.
404          * Other SDRAM ignore
405          */
406         info->min_write_recovery_time = spd_data[SPD_TWR_BYTE] * time_base;
407         DEBUG_INIT_FULL_C("DRAM min_write_recovery_time ",
408                           info->min_write_recovery_time, 1);
409
410         /* Mininmum Ras to Cas Delay */
411         info->min_ras_to_cas_delay = spd_data[SPD_TRCD_BYTE] * time_base;
412         DEBUG_INIT_FULL_C("DRAM min_ras_to_cas_delay ",
413                           info->min_ras_to_cas_delay, 1);
414
415         /* Minimum Row Active to Row Active Time */
416         info->min_row_active_to_row_active =
417             spd_data[SPD_TRRD_BYTE] * time_base;
418         DEBUG_INIT_FULL_C("DRAM min_row_active_to_row_active ",
419                           info->min_row_active_to_row_active, 1);
420
421         /* Minimum Row Precharge Delay Time */
422         info->min_row_precharge_time = spd_data[SPD_TRP_BYTE] * time_base;
423         DEBUG_INIT_FULL_C("DRAM min_row_precharge_time ",
424                           info->min_row_precharge_time, 1);
425
426         /* Minimum Active to Precharge Delay Time - tRAS   ps */
427         info->min_active_to_precharge =
428                 (spd_data[SPD_TRAS_MSB_BYTE] & SPD_TRAS_MSB_MASK) << 8;
429         info->min_active_to_precharge |= spd_data[SPD_TRAS_LSB_BYTE];
430         info->min_active_to_precharge *= time_base;
431         DEBUG_INIT_FULL_C("DRAM min_active_to_precharge ",
432                           info->min_active_to_precharge, 1);
433
434         /* Minimum Refresh Recovery Delay Time - tRFC  ps */
435         info->min_refresh_recovery = spd_data[SPD_TRFC_MSB_BYTE] << 8;
436         info->min_refresh_recovery |= spd_data[SPD_TRFC_LSB_BYTE];
437         info->min_refresh_recovery *= time_base;
438         DEBUG_INIT_FULL_C("DRAM min_refresh_recovery ",
439                           info->min_refresh_recovery, 1);
440
441         /*
442          * For DDR3 and DDR2 includes Internal Write To Read Command Delay
443          * field.
444          */
445         info->min_write_to_read_cmd_delay = spd_data[SPD_TWTR_BYTE] * time_base;
446         DEBUG_INIT_FULL_C("DRAM min_write_to_read_cmd_delay ",
447                           info->min_write_to_read_cmd_delay, 1);
448
449         /*
450          * For DDR3 and DDR2 includes Internal Read To Precharge Command Delay
451          * field.
452          */
453         info->min_read_to_prech_cmd_delay = spd_data[SPD_TRTP_BYTE] * time_base;
454         DEBUG_INIT_FULL_C("DRAM min_read_to_prech_cmd_delay ",
455                           info->min_read_to_prech_cmd_delay, 1);
456
457         /*
458          * For DDR3 includes Minimum Activate to Activate/Refresh Command
459          * field
460          */
461         tmp = ((spd_data[SPD_TFAW_MSB_BYTE] & SPD_TFAW_MSB_MASK) << 8) |
462                 spd_data[SPD_TFAW_LSB_BYTE];
463         info->min_four_active_win_delay = tmp * time_base;
464         DEBUG_INIT_FULL_C("DRAM min_four_active_win_delay ",
465                           info->min_four_active_win_delay, 1);
466
467 #if defined(MV88F78X60) || defined(MV88F672X)
468         /* Registered DIMM support */
469         if (info->type_info == SPD_MODULE_TYPE_RDIMM) {
470                 for (rc = 2; rc < 6; rc += 2) {
471                         tmp = spd_data[SPD_RDIMM_RC_BYTE + rc / 2];
472                         info->dimm_rc[rc] =
473                                 spd_data[SPD_RDIMM_RC_BYTE + rc / 2] &
474                                 SPD_RDIMM_RC_NIBBLE_MASK;
475                         info->dimm_rc[rc + 1] =
476                                 (spd_data[SPD_RDIMM_RC_BYTE + rc / 2] >> 4) &
477                                 SPD_RDIMM_RC_NIBBLE_MASK;
478                 }
479
480                 vendor_low = spd_data[66];
481                 vendor_high = spd_data[65];
482                 info->vendor = (vendor_high << 8) + vendor_low;
483                 DEBUG_INIT_C("DDR3 Training Sequence - Registered DIMM vendor ID 0x",
484                              info->vendor, 4);
485
486                 info->dimm_rc[0] = RDIMM_RC0;
487                 info->dimm_rc[1] = RDIMM_RC1;
488                 info->dimm_rc[2] = RDIMM_RC2;
489                 info->dimm_rc[8] = RDIMM_RC8;
490                 info->dimm_rc[9] = RDIMM_RC9;
491                 info->dimm_rc[10] = RDIMM_RC10;
492                 info->dimm_rc[11] = RDIMM_RC11;
493         }
494 #endif
495
496         return MV_OK;
497 }
498
499 /*
500  * Name:     ddr3_spd_sum_init - Get the SPD parameters.
501  * Desc:     Read the DIMM SPD parameters into given struct parameter.
502  * Args:     dimmNum - DIMM number. See MV_BOARD_DIMM_NUM enumerator.
503  *           info - DIMM information structure.
504  * Notes:
505  * Returns:  MV_OK if function could read DIMM parameters, 0 otherwise.
506  */
507 int ddr3_spd_sum_init(MV_DIMM_INFO *info, MV_DIMM_INFO *sum_info, u32 dimm)
508 {
509         if (dimm == 0) {
510                 memcpy(sum_info, info, sizeof(MV_DIMM_INFO));
511                 return MV_OK;
512         }
513         if (sum_info->type_info != info->type_info) {
514                 DEBUG_INIT_S("DDR3 Dimm Compare - DIMM type does not match - FAIL\n");
515                 return MV_DDR3_TRAINING_ERR_DIMM_TYPE_NO_MATCH;
516         }
517         if (sum_info->err_check_type > info->err_check_type) {
518                 sum_info->err_check_type = info->err_check_type;
519                 DEBUG_INIT_S("DDR3 Dimm Compare - ECC does not match. ECC is disabled\n");
520         }
521         if (sum_info->data_width != info->data_width) {
522                 DEBUG_INIT_S("DDR3 Dimm Compare - DRAM bus width does not match - FAIL\n");
523                 return MV_DDR3_TRAINING_ERR_BUS_WIDTH_NOT_MATCH;
524         }
525         if (sum_info->min_cycle_time < info->min_cycle_time)
526                 sum_info->min_cycle_time = info->min_cycle_time;
527         if (sum_info->refresh_interval < info->refresh_interval)
528                 sum_info->refresh_interval = info->refresh_interval;
529         sum_info->supported_cas_latencies &= info->supported_cas_latencies;
530         if (sum_info->min_cas_lat_time < info->min_cas_lat_time)
531                 sum_info->min_cas_lat_time = info->min_cas_lat_time;
532         if (sum_info->min_write_recovery_time < info->min_write_recovery_time)
533                 sum_info->min_write_recovery_time =
534                     info->min_write_recovery_time;
535         if (sum_info->min_ras_to_cas_delay < info->min_ras_to_cas_delay)
536                 sum_info->min_ras_to_cas_delay = info->min_ras_to_cas_delay;
537         if (sum_info->min_row_active_to_row_active <
538             info->min_row_active_to_row_active)
539                 sum_info->min_row_active_to_row_active =
540                     info->min_row_active_to_row_active;
541         if (sum_info->min_row_precharge_time < info->min_row_precharge_time)
542                 sum_info->min_row_precharge_time = info->min_row_precharge_time;
543         if (sum_info->min_active_to_precharge < info->min_active_to_precharge)
544                 sum_info->min_active_to_precharge =
545                     info->min_active_to_precharge;
546         if (sum_info->min_refresh_recovery < info->min_refresh_recovery)
547                 sum_info->min_refresh_recovery = info->min_refresh_recovery;
548         if (sum_info->min_write_to_read_cmd_delay <
549             info->min_write_to_read_cmd_delay)
550                 sum_info->min_write_to_read_cmd_delay =
551                     info->min_write_to_read_cmd_delay;
552         if (sum_info->min_read_to_prech_cmd_delay <
553             info->min_read_to_prech_cmd_delay)
554                 sum_info->min_read_to_prech_cmd_delay =
555                     info->min_read_to_prech_cmd_delay;
556         if (sum_info->min_four_active_win_delay <
557             info->min_four_active_win_delay)
558                 sum_info->min_four_active_win_delay =
559                     info->min_four_active_win_delay;
560         if (sum_info->min_write_to_read_cmd_delay <
561             info->min_write_to_read_cmd_delay)
562                 sum_info->min_write_to_read_cmd_delay =
563                         info->min_write_to_read_cmd_delay;
564
565         return MV_OK;
566 }
567
568 /*
569  * Name:     ddr3_dunit_setup
570  * Desc:     Set the controller with the timing values.
571  * Args:     ecc_ena - User ECC setup
572  * Notes:
573  * Returns:
574  */
575 int ddr3_dunit_setup(u32 ecc_ena, u32 hclk_time, u32 *ddr_width)
576 {
577         u32 reg, tmp, cwl;
578         u32 ddr_clk_time;
579         MV_DIMM_INFO dimm_info[2];
580         MV_DIMM_INFO sum_info;
581         u32 stat_val, spd_val;
582         u32 cs, cl, cs_num, cs_ena;
583         u32 dimm_num = 0;
584         int status;
585         u32 rc;
586         __maybe_unused u32 dimm_cnt, cs_count, dimm;
587         __maybe_unused u32 dimm_addr[2] = { 0, 0 };
588
589 #if defined(DB_88F6710) || defined(DB_88F6710_PCAC) || defined(RD_88F6710)
590         /* Armada 370 - SPD is not available on DIMM */
591         /*
592          * Set MC registers according to Static SPD values Values -
593          * must be set manually
594          */
595         /*
596          * We only have one optional DIMM for the DB and we already got the
597          * SPD matching values
598          */
599         status = ddr3_spd_init(&dimm_info[0], 0, *ddr_width);
600         if (MV_OK != status)
601                 return status;
602
603         dimm_num = 1;
604         /* Use JP8 to enable multiCS support for Armada 370 DB */
605         if (!ddr3_check_config(EEPROM_MODULE_ADDR, CONFIG_MULTI_CS))
606                 dimm_info[0].num_of_module_ranks = 1;
607         status = ddr3_spd_sum_init(&dimm_info[0], &sum_info, 0);
608         if (MV_OK != status)
609                 return status;
610 #else
611         /* Dynamic D-Unit Setup - Read SPD values */
612 #ifdef DUNIT_SPD
613         dimm_num = ddr3_get_dimm_num(dimm_addr);
614         if (dimm_num == 0) {
615 #ifdef MIXED_DIMM_STATIC
616                 DEBUG_INIT_S("DDR3 Training Sequence - No DIMMs detected\n");
617 #else
618                 DEBUG_INIT_S("DDR3 Training Sequence - FAILED (Wrong DIMMs Setup)\n");
619                 return MV_DDR3_TRAINING_ERR_BAD_DIMM_SETUP;
620 #endif
621         } else {
622                 DEBUG_INIT_C("DDR3 Training Sequence - Number of DIMMs detected: ",
623                              dimm_num, 1);
624         }
625
626         for (dimm = 0; dimm < dimm_num; dimm++) {
627                 status = ddr3_spd_init(&dimm_info[dimm], dimm_addr[dimm],
628                                        *ddr_width);
629                 if (MV_OK != status)
630                         return status;
631                 status = ddr3_spd_sum_init(&dimm_info[dimm], &sum_info, dimm);
632                 if (MV_OK != status)
633                         return status;
634         }
635 #endif
636 #endif
637
638         /* Set number of enabled CS */
639         cs_num = 0;
640 #ifdef DUNIT_STATIC
641         cs_num = ddr3_get_cs_num_from_reg();
642 #endif
643 #ifdef DUNIT_SPD
644         for (dimm = 0; dimm < dimm_num; dimm++)
645                 cs_num += dimm_info[dimm].num_of_module_ranks;
646 #endif
647         if (cs_num > MAX_CS) {
648                 DEBUG_INIT_C("DDR3 Training Sequence - Number of CS exceed limit -  ",
649                              MAX_CS, 1);
650                 return MV_DDR3_TRAINING_ERR_MAX_CS_LIMIT;
651         }
652
653         /* Set bitmap of enabled CS */
654         cs_ena = 0;
655 #ifdef DUNIT_STATIC
656         cs_ena = ddr3_get_cs_ena_from_reg();
657 #endif
658 #ifdef DUNIT_SPD
659         dimm = 0;
660
661         if (dimm_num) {
662                 for (cs = 0; cs < MAX_CS; cs += 2) {
663                         if (((1 << cs) & DIMM_CS_BITMAP) &&
664                             !(cs_ena & (1 << cs))) {
665                                 if (dimm_info[dimm].num_of_module_ranks == 1)
666                                         cs_ena |= (0x1 << cs);
667                                 else if (dimm_info[dimm].num_of_module_ranks == 2)
668                                         cs_ena |= (0x3 << cs);
669                                 else if (dimm_info[dimm].num_of_module_ranks == 3)
670                                         cs_ena |= (0x7 << cs);
671                                 else if (dimm_info[dimm].num_of_module_ranks == 4)
672                                         cs_ena |= (0xF << cs);
673
674                                 dimm++;
675                                 if (dimm == dimm_num)
676                                         break;
677                         }
678                 }
679         }
680 #endif
681
682         if (cs_ena > 0xF) {
683                 DEBUG_INIT_C("DDR3 Training Sequence - Number of enabled CS exceed limit -  ",
684                              MAX_CS, 1);
685                 return MV_DDR3_TRAINING_ERR_MAX_ENA_CS_LIMIT;
686         }
687
688         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - Number of CS = ", cs_num, 1);
689
690         /* Check Ratio - '1' - 2:1, '0' - 1:1 */
691         if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
692                 ddr_clk_time = hclk_time / 2;
693         else
694                 ddr_clk_time = hclk_time;
695
696 #ifdef DUNIT_STATIC
697         /* Get target CL value from set register */
698         reg = (reg_read(REG_DDR3_MR0_ADDR) >> 2);
699         reg = ((((reg >> 1) & 0xE)) | (reg & 0x1)) & 0xF;
700
701         cl = ddr3_get_max_val(ddr3_div(sum_info.min_cas_lat_time,
702                                        ddr_clk_time, 0),
703                               dimm_num, ddr3_valid_cl_to_cl(reg));
704 #else
705         cl = ddr3_div(sum_info.min_cas_lat_time, ddr_clk_time, 0);
706 #endif
707         if (cl < 5)
708                 cl = 5;
709
710         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - Cas Latency = ", cl, 1);
711
712         /* {0x00001400} -   DDR SDRAM Configuration Register */
713         reg = 0x73004000;
714         stat_val = ddr3_get_static_mc_value(
715                 REG_SDRAM_CONFIG_ADDR, REG_SDRAM_CONFIG_ECC_OFFS, 0x1, 0, 0);
716         if (ecc_ena && ddr3_get_min_val(sum_info.err_check_type, dimm_num,
717                                         stat_val)) {
718                 reg |= (1 << REG_SDRAM_CONFIG_ECC_OFFS);
719                 reg |= (1 << REG_SDRAM_CONFIG_IERR_OFFS);
720                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - ECC Enabled\n");
721         } else {
722                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - ECC Disabled\n");
723         }
724
725         if (sum_info.type_info == SPD_MODULE_TYPE_RDIMM) {
726 #ifdef DUNIT_STATIC
727                 DEBUG_INIT_S("DDR3 Training Sequence - FAIL - Illegal R-DIMM setup\n");
728                 return MV_DDR3_TRAINING_ERR_BAD_R_DIMM_SETUP;
729 #endif
730                 reg |= (1 << REG_SDRAM_CONFIG_REGDIMM_OFFS);
731                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - R-DIMM\n");
732         } else {
733                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - U-DIMM\n");
734         }
735
736 #ifndef MV88F67XX
737 #ifdef DUNIT_STATIC
738         if (ddr3_get_min_val(sum_info.data_width, dimm_num, BUS_WIDTH) == 64) {
739 #else
740         if (*ddr_width == 64) {
741 #endif
742                 reg |= (1 << REG_SDRAM_CONFIG_WIDTH_OFFS);
743                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 64Bits\n");
744         } else {
745                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 32Bits\n");
746         }
747 #else
748         DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 16Bits\n");
749 #endif
750
751 #if defined(MV88F672X)
752         if (*ddr_width == 32) {
753                 reg |= (1 << REG_SDRAM_CONFIG_WIDTH_OFFS);
754                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 32Bits\n");
755         } else {
756                 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 16Bits\n");
757         }
758 #endif
759         stat_val = ddr3_get_static_mc_value(REG_SDRAM_CONFIG_ADDR, 0,
760                                                REG_SDRAM_CONFIG_RFRS_MASK, 0, 0);
761         tmp = ddr3_get_min_val(sum_info.refresh_interval / hclk_time,
762                                dimm_num, stat_val);
763
764 #ifdef TREFI_USER_EN
765         tmp = min(TREFI_USER / hclk_time, tmp);
766 #endif
767
768         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - RefreshInterval/Hclk = ", tmp, 4);
769         reg |= tmp;
770
771         if (cl != 3)
772                 reg |= (1 << 16);       /*  If 2:1 need to set P2DWr */
773
774 #if defined(MV88F672X)
775         reg |= (1 << 27);       /* PhyRfRST = Disable */
776 #endif
777         reg_write(REG_SDRAM_CONFIG_ADDR, reg);
778
779         /*{0x00001404}  -   DDR SDRAM Configuration Register */
780         reg = 0x3630B800;
781 #ifdef DUNIT_SPD
782         reg |= (DRAM_2T << REG_DUNIT_CTRL_LOW_2T_OFFS);
783 #endif
784         reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
785
786         /* {0x00001408}  -   DDR SDRAM Timing (Low) Register */
787         reg = 0x0;
788
789         /* tRAS - (0:3,20) */
790         spd_val = ddr3_div(sum_info.min_active_to_precharge,
791                             ddr_clk_time, 1);
792         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
793                                             0, 0xF, 16, 0x10);
794         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
795         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRAS-1 = ", tmp, 1);
796         reg |= (tmp & 0xF);
797         reg |= ((tmp & 0x10) << 16);    /* to bit 20 */
798
799         /* tRCD - (4:7) */
800         spd_val = ddr3_div(sum_info.min_ras_to_cas_delay, ddr_clk_time, 1);
801         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
802                                             4, 0xF, 0, 0);
803         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
804         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRCD-1 = ", tmp, 1);
805         reg |= ((tmp & 0xF) << 4);
806
807         /* tRP - (8:11) */
808         spd_val = ddr3_div(sum_info.min_row_precharge_time, ddr_clk_time, 1);
809         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
810                                             8, 0xF, 0, 0);
811         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
812         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRP-1 = ", tmp, 1);
813         reg |= ((tmp & 0xF) << 8);
814
815         /* tWR - (12:15) */
816         spd_val = ddr3_div(sum_info.min_write_recovery_time, ddr_clk_time, 1);
817         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
818                                             12, 0xF, 0, 0);
819         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
820         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tWR-1 = ", tmp, 1);
821         reg |= ((tmp & 0xF) << 12);
822
823         /* tWTR - (16:19) */
824         spd_val = ddr3_div(sum_info.min_write_to_read_cmd_delay, ddr_clk_time, 1);
825         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
826                                             16, 0xF, 0, 0);
827         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
828         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tWTR-1 = ", tmp, 1);
829         reg |= ((tmp & 0xF) << 16);
830
831         /* tRRD - (24:27) */
832         spd_val = ddr3_div(sum_info.min_row_active_to_row_active, ddr_clk_time, 1);
833         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
834                                             24, 0xF, 0, 0);
835         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
836         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRRD-1 = ", tmp, 1);
837         reg |= ((tmp & 0xF) << 24);
838
839         /* tRTP - (28:31) */
840         spd_val = ddr3_div(sum_info.min_read_to_prech_cmd_delay, ddr_clk_time, 1);
841         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
842                                             28, 0xF, 0, 0);
843         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
844         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRTP-1 = ", tmp, 1);
845         reg |= ((tmp & 0xF) << 28);
846
847         if (cl < 7)
848                 reg = 0x33137663;
849
850         reg_write(REG_SDRAM_TIMING_LOW_ADDR, reg);
851
852         /*{0x0000140C}  -   DDR SDRAM Timing (High) Register */
853         /* Add cycles to R2R W2W */
854         reg = 0x39F8FF80;
855
856         /* tRFC - (0:6,16:18) */
857         spd_val = ddr3_div(sum_info.min_refresh_recovery, ddr_clk_time, 1);
858         stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_HIGH_ADDR,
859                                             0, 0x7F, 9, 0x380);
860         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
861         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRFC-1 = ", tmp, 1);
862         reg |= (tmp & 0x7F);
863         reg |= ((tmp & 0x380) << 9);    /* to bit 16 */
864         reg_write(REG_SDRAM_TIMING_HIGH_ADDR, reg);
865
866         /*{0x00001410}  -   DDR SDRAM Address Control Register */
867         reg = 0x000F0000;
868
869         /* tFAW - (24:28)  */
870 #if (defined(MV88F78X60) || defined(MV88F672X))
871         tmp = sum_info.min_four_active_win_delay;
872         spd_val = ddr3_div(tmp, ddr_clk_time, 0);
873         stat_val = ddr3_get_static_mc_value(REG_SDRAM_ADDRESS_CTRL_ADDR,
874                                             24, 0x3F, 0, 0);
875         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
876         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tFAW = ", tmp, 1);
877         reg |= ((tmp & 0x3F) << 24);
878 #else
879         tmp = sum_info.min_four_active_win_delay -
880                 4 * (sum_info.min_row_active_to_row_active);
881         spd_val = ddr3_div(tmp, ddr_clk_time, 0);
882         stat_val = ddr3_get_static_mc_value(REG_SDRAM_ADDRESS_CTRL_ADDR,
883                                             24, 0x1F, 0, 0);
884         tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
885         DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tFAW-4*tRRD = ", tmp, 1);
886         reg |= ((tmp & 0x1F) << 24);
887 #endif
888
889         /* SDRAM device capacity */
890 #ifdef DUNIT_STATIC
891         reg |= (reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR) & 0xF0FFFF);
892 #endif
893
894 #ifdef DUNIT_SPD
895         cs_count = 0;
896         dimm_cnt = 0;
897         for (cs = 0; cs < MAX_CS; cs++) {
898                 if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
899                         if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
900                                 dimm_cnt++;
901                                 cs_count = 0;
902                         }
903                         cs_count++;
904                         if (dimm_info[dimm_cnt].sdram_capacity < 0x3) {
905                                 reg |= ((dimm_info[dimm_cnt].sdram_capacity + 1) <<
906                                         (REG_SDRAM_ADDRESS_SIZE_OFFS +
907                                          (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs)));
908                         } else if (dimm_info[dimm_cnt].sdram_capacity > 0x3) {
909                                 reg |= ((dimm_info[dimm_cnt].sdram_capacity & 0x3) <<
910                                         (REG_SDRAM_ADDRESS_SIZE_OFFS +
911                                          (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs)));
912                                 reg |= ((dimm_info[dimm_cnt].sdram_capacity & 0x4) <<
913                                         (REG_SDRAM_ADDRESS_SIZE_HIGH_OFFS + cs));
914                         }
915                 }
916         }
917
918         /* SDRAM device structure */
919         cs_count = 0;
920         dimm_cnt = 0;
921         for (cs = 0; cs < MAX_CS; cs++) {
922                 if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
923                         if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
924                                 dimm_cnt++;
925                                 cs_count = 0;
926                         }
927                         cs_count++;
928                         if (dimm_info[dimm_cnt].sdram_width == 16)
929                                 reg |= (1 << (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs));
930                 }
931         }
932 #endif
933         reg_write(REG_SDRAM_ADDRESS_CTRL_ADDR, reg);
934
935         /*{0x00001418}  -   DDR SDRAM Operation Register */
936         reg = 0xF00;
937         for (cs = 0; cs < MAX_CS; cs++) {
938                 if (cs_ena & (1 << cs))
939                         reg &= ~(1 << (cs + REG_SDRAM_OPERATION_CS_OFFS));
940         }
941         reg_write(REG_SDRAM_OPERATION_ADDR, reg);
942
943         /*{0x00001420}  -   DDR SDRAM Extended Mode Register */
944         reg = 0x00000004;
945         reg_write(REG_SDRAM_EXT_MODE_ADDR, reg);
946
947         /*{0x00001424}  -   DDR Controller Control (High) Register */
948 #if (defined(MV88F78X60) || defined(MV88F672X))
949         reg = 0x0000D3FF;
950 #else
951         reg = 0x0100D1FF;
952 #endif
953         reg_write(REG_DDR_CONT_HIGH_ADDR, reg);
954
955         /*{0x0000142C}  -   DDR3 Timing Register */
956         reg = 0x014C2F38;
957 #if defined(MV88F78X60) || defined(MV88F672X)
958         reg = 0x1FEC2F38;
959 #endif
960         reg_write(0x142C, reg);
961
962         /*{0x00001484}  - MBus CPU Block Register */
963 #ifdef MV88F67XX
964         if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
965                 reg_write(REG_MBUS_CPU_BLOCK_ADDR, 0x0000E907);
966 #endif
967
968         /*
969          * In case of mixed dimm and on-board devices setup paramters will
970          * be taken statically
971          */
972         /*{0x00001494}  -   DDR SDRAM ODT Control (Low) Register */
973         reg = odt_config[cs_ena];
974         reg_write(REG_SDRAM_ODT_CTRL_LOW_ADDR, reg);
975
976         /*{0x00001498}  -   DDR SDRAM ODT Control (High) Register */
977         reg = 0x00000000;
978         reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, reg);
979
980         /*{0x0000149C}  -   DDR Dunit ODT Control Register */
981         reg = cs_ena;
982         reg_write(REG_DUNIT_ODT_CTRL_ADDR, reg);
983
984         /*{0x000014A0}  -   DDR Dunit ODT Control Register */
985 #if defined(MV88F672X)
986         reg = 0x000006A9;
987         reg_write(REG_DRAM_FIFO_CTRL_ADDR, reg);
988 #endif
989
990         /*{0x000014C0}  -   DRAM address and Control Driving Strenght */
991         reg_write(REG_DRAM_ADDR_CTRL_DRIVE_STRENGTH_ADDR, 0x192435e9);
992
993         /*{0x000014C4}  -   DRAM Data and DQS Driving Strenght */
994         reg_write(REG_DRAM_DATA_DQS_DRIVE_STRENGTH_ADDR, 0xB2C35E9);
995
996 #if (defined(MV88F78X60) || defined(MV88F672X))
997         /*{0x000014CC}  -   DRAM Main Pads Calibration Machine Control Register */
998         reg = reg_read(REG_DRAM_MAIN_PADS_CAL_ADDR);
999         reg_write(REG_DRAM_MAIN_PADS_CAL_ADDR, reg | (1 << 0));
1000 #endif
1001
1002 #if defined(MV88F672X)
1003         /* DRAM Main Pads Calibration Machine Control Register */
1004         /* 0x14CC[4:3] - CalUpdateControl = IntOnly */
1005         reg = reg_read(REG_DRAM_MAIN_PADS_CAL_ADDR);
1006         reg &= 0xFFFFFFE7;
1007         reg |= (1 << 3);
1008         reg_write(REG_DRAM_MAIN_PADS_CAL_ADDR, reg);
1009 #endif
1010
1011 #ifdef DUNIT_SPD
1012         cs_count = 0;
1013         dimm_cnt = 0;
1014         for (cs = 0; cs < MAX_CS; cs++) {
1015                 if ((1 << cs) & DIMM_CS_BITMAP) {
1016                         if ((1 << cs) & cs_ena) {
1017                                 if (dimm_info[dimm_cnt].num_of_module_ranks ==
1018                                     cs_count) {
1019                                         dimm_cnt++;
1020                                         cs_count = 0;
1021                                 }
1022                                 cs_count++;
1023                                 reg_write(REG_CS_SIZE_SCRATCH_ADDR + (cs * 0x8),
1024                                           dimm_info[dimm_cnt].rank_capacity - 1);
1025                         } else {
1026                                 reg_write(REG_CS_SIZE_SCRATCH_ADDR + (cs * 0x8), 0);
1027                         }
1028                 }
1029         }
1030 #endif
1031
1032         /*{0x00020184}  -   Close FastPath - 2G */
1033         reg_write(REG_FASTPATH_WIN_0_CTRL_ADDR, 0);
1034
1035         /*{0x00001538}  -    Read Data Sample Delays Register */
1036         reg = 0;
1037         for (cs = 0; cs < MAX_CS; cs++) {
1038                 if (cs_ena & (1 << cs))
1039                         reg |= (cl << (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1040         }
1041
1042         reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
1043         DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Read Data Sample Delays = ", reg,
1044                           1);
1045
1046         /*{0x0000153C}  -   Read Data Ready Delay Register */
1047         reg = 0;
1048         for (cs = 0; cs < MAX_CS; cs++) {
1049                 if (cs_ena & (1 << cs)) {
1050                         reg |= ((cl + 2) <<
1051                                 (REG_READ_DATA_READY_DELAYS_OFFS * cs));
1052                 }
1053         }
1054         reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
1055         DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Read Data Ready Delays = ", reg, 1);
1056
1057         /* Set MR registers */
1058         /* MR0 */
1059         reg = 0x00000600;
1060         tmp = ddr3_cl_to_valid_cl(cl);
1061         reg |= ((tmp & 0x1) << 2);
1062         reg |= ((tmp & 0xE) << 3);      /* to bit 4 */
1063         for (cs = 0; cs < MAX_CS; cs++) {
1064                 if (cs_ena & (1 << cs)) {
1065                         reg_write(REG_DDR3_MR0_CS_ADDR +
1066                                   (cs << MR_CS_ADDR_OFFS), reg);
1067                 }
1068         }
1069
1070         /* MR1 */
1071         reg = 0x00000044 & REG_DDR3_MR1_ODT_MASK;
1072         if (cs_num > 1)
1073                 reg = 0x00000046 & REG_DDR3_MR1_ODT_MASK;
1074
1075         for (cs = 0; cs < MAX_CS; cs++) {
1076                 if (cs_ena & (1 << cs)) {
1077                         reg |= odt_static[cs_ena][cs];
1078                         reg_write(REG_DDR3_MR1_CS_ADDR +
1079                                   (cs << MR_CS_ADDR_OFFS), reg);
1080                 }
1081         }
1082
1083         /* MR2 */
1084         if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
1085                 tmp = hclk_time / 2;
1086         else
1087                 tmp = hclk_time;
1088
1089         if (tmp >= 2500)
1090                 cwl = 5;        /* CWL = 5 */
1091         else if (tmp >= 1875 && tmp < 2500)
1092                 cwl = 6;        /* CWL = 6 */
1093         else if (tmp >= 1500 && tmp < 1875)
1094                 cwl = 7;        /* CWL = 7 */
1095         else if (tmp >= 1250 && tmp < 1500)
1096                 cwl = 8;        /* CWL = 8 */
1097         else if (tmp >= 1070 && tmp < 1250)
1098                 cwl = 9;        /* CWL = 9 */
1099         else if (tmp >= 935 && tmp < 1070)
1100                 cwl = 10;       /* CWL = 10 */
1101         else if (tmp >= 833 && tmp < 935)
1102                 cwl = 11;       /* CWL = 11 */
1103         else if (tmp >= 750 && tmp < 833)
1104                 cwl = 12;       /* CWL = 12 */
1105         else {
1106                 cwl = 12;       /* CWL = 12 */
1107                 printf("Unsupported hclk %d MHz\n", tmp);
1108         }
1109
1110         reg = ((cwl - 5) << REG_DDR3_MR2_CWL_OFFS);
1111
1112         for (cs = 0; cs < MAX_CS; cs++) {
1113                 if (cs_ena & (1 << cs)) {
1114                         reg &= REG_DDR3_MR2_ODT_MASK;
1115                         reg |= odt_dynamic[cs_ena][cs];
1116                         reg_write(REG_DDR3_MR2_CS_ADDR +
1117                                   (cs << MR_CS_ADDR_OFFS), reg);
1118                 }
1119         }
1120
1121         /* MR3 */
1122         reg = 0x00000000;
1123         for (cs = 0; cs < MAX_CS; cs++) {
1124                 if (cs_ena & (1 << cs)) {
1125                         reg_write(REG_DDR3_MR3_CS_ADDR +
1126                                   (cs << MR_CS_ADDR_OFFS), reg);
1127                 }
1128         }
1129
1130         /* {0x00001428}  -   DDR ODT Timing (Low) Register */
1131         reg = 0;
1132         reg |= (((cl - cwl + 1) & 0xF) << 4);
1133         reg |= (((cl - cwl + 6) & 0xF) << 8);
1134         reg |= ((((cl - cwl + 6) >> 4) & 0x1) << 21);
1135         reg |= (((cl - 1) & 0xF) << 12);
1136         reg |= (((cl + 6) & 0x1F) << 16);
1137         reg_write(REG_ODT_TIME_LOW_ADDR, reg);
1138
1139         /* {0x0000147C}  -   DDR ODT Timing (High) Register */
1140         reg = 0x00000071;
1141         reg |= ((cwl - 1) << 8);
1142         reg |= ((cwl + 5) << 12);
1143         reg_write(REG_ODT_TIME_HIGH_ADDR, reg);
1144
1145 #ifdef DUNIT_SPD
1146         /*{0x000015E0} - DDR3 Rank Control Register */
1147         reg = cs_ena;
1148         cs_count = 0;
1149         dimm_cnt = 0;
1150         for (cs = 0; cs < MAX_CS; cs++) {
1151                 if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
1152                         if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
1153                                 dimm_cnt++;
1154                                 cs_count = 0;
1155                         }
1156                         cs_count++;
1157
1158                         if (dimm_info[dimm_cnt].addr_mirroring &&
1159                             (cs == 1 || cs == 3) &&
1160                             (sum_info.type_info != SPD_MODULE_TYPE_RDIMM)) {
1161                                 reg |= (1 << (REG_DDR3_RANK_CTRL_MIRROR_OFFS + cs));
1162                                 DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Setting Address Mirroring for CS = ",
1163                                                   cs, 1);
1164                         }
1165                 }
1166         }
1167         reg_write(REG_DDR3_RANK_CTRL_ADDR, reg);
1168 #endif
1169
1170         /*{0xD00015E4}  -   ZQDS Configuration Register */
1171         reg = 0x00203c18;
1172         reg_write(REG_ZQC_CONF_ADDR, reg);
1173
1174         /* {0x00015EC}  -   DDR PHY */
1175 #if defined(MV88F78X60)
1176         reg = 0xF800AAA5;
1177         if (mv_ctrl_rev_get() == MV_78XX0_B0_REV)
1178                 reg = 0xF800A225;
1179 #else
1180         reg = 0xDE000025;
1181 #if defined(MV88F672X)
1182         reg = 0xF800A225;
1183 #endif
1184 #endif
1185         reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1186
1187 #if (defined(MV88F78X60) || defined(MV88F672X))
1188         /* Registered DIMM support - supported only in AXP A0 devices */
1189         /* Currently supported for SPD detection only */
1190         /*
1191          * Flow is according to the Registered DIMM chapter in the
1192          * Functional Spec
1193          */
1194         if (sum_info.type_info == SPD_MODULE_TYPE_RDIMM) {
1195                 DEBUG_INIT_S("DDR3 Training Sequence - Registered DIMM detected\n");
1196
1197                 /* Set commands parity completion */
1198                 reg = reg_read(REG_REGISTERED_DRAM_CTRL_ADDR);
1199                 reg &= ~REG_REGISTERED_DRAM_CTRL_PARITY_MASK;
1200                 reg |= 0x8;
1201                 reg_write(REG_REGISTERED_DRAM_CTRL_ADDR, reg);
1202
1203                 /* De-assert M_RESETn and assert M_CKE */
1204                 reg_write(REG_SDRAM_INIT_CTRL_ADDR,
1205                           1 << REG_SDRAM_INIT_CKE_ASSERT_OFFS);
1206                 do {
1207                         reg = (reg_read(REG_SDRAM_INIT_CTRL_ADDR)) &
1208                                 (1 << REG_SDRAM_INIT_CKE_ASSERT_OFFS);
1209                 } while (reg);
1210
1211                 for (rc = 0; rc < SPD_RDIMM_RC_NUM; rc++) {
1212                         if (rc != 6 && rc != 7) {
1213                                 /* Set CWA Command */
1214                                 reg = (REG_SDRAM_OPERATION_CMD_CWA &
1215                                        ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
1216                                 reg |= ((dimm_info[0].dimm_rc[rc] &
1217                                          REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1218                                         REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1219                                 reg |= rc << REG_SDRAM_OPERATION_CWA_RC_OFFS;
1220                                 /* Configure - Set Delay - tSTAB/tMRD */
1221                                 if (rc == 2 || rc == 10)
1222                                         reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
1223                                 /* 0x1418 - SDRAM Operation Register */
1224                                 reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1225
1226                                 /*
1227                                  * Poll the "cmd" field in the SDRAM OP
1228                                  * register for 0x0
1229                                  */
1230                                 do {
1231                                         reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
1232                                                 (REG_SDRAM_OPERATION_CMD_MASK);
1233                                 } while (reg);
1234                         }
1235                 }
1236         }
1237 #endif
1238
1239         return MV_OK;
1240 }
1241
1242 /*
1243  * Name:     ddr3_div - this function divides integers
1244  * Desc:
1245  * Args:     val - the value
1246  *           divider - the divider
1247  *           sub - substruction value
1248  * Notes:
1249  * Returns:  required value
1250  */
1251 u32 ddr3_div(u32 val, u32 divider, u32 sub)
1252 {
1253         return val / divider + (val % divider > 0 ? 1 : 0) - sub;
1254 }
1255
1256 /*
1257  * Name:     ddr3_get_max_val
1258  * Desc:
1259  * Args:
1260  * Notes:
1261  * Returns:
1262  */
1263 u32 ddr3_get_max_val(u32 spd_val, u32 dimm_num, u32 static_val)
1264 {
1265 #ifdef DUNIT_STATIC
1266         if (dimm_num > 0) {
1267                 if (spd_val >= static_val)
1268                         return spd_val;
1269                 else
1270                         return static_val;
1271         } else {
1272                 return static_val;
1273         }
1274 #else
1275         return spd_val;
1276 #endif
1277 }
1278
1279 /*
1280  * Name:     ddr3_get_min_val
1281  * Desc:
1282  * Args:
1283  * Notes:
1284  * Returns:
1285  */
1286 u32 ddr3_get_min_val(u32 spd_val, u32 dimm_num, u32 static_val)
1287 {
1288 #ifdef DUNIT_STATIC
1289         if (dimm_num > 0) {
1290                 if (spd_val <= static_val)
1291                         return spd_val;
1292                 else
1293                         return static_val;
1294         } else
1295                 return static_val;
1296 #else
1297         return spd_val;
1298 #endif
1299 }
1300 #endif