]> git.karo-electronics.de Git - karo-tx-uboot.git/blob - drivers/ddr/marvell/a38x/ddr3_training_leveling.c
arm: mvebu: Add Armada 38x DDR3 training code from Marvell bin_hdr
[karo-tx-uboot.git] / drivers / ddr / marvell / a38x / ddr3_training_leveling.c
1 /*
2  * Copyright (C) Marvell International Ltd. and its affiliates
3  *
4  * SPDX-License-Identifier:     GPL-2.0
5  */
6
7 #include <common.h>
8 #include <spl.h>
9 #include <asm/io.h>
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
12
13 #include "ddr3_init.h"
14
15 #define WL_ITERATION_NUM                10
16 #define ONE_CLOCK_ERROR_SHIFT           2
17 #define ALIGN_ERROR_SHIFT               -2
18
19 static u32 pup_mask_table[] = {
20         0x000000ff,
21         0x0000ff00,
22         0x00ff0000,
23         0xff000000
24 };
25
26 static struct write_supp_result wr_supp_res[MAX_INTERFACE_NUM][MAX_BUS_NUM];
27
28 static int ddr3_tip_dynamic_write_leveling_seq(u32 dev_num);
29 static int ddr3_tip_dynamic_read_leveling_seq(u32 dev_num);
30 static int ddr3_tip_dynamic_per_bit_read_leveling_seq(u32 dev_num);
31 static int ddr3_tip_wl_supp_align_err_shift(u32 dev_num, u32 if_id, u32 bus_id,
32                                             u32 bus_id_delta);
33 static int ddr3_tip_wl_supp_align_phase_shift(u32 dev_num, u32 if_id,
34                                               u32 bus_id, u32 offset,
35                                               u32 bus_id_delta);
36 static int ddr3_tip_xsb_compare_test(u32 dev_num, u32 if_id, u32 bus_id,
37                                      u32 edge_offset, u32 bus_id_delta);
38 static int ddr3_tip_wl_supp_one_clk_err_shift(u32 dev_num, u32 if_id,
39                                               u32 bus_id, u32 bus_id_delta);
40
41 u32 hws_ddr3_tip_max_cs_get(void)
42 {
43         u32 c_cs;
44         static u32 max_cs;
45         struct hws_topology_map *tm = ddr3_get_topology_map();
46
47         if (!max_cs) {
48                 for (c_cs = 0; c_cs < NUM_OF_CS; c_cs++) {
49                         VALIDATE_ACTIVE(tm->
50                                         interface_params[0].as_bus_params[0].
51                                         cs_bitmask, c_cs);
52                         max_cs++;
53                 }
54         }
55
56         return max_cs;
57 }
58
59 /*****************************************************************************
60 Dynamic read leveling
61 ******************************************************************************/
62 int ddr3_tip_dynamic_read_leveling(u32 dev_num, u32 freq)
63 {
64         u32 data, mask;
65         u32 max_cs = hws_ddr3_tip_max_cs_get();
66         u32 bus_num, if_id, cl_val;
67         enum hws_speed_bin speed_bin_index;
68         /* save current CS value */
69         u32 cs_enable_reg_val[MAX_INTERFACE_NUM] = { 0 };
70         int is_any_pup_fail = 0;
71         u32 data_read[MAX_INTERFACE_NUM + 1] = { 0 };
72         u8 rl_values[NUM_OF_CS][MAX_BUS_NUM][MAX_INTERFACE_NUM];
73         struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
74         u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
75         struct hws_topology_map *tm = ddr3_get_topology_map();
76
77         if (rl_version == 0) {
78                 /* OLD RL machine */
79                 data = 0x40;
80                 data |= (1 << 20);
81
82                 /* TBD multi CS */
83                 CHECK_STATUS(ddr3_tip_if_write(
84                                      dev_num, ACCESS_TYPE_MULTICAST,
85                                      PARAM_NOT_CARE, TRAINING_REG,
86                                      data, 0x11ffff));
87                 CHECK_STATUS(ddr3_tip_if_write(
88                                      dev_num, ACCESS_TYPE_MULTICAST,
89                                      PARAM_NOT_CARE,
90                                      TRAINING_PATTERN_BASE_ADDRESS_REG,
91                                      0, 0xfffffff8));
92                 CHECK_STATUS(ddr3_tip_if_write(
93                                      dev_num, ACCESS_TYPE_MULTICAST,
94                                      PARAM_NOT_CARE, TRAINING_REG,
95                                      (u32)(1 << 31), (u32)(1 << 31)));
96
97                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
98                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
99                         training_result[training_stage][if_id] = TEST_SUCCESS;
100                         if (ddr3_tip_if_polling
101                             (dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
102                              (u32)(1 << 31), TRAINING_REG,
103                              MAX_POLLING_ITERATIONS) != MV_OK) {
104                                 DEBUG_LEVELING(
105                                         DEBUG_LEVEL_ERROR,
106                                         ("RL: DDR3 poll failed(1) IF %d\n",
107                                          if_id));
108                                 training_result[training_stage][if_id] =
109                                         TEST_FAILED;
110
111                                 if (debug_mode == 0)
112                                         return MV_FAIL;
113                         }
114                 }
115
116                 /* read read-leveling result */
117                 CHECK_STATUS(ddr3_tip_if_read
118                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
119                               TRAINING_REG, data_read, 1 << 30));
120                 /* exit read leveling mode */
121                 CHECK_STATUS(ddr3_tip_if_write
122                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
123                               TRAINING_SW_2_REG, 0x8, 0x9));
124                 CHECK_STATUS(ddr3_tip_if_write
125                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
126                               TRAINING_SW_1_REG, 1 << 16, 1 << 16));
127
128                 /* disable RL machine all Trn_CS[3:0] , [16:0] */
129
130                 CHECK_STATUS(ddr3_tip_if_write
131                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
132                               TRAINING_REG, 0, 0xf1ffff));
133
134                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
135                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
136                         if ((data_read[if_id] & (1 << 30)) == 0) {
137                                 DEBUG_LEVELING(
138                                         DEBUG_LEVEL_ERROR,
139                                         ("\n_read Leveling failed for IF %d\n",
140                                          if_id));
141                                 training_result[training_stage][if_id] =
142                                         TEST_FAILED;
143                                 if (debug_mode == 0)
144                                         return MV_FAIL;
145                         }
146                 }
147                 return MV_OK;
148         }
149
150         /* NEW RL machine */
151         for (effective_cs = 0; effective_cs < NUM_OF_CS; effective_cs++)
152                 for (bus_num = 0; bus_num < MAX_BUS_NUM; bus_num++)
153                         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++)
154                                 rl_values[effective_cs][bus_num][if_id] = 0;
155
156         for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
157                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
158                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
159                         training_result[training_stage][if_id] = TEST_SUCCESS;
160
161                         /* save current cs enable reg val */
162                         CHECK_STATUS(ddr3_tip_if_read
163                                      (dev_num, ACCESS_TYPE_UNICAST, if_id,
164                                       CS_ENABLE_REG, cs_enable_reg_val,
165                                       MASK_ALL_BITS));
166                         /* enable single cs */
167                         CHECK_STATUS(ddr3_tip_if_write
168                                      (dev_num, ACCESS_TYPE_UNICAST, if_id,
169                                       CS_ENABLE_REG, (1 << 3), (1 << 3)));
170                 }
171
172                 ddr3_tip_reset_fifo_ptr(dev_num);
173
174                 /*
175                  *     Phase 1: Load pattern (using ODPG)
176                  *
177                  * enter Read Leveling mode
178                  * only 27 bits are masked
179                  * assuming non multi-CS configuration
180                  * write to CS = 0 for the non multi CS configuration, note
181                  * that the results shall be read back to the required CS !!!
182                  */
183
184                 /* BUS count is 0 shifted 26 */
185                 CHECK_STATUS(ddr3_tip_if_write
186                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
187                               ODPG_DATA_CONTROL_REG, 0x3, 0x3));
188                 CHECK_STATUS(ddr3_tip_configure_odpg
189                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0,
190                               pattern_table[PATTERN_RL].num_of_phases_tx, 0,
191                               pattern_table[PATTERN_RL].num_of_phases_rx, 0, 0,
192                               effective_cs, STRESS_NONE, DURATION_SINGLE));
193
194                 /* load pattern to ODPG */
195                 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
196                                               PARAM_NOT_CARE, PATTERN_RL,
197                                               pattern_table[PATTERN_RL].
198                                               start_addr);
199
200                 /*
201                  *     Phase 2: ODPG to Read Leveling mode
202                  */
203
204                 /* General Training Opcode register */
205                 CHECK_STATUS(ddr3_tip_if_write
206                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
207                               ODPG_WRITE_READ_MODE_ENABLE_REG, 0,
208                               MASK_ALL_BITS));
209
210                 CHECK_STATUS(ddr3_tip_if_write
211                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
212                               ODPG_TRAINING_CONTROL_REG,
213                               (0x301b01 | effective_cs << 2), 0x3c3fef));
214
215                 /* Object1 opcode register 0 & 1 */
216                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
217                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
218                         speed_bin_index =
219                                 tm->interface_params[if_id].speed_bin_index;
220                         cl_val =
221                                 cas_latency_table[speed_bin_index].cl_val[freq];
222                         data = (cl_val << 17) | (0x3 << 25);
223                         mask = (0xff << 9) | (0x1f << 17) | (0x3 << 25);
224                         CHECK_STATUS(ddr3_tip_if_write
225                                      (dev_num, ACCESS_TYPE_UNICAST, if_id,
226                                       ODPG_OBJ1_OPCODE_REG, data, mask));
227                 }
228
229                 /* Set iteration count to max value */
230                 CHECK_STATUS(ddr3_tip_if_write
231                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
232                               TRAINING_OPCODE_1_REG, 0xd00, 0xd00));
233
234                 /*
235                  *     Phase 2: Mask config
236                  */
237
238                 ddr3_tip_dynamic_read_leveling_seq(dev_num);
239
240                 /*
241                  *     Phase 3: Read Leveling execution
242                  */
243
244                 /* temporary jira dunit=14751 */
245                 CHECK_STATUS(ddr3_tip_if_write
246                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
247                               TRAINING_DBG_1_REG, 0, (u32)(1 << 31)));
248                 /* configure phy reset value */
249                 CHECK_STATUS(ddr3_tip_if_write
250                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
251                               TRAINING_DBG_3_REG, (0x7f << 24),
252                               (u32)(0xff << 24)));
253                 /* data pup rd reset enable  */
254                 CHECK_STATUS(ddr3_tip_if_write
255                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
256                               SDRAM_CONFIGURATION_REG, 0, (1 << 30)));
257                 /* data pup rd reset disable */
258                 CHECK_STATUS(ddr3_tip_if_write
259                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
260                               SDRAM_CONFIGURATION_REG, (1 << 30), (1 << 30)));
261                 /* training SW override & training RL mode */
262                 CHECK_STATUS(ddr3_tip_if_write
263                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
264                               TRAINING_SW_2_REG, 0x1, 0x9));
265                 /* training enable */
266                 CHECK_STATUS(ddr3_tip_if_write
267                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
268                               TRAINING_REG, (1 << 24) | (1 << 20),
269                               (1 << 24) | (1 << 20)));
270                 CHECK_STATUS(ddr3_tip_if_write
271                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
272                               TRAINING_REG, (u32)(1 << 31), (u32)(1 << 31)));
273
274                 /********* trigger training *******************/
275                 /* Trigger, poll on status and disable ODPG */
276                 CHECK_STATUS(ddr3_tip_if_write
277                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
278                               ODPG_TRAINING_TRIGGER_REG, 0x1, 0x1));
279                 CHECK_STATUS(ddr3_tip_if_write
280                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
281                               ODPG_TRAINING_STATUS_REG, 0x1, 0x1));
282
283                 /* check for training done + results pass */
284                 if (ddr3_tip_if_polling
285                     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x2, 0x2,
286                      ODPG_TRAINING_STATUS_REG,
287                      MAX_POLLING_ITERATIONS) != MV_OK) {
288                         DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
289                                        ("Training Done Failed\n"));
290                         return MV_FAIL;
291                 }
292
293                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
294                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
295                         CHECK_STATUS(ddr3_tip_if_read
296                                      (dev_num, ACCESS_TYPE_UNICAST,
297                                       if_id,
298                                       ODPG_TRAINING_TRIGGER_REG, data_read,
299                                       0x4));
300                         data = data_read[if_id];
301                         if (data != 0x0) {
302                                 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
303                                                ("Training Result Failed\n"));
304                         }
305                 }
306
307                 /*disable ODPG - Back to functional mode */
308                 CHECK_STATUS(ddr3_tip_if_write
309                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
310                               ODPG_ENABLE_REG, 0x1 << ODPG_DISABLE_OFFS,
311                               (0x1 << ODPG_DISABLE_OFFS)));
312                 if (ddr3_tip_if_polling
313                     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x0, 0x1,
314                      ODPG_ENABLE_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
315                         DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
316                                        ("ODPG disable failed "));
317                         return MV_FAIL;
318                 }
319                 CHECK_STATUS(ddr3_tip_if_write
320                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
321                               ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
322
323                 /* double loop on bus, pup */
324                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
325                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
326                         /* check training done */
327                         is_any_pup_fail = 0;
328                         for (bus_num = 0;
329                              bus_num < tm->num_of_bus_per_interface;
330                              bus_num++) {
331                                 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
332                                 if (ddr3_tip_if_polling
333                                     (dev_num, ACCESS_TYPE_UNICAST,
334                                      if_id, (1 << 25), (1 << 25),
335                                      mask_results_pup_reg_map[bus_num],
336                                      MAX_POLLING_ITERATIONS) != MV_OK) {
337                                         DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
338                                                        ("\n_r_l: DDR3 poll failed(2) for bus %d",
339                                                         bus_num));
340                                         is_any_pup_fail = 1;
341                                 } else {
342                                         /* read result per pup */
343                                         CHECK_STATUS(ddr3_tip_if_read
344                                                      (dev_num,
345                                                       ACCESS_TYPE_UNICAST,
346                                                       if_id,
347                                                       mask_results_pup_reg_map
348                                                       [bus_num], data_read,
349                                                       0xff));
350                                         rl_values[effective_cs][bus_num]
351                                                 [if_id] = (u8)data_read[if_id];
352                                 }
353                         }
354
355                         if (is_any_pup_fail == 1) {
356                                 training_result[training_stage][if_id] =
357                                         TEST_FAILED;
358                                 if (debug_mode == 0)
359                                         return MV_FAIL;
360                         }
361                 }
362
363                 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("RL exit read leveling\n"));
364
365                 /*
366                  *     Phase 3: Exit Read Leveling
367                  */
368
369                 CHECK_STATUS(ddr3_tip_if_write
370                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
371                               TRAINING_SW_2_REG, (1 << 3), (1 << 3)));
372                 CHECK_STATUS(ddr3_tip_if_write
373                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
374                               TRAINING_SW_1_REG, (1 << 16), (1 << 16)));
375                 /* set ODPG to functional */
376                 CHECK_STATUS(ddr3_tip_if_write
377                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
378                               ODPG_DATA_CONTROL_REG, 0x0, MASK_ALL_BITS));
379
380                 /*
381                  * Copy the result from the effective CS search to the
382                  * real Functional CS
383                  */
384                 /*ddr3_tip_write_cs_result(dev_num, RL_PHY_REG); */
385                 CHECK_STATUS(ddr3_tip_if_write
386                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
387                               ODPG_DATA_CONTROL_REG, 0x0, MASK_ALL_BITS));
388         }
389
390         for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
391                 /* double loop on bus, pup */
392                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
393                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
394                         for (bus_num = 0;
395                              bus_num < tm->num_of_bus_per_interface;
396                              bus_num++) {
397                                 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
398                                 /* read result per pup from arry */
399                                 data = rl_values[effective_cs][bus_num][if_id];
400                                 data = (data & 0x1f) |
401                                         (((data & 0xe0) >> 5) << 6);
402                                 ddr3_tip_bus_write(dev_num,
403                                                    ACCESS_TYPE_UNICAST,
404                                                    if_id,
405                                                    ACCESS_TYPE_UNICAST,
406                                                    bus_num, DDR_PHY_DATA,
407                                                    RL_PHY_REG +
408                                                    ((effective_cs ==
409                                                      0) ? 0x0 : 0x4), data);
410                         }
411                 }
412         }
413         /* Set to 0 after each loop to avoid illegal value may be used */
414         effective_cs = 0;
415
416         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
417                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
418                 /* restore cs enable value */
419                 CHECK_STATUS(ddr3_tip_if_write
420                              (dev_num, ACCESS_TYPE_UNICAST, if_id,
421                               CS_ENABLE_REG, cs_enable_reg_val[if_id],
422                               MASK_ALL_BITS));
423                 if (odt_config != 0) {
424                         CHECK_STATUS(ddr3_tip_write_additional_odt_setting
425                                      (dev_num, if_id));
426                 }
427         }
428
429         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
430                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
431                 if (training_result[training_stage][if_id] == TEST_FAILED)
432                         return MV_FAIL;
433         }
434
435         return MV_OK;
436 }
437
438 /*
439  * Legacy Dynamic write leveling
440  */
441 int ddr3_tip_legacy_dynamic_write_leveling(u32 dev_num)
442 {
443         u32 c_cs, if_id, cs_mask = 0;
444         u32 max_cs = hws_ddr3_tip_max_cs_get();
445         struct hws_topology_map *tm = ddr3_get_topology_map();
446
447         /*
448          * In TRAINIUNG reg (0x15b0) write 0x80000008 | cs_mask:
449          * Trn_start
450          * cs_mask = 0x1 <<20 Trn_CS0 - CS0 is included in the DDR3 training
451          * cs_mask = 0x1 <<21 Trn_CS1 - CS1 is included in the DDR3 training
452          * cs_mask = 0x1 <<22 Trn_CS2 - CS2 is included in the DDR3 training
453          * cs_mask = 0x1 <<23 Trn_CS3 - CS3 is included in the DDR3 training
454          * Trn_auto_seq =  write leveling
455          */
456         for (c_cs = 0; c_cs < max_cs; c_cs++)
457                 cs_mask = cs_mask | 1 << (20 + c_cs);
458
459         for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
460                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
461                 CHECK_STATUS(ddr3_tip_if_write
462                              (dev_num, ACCESS_TYPE_MULTICAST, 0,
463                               TRAINING_REG, (0x80000008 | cs_mask),
464                               0xffffffff));
465                 mdelay(20);
466                 if (ddr3_tip_if_polling
467                     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
468                      (u32)0x80000000, TRAINING_REG,
469                      MAX_POLLING_ITERATIONS) != MV_OK) {
470                         DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
471                                        ("polling failed for Old WL result\n"));
472                         return MV_FAIL;
473                 }
474         }
475
476         return MV_OK;
477 }
478
479 /*
480  * Legacy Dynamic read leveling
481  */
482 int ddr3_tip_legacy_dynamic_read_leveling(u32 dev_num)
483 {
484         u32 c_cs, if_id, cs_mask = 0;
485         u32 max_cs = hws_ddr3_tip_max_cs_get();
486         struct hws_topology_map *tm = ddr3_get_topology_map();
487
488         /*
489          * In TRAINIUNG reg (0x15b0) write 0x80000040 | cs_mask:
490          * Trn_start
491          * cs_mask = 0x1 <<20 Trn_CS0 - CS0 is included in the DDR3 training
492          * cs_mask = 0x1 <<21 Trn_CS1 - CS1 is included in the DDR3 training
493          * cs_mask = 0x1 <<22 Trn_CS2 - CS2 is included in the DDR3 training
494          * cs_mask = 0x1 <<23 Trn_CS3 - CS3 is included in the DDR3 training
495          * Trn_auto_seq =  Read Leveling using training pattern
496          */
497         for (c_cs = 0; c_cs < max_cs; c_cs++)
498                 cs_mask = cs_mask | 1 << (20 + c_cs);
499
500         CHECK_STATUS(ddr3_tip_if_write
501                      (dev_num, ACCESS_TYPE_MULTICAST, 0, TRAINING_REG,
502                       (0x80000040 | cs_mask), 0xffffffff));
503         mdelay(100);
504
505         for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
506                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
507                 if (ddr3_tip_if_polling
508                     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
509                      (u32)0x80000000, TRAINING_REG,
510                      MAX_POLLING_ITERATIONS) != MV_OK) {
511                         DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
512                                        ("polling failed for Old RL result\n"));
513                         return MV_FAIL;
514                 }
515         }
516
517         return MV_OK;
518 }
519
520 /*
521  * Dynamic per bit read leveling
522  */
523 int ddr3_tip_dynamic_per_bit_read_leveling(u32 dev_num, u32 freq)
524 {
525         u32 data, mask;
526         u32 bus_num, if_id, cl_val, bit_num;
527         u32 curr_numb, curr_min_delay;
528         int adll_array[3] = { 0, -0xa, 0x14 };
529         u32 phyreg3_arr[MAX_INTERFACE_NUM][MAX_BUS_NUM];
530         enum hws_speed_bin speed_bin_index;
531         int is_any_pup_fail = 0;
532         int break_loop = 0;
533         u32 cs_enable_reg_val[MAX_INTERFACE_NUM]; /* save current CS value */
534         u32 data_read[MAX_INTERFACE_NUM];
535         int per_bit_rl_pup_status[MAX_INTERFACE_NUM][MAX_BUS_NUM];
536         u32 data2_write[MAX_INTERFACE_NUM][MAX_BUS_NUM];
537         struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
538         u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
539         struct hws_topology_map *tm = ddr3_get_topology_map();
540
541         for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
542                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
543                 for (bus_num = 0;
544                      bus_num <= tm->num_of_bus_per_interface; bus_num++) {
545                         VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
546                         per_bit_rl_pup_status[if_id][bus_num] = 0;
547                         data2_write[if_id][bus_num] = 0;
548                         /* read current value of phy register 0x3 */
549                         CHECK_STATUS(ddr3_tip_bus_read
550                                      (dev_num, if_id, ACCESS_TYPE_UNICAST,
551                                       bus_num, DDR_PHY_DATA,
552                                       READ_CENTRALIZATION_PHY_REG,
553                                       &phyreg3_arr[if_id][bus_num]));
554                 }
555         }
556
557         /* NEW RL machine */
558         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
559                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
560                 training_result[training_stage][if_id] = TEST_SUCCESS;
561
562                 /* save current cs enable reg val */
563                 CHECK_STATUS(ddr3_tip_if_read
564                              (dev_num, ACCESS_TYPE_UNICAST, if_id,
565                               CS_ENABLE_REG, &cs_enable_reg_val[if_id],
566                               MASK_ALL_BITS));
567                 /* enable single cs */
568                 CHECK_STATUS(ddr3_tip_if_write
569                              (dev_num, ACCESS_TYPE_UNICAST, if_id,
570                               CS_ENABLE_REG, (1 << 3), (1 << 3)));
571         }
572
573         ddr3_tip_reset_fifo_ptr(dev_num);
574         for (curr_numb = 0; curr_numb < 3; curr_numb++) {
575                 /*
576                  *     Phase 1: Load pattern (using ODPG)
577                  *
578                  * enter Read Leveling mode
579                  * only 27 bits are masked
580                  * assuming non multi-CS configuration
581                  * write to CS = 0 for the non multi CS configuration, note that
582                  * the results shall be read back to the required CS !!!
583                  */
584
585                 /* BUS count is 0 shifted 26 */
586                 CHECK_STATUS(ddr3_tip_if_write
587                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
588                               ODPG_DATA_CONTROL_REG, 0x3, 0x3));
589                 CHECK_STATUS(ddr3_tip_configure_odpg
590                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0,
591                               pattern_table[PATTERN_TEST].num_of_phases_tx, 0,
592                               pattern_table[PATTERN_TEST].num_of_phases_rx, 0,
593                               0, 0, STRESS_NONE, DURATION_SINGLE));
594
595                 /* load pattern to ODPG */
596                 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
597                                               PARAM_NOT_CARE, PATTERN_TEST,
598                                               pattern_table[PATTERN_TEST].
599                                               start_addr);
600
601                 /*
602                  *     Phase 2: ODPG to Read Leveling mode
603                  */
604
605                 /* General Training Opcode register */
606                 CHECK_STATUS(ddr3_tip_if_write
607                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
608                               ODPG_WRITE_READ_MODE_ENABLE_REG, 0,
609                               MASK_ALL_BITS));
610                 CHECK_STATUS(ddr3_tip_if_write
611                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
612                               ODPG_TRAINING_CONTROL_REG, 0x301b01, 0x3c3fef));
613
614                 /* Object1 opcode register 0 & 1 */
615                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
616                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
617                         speed_bin_index =
618                                 tm->interface_params[if_id].speed_bin_index;
619                         cl_val =
620                                 cas_latency_table[speed_bin_index].cl_val[freq];
621                         data = (cl_val << 17) | (0x3 << 25);
622                         mask = (0xff << 9) | (0x1f << 17) | (0x3 << 25);
623                         CHECK_STATUS(ddr3_tip_if_write
624                                      (dev_num, ACCESS_TYPE_UNICAST, if_id,
625                                       ODPG_OBJ1_OPCODE_REG, data, mask));
626                 }
627
628                 /* Set iteration count to max value */
629                 CHECK_STATUS(ddr3_tip_if_write
630                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
631                               TRAINING_OPCODE_1_REG, 0xd00, 0xd00));
632
633                 /*
634                  *     Phase 2: Mask config
635                  */
636
637                 ddr3_tip_dynamic_per_bit_read_leveling_seq(dev_num);
638
639                 /*
640                  *     Phase 3: Read Leveling execution
641                  */
642
643                 /* temporary jira dunit=14751 */
644                 CHECK_STATUS(ddr3_tip_if_write
645                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
646                               TRAINING_DBG_1_REG, 0, (u32)(1 << 31)));
647                 /* configure phy reset value */
648                 CHECK_STATUS(ddr3_tip_if_write
649                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
650                               TRAINING_DBG_3_REG, (0x7f << 24),
651                               (u32)(0xff << 24)));
652                 /* data pup rd reset enable  */
653                 CHECK_STATUS(ddr3_tip_if_write
654                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
655                               SDRAM_CONFIGURATION_REG, 0, (1 << 30)));
656                 /* data pup rd reset disable */
657                 CHECK_STATUS(ddr3_tip_if_write
658                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
659                               SDRAM_CONFIGURATION_REG, (1 << 30), (1 << 30)));
660                 /* training SW override & training RL mode */
661                 CHECK_STATUS(ddr3_tip_if_write
662                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
663                               TRAINING_SW_2_REG, 0x1, 0x9));
664                 /* training enable */
665                 CHECK_STATUS(ddr3_tip_if_write
666                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
667                               TRAINING_REG, (1 << 24) | (1 << 20),
668                               (1 << 24) | (1 << 20)));
669                 CHECK_STATUS(ddr3_tip_if_write
670                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
671                               TRAINING_REG, (u32)(1 << 31), (u32)(1 << 31)));
672
673                 /********* trigger training *******************/
674                 /* Trigger, poll on status and disable ODPG */
675                 CHECK_STATUS(ddr3_tip_if_write
676                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
677                               ODPG_TRAINING_TRIGGER_REG, 0x1, 0x1));
678                 CHECK_STATUS(ddr3_tip_if_write
679                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
680                               ODPG_TRAINING_STATUS_REG, 0x1, 0x1));
681
682                 /*check for training done + results pass */
683                 if (ddr3_tip_if_polling
684                     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x2, 0x2,
685                      ODPG_TRAINING_STATUS_REG,
686                      MAX_POLLING_ITERATIONS) != MV_OK) {
687                         DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
688                                        ("Training Done Failed\n"));
689                         return MV_FAIL;
690                 }
691
692                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
693                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
694                         CHECK_STATUS(ddr3_tip_if_read
695                                      (dev_num, ACCESS_TYPE_UNICAST,
696                                       if_id,
697                                       ODPG_TRAINING_TRIGGER_REG, data_read,
698                                       0x4));
699                         data = data_read[if_id];
700                         if (data != 0x0) {
701                                 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
702                                                ("Training Result Failed\n"));
703                         }
704                 }
705
706                 /*disable ODPG - Back to functional mode */
707                 CHECK_STATUS(ddr3_tip_if_write
708                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
709                               ODPG_ENABLE_REG, 0x1 << ODPG_DISABLE_OFFS,
710                               (0x1 << ODPG_DISABLE_OFFS)));
711                 if (ddr3_tip_if_polling
712                     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x0, 0x1,
713                      ODPG_ENABLE_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
714                         DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
715                                        ("ODPG disable failed "));
716                         return MV_FAIL;
717                 }
718                 CHECK_STATUS(ddr3_tip_if_write
719                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
720                               ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
721
722                 /* double loop on bus, pup */
723                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
724                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
725                         /* check training done */
726                         for (bus_num = 0;
727                              bus_num < tm->num_of_bus_per_interface;
728                              bus_num++) {
729                                 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
730
731                                 if (per_bit_rl_pup_status[if_id][bus_num]
732                                     == 0) {
733                                         curr_min_delay = 0;
734                                         for (bit_num = 0; bit_num < 8;
735                                              bit_num++) {
736                                                 if (ddr3_tip_if_polling
737                                                     (dev_num,
738                                                      ACCESS_TYPE_UNICAST,
739                                                      if_id, (1 << 25),
740                                                      (1 << 25),
741                                                      mask_results_dq_reg_map
742                                                      [bus_num * 8 + bit_num],
743                                                      MAX_POLLING_ITERATIONS) !=
744                                                     MV_OK) {
745                                                         DEBUG_LEVELING
746                                                                 (DEBUG_LEVEL_ERROR,
747                                                                  ("\n_r_l: DDR3 poll failed(2) for bus %d bit %d\n",
748                                                                   bus_num,
749                                                                   bit_num));
750                                                 } else {
751                                                         /* read result per pup */
752                                                         CHECK_STATUS
753                                                                 (ddr3_tip_if_read
754                                                                  (dev_num,
755                                                                   ACCESS_TYPE_UNICAST,
756                                                                   if_id,
757                                                                   mask_results_dq_reg_map
758                                                                   [bus_num * 8 +
759                                                                    bit_num],
760                                                                   data_read,
761                                                                   MASK_ALL_BITS));
762                                                         data =
763                                                                 (data_read
764                                                                  [if_id] &
765                                                                  0x1f) |
766                                                                 ((data_read
767                                                                   [if_id] &
768                                                                   0xe0) << 1);
769                                                         if (curr_min_delay == 0)
770                                                                 curr_min_delay =
771                                                                         data;
772                                                         else if (data <
773                                                                  curr_min_delay)
774                                                                 curr_min_delay =
775                                                                         data;
776                                                         if (data > data2_write[if_id][bus_num])
777                                                                 data2_write
778                                                                         [if_id]
779                                                                         [bus_num] =
780                                                                         data;
781                                                 }
782                                         }
783
784                                         if (data2_write[if_id][bus_num] <=
785                                             (curr_min_delay +
786                                              MAX_DQ_READ_LEVELING_DELAY)) {
787                                                 per_bit_rl_pup_status[if_id]
788                                                         [bus_num] = 1;
789                                         }
790                                 }
791                         }
792                 }
793
794                 /* check if there is need to search new phyreg3 value */
795                 if (curr_numb < 2) {
796                         /* if there is DLL that is not checked yet */
797                         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
798                              if_id++) {
799                                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
800                                 for (bus_num = 0;
801                                      bus_num < tm->num_of_bus_per_interface;
802                                      bus_num++) {
803                                         VALIDATE_ACTIVE(tm->bus_act_mask,
804                                                         bus_num);
805                                         if (per_bit_rl_pup_status[if_id]
806                                             [bus_num] != 1) {
807                                                 /* go to next ADLL value */
808                                                 CHECK_STATUS
809                                                         (ddr3_tip_bus_write
810                                                          (dev_num,
811                                                           ACCESS_TYPE_UNICAST,
812                                                           if_id,
813                                                           ACCESS_TYPE_UNICAST,
814                                                           bus_num, DDR_PHY_DATA,
815                                                           READ_CENTRALIZATION_PHY_REG,
816                                                           (phyreg3_arr[if_id]
817                                                            [bus_num] +
818                                                            adll_array[curr_numb])));
819                                                 break_loop = 1;
820                                                 break;
821                                         }
822                                 }
823                                 if (break_loop)
824                                         break;
825                         }
826                 }               /* if (curr_numb < 2) */
827                 if (!break_loop)
828                         break;
829         }               /* for ( curr_numb = 0; curr_numb <3; curr_numb++) */
830
831         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
832                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
833                 for (bus_num = 0; bus_num < tm->num_of_bus_per_interface;
834                      bus_num++) {
835                         VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
836                         if (per_bit_rl_pup_status[if_id][bus_num] == 1)
837                                 ddr3_tip_bus_write(dev_num,
838                                                    ACCESS_TYPE_UNICAST,
839                                                    if_id,
840                                                    ACCESS_TYPE_UNICAST,
841                                                    bus_num, DDR_PHY_DATA,
842                                                    RL_PHY_REG +
843                                                    CS_REG_VALUE(effective_cs),
844                                                    data2_write[if_id]
845                                                    [bus_num]);
846                         else
847                                 is_any_pup_fail = 1;
848                 }
849
850                 /* TBD flow does not support multi CS */
851                 /*
852                  * cs_bitmask = tm->interface_params[if_id].
853                  * as_bus_params[bus_num].cs_bitmask;
854                  */
855                 /* divide by 4 is used for retrieving the CS number */
856                 /*
857                  * TBD BC2 - what is the PHY address for other
858                  * CS ddr3_tip_write_cs_result() ???
859                  */
860                 /*
861                  * find what should be written to PHY
862                  * - max delay that is less than threshold
863                  */
864                 if (is_any_pup_fail == 1) {
865                         training_result[training_stage][if_id] = TEST_FAILED;
866                         if (debug_mode == 0)
867                                 return MV_FAIL;
868                 }
869         }
870         DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("RL exit read leveling\n"));
871
872         /*
873          *     Phase 3: Exit Read Leveling
874          */
875
876         CHECK_STATUS(ddr3_tip_if_write
877                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
878                       TRAINING_SW_2_REG, (1 << 3), (1 << 3)));
879         CHECK_STATUS(ddr3_tip_if_write
880                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
881                       TRAINING_SW_1_REG, (1 << 16), (1 << 16)));
882         /* set ODPG to functional */
883         CHECK_STATUS(ddr3_tip_if_write
884                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
885                       ODPG_DATA_CONTROL_REG, 0x0, MASK_ALL_BITS));
886         /*
887          * Copy the result from the effective CS search to the real
888          * Functional CS
889          */
890         ddr3_tip_write_cs_result(dev_num, RL_PHY_REG);
891         CHECK_STATUS(ddr3_tip_if_write
892                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
893                       ODPG_DATA_CONTROL_REG, 0x0, MASK_ALL_BITS));
894
895         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
896                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
897                 /* restore cs enable value */
898                 CHECK_STATUS(ddr3_tip_if_write
899                              (dev_num, ACCESS_TYPE_UNICAST, if_id,
900                               CS_ENABLE_REG, cs_enable_reg_val[if_id],
901                               MASK_ALL_BITS));
902                 if (odt_config != 0) {
903                         CHECK_STATUS(ddr3_tip_write_additional_odt_setting
904                                      (dev_num, if_id));
905                 }
906         }
907
908         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
909                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
910                 if (training_result[training_stage][if_id] == TEST_FAILED)
911                         return MV_FAIL;
912         }
913
914         return MV_OK;
915 }
916
917 int ddr3_tip_calc_cs_mask(u32 dev_num, u32 if_id, u32 effective_cs,
918                           u32 *cs_mask)
919 {
920         u32 all_bus_cs = 0, same_bus_cs;
921         u32 bus_cnt;
922         struct hws_topology_map *tm = ddr3_get_topology_map();
923
924         *cs_mask = same_bus_cs = CS_BIT_MASK;
925
926         /*
927          * In some of the devices (such as BC2), the CS is per pup and there
928          * for mixed mode is valid on like other devices where CS configuration
929          * is per interface.
930          * In order to know that, we do 'Or' and 'And' operation between all
931          * CS (of the pups).
932          * If they are they are not the same then it's mixed mode so all CS
933          * should be configured (when configuring the MRS)
934          */
935         for (bus_cnt = 0; bus_cnt < tm->num_of_bus_per_interface; bus_cnt++) {
936                 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
937
938                 all_bus_cs |= tm->interface_params[if_id].
939                         as_bus_params[bus_cnt].cs_bitmask;
940                 same_bus_cs &= tm->interface_params[if_id].
941                         as_bus_params[bus_cnt].cs_bitmask;
942
943                 /* cs enable is active low */
944                 *cs_mask &= ~tm->interface_params[if_id].
945                         as_bus_params[bus_cnt].cs_bitmask;
946         }
947
948         if (all_bus_cs == same_bus_cs)
949                 *cs_mask = (*cs_mask | (~(1 << effective_cs))) & CS_BIT_MASK;
950
951         return MV_OK;
952 }
953
954 /*
955  * Dynamic write leveling
956  */
957 int ddr3_tip_dynamic_write_leveling(u32 dev_num)
958 {
959         u32 reg_data = 0, iter, if_id, bus_cnt;
960         u32 cs_enable_reg_val[MAX_INTERFACE_NUM] = { 0 };
961         u32 cs_mask[MAX_INTERFACE_NUM];
962         u32 read_data_sample_delay_vals[MAX_INTERFACE_NUM] = { 0 };
963         u32 read_data_ready_delay_vals[MAX_INTERFACE_NUM] = { 0 };
964         /* 0 for failure */
965         u32 res_values[MAX_INTERFACE_NUM * MAX_BUS_NUM] = { 0 };
966         u32 test_res = 0;       /* 0 - success for all pup */
967         u32 data_read[MAX_INTERFACE_NUM];
968         u8 wl_values[NUM_OF_CS][MAX_BUS_NUM][MAX_INTERFACE_NUM];
969         u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
970         u32 cs_mask0[MAX_INTERFACE_NUM] = { 0 };
971         u32 max_cs = hws_ddr3_tip_max_cs_get();
972         struct hws_topology_map *tm = ddr3_get_topology_map();
973
974         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
975                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
976
977                 training_result[training_stage][if_id] = TEST_SUCCESS;
978
979                 /* save Read Data Sample Delay */
980                 CHECK_STATUS(ddr3_tip_if_read
981                              (dev_num, ACCESS_TYPE_UNICAST, if_id,
982                               READ_DATA_SAMPLE_DELAY,
983                               read_data_sample_delay_vals, MASK_ALL_BITS));
984                 /* save Read Data Ready Delay */
985                 CHECK_STATUS(ddr3_tip_if_read
986                              (dev_num, ACCESS_TYPE_UNICAST, if_id,
987                               READ_DATA_READY_DELAY, read_data_ready_delay_vals,
988                               MASK_ALL_BITS));
989                 /* save current cs reg val */
990                 CHECK_STATUS(ddr3_tip_if_read
991                              (dev_num, ACCESS_TYPE_UNICAST, if_id,
992                               CS_ENABLE_REG, cs_enable_reg_val, MASK_ALL_BITS));
993         }
994
995         /*
996          *     Phase 1: DRAM 2 Write Leveling mode
997          */
998
999         /*Assert 10 refresh commands to DRAM to all CS */
1000         for (iter = 0; iter < WL_ITERATION_NUM; iter++) {
1001                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1002                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1003                         CHECK_STATUS(ddr3_tip_if_write
1004                                      (dev_num, ACCESS_TYPE_UNICAST,
1005                                       if_id, SDRAM_OPERATION_REG,
1006                                       (u32)((~(0xf) << 8) | 0x2), 0xf1f));
1007                 }
1008         }
1009         /* check controller back to normal */
1010         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1011                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1012                 if (ddr3_tip_if_polling
1013                     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f,
1014                      SDRAM_OPERATION_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
1015                         DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
1016                                        ("WL: DDR3 poll failed(3)"));
1017                 }
1018         }
1019
1020         for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
1021                 /*enable write leveling to all cs  - Q off , WL n */
1022                 /* calculate interface cs mask */
1023                 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MRS1_CMD,
1024                                                     0x1000, 0x1080));
1025
1026                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1027                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1028                         /* cs enable is active low */
1029                         ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs,
1030                                               &cs_mask[if_id]);
1031                 }
1032
1033                 /* Enable Output buffer to relevant CS - Q on , WL on */
1034                 CHECK_STATUS(ddr3_tip_write_mrs_cmd
1035                              (dev_num, cs_mask, MRS1_CMD, 0x80, 0x1080));
1036
1037                 /*enable odt for relevant CS */
1038                 CHECK_STATUS(ddr3_tip_if_write
1039                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1040                               0x1498, (0x3 << (effective_cs * 2)), 0xf));
1041
1042                 /*
1043                  *     Phase 2: Set training IP to write leveling mode
1044                  */
1045
1046                 CHECK_STATUS(ddr3_tip_dynamic_write_leveling_seq(dev_num));
1047
1048                 /*
1049                  *     Phase 3: Trigger training
1050                  */
1051
1052                 CHECK_STATUS(ddr3_tip_if_write
1053                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1054                               ODPG_TRAINING_TRIGGER_REG, 0x1, 0x1));
1055
1056                 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1057                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1058
1059                         /* training done */
1060                         if (ddr3_tip_if_polling
1061                             (dev_num, ACCESS_TYPE_UNICAST, if_id,
1062                              (1 << 1), (1 << 1), ODPG_TRAINING_STATUS_REG,
1063                              MAX_POLLING_ITERATIONS) != MV_OK) {
1064                                 DEBUG_LEVELING(
1065                                         DEBUG_LEVEL_ERROR,
1066                                         ("WL: DDR3 poll (4) failed (Data: 0x%x)\n",
1067                                          reg_data));
1068                         }
1069 #if !defined(CONFIG_ARMADA_38X) /*Disabled. JIRA #1498 */
1070                         else {
1071                                 CHECK_STATUS(ddr3_tip_if_read
1072                                              (dev_num, ACCESS_TYPE_UNICAST,
1073                                               if_id,
1074                                               ODPG_TRAINING_TRIGGER_REG,
1075                                               &reg_data, (1 << 2)));
1076                                 if (reg_data != 0) {
1077                                         DEBUG_LEVELING(
1078                                                 DEBUG_LEVEL_ERROR,
1079                                                 ("WL: WL failed IF %d reg_data=0x%x\n",
1080                                                  if_id, reg_data));
1081                                 }
1082                         }
1083 #endif
1084                 }
1085
1086                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1087                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1088                         /* training done */
1089                         if (ddr3_tip_if_polling
1090                             (dev_num, ACCESS_TYPE_UNICAST, if_id,
1091                              (1 << 1), (1 << 1), ODPG_TRAINING_STATUS_REG,
1092                              MAX_POLLING_ITERATIONS) != MV_OK) {
1093                                 DEBUG_LEVELING(
1094                                         DEBUG_LEVEL_ERROR,
1095                                         ("WL: DDR3 poll (4) failed (Data: 0x%x)\n",
1096                                          reg_data));
1097                         } else {
1098 #if !defined(CONFIG_ARMADA_38X) /*Disabled. JIRA #1498 */
1099                                 CHECK_STATUS(ddr3_tip_if_read
1100                                              (dev_num, ACCESS_TYPE_UNICAST,
1101                                               if_id,
1102                                               ODPG_TRAINING_STATUS_REG,
1103                                               data_read, (1 << 2)));
1104                                 reg_data = data_read[if_id];
1105                                 if (reg_data != 0) {
1106                                         DEBUG_LEVELING(
1107                                                 DEBUG_LEVEL_ERROR,
1108                                                 ("WL: WL failed IF %d reg_data=0x%x\n",
1109                                                  if_id, reg_data));
1110                                 }
1111 #endif
1112
1113                                 /* check for training completion per bus */
1114                                 for (bus_cnt = 0;
1115                                      bus_cnt < tm->num_of_bus_per_interface;
1116                                      bus_cnt++) {
1117                                         VALIDATE_ACTIVE(tm->bus_act_mask,
1118                                                         bus_cnt);
1119                                         /* training status */
1120                                         CHECK_STATUS(ddr3_tip_if_read
1121                                                      (dev_num,
1122                                                       ACCESS_TYPE_UNICAST,
1123                                                       if_id,
1124                                                       mask_results_pup_reg_map
1125                                                       [bus_cnt], data_read,
1126                                                       (1 << 25)));
1127                                         reg_data = data_read[if_id];
1128                                         DEBUG_LEVELING(
1129                                                 DEBUG_LEVEL_TRACE,
1130                                                 ("WL: IF %d BUS %d reg 0x%x\n",
1131                                                  if_id, bus_cnt, reg_data));
1132                                         if (reg_data == 0) {
1133                                                 res_values[
1134                                                         (if_id *
1135                                                          tm->num_of_bus_per_interface)
1136                                                         + bus_cnt] = 1;
1137                                         }
1138                                         CHECK_STATUS(ddr3_tip_if_read
1139                                                      (dev_num,
1140                                                       ACCESS_TYPE_UNICAST,
1141                                                       if_id,
1142                                                       mask_results_pup_reg_map
1143                                                       [bus_cnt], data_read,
1144                                                       0xff));
1145                                         /*
1146                                          * Save the read value that should be
1147                                          * write to PHY register
1148                                          */
1149                                         wl_values[effective_cs]
1150                                                 [bus_cnt][if_id] =
1151                                                 (u8)data_read[if_id];
1152                                 }
1153                         }
1154                 }
1155
1156                 /*
1157                  *     Phase 4: Exit write leveling mode
1158                  */
1159
1160                 /* disable DQs toggling */
1161                 CHECK_STATUS(ddr3_tip_if_write
1162                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1163                               WR_LEVELING_DQS_PATTERN_REG, 0x0, 0x1));
1164
1165                 /* Update MRS 1 (WL off) */
1166                 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MRS1_CMD,
1167                                                     0x1000, 0x1080));
1168
1169                 /* Update MRS 1 (return to functional mode - Q on , WL off) */
1170                 CHECK_STATUS(ddr3_tip_write_mrs_cmd
1171                              (dev_num, cs_mask0, MRS1_CMD, 0x0, 0x1080));
1172
1173                 /* set phy to normal mode */
1174                 CHECK_STATUS(ddr3_tip_if_write
1175                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1176                               TRAINING_SW_2_REG, 0x5, 0x7));
1177
1178                 /* exit sw override mode  */
1179                 CHECK_STATUS(ddr3_tip_if_write
1180                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1181                               TRAINING_SW_2_REG, 0x4, 0x7));
1182         }
1183
1184         /*
1185          *     Phase 5: Load WL values to each PHY
1186          */
1187
1188         for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
1189                 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1190                         VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1191                         test_res = 0;
1192                         for (bus_cnt = 0;
1193                              bus_cnt < tm->num_of_bus_per_interface;
1194                              bus_cnt++) {
1195                                 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
1196                                 /* check if result == pass */
1197                                 if (res_values
1198                                     [(if_id *
1199                                       tm->num_of_bus_per_interface) +
1200                                      bus_cnt] == 0) {
1201                                         /*
1202                                          * read result control register
1203                                          * according to pup
1204                                          */
1205                                         reg_data =
1206                                                 wl_values[effective_cs][bus_cnt]
1207                                                 [if_id];
1208                                         /*
1209                                          * Write into write leveling register
1210                                          * ([4:0] ADLL, [8:6] Phase, [15:10]
1211                                          * (centralization) ADLL + 0x10)
1212                                          */
1213                                         reg_data =
1214                                                 (reg_data & 0x1f) |
1215                                                 (((reg_data & 0xe0) >> 5) << 6) |
1216                                                 (((reg_data & 0x1f) +
1217                                                   phy_reg1_val) << 10);
1218                                         ddr3_tip_bus_write(
1219                                                 dev_num,
1220                                                 ACCESS_TYPE_UNICAST,
1221                                                 if_id,
1222                                                 ACCESS_TYPE_UNICAST,
1223                                                 bus_cnt,
1224                                                 DDR_PHY_DATA,
1225                                                 WL_PHY_REG +
1226                                                 effective_cs *
1227                                                 CS_REGISTER_ADDR_OFFSET,
1228                                                 reg_data);
1229                                 } else {
1230                                         test_res = 1;
1231                                         /*
1232                                          * read result control register
1233                                          * according to pup
1234                                          */
1235                                         CHECK_STATUS(ddr3_tip_if_read
1236                                                      (dev_num,
1237                                                       ACCESS_TYPE_UNICAST,
1238                                                       if_id,
1239                                                       mask_results_pup_reg_map
1240                                                       [bus_cnt], data_read,
1241                                                       0xff));
1242                                         reg_data = data_read[if_id];
1243                                         DEBUG_LEVELING(
1244                                                 DEBUG_LEVEL_ERROR,
1245                                                 ("WL: IF %d BUS %d failed, reg 0x%x\n",
1246                                                  if_id, bus_cnt, reg_data));
1247                                 }
1248                         }
1249
1250                         if (test_res != 0) {
1251                                 training_result[training_stage][if_id] =
1252                                         TEST_FAILED;
1253                         }
1254                 }
1255         }
1256         /* Set to 0 after each loop to avoid illegal value may be used */
1257         effective_cs = 0;
1258
1259         /*
1260          * Copy the result from the effective CS search to the real
1261          * Functional CS
1262          */
1263         /* ddr3_tip_write_cs_result(dev_num, WL_PHY_REG); */
1264         /* restore saved values */
1265         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1266                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1267                 /* restore Read Data Sample Delay */
1268                 CHECK_STATUS(ddr3_tip_if_write
1269                              (dev_num, ACCESS_TYPE_UNICAST, if_id,
1270                               READ_DATA_SAMPLE_DELAY,
1271                               read_data_sample_delay_vals[if_id],
1272                               MASK_ALL_BITS));
1273
1274                 /* restore Read Data Ready Delay */
1275                 CHECK_STATUS(ddr3_tip_if_write
1276                              (dev_num, ACCESS_TYPE_UNICAST, if_id,
1277                               READ_DATA_READY_DELAY,
1278                               read_data_ready_delay_vals[if_id],
1279                               MASK_ALL_BITS));
1280
1281                 /* enable multi cs */
1282                 CHECK_STATUS(ddr3_tip_if_write
1283                              (dev_num, ACCESS_TYPE_UNICAST, if_id,
1284                               CS_ENABLE_REG, cs_enable_reg_val[if_id],
1285                               MASK_ALL_BITS));
1286         }
1287
1288         /* Disable modt0 for CS0 training - need to adjust for multy CS */
1289         CHECK_STATUS(ddr3_tip_if_write
1290                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x1498,
1291                       0x0, 0xf));
1292
1293         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1294                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1295                 if (training_result[training_stage][if_id] == TEST_FAILED)
1296                         return MV_FAIL;
1297         }
1298
1299         return MV_OK;
1300 }
1301
1302 /*
1303  * Dynamic write leveling supplementary
1304  */
1305 int ddr3_tip_dynamic_write_leveling_supp(u32 dev_num)
1306 {
1307         int adll_offset;
1308         u32 if_id, bus_id, data, data_tmp;
1309         int is_if_fail = 0;
1310         struct hws_topology_map *tm = ddr3_get_topology_map();
1311
1312         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1313                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1314                 is_if_fail = 0;
1315
1316                 for (bus_id = 0; bus_id < GET_TOPOLOGY_NUM_OF_BUSES();
1317                      bus_id++) {
1318                         VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1319                         wr_supp_res[if_id][bus_id].is_pup_fail = 1;
1320                         CHECK_STATUS(ddr3_tip_bus_read
1321                                      (dev_num, if_id, ACCESS_TYPE_UNICAST,
1322                                       bus_id, DDR_PHY_DATA,
1323                                       WRITE_CENTRALIZATION_PHY_REG +
1324                                       effective_cs * CS_REGISTER_ADDR_OFFSET,
1325                                       &data));
1326                         DEBUG_LEVELING(
1327                                 DEBUG_LEVEL_TRACE,
1328                                 ("WL Supp: adll_offset=0 data delay = %d\n",
1329                                  data));
1330                         if (ddr3_tip_wl_supp_align_phase_shift
1331                             (dev_num, if_id, bus_id, 0, 0) == MV_OK) {
1332                                 DEBUG_LEVELING(
1333                                         DEBUG_LEVEL_TRACE,
1334                                         ("WL Supp: IF %d bus_id %d adll_offset=0 Success !\n",
1335                                          if_id, bus_id));
1336                                 continue;
1337                         }
1338
1339                         /* change adll */
1340                         adll_offset = 5;
1341                         CHECK_STATUS(ddr3_tip_bus_write
1342                                      (dev_num, ACCESS_TYPE_UNICAST, if_id,
1343                                       ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA,
1344                                       WRITE_CENTRALIZATION_PHY_REG +
1345                                       effective_cs * CS_REGISTER_ADDR_OFFSET,
1346                                       data + adll_offset));
1347                         CHECK_STATUS(ddr3_tip_bus_read
1348                                      (dev_num, if_id, ACCESS_TYPE_UNICAST,
1349                                       bus_id, DDR_PHY_DATA,
1350                                       WRITE_CENTRALIZATION_PHY_REG +
1351                                       effective_cs * CS_REGISTER_ADDR_OFFSET,
1352                                       &data_tmp));
1353                         DEBUG_LEVELING(
1354                                 DEBUG_LEVEL_TRACE,
1355                                 ("WL Supp: adll_offset= %d data delay = %d\n",
1356                                  adll_offset, data_tmp));
1357
1358                         if (ddr3_tip_wl_supp_align_phase_shift
1359                             (dev_num, if_id, bus_id, adll_offset, 0) == MV_OK) {
1360                                 DEBUG_LEVELING(
1361                                         DEBUG_LEVEL_TRACE,
1362                                         ("WL Supp: IF %d bus_id %d adll_offset= %d Success !\n",
1363                                          if_id, bus_id, adll_offset));
1364                                 continue;
1365                         }
1366
1367                         /* change adll */
1368                         adll_offset = -5;
1369                         CHECK_STATUS(ddr3_tip_bus_write
1370                                      (dev_num, ACCESS_TYPE_UNICAST, if_id,
1371                                       ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA,
1372                                       WRITE_CENTRALIZATION_PHY_REG +
1373                                       effective_cs * CS_REGISTER_ADDR_OFFSET,
1374                                       data + adll_offset));
1375                         CHECK_STATUS(ddr3_tip_bus_read
1376                                      (dev_num, if_id, ACCESS_TYPE_UNICAST,
1377                                       bus_id, DDR_PHY_DATA,
1378                                       WRITE_CENTRALIZATION_PHY_REG +
1379                                       effective_cs * CS_REGISTER_ADDR_OFFSET,
1380                                       &data_tmp));
1381                         DEBUG_LEVELING(
1382                                 DEBUG_LEVEL_TRACE,
1383                                 ("WL Supp: adll_offset= %d data delay = %d\n",
1384                                  adll_offset, data_tmp));
1385                         if (ddr3_tip_wl_supp_align_phase_shift
1386                             (dev_num, if_id, bus_id, adll_offset, 0) == MV_OK) {
1387                                 DEBUG_LEVELING(
1388                                         DEBUG_LEVEL_TRACE,
1389                                         ("WL Supp: IF %d bus_id %d adll_offset= %d Success !\n",
1390                                          if_id, bus_id, adll_offset));
1391                                 continue;
1392                         } else {
1393                                 DEBUG_LEVELING(
1394                                         DEBUG_LEVEL_ERROR,
1395                                         ("WL Supp: IF %d bus_id %d Failed !\n",
1396                                          if_id, bus_id));
1397                                 is_if_fail = 1;
1398                         }
1399                 }
1400                 DEBUG_LEVELING(DEBUG_LEVEL_TRACE,
1401                                ("WL Supp: IF %d bus_id %d is_pup_fail %d\n",
1402                                 if_id, bus_id, is_if_fail));
1403
1404                 if (is_if_fail == 1) {
1405                         DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
1406                                        ("WL Supp: IF %d failed\n", if_id));
1407                         training_result[training_stage][if_id] = TEST_FAILED;
1408                 } else {
1409                         training_result[training_stage][if_id] = TEST_SUCCESS;
1410                 }
1411         }
1412
1413         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1414                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1415                 if (training_result[training_stage][if_id] == TEST_FAILED)
1416                         return MV_FAIL;
1417         }
1418
1419         return MV_OK;
1420 }
1421
1422 /*
1423  * Phase Shift
1424  */
1425 static int ddr3_tip_wl_supp_align_phase_shift(u32 dev_num, u32 if_id,
1426                                               u32 bus_id, u32 offset,
1427                                               u32 bus_id_delta)
1428 {
1429         wr_supp_res[if_id][bus_id].stage = PHASE_SHIFT;
1430         if (ddr3_tip_xsb_compare_test(dev_num, if_id, bus_id,
1431                                       0, bus_id_delta) == MV_OK) {
1432                 wr_supp_res[if_id][bus_id].is_pup_fail = 0;
1433                 return MV_OK;
1434         } else if (ddr3_tip_xsb_compare_test(dev_num, if_id, bus_id,
1435                                              ONE_CLOCK_ERROR_SHIFT,
1436                                              bus_id_delta) == MV_OK) {
1437                 /* 1 clock error */
1438                 wr_supp_res[if_id][bus_id].stage = CLOCK_SHIFT;
1439                 DEBUG_LEVELING(DEBUG_LEVEL_TRACE,
1440                                ("Supp: 1 error clock for if %d pup %d with ofsset %d success\n",
1441                                 if_id, bus_id, offset));
1442                 ddr3_tip_wl_supp_one_clk_err_shift(dev_num, if_id, bus_id, 0);
1443                 wr_supp_res[if_id][bus_id].is_pup_fail = 0;
1444                 return MV_OK;
1445         } else if (ddr3_tip_xsb_compare_test(dev_num, if_id, bus_id,
1446                                              ALIGN_ERROR_SHIFT,
1447                                              bus_id_delta) == MV_OK) {
1448                 /* align error */
1449                 DEBUG_LEVELING(DEBUG_LEVEL_TRACE,
1450                                ("Supp: align error for if %d pup %d with ofsset %d success\n",
1451                                 if_id, bus_id, offset));
1452                 wr_supp_res[if_id][bus_id].stage = ALIGN_SHIFT;
1453                 ddr3_tip_wl_supp_align_err_shift(dev_num, if_id, bus_id, 0);
1454                 wr_supp_res[if_id][bus_id].is_pup_fail = 0;
1455                 return MV_OK;
1456         } else {
1457                 wr_supp_res[if_id][bus_id].is_pup_fail = 1;
1458                 return MV_FAIL;
1459         }
1460 }
1461
1462 /*
1463  * Compare Test
1464  */
1465 static int ddr3_tip_xsb_compare_test(u32 dev_num, u32 if_id, u32 bus_id,
1466                                      u32 edge_offset, u32 bus_id_delta)
1467 {
1468         u32 num_of_succ_byte_compare, word_in_pattern, abs_offset;
1469         u32 word_offset, i;
1470         u32 read_pattern[TEST_PATTERN_LENGTH * 2];
1471         struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
1472         u32 pattern_test_pattern_table[8];
1473
1474         for (i = 0; i < 8; i++) {
1475                 pattern_test_pattern_table[i] =
1476                         pattern_table_get_word(dev_num, PATTERN_TEST, (u8)i);
1477         }
1478
1479         /* extern write, than read and compare */
1480         CHECK_STATUS(ddr3_tip_ext_write
1481                      (dev_num, if_id,
1482                       (pattern_table[PATTERN_TEST].start_addr +
1483                        ((SDRAM_CS_SIZE + 1) * effective_cs)), 1,
1484                       pattern_test_pattern_table));
1485
1486         CHECK_STATUS(ddr3_tip_reset_fifo_ptr(dev_num));
1487
1488         CHECK_STATUS(ddr3_tip_ext_read
1489                      (dev_num, if_id,
1490                       (pattern_table[PATTERN_TEST].start_addr +
1491                        ((SDRAM_CS_SIZE + 1) * effective_cs)), 1, read_pattern));
1492
1493         DEBUG_LEVELING(
1494                 DEBUG_LEVEL_TRACE,
1495                 ("XSB-compt: IF %d bus_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1496                  if_id, bus_id, read_pattern[0], read_pattern[1],
1497                  read_pattern[2], read_pattern[3], read_pattern[4],
1498                  read_pattern[5], read_pattern[6], read_pattern[7]));
1499
1500         /* compare byte per pup */
1501         num_of_succ_byte_compare = 0;
1502         for (word_in_pattern = start_xsb_offset;
1503              word_in_pattern < (TEST_PATTERN_LENGTH * 2); word_in_pattern++) {
1504                 word_offset = word_in_pattern + edge_offset;
1505                 if ((word_offset > (TEST_PATTERN_LENGTH * 2 - 1)) ||
1506                     (word_offset < 0))
1507                         continue;
1508
1509                 if ((read_pattern[word_in_pattern] & pup_mask_table[bus_id]) ==
1510                     (pattern_test_pattern_table[word_offset] &
1511                      pup_mask_table[bus_id]))
1512                         num_of_succ_byte_compare++;
1513         }
1514
1515         abs_offset = (edge_offset > 0) ? edge_offset : -edge_offset;
1516         if (num_of_succ_byte_compare == ((TEST_PATTERN_LENGTH * 2) -
1517                                          abs_offset - start_xsb_offset)) {
1518                 DEBUG_LEVELING(
1519                         DEBUG_LEVEL_TRACE,
1520                         ("XSB-compt: IF %d bus_id %d num_of_succ_byte_compare %d - Success\n",
1521                          if_id, bus_id, num_of_succ_byte_compare));
1522                 return MV_OK;
1523         } else {
1524                 DEBUG_LEVELING(
1525                         DEBUG_LEVEL_TRACE,
1526                         ("XSB-compt: IF %d bus_id %d num_of_succ_byte_compare %d - Fail !\n",
1527                          if_id, bus_id, num_of_succ_byte_compare));
1528
1529                 DEBUG_LEVELING(
1530                         DEBUG_LEVEL_TRACE,
1531                         ("XSB-compt: expected 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1532                          pattern_test_pattern_table[0],
1533                          pattern_test_pattern_table[1],
1534                          pattern_test_pattern_table[2],
1535                          pattern_test_pattern_table[3],
1536                          pattern_test_pattern_table[4],
1537                          pattern_test_pattern_table[5],
1538                          pattern_test_pattern_table[6],
1539                          pattern_test_pattern_table[7]));
1540                 DEBUG_LEVELING(
1541                         DEBUG_LEVEL_TRACE,
1542                         ("XSB-compt: recieved 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1543                          read_pattern[0], read_pattern[1],
1544                          read_pattern[2], read_pattern[3],
1545                          read_pattern[4], read_pattern[5],
1546                          read_pattern[6], read_pattern[7]));
1547
1548                 DEBUG_LEVELING(
1549                         DEBUG_LEVEL_TRACE,
1550                         ("XSB-compt: IF %d bus_id %d num_of_succ_byte_compare %d - Fail !\n",
1551                          if_id, bus_id, num_of_succ_byte_compare));
1552
1553                 return MV_FAIL;
1554         }
1555 }
1556
1557 /*
1558  * Clock error shift - function moves the write leveling delay 1cc forward
1559  */
1560 static int ddr3_tip_wl_supp_one_clk_err_shift(u32 dev_num, u32 if_id,
1561                                               u32 bus_id, u32 bus_id_delta)
1562 {
1563         int phase, adll;
1564         u32 data;
1565         DEBUG_LEVELING(DEBUG_LEVEL_TRACE, ("One_clk_err_shift\n"));
1566
1567         CHECK_STATUS(ddr3_tip_bus_read
1568                      (dev_num, if_id, ACCESS_TYPE_UNICAST, bus_id,
1569                       DDR_PHY_DATA, WL_PHY_REG, &data));
1570         phase = ((data >> 6) & 0x7);
1571         adll = data & 0x1f;
1572         DEBUG_LEVELING(DEBUG_LEVEL_TRACE,
1573                        ("One_clk_err_shift: IF %d bus_id %d phase %d adll %d\n",
1574                         if_id, bus_id, phase, adll));
1575
1576         if ((phase == 0) || (phase == 1)) {
1577                 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1578                              (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_id,
1579                               DDR_PHY_DATA, 0, (phase + 2), 0x1f));
1580         } else if (phase == 2) {
1581                 if (adll < 6) {
1582                         data = (3 << 6) + (0x1f);
1583                         CHECK_STATUS(ddr3_tip_bus_read_modify_write
1584                                      (dev_num, ACCESS_TYPE_UNICAST, if_id,
1585                                       bus_id, DDR_PHY_DATA, 0, data,
1586                                       (0x7 << 6 | 0x1f)));
1587                         data = 0x2f;
1588                         CHECK_STATUS(ddr3_tip_bus_read_modify_write
1589                                      (dev_num, ACCESS_TYPE_UNICAST, if_id,
1590                                       bus_id, DDR_PHY_DATA, 1, data, 0x3f));
1591                 }
1592         } else {
1593                 /* phase 3 */
1594                 return MV_FAIL;
1595         }
1596
1597         return MV_OK;
1598 }
1599
1600 /*
1601  * Align error shift
1602  */
1603 static int ddr3_tip_wl_supp_align_err_shift(u32 dev_num, u32 if_id,
1604                                             u32 bus_id, u32 bus_id_delta)
1605 {
1606         int phase, adll;
1607         u32 data;
1608
1609         /* Shift WL result 1 phase back */
1610         CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id, ACCESS_TYPE_UNICAST,
1611                                        bus_id, DDR_PHY_DATA, WL_PHY_REG,
1612                                        &data));
1613         phase = ((data >> 6) & 0x7);
1614         adll = data & 0x1f;
1615         DEBUG_LEVELING(
1616                 DEBUG_LEVEL_TRACE,
1617                 ("Wl_supp_align_err_shift: IF %d bus_id %d phase %d adll %d\n",
1618                  if_id, bus_id, phase, adll));
1619
1620         if (phase < 2) {
1621                 if (adll > 0x1a) {
1622                         if (phase == 0)
1623                                 return MV_FAIL;
1624
1625                         if (phase == 1) {
1626                                 data = 0;
1627                                 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1628                                              (dev_num, ACCESS_TYPE_UNICAST,
1629                                               if_id, bus_id, DDR_PHY_DATA,
1630                                               0, data, (0x7 << 6 | 0x1f)));
1631                                 data = 0xf;
1632                                 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1633                                              (dev_num, ACCESS_TYPE_UNICAST,
1634                                               if_id, bus_id, DDR_PHY_DATA,
1635                                               1, data, 0x1f));
1636                                 return MV_OK;
1637                         }
1638                 } else {
1639                         return MV_FAIL;
1640                 }
1641         } else if ((phase == 2) || (phase == 3)) {
1642                 phase = phase - 2;
1643                 data = (phase << 6) + (adll & 0x1f);
1644                 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1645                              (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_id,
1646                               DDR_PHY_DATA, 0, data, (0x7 << 6 | 0x1f)));
1647                 return MV_OK;
1648         } else {
1649                 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
1650                                ("Wl_supp_align_err_shift: unexpected phase\n"));
1651
1652                 return MV_FAIL;
1653         }
1654
1655         return MV_OK;
1656 }
1657
1658 /*
1659  * Dynamic write leveling sequence
1660  */
1661 static int ddr3_tip_dynamic_write_leveling_seq(u32 dev_num)
1662 {
1663         u32 bus_id, dq_id;
1664         u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
1665         u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
1666         struct hws_topology_map *tm = ddr3_get_topology_map();
1667
1668         CHECK_STATUS(ddr3_tip_if_write
1669                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1670                       TRAINING_SW_2_REG, 0x1, 0x5));
1671         CHECK_STATUS(ddr3_tip_if_write
1672                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1673                       TRAINING_WRITE_LEVELING_REG, 0x50, 0xff));
1674         CHECK_STATUS(ddr3_tip_if_write
1675                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1676                       TRAINING_WRITE_LEVELING_REG, 0x5c, 0xff));
1677         CHECK_STATUS(ddr3_tip_if_write
1678                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1679                       ODPG_TRAINING_CONTROL_REG, 0x381b82, 0x3c3faf));
1680         CHECK_STATUS(ddr3_tip_if_write
1681                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1682                       ODPG_OBJ1_OPCODE_REG, (0x3 << 25), (0x3ffff << 9)));
1683         CHECK_STATUS(ddr3_tip_if_write
1684                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1685                       ODPG_OBJ1_ITER_CNT_REG, 0x80, 0xffff));
1686         CHECK_STATUS(ddr3_tip_if_write
1687                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1688                       ODPG_WRITE_LEVELING_DONE_CNTR_REG, 0x14, 0xff));
1689         CHECK_STATUS(ddr3_tip_if_write
1690                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1691                       TRAINING_WRITE_LEVELING_REG, 0xff5c, 0xffff));
1692
1693         /* mask PBS */
1694         for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) {
1695                 CHECK_STATUS(ddr3_tip_if_write
1696                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1697                               mask_results_dq_reg_map[dq_id], 0x1 << 24,
1698                               0x1 << 24));
1699         }
1700
1701         /* Mask all results */
1702         for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1703                 CHECK_STATUS(ddr3_tip_if_write
1704                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1705                               mask_results_pup_reg_map[bus_id], 0x1 << 24,
1706                               0x1 << 24));
1707         }
1708
1709         /* Unmask only wanted */
1710         for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1711                 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1712                 CHECK_STATUS(ddr3_tip_if_write
1713                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1714                               mask_results_pup_reg_map[bus_id], 0, 0x1 << 24));
1715         }
1716
1717         CHECK_STATUS(ddr3_tip_if_write
1718                      (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1719                       WR_LEVELING_DQS_PATTERN_REG, 0x1, 0x1));
1720
1721         return MV_OK;
1722 }
1723
1724 /*
1725  * Dynamic read leveling sequence
1726  */
1727 static int ddr3_tip_dynamic_read_leveling_seq(u32 dev_num)
1728 {
1729         u32 bus_id, dq_id;
1730         u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
1731         u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
1732         struct hws_topology_map *tm = ddr3_get_topology_map();
1733
1734         /* mask PBS */
1735         for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) {
1736                 CHECK_STATUS(ddr3_tip_if_write
1737                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1738                               mask_results_dq_reg_map[dq_id], 0x1 << 24,
1739                               0x1 << 24));
1740         }
1741
1742         /* Mask all results */
1743         for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1744                 CHECK_STATUS(ddr3_tip_if_write
1745                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1746                               mask_results_pup_reg_map[bus_id], 0x1 << 24,
1747                               0x1 << 24));
1748         }
1749
1750         /* Unmask only wanted */
1751         for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1752                 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1753                 CHECK_STATUS(ddr3_tip_if_write
1754                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1755                               mask_results_pup_reg_map[bus_id], 0, 0x1 << 24));
1756         }
1757
1758         return MV_OK;
1759 }
1760
1761 /*
1762  * Dynamic read leveling sequence
1763  */
1764 static int ddr3_tip_dynamic_per_bit_read_leveling_seq(u32 dev_num)
1765 {
1766         u32 bus_id, dq_id;
1767         u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
1768         u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
1769         struct hws_topology_map *tm = ddr3_get_topology_map();
1770
1771         /* mask PBS */
1772         for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) {
1773                 CHECK_STATUS(ddr3_tip_if_write
1774                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1775                               mask_results_dq_reg_map[dq_id], 0x1 << 24,
1776                               0x1 << 24));
1777         }
1778
1779         /* Mask all results */
1780         for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1781                 CHECK_STATUS(ddr3_tip_if_write
1782                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1783                               mask_results_pup_reg_map[bus_id], 0x1 << 24,
1784                               0x1 << 24));
1785         }
1786
1787         /* Unmask only wanted */
1788         for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) {
1789                 VALIDATE_ACTIVE(tm->bus_act_mask, dq_id / 8);
1790                 CHECK_STATUS(ddr3_tip_if_write
1791                              (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1792                               mask_results_dq_reg_map[dq_id], 0x0 << 24,
1793                               0x1 << 24));
1794         }
1795
1796         return MV_OK;
1797 }
1798
1799 /*
1800  * Print write leveling supplementary results
1801  */
1802 int ddr3_tip_print_wl_supp_result(u32 dev_num)
1803 {
1804         u32 bus_id = 0, if_id = 0;
1805         struct hws_topology_map *tm = ddr3_get_topology_map();
1806
1807         DEBUG_LEVELING(DEBUG_LEVEL_INFO,
1808                        ("I/F0 PUP0 Result[0 - success, 1-fail] ...\n"));
1809
1810         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1811                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1812                 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
1813                      bus_id++) {
1814                         VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1815                         DEBUG_LEVELING(DEBUG_LEVEL_INFO,
1816                                        ("%d ,", wr_supp_res[if_id]
1817                                         [bus_id].is_pup_fail));
1818                 }
1819         }
1820         DEBUG_LEVELING(
1821                 DEBUG_LEVEL_INFO,
1822                 ("I/F0 PUP0 Stage[0-phase_shift, 1-clock_shift, 2-align_shift] ...\n"));
1823
1824         for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1825                 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1826                 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
1827                      bus_id++) {
1828                         VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1829                         DEBUG_LEVELING(DEBUG_LEVEL_INFO,
1830                                        ("%d ,", wr_supp_res[if_id]
1831                                         [bus_id].stage));
1832                 }
1833         }
1834
1835         return MV_OK;
1836 }