]> git.kernelconcepts.de Git - karo-tx-uboot.git/blob - arch/arm/cpu/armv7/exynos/dmc_init_ddr3.c
Merge remote-tracking branch 'u-boot-samsung/master'
[karo-tx-uboot.git] / arch / arm / cpu / armv7 / exynos / dmc_init_ddr3.c
1 /*
2  * DDR3 mem setup file for board based on EXYNOS5
3  *
4  * Copyright (C) 2012 Samsung Electronics
5  *
6  * SPDX-License-Identifier:     GPL-2.0+
7  */
8
9 #include <common.h>
10 #include <config.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/cpu.h>
14 #include <asm/arch/dmc.h>
15 #include <asm/arch/power.h>
16 #include "common_setup.h"
17 #include "exynos5_setup.h"
18 #include "clock_init.h"
19
20 #define TIMEOUT_US              10000
21 #define NUM_BYTE_LANES          4
22 #define DEFAULT_DQS             8
23 #define DEFAULT_DQS_X4          (DEFAULT_DQS << 24) || (DEFAULT_DQS << 16) \
24                                 || (DEFAULT_DQS << 8) || (DEFAULT_DQS << 0)
25
26 #ifdef CONFIG_EXYNOS5250
27 static void reset_phy_ctrl(void)
28 {
29         struct exynos5_clock *clk =
30                 (struct exynos5_clock *)samsung_get_base_clock();
31
32         writel(DDR3PHY_CTRL_PHY_RESET_OFF, &clk->lpddr3phy_ctrl);
33         writel(DDR3PHY_CTRL_PHY_RESET, &clk->lpddr3phy_ctrl);
34 }
35
36 int ddr3_mem_ctrl_init(struct mem_timings *mem, int reset)
37 {
38         unsigned int val;
39         struct exynos5_phy_control *phy0_ctrl, *phy1_ctrl;
40         struct exynos5_dmc *dmc;
41         int i;
42
43         phy0_ctrl = (struct exynos5_phy_control *)samsung_get_base_dmc_phy();
44         phy1_ctrl = (struct exynos5_phy_control *)(samsung_get_base_dmc_phy()
45                                                         + DMC_OFFSET);
46         dmc = (struct exynos5_dmc *)samsung_get_base_dmc_ctrl();
47
48         if (reset)
49                 reset_phy_ctrl();
50
51         /* Set Impedance Output Driver */
52         val = (mem->impedance << CA_CK_DRVR_DS_OFFSET) |
53                 (mem->impedance << CA_CKE_DRVR_DS_OFFSET) |
54                 (mem->impedance << CA_CS_DRVR_DS_OFFSET) |
55                 (mem->impedance << CA_ADR_DRVR_DS_OFFSET);
56         writel(val, &phy0_ctrl->phy_con39);
57         writel(val, &phy1_ctrl->phy_con39);
58
59         /* Set Read Latency and Burst Length for PHY0 and PHY1 */
60         val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
61                 (mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
62         writel(val, &phy0_ctrl->phy_con42);
63         writel(val, &phy1_ctrl->phy_con42);
64
65         /* ZQ Calibration */
66         if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
67                           &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
68                 return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
69
70         /* DQ Signal */
71         writel(mem->phy0_pulld_dqs, &phy0_ctrl->phy_con14);
72         writel(mem->phy1_pulld_dqs, &phy1_ctrl->phy_con14);
73
74         writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
75                 | (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT),
76                 &dmc->concontrol);
77
78         update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
79
80         /* DQS Signal */
81         writel(mem->phy0_dqs, &phy0_ctrl->phy_con4);
82         writel(mem->phy1_dqs, &phy1_ctrl->phy_con4);
83
84         writel(mem->phy0_dq, &phy0_ctrl->phy_con6);
85         writel(mem->phy1_dq, &phy1_ctrl->phy_con6);
86
87         writel(mem->phy0_tFS, &phy0_ctrl->phy_con10);
88         writel(mem->phy1_tFS, &phy1_ctrl->phy_con10);
89
90         val = (mem->ctrl_start_point << PHY_CON12_CTRL_START_POINT_SHIFT) |
91                 (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
92                 (mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
93                 (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
94         writel(val, &phy0_ctrl->phy_con12);
95         writel(val, &phy1_ctrl->phy_con12);
96
97         /* Start DLL locking */
98         writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
99                &phy0_ctrl->phy_con12);
100         writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
101                &phy1_ctrl->phy_con12);
102
103         update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
104
105         writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
106                &dmc->concontrol);
107
108         /* Memory Channel Inteleaving Size */
109         writel(mem->iv_size, &dmc->ivcontrol);
110
111         writel(mem->memconfig, &dmc->memconfig0);
112         writel(mem->memconfig, &dmc->memconfig1);
113         writel(mem->membaseconfig0, &dmc->membaseconfig0);
114         writel(mem->membaseconfig1, &dmc->membaseconfig1);
115
116         /* Precharge Configuration */
117         writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
118                &dmc->prechconfig);
119
120         /* Power Down mode Configuration */
121         writel(mem->dpwrdn_cyc << PWRDNCONFIG_DPWRDN_CYC_SHIFT |
122                 mem->dsref_cyc << PWRDNCONFIG_DSREF_CYC_SHIFT,
123                 &dmc->pwrdnconfig);
124
125         /* TimingRow, TimingData, TimingPower and Timingaref
126          * values as per Memory AC parameters
127          */
128         writel(mem->timing_ref, &dmc->timingref);
129         writel(mem->timing_row, &dmc->timingrow);
130         writel(mem->timing_data, &dmc->timingdata);
131         writel(mem->timing_power, &dmc->timingpower);
132
133         /* Send PALL command */
134         dmc_config_prech(mem, &dmc->directcmd);
135
136         /* Send NOP, MRS and ZQINIT commands */
137         dmc_config_mrs(mem, &dmc->directcmd);
138
139         if (mem->gate_leveling_enable) {
140                 val = PHY_CON0_RESET_VAL;
141                 val |= P0_CMD_EN;
142                 writel(val, &phy0_ctrl->phy_con0);
143                 writel(val, &phy1_ctrl->phy_con0);
144
145                 val = PHY_CON2_RESET_VAL;
146                 val |= INIT_DESKEW_EN;
147                 writel(val, &phy0_ctrl->phy_con2);
148                 writel(val, &phy1_ctrl->phy_con2);
149
150                 val = PHY_CON0_RESET_VAL;
151                 val |= P0_CMD_EN;
152                 val |= BYTE_RDLVL_EN;
153                 writel(val, &phy0_ctrl->phy_con0);
154                 writel(val, &phy1_ctrl->phy_con0);
155
156                 val = (mem->ctrl_start_point <<
157                                 PHY_CON12_CTRL_START_POINT_SHIFT) |
158                         (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
159                         (mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
160                         (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
161                         (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
162                 writel(val, &phy0_ctrl->phy_con12);
163                 writel(val, &phy1_ctrl->phy_con12);
164
165                 val = PHY_CON2_RESET_VAL;
166                 val |= INIT_DESKEW_EN;
167                 val |= RDLVL_GATE_EN;
168                 writel(val, &phy0_ctrl->phy_con2);
169                 writel(val, &phy1_ctrl->phy_con2);
170
171                 val = PHY_CON0_RESET_VAL;
172                 val |= P0_CMD_EN;
173                 val |= BYTE_RDLVL_EN;
174                 val |= CTRL_SHGATE;
175                 writel(val, &phy0_ctrl->phy_con0);
176                 writel(val, &phy1_ctrl->phy_con0);
177
178                 val = PHY_CON1_RESET_VAL;
179                 val &= ~(CTRL_GATEDURADJ_MASK);
180                 writel(val, &phy0_ctrl->phy_con1);
181                 writel(val, &phy1_ctrl->phy_con1);
182
183                 writel(CTRL_RDLVL_GATE_ENABLE, &dmc->rdlvl_config);
184                 i = TIMEOUT_US;
185                 while ((readl(&dmc->phystatus) &
186                         (RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1)) !=
187                         (RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1) && i > 0) {
188                         /*
189                          * TODO(waihong): Comment on how long this take to
190                          * timeout
191                          */
192                         sdelay(100);
193                         i--;
194                 }
195                 if (!i)
196                         return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
197                 writel(CTRL_RDLVL_GATE_DISABLE, &dmc->rdlvl_config);
198
199                 writel(0, &phy0_ctrl->phy_con14);
200                 writel(0, &phy1_ctrl->phy_con14);
201
202                 val = (mem->ctrl_start_point <<
203                                 PHY_CON12_CTRL_START_POINT_SHIFT) |
204                         (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
205                         (mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
206                         (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
207                         (mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
208                         (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
209                 writel(val, &phy0_ctrl->phy_con12);
210                 writel(val, &phy1_ctrl->phy_con12);
211
212                 update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
213         }
214
215         /* Send PALL command */
216         dmc_config_prech(mem, &dmc->directcmd);
217
218         writel(mem->memcontrol, &dmc->memcontrol);
219
220         /* Set DMC Concontrol and enable auto-refresh counter */
221         writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
222                 | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT), &dmc->concontrol);
223         return 0;
224 }
225 #endif
226
227 #ifdef CONFIG_EXYNOS5420
228 /**
229  * RAM address to use in the test.
230  *
231  * We'll use 4 words at this address and 4 at this address + 0x80 (Ares
232  * interleaves channels every 128 bytes).  This will allow us to evaluate all of
233  * the chips in a 1 chip per channel (2GB) system and half the chips in a 2
234  * chip per channel (4GB) system.  We can't test the 2nd chip since we need to
235  * do tests before the 2nd chip is enabled.  Looking at the 2nd chip isn't
236  * critical because the 1st and 2nd chip have very similar timings (they'd
237  * better have similar timings, since there's only a single adjustment that is
238  * shared by both chips).
239  */
240 const unsigned int test_addr = CONFIG_SYS_SDRAM_BASE;
241
242 /* Test pattern with which RAM will be tested */
243 static const unsigned int test_pattern[] = {
244         0x5a5a5a5a,
245         0xa5a5a5a5,
246         0xf0f0f0f0,
247         0x0f0f0f0f,
248 };
249
250 /**
251  * This function is a test vector for sw read leveling,
252  * it compares the read data with the written data.
253  *
254  * @param ch                    DMC channel number
255  * @param byte_lane             which DQS byte offset,
256  *                              possible values are 0,1,2,3
257  * @return                      TRUE if memory was good, FALSE if not.
258  */
259 static bool dmc_valid_window_test_vector(int ch, int byte_lane)
260 {
261         unsigned int read_data;
262         unsigned int mask;
263         int i;
264
265         mask = 0xFF << (8 * byte_lane);
266
267         for (i = 0; i < ARRAY_SIZE(test_pattern); i++) {
268                 read_data = readl(test_addr + i * 4 + ch * 0x80);
269                 if ((read_data & mask) != (test_pattern[i] & mask))
270                         return false;
271         }
272
273         return true;
274 }
275
276 /**
277  * This function returns current read offset value.
278  *
279  * @param phy_ctrl      pointer to the current phy controller
280  */
281 static unsigned int dmc_get_read_offset_value(struct exynos5420_phy_control
282                                                *phy_ctrl)
283 {
284         return readl(&phy_ctrl->phy_con4);
285 }
286
287 /**
288  * This function performs resync, so that slave DLL is updated.
289  *
290  * @param phy_ctrl      pointer to the current phy controller
291  */
292 static void ddr_phy_set_do_resync(struct exynos5420_phy_control *phy_ctrl)
293 {
294         setbits_le32(&phy_ctrl->phy_con10, PHY_CON10_CTRL_OFFSETR3);
295         clrbits_le32(&phy_ctrl->phy_con10, PHY_CON10_CTRL_OFFSETR3);
296 }
297
298 /**
299  * This function sets read offset value register with 'offset'.
300  *
301  * ...we also call call ddr_phy_set_do_resync().
302  *
303  * @param phy_ctrl      pointer to the current phy controller
304  * @param offset        offset to read DQS
305  */
306 static void dmc_set_read_offset_value(struct exynos5420_phy_control *phy_ctrl,
307                                       unsigned int offset)
308 {
309         writel(offset, &phy_ctrl->phy_con4);
310         ddr_phy_set_do_resync(phy_ctrl);
311 }
312
313 /**
314  * Convert a 2s complement byte to a byte with a sign bit.
315  *
316  * NOTE: you shouldn't use normal math on the number returned by this function.
317  *   As an example, -10 = 0xf6.  After this function -10 = 0x8a.  If you wanted
318  *   to do math and get the average of 10 and -10 (should be 0):
319  *     0x8a + 0xa = 0x94 (-108)
320  *     0x94 / 2   = 0xca (-54)
321  *   ...and 0xca = sign bit plus 0x4a, or -74
322  *
323  * Also note that you lose the ability to represent -128 since there are two
324  * representations of 0.
325  *
326  * @param b     The byte to convert in two's complement.
327  * @return      The 7-bit value + sign bit.
328  */
329
330 unsigned char make_signed_byte(signed char b)
331 {
332         if (b < 0)
333                 return 0x80 | -b;
334         else
335                 return b;
336 }
337
338 /**
339  * Test various shifts starting at 'start' and going to 'end'.
340  *
341  * For each byte lane, we'll walk through shift starting at 'start' and going
342  * to 'end' (inclusive).  When we are finally able to read the test pattern
343  * we'll store the value in the results array.
344  *
345  * @param phy_ctrl              pointer to the current phy controller
346  * @param ch                    channel number
347  * @param start                 the start shift.  -127 to 127
348  * @param end                   the end shift.  -127 to 127
349  * @param results               we'll store results for each byte lane.
350  */
351
352 void test_shifts(struct exynos5420_phy_control *phy_ctrl, int ch,
353                  int start, int end, int results[NUM_BYTE_LANES])
354 {
355         int incr = (start < end) ? 1 : -1;
356         int byte_lane;
357
358         for (byte_lane = 0; byte_lane < NUM_BYTE_LANES; byte_lane++) {
359                 int shift;
360
361                 dmc_set_read_offset_value(phy_ctrl, DEFAULT_DQS_X4);
362                 results[byte_lane] = DEFAULT_DQS;
363
364                 for (shift = start; shift != (end + incr); shift += incr) {
365                         unsigned int byte_offsetr;
366                         unsigned int offsetr;
367
368                         byte_offsetr = make_signed_byte(shift);
369
370                         offsetr = dmc_get_read_offset_value(phy_ctrl);
371                         offsetr &= ~(0xFF << (8 * byte_lane));
372                         offsetr |= (byte_offsetr << (8 * byte_lane));
373                         dmc_set_read_offset_value(phy_ctrl, offsetr);
374
375                         if (dmc_valid_window_test_vector(ch, byte_lane)) {
376                                 results[byte_lane] = shift;
377                                 break;
378                         }
379                 }
380         }
381 }
382
383 /**
384  * This function performs SW read leveling to compensate DQ-DQS skew at
385  * receiver it first finds the optimal read offset value on each DQS
386  * then applies the value to PHY.
387  *
388  * Read offset value has its min margin and max margin. If read offset
389  * value exceeds its min or max margin, read data will have corruption.
390  * To avoid this we are doing sw read leveling.
391  *
392  * SW read leveling is:
393  * 1> Finding offset value's left_limit and right_limit
394  * 2> and calculate its center value
395  * 3> finally programs that center value to PHY
396  * 4> then PHY gets its optimal offset value.
397  *
398  * @param phy_ctrl              pointer to the current phy controller
399  * @param ch                    channel number
400  * @param coarse_lock_val       The coarse lock value read from PHY_CON13.
401  *                              (0 - 0x7f)
402  */
403 static void software_find_read_offset(struct exynos5420_phy_control *phy_ctrl,
404                                       int ch, unsigned int coarse_lock_val)
405 {
406         unsigned int offsetr_cent;
407         int byte_lane;
408         int left_limit;
409         int right_limit;
410         int left[NUM_BYTE_LANES];
411         int right[NUM_BYTE_LANES];
412         int i;
413
414         /* Fill the memory with test patterns */
415         for (i = 0; i < ARRAY_SIZE(test_pattern); i++)
416                 writel(test_pattern[i], test_addr + i * 4 + ch * 0x80);
417
418         /* Figure out the limits we'll test with; keep -127 < limit < 127 */
419         left_limit = DEFAULT_DQS - coarse_lock_val;
420         right_limit = DEFAULT_DQS + coarse_lock_val;
421         if (right_limit > 127)
422                 right_limit = 127;
423
424         /* Fill in the location where reads were OK from left and right */
425         test_shifts(phy_ctrl, ch, left_limit, right_limit, left);
426         test_shifts(phy_ctrl, ch, right_limit, left_limit, right);
427
428         /* Make a final value by taking the center between the left and right */
429         offsetr_cent = 0;
430         for (byte_lane = 0; byte_lane < NUM_BYTE_LANES; byte_lane++) {
431                 int temp_center;
432                 unsigned int vmwc;
433
434                 temp_center = (left[byte_lane] + right[byte_lane]) / 2;
435                 vmwc = make_signed_byte(temp_center);
436                 offsetr_cent |= vmwc << (8 * byte_lane);
437         }
438         dmc_set_read_offset_value(phy_ctrl, offsetr_cent);
439 }
440
441 int ddr3_mem_ctrl_init(struct mem_timings *mem, int reset)
442 {
443         struct exynos5420_clock *clk =
444                 (struct exynos5420_clock *)samsung_get_base_clock();
445         struct exynos5420_power *power =
446                 (struct exynos5420_power *)samsung_get_base_power();
447         struct exynos5420_phy_control *phy0_ctrl, *phy1_ctrl;
448         struct exynos5420_dmc *drex0, *drex1;
449         struct exynos5420_tzasc *tzasc0, *tzasc1;
450         struct exynos5_power *pmu;
451         uint32_t val, n_lock_r, n_lock_w_phy0, n_lock_w_phy1;
452         uint32_t lock0_info, lock1_info;
453         int chip;
454         int i;
455
456         phy0_ctrl = (struct exynos5420_phy_control *)samsung_get_base_dmc_phy();
457         phy1_ctrl = (struct exynos5420_phy_control *)(samsung_get_base_dmc_phy()
458                                                         + DMC_OFFSET);
459         drex0 = (struct exynos5420_dmc *)samsung_get_base_dmc_ctrl();
460         drex1 = (struct exynos5420_dmc *)(samsung_get_base_dmc_ctrl()
461                                                         + DMC_OFFSET);
462         tzasc0 = (struct exynos5420_tzasc *)samsung_get_base_dmc_tzasc();
463         tzasc1 = (struct exynos5420_tzasc *)(samsung_get_base_dmc_tzasc()
464                                                         + DMC_OFFSET);
465         pmu = (struct exynos5_power *)EXYNOS5420_POWER_BASE;
466
467         /* Enable PAUSE for DREX */
468         setbits_le32(&clk->pause, ENABLE_BIT);
469
470         /* Enable BYPASS mode */
471         setbits_le32(&clk->bpll_con1, BYPASS_EN);
472
473         writel(MUX_BPLL_SEL_FOUTBPLL, &clk->src_cdrex);
474         do {
475                 val = readl(&clk->mux_stat_cdrex);
476                 val &= BPLL_SEL_MASK;
477         } while (val != FOUTBPLL);
478
479         clrbits_le32(&clk->bpll_con1, BYPASS_EN);
480
481         /* Specify the DDR memory type as DDR3 */
482         val = readl(&phy0_ctrl->phy_con0);
483         val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
484         val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
485         writel(val, &phy0_ctrl->phy_con0);
486
487         val = readl(&phy1_ctrl->phy_con0);
488         val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
489         val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
490         writel(val, &phy1_ctrl->phy_con0);
491
492         /* Set Read Latency and Burst Length for PHY0 and PHY1 */
493         val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
494                 (mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
495         writel(val, &phy0_ctrl->phy_con42);
496         writel(val, &phy1_ctrl->phy_con42);
497
498         val = readl(&phy0_ctrl->phy_con26);
499         val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
500         val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
501         writel(val, &phy0_ctrl->phy_con26);
502
503         val = readl(&phy1_ctrl->phy_con26);
504         val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
505         val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
506         writel(val, &phy1_ctrl->phy_con26);
507
508         /*
509          * Set Driver strength for CK, CKE, CS & CA to 0x7
510          * Set Driver strength for Data Slice 0~3 to 0x7
511          */
512         val = (0x7 << CA_CK_DRVR_DS_OFFSET) | (0x7 << CA_CKE_DRVR_DS_OFFSET) |
513                 (0x7 << CA_CS_DRVR_DS_OFFSET) | (0x7 << CA_ADR_DRVR_DS_OFFSET);
514         val |= (0x7 << DA_3_DS_OFFSET) | (0x7 << DA_2_DS_OFFSET) |
515                 (0x7 << DA_1_DS_OFFSET) | (0x7 << DA_0_DS_OFFSET);
516         writel(val, &phy0_ctrl->phy_con39);
517         writel(val, &phy1_ctrl->phy_con39);
518
519         /* ZQ Calibration */
520         if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
521                           &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
522                 return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
523
524         clrbits_le32(&phy0_ctrl->phy_con16, ZQ_CLK_DIV_EN);
525         clrbits_le32(&phy1_ctrl->phy_con16, ZQ_CLK_DIV_EN);
526
527         /* DQ Signal */
528         val = readl(&phy0_ctrl->phy_con14);
529         val |= mem->phy0_pulld_dqs;
530         writel(val, &phy0_ctrl->phy_con14);
531         val = readl(&phy1_ctrl->phy_con14);
532         val |= mem->phy1_pulld_dqs;
533         writel(val, &phy1_ctrl->phy_con14);
534
535         val = MEM_TERM_EN | PHY_TERM_EN;
536         writel(val, &drex0->phycontrol0);
537         writel(val, &drex1->phycontrol0);
538
539         writel(mem->concontrol |
540                 (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
541                 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
542                 &drex0->concontrol);
543         writel(mem->concontrol |
544                 (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
545                 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
546                 &drex1->concontrol);
547
548         do {
549                 val = readl(&drex0->phystatus);
550         } while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
551         do {
552                 val = readl(&drex1->phystatus);
553         } while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
554
555         clrbits_le32(&drex0->concontrol, DFI_INIT_START);
556         clrbits_le32(&drex1->concontrol, DFI_INIT_START);
557
558         update_reset_dll(&drex0->phycontrol0, DDR_MODE_DDR3);
559         update_reset_dll(&drex1->phycontrol0, DDR_MODE_DDR3);
560
561         /*
562          * Set Base Address:
563          * 0x2000_0000 ~ 0x5FFF_FFFF
564          * 0x6000_0000 ~ 0x9FFF_FFFF
565          */
566         /* MEMBASECONFIG0 */
567         val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_0) |
568                 DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
569         writel(val, &tzasc0->membaseconfig0);
570         writel(val, &tzasc1->membaseconfig0);
571
572         /* MEMBASECONFIG1 */
573         val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_1) |
574                 DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
575         writel(val, &tzasc0->membaseconfig1);
576         writel(val, &tzasc1->membaseconfig1);
577
578         /*
579          * Memory Channel Inteleaving Size
580          * Ares Channel interleaving = 128 bytes
581          */
582         /* MEMCONFIG0/1 */
583         writel(mem->memconfig, &tzasc0->memconfig0);
584         writel(mem->memconfig, &tzasc1->memconfig0);
585         writel(mem->memconfig, &tzasc0->memconfig1);
586         writel(mem->memconfig, &tzasc1->memconfig1);
587
588         /* Precharge Configuration */
589         writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
590                &drex0->prechconfig0);
591         writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
592                &drex1->prechconfig0);
593
594         /*
595          * TimingRow, TimingData, TimingPower and Timingaref
596          * values as per Memory AC parameters
597          */
598         writel(mem->timing_ref, &drex0->timingref);
599         writel(mem->timing_ref, &drex1->timingref);
600         writel(mem->timing_row, &drex0->timingrow0);
601         writel(mem->timing_row, &drex1->timingrow0);
602         writel(mem->timing_data, &drex0->timingdata0);
603         writel(mem->timing_data, &drex1->timingdata0);
604         writel(mem->timing_power, &drex0->timingpower0);
605         writel(mem->timing_power, &drex1->timingpower0);
606
607         if (reset) {
608                 /*
609                  * Send NOP, MRS and ZQINIT commands
610                  * Sending MRS command will reset the DRAM. We should not be
611                  * reseting the DRAM after resume, this will lead to memory
612                  * corruption as DRAM content is lost after DRAM reset
613                  */
614                 dmc_config_mrs(mem, &drex0->directcmd);
615                 dmc_config_mrs(mem, &drex1->directcmd);
616         }
617
618         /*
619          * Get PHY_CON13 from both phys.  Gate CLKM around reading since
620          * PHY_CON13 is glitchy when CLKM is running.  We're paranoid and
621          * wait until we get a "fine lock", though a coarse lock is probably
622          * OK (we only use the coarse numbers below).  We try to gate the
623          * clock for as short a time as possible in case SDRAM is somehow
624          * sensitive.  sdelay(10) in the loop is arbitrary to make sure
625          * there is some time for PHY_CON13 to get updated.  In practice
626          * no delay appears to be needed.
627          */
628         val = readl(&clk->gate_bus_cdrex);
629         while (true) {
630                 writel(val & ~0x1, &clk->gate_bus_cdrex);
631                 lock0_info = readl(&phy0_ctrl->phy_con13);
632                 writel(val, &clk->gate_bus_cdrex);
633
634                 if ((lock0_info & CTRL_FINE_LOCKED) == CTRL_FINE_LOCKED)
635                         break;
636
637                 sdelay(10);
638         }
639         while (true) {
640                 writel(val & ~0x2, &clk->gate_bus_cdrex);
641                 lock1_info = readl(&phy1_ctrl->phy_con13);
642                 writel(val, &clk->gate_bus_cdrex);
643
644                 if ((lock1_info & CTRL_FINE_LOCKED) == CTRL_FINE_LOCKED)
645                         break;
646
647                 sdelay(10);
648         }
649
650         if (!reset) {
651                 /*
652                  * During Suspend-Resume & S/W-Reset, as soon as PMU releases
653                  * pad retention, CKE goes high. This causes memory contents
654                  * not to be retained during DRAM initialization. Therfore,
655                  * there is a new control register(0x100431e8[28]) which lets us
656                  * release pad retention and retain the memory content until the
657                  * initialization is complete.
658                  */
659                 writel(PAD_RETENTION_DRAM_COREBLK_VAL,
660                        &power->pad_retention_dram_coreblk_option);
661                 do {
662                         val = readl(&power->pad_retention_dram_status);
663                 } while (val != 0x1);
664
665                 /*
666                  * CKE PAD retention disables DRAM self-refresh mode.
667                  * Send auto refresh command for DRAM refresh.
668                  */
669                 for (i = 0; i < 128; i++) {
670                         for (chip = 0; chip < mem->chips_to_configure; chip++) {
671                                 writel(DIRECT_CMD_REFA |
672                                        (chip << DIRECT_CMD_CHIP_SHIFT),
673                                        &drex0->directcmd);
674                                 writel(DIRECT_CMD_REFA |
675                                        (chip << DIRECT_CMD_CHIP_SHIFT),
676                                        &drex1->directcmd);
677                         }
678                 }
679         }
680
681         if (mem->gate_leveling_enable) {
682                 writel(PHY_CON0_RESET_VAL, &phy0_ctrl->phy_con0);
683                 writel(PHY_CON0_RESET_VAL, &phy1_ctrl->phy_con0);
684
685                 setbits_le32(&phy0_ctrl->phy_con0, P0_CMD_EN);
686                 setbits_le32(&phy1_ctrl->phy_con0, P0_CMD_EN);
687
688                 val = PHY_CON2_RESET_VAL;
689                 val |= INIT_DESKEW_EN;
690                 writel(val, &phy0_ctrl->phy_con2);
691                 writel(val, &phy1_ctrl->phy_con2);
692
693                 val =  readl(&phy0_ctrl->phy_con1);
694                 val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
695                 writel(val, &phy0_ctrl->phy_con1);
696
697                 val =  readl(&phy1_ctrl->phy_con1);
698                 val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
699                 writel(val, &phy1_ctrl->phy_con1);
700
701                 n_lock_w_phy0 = (lock0_info & CTRL_LOCK_COARSE_MASK) >> 2;
702                 n_lock_r = readl(&phy0_ctrl->phy_con12);
703                 n_lock_r &= ~CTRL_DLL_ON;
704                 n_lock_r |= n_lock_w_phy0;
705                 writel(n_lock_r, &phy0_ctrl->phy_con12);
706
707                 n_lock_w_phy1 = (lock1_info & CTRL_LOCK_COARSE_MASK) >> 2;
708                 n_lock_r = readl(&phy1_ctrl->phy_con12);
709                 n_lock_r &= ~CTRL_DLL_ON;
710                 n_lock_r |= n_lock_w_phy1;
711                 writel(n_lock_r, &phy1_ctrl->phy_con12);
712
713                 val = (0x3 << DIRECT_CMD_BANK_SHIFT) | 0x4;
714                 for (chip = 0; chip < mem->chips_to_configure; chip++) {
715                         writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
716                                &drex0->directcmd);
717                         writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
718                                &drex1->directcmd);
719                 }
720
721                 setbits_le32(&phy0_ctrl->phy_con2, RDLVL_GATE_EN);
722                 setbits_le32(&phy1_ctrl->phy_con2, RDLVL_GATE_EN);
723
724                 setbits_le32(&phy0_ctrl->phy_con0, CTRL_SHGATE);
725                 setbits_le32(&phy1_ctrl->phy_con0, CTRL_SHGATE);
726
727                 val = readl(&phy0_ctrl->phy_con1);
728                 val &= ~(CTRL_GATEDURADJ_MASK);
729                 writel(val, &phy0_ctrl->phy_con1);
730
731                 val = readl(&phy1_ctrl->phy_con1);
732                 val &= ~(CTRL_GATEDURADJ_MASK);
733                 writel(val, &phy1_ctrl->phy_con1);
734
735                 writel(CTRL_RDLVL_GATE_ENABLE, &drex0->rdlvl_config);
736                 i = TIMEOUT_US;
737                 while (((readl(&drex0->phystatus) & RDLVL_COMPLETE_CHO) !=
738                         RDLVL_COMPLETE_CHO) && (i > 0)) {
739                         /*
740                          * TODO(waihong): Comment on how long this take to
741                          * timeout
742                          */
743                         sdelay(100);
744                         i--;
745                 }
746                 if (!i)
747                         return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
748                 writel(CTRL_RDLVL_GATE_DISABLE, &drex0->rdlvl_config);
749
750                 writel(CTRL_RDLVL_GATE_ENABLE, &drex1->rdlvl_config);
751                 i = TIMEOUT_US;
752                 while (((readl(&drex1->phystatus) & RDLVL_COMPLETE_CHO) !=
753                         RDLVL_COMPLETE_CHO) && (i > 0)) {
754                         /*
755                          * TODO(waihong): Comment on how long this take to
756                          * timeout
757                          */
758                         sdelay(100);
759                         i--;
760                 }
761                 if (!i)
762                         return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
763                 writel(CTRL_RDLVL_GATE_DISABLE, &drex1->rdlvl_config);
764
765                 writel(0, &phy0_ctrl->phy_con14);
766                 writel(0, &phy1_ctrl->phy_con14);
767
768                 val = (0x3 << DIRECT_CMD_BANK_SHIFT);
769                 for (chip = 0; chip < mem->chips_to_configure; chip++) {
770                         writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
771                                &drex0->directcmd);
772                         writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
773                                &drex1->directcmd);
774                 }
775
776                 /* Common Settings for Leveling */
777                 val = PHY_CON12_RESET_VAL;
778                 writel((val + n_lock_w_phy0), &phy0_ctrl->phy_con12);
779                 writel((val + n_lock_w_phy1), &phy1_ctrl->phy_con12);
780
781                 setbits_le32(&phy0_ctrl->phy_con2, DLL_DESKEW_EN);
782                 setbits_le32(&phy1_ctrl->phy_con2, DLL_DESKEW_EN);
783         }
784
785         /*
786          * Do software read leveling
787          *
788          * Do this before we turn on auto refresh since the auto refresh can
789          * be in conflict with the resync operation that's part of setting
790          * read leveling.
791          */
792         if (!reset) {
793                 /* restore calibrated value after resume */
794                 dmc_set_read_offset_value(phy0_ctrl, readl(&pmu->pmu_spare1));
795                 dmc_set_read_offset_value(phy1_ctrl, readl(&pmu->pmu_spare2));
796         } else {
797                 software_find_read_offset(phy0_ctrl, 0,
798                                           CTRL_LOCK_COARSE(lock0_info));
799                 software_find_read_offset(phy1_ctrl, 1,
800                                           CTRL_LOCK_COARSE(lock1_info));
801                 /* save calibrated value to restore after resume */
802                 writel(dmc_get_read_offset_value(phy0_ctrl), &pmu->pmu_spare1);
803                 writel(dmc_get_read_offset_value(phy1_ctrl), &pmu->pmu_spare2);
804         }
805
806         /* Send PALL command */
807         dmc_config_prech(mem, &drex0->directcmd);
808         dmc_config_prech(mem, &drex1->directcmd);
809
810         writel(mem->memcontrol, &drex0->memcontrol);
811         writel(mem->memcontrol, &drex1->memcontrol);
812
813         /*
814          * Set DMC Concontrol: Enable auto-refresh counter, provide
815          * read data fetch cycles and enable DREX auto set powerdown
816          * for input buffer of I/O in none read memory state.
817          */
818         writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
819                 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
820                 DMC_CONCONTROL_IO_PD_CON(0x2),
821                 &drex0->concontrol);
822         writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
823                 (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
824                 DMC_CONCONTROL_IO_PD_CON(0x2),
825                 &drex1->concontrol);
826
827         /*
828          * Enable Clock Gating Control for DMC
829          * this saves around 25 mw dmc power as compared to the power
830          * consumption without these bits enabled
831          */
832         setbits_le32(&drex0->cgcontrol, DMC_INTERNAL_CG);
833         setbits_le32(&drex1->cgcontrol, DMC_INTERNAL_CG);
834
835         return 0;
836 }
837 #endif