summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarek Vasut <marex@denx.de>2015-07-17 02:07:12 +0200
committerMarek Vasut <marex@denx.de>2015-08-08 14:14:15 +0200
commit722c96857044831f46c9bdf263a84ad6d5c87463 (patch)
treed2b958b0cf68dc064949e9b77a183b771ffa2a76
parent51cea0b6c0ef4989ee666dc99665b55dd02891ee (diff)
downloadu-boot-imx-722c96857044831f46c9bdf263a84ad6d5c87463.zip
u-boot-imx-722c96857044831f46c9bdf263a84ad6d5c87463.tar.gz
u-boot-imx-722c96857044831f46c9bdf263a84ad6d5c87463.tar.bz2
ddr: altera: Trivial mem_calibrate() indent cleanup
Redo the mega-condition such that if the calibration is to be skipped, the positive branch of the condition does all the work and returns. The negative branch, which is in fact the default behavior, is then converted to a code which is no longer conditional. This trims down the indent by one level. Signed-off-by: Marek Vasut <marex@denx.de>
-rw-r--r--drivers/ddr/altera/sequencer.c249
1 files changed, 129 insertions, 120 deletions
diff --git a/drivers/ddr/altera/sequencer.c b/drivers/ddr/altera/sequencer.c
index 874868d..77a1308 100644
--- a/drivers/ddr/altera/sequencer.c
+++ b/drivers/ddr/altera/sequencer.c
@@ -3295,152 +3295,161 @@ static uint32_t mem_calibrate(void)
scc_set_bypass_mode(i);
}
+ /* Calibration is skipped. */
if ((dyn_calib_steps & CALIB_SKIP_ALL) == CALIB_SKIP_ALL) {
/*
* Set VFIFO and LFIFO to instant-on settings in skip
* calibration mode.
*/
mem_skip_calibrate();
- } else {
- for (i = 0; i < NUM_CALIB_REPEAT; i++) {
- /*
- * Zero all delay chain/phase settings for all
- * groups and all shadow register sets.
- */
- scc_mgr_zero_all();
-
- run_groups = ~param->skip_groups;
-
- for (write_group = 0, write_test_bgn = 0; write_group
- < RW_MGR_MEM_IF_WRITE_DQS_WIDTH; write_group++,
- write_test_bgn += RW_MGR_MEM_DQ_PER_WRITE_DQS) {
- /* Initialized the group failure */
- group_failed = 0;
-
- current_run = run_groups & ((1 <<
- RW_MGR_NUM_DQS_PER_WRITE_GROUP) - 1);
- run_groups = run_groups >>
- RW_MGR_NUM_DQS_PER_WRITE_GROUP;
-
- if (current_run == 0)
- continue;
- writel(write_group, SDR_PHYGRP_SCCGRP_ADDRESS |
- SCC_MGR_GROUP_COUNTER_OFFSET);
- scc_mgr_zero_group(write_group, 0);
+ /*
+ * Do not remove this line as it makes sure all of our
+ * decisions have been applied.
+ */
+ writel(0, &sdr_scc_mgr->update);
+ return 1;
+ }
- for (read_group = write_group *
- RW_MGR_MEM_IF_READ_DQS_WIDTH /
- RW_MGR_MEM_IF_WRITE_DQS_WIDTH,
- read_test_bgn = 0;
- read_group < (write_group + 1) *
- RW_MGR_MEM_IF_READ_DQS_WIDTH /
- RW_MGR_MEM_IF_WRITE_DQS_WIDTH &&
- group_failed == 0;
- read_group++, read_test_bgn +=
- RW_MGR_MEM_DQ_PER_READ_DQS) {
- /* Calibrate the VFIFO */
- if (!((STATIC_CALIB_STEPS) &
- CALIB_SKIP_VFIFO)) {
- if (!rw_mgr_mem_calibrate_vfifo
- (read_group,
- read_test_bgn)) {
- group_failed = 1;
-
- if (!(gbl->
- phy_debug_mode_flags &
- PHY_DEBUG_SWEEP_ALL_GROUPS)) {
- return 0;
- }
+ /* Calibration is not skipped. */
+ for (i = 0; i < NUM_CALIB_REPEAT; i++) {
+ /*
+ * Zero all delay chain/phase settings for all
+ * groups and all shadow register sets.
+ */
+ scc_mgr_zero_all();
+
+ run_groups = ~param->skip_groups;
+
+ for (write_group = 0, write_test_bgn = 0; write_group
+ < RW_MGR_MEM_IF_WRITE_DQS_WIDTH; write_group++,
+ write_test_bgn += RW_MGR_MEM_DQ_PER_WRITE_DQS) {
+ /* Initialized the group failure */
+ group_failed = 0;
+
+ current_run = run_groups & ((1 <<
+ RW_MGR_NUM_DQS_PER_WRITE_GROUP) - 1);
+ run_groups = run_groups >>
+ RW_MGR_NUM_DQS_PER_WRITE_GROUP;
+
+ if (current_run == 0)
+ continue;
+
+ writel(write_group, SDR_PHYGRP_SCCGRP_ADDRESS |
+ SCC_MGR_GROUP_COUNTER_OFFSET);
+ scc_mgr_zero_group(write_group, 0);
+
+ for (read_group = write_group *
+ RW_MGR_MEM_IF_READ_DQS_WIDTH /
+ RW_MGR_MEM_IF_WRITE_DQS_WIDTH,
+ read_test_bgn = 0;
+ read_group < (write_group + 1) *
+ RW_MGR_MEM_IF_READ_DQS_WIDTH /
+ RW_MGR_MEM_IF_WRITE_DQS_WIDTH &&
+ group_failed == 0;
+ read_group++, read_test_bgn +=
+ RW_MGR_MEM_DQ_PER_READ_DQS) {
+ /* Calibrate the VFIFO */
+ if (!((STATIC_CALIB_STEPS) &
+ CALIB_SKIP_VFIFO)) {
+ if (!rw_mgr_mem_calibrate_vfifo
+ (read_group,
+ read_test_bgn)) {
+ group_failed = 1;
+
+ if (!(gbl->
+ phy_debug_mode_flags &
+ PHY_DEBUG_SWEEP_ALL_GROUPS)) {
+ return 0;
}
}
}
+ }
- /* Calibrate the output side */
- if (group_failed == 0) {
- for (rank_bgn = 0, sr = 0; rank_bgn
- < RW_MGR_MEM_NUMBER_OF_RANKS;
- rank_bgn +=
- NUM_RANKS_PER_SHADOW_REG,
- ++sr) {
- sr_failed = 0;
- if (!((STATIC_CALIB_STEPS) &
- CALIB_SKIP_WRITES)) {
- if ((STATIC_CALIB_STEPS)
- & CALIB_SKIP_DELAY_SWEEPS) {
- /* not needed in quick mode! */
- } else {
- /*
- * Determine if this set of
- * ranks should be skipped
- * entirely.
- */
- if (!param->skip_shadow_regs[sr]) {
- if (!rw_mgr_mem_calibrate_writes
- (rank_bgn, write_group,
- write_test_bgn)) {
- sr_failed = 1;
- if (!(gbl->
- phy_debug_mode_flags &
- PHY_DEBUG_SWEEP_ALL_GROUPS)) {
- return 0;
- }
- }
+ /* Calibrate the output side */
+ if (group_failed == 0) {
+ for (rank_bgn = 0, sr = 0; rank_bgn
+ < RW_MGR_MEM_NUMBER_OF_RANKS;
+ rank_bgn +=
+ NUM_RANKS_PER_SHADOW_REG,
+ ++sr) {
+ sr_failed = 0;
+ if (!((STATIC_CALIB_STEPS) &
+ CALIB_SKIP_WRITES)) {
+ if ((STATIC_CALIB_STEPS)
+ & CALIB_SKIP_DELAY_SWEEPS) {
+ /* not needed in quick mode! */
+ } else {
+ /*
+ * Determine if this set of
+ * ranks should be skipped
+ * entirely.
+ */
+ if (!param->skip_shadow_regs[sr]) {
+ if (!rw_mgr_mem_calibrate_writes
+ (rank_bgn, write_group,
+ write_test_bgn)) {
+ sr_failed = 1;
+ if (!(gbl->
+ phy_debug_mode_flags &
+ PHY_DEBUG_SWEEP_ALL_GROUPS)) {
+ return 0;
+ }
}
}
}
- if (sr_failed != 0)
- group_failed = 1;
}
+ if (sr_failed != 0)
+ group_failed = 1;
}
+ }
- if (group_failed == 0) {
- for (read_group = write_group *
- RW_MGR_MEM_IF_READ_DQS_WIDTH /
- RW_MGR_MEM_IF_WRITE_DQS_WIDTH,
- read_test_bgn = 0;
- read_group < (write_group + 1)
- * RW_MGR_MEM_IF_READ_DQS_WIDTH
- / RW_MGR_MEM_IF_WRITE_DQS_WIDTH &&
- group_failed == 0;
- read_group++, read_test_bgn +=
- RW_MGR_MEM_DQ_PER_READ_DQS) {
- if (!((STATIC_CALIB_STEPS) &
- CALIB_SKIP_WRITES)) {
- if (!rw_mgr_mem_calibrate_vfifo_end
- (read_group, read_test_bgn)) {
- group_failed = 1;
-
- if (!(gbl->phy_debug_mode_flags
- & PHY_DEBUG_SWEEP_ALL_GROUPS)) {
- return 0;
- }
+ if (group_failed == 0) {
+ for (read_group = write_group *
+ RW_MGR_MEM_IF_READ_DQS_WIDTH /
+ RW_MGR_MEM_IF_WRITE_DQS_WIDTH,
+ read_test_bgn = 0;
+ read_group < (write_group + 1)
+ * RW_MGR_MEM_IF_READ_DQS_WIDTH
+ / RW_MGR_MEM_IF_WRITE_DQS_WIDTH &&
+ group_failed == 0;
+ read_group++, read_test_bgn +=
+ RW_MGR_MEM_DQ_PER_READ_DQS) {
+ if (!((STATIC_CALIB_STEPS) &
+ CALIB_SKIP_WRITES)) {
+ if (!rw_mgr_mem_calibrate_vfifo_end
+ (read_group, read_test_bgn)) {
+ group_failed = 1;
+
+ if (!(gbl->phy_debug_mode_flags
+ & PHY_DEBUG_SWEEP_ALL_GROUPS)) {
+ return 0;
}
}
}
}
-
- if (group_failed != 0)
- failing_groups++;
}
+ if (group_failed != 0)
+ failing_groups++;
+ }
+
+ /*
+ * USER If there are any failing groups then report
+ * the failure.
+ */
+ if (failing_groups != 0)
+ return 0;
+
+ /* Calibrate the LFIFO */
+ if (!((STATIC_CALIB_STEPS) & CALIB_SKIP_LFIFO)) {
/*
- * USER If there are any failing groups then report
- * the failure.
+ * If we're skipping groups as part of debug,
+ * don't calibrate LFIFO.
*/
- if (failing_groups != 0)
- return 0;
-
- /* Calibrate the LFIFO */
- if (!((STATIC_CALIB_STEPS) & CALIB_SKIP_LFIFO)) {
- /*
- * If we're skipping groups as part of debug,
- * don't calibrate LFIFO.
- */
- if (param->skip_groups == 0) {
- if (!rw_mgr_mem_calibrate_lfifo())
- return 0;
- }
+ if (param->skip_groups == 0) {
+ if (!rw_mgr_mem_calibrate_lfifo())
+ return 0;
}
}
}