Skip to content
Snippets Groups Projects
ctrl_regs.c 69.1 KiB
Newer Older
  • Learn to ignore specific revisions
  • 	pre_all_rec = 0;
    
    	ddr->timing_cfg_8 = (0
    			     | ((rwt_bg & 0xf) << 28)
    			     | ((wrt_bg & 0xf) << 24)
    			     | ((rrt_bg & 0xf) << 20)
    			     | ((wwt_bg & 0xf) << 16)
    			     | ((acttoact_bg & 0xf) << 12)
    			     | ((wrtord_bg & 0xf) << 8)
    			     | ((pre_all_rec & 0x1f) << 0)
    			    );
    
    	debug("FSLDDR: timing_cfg_8 = 0x%08x\n", ddr->timing_cfg_8);
    }
    
    static void set_timing_cfg_9(fsl_ddr_cfg_regs_t *ddr)
    {
    	ddr->timing_cfg_9 = 0;
    	debug("FSLDDR: timing_cfg_9 = 0x%08x\n", ddr->timing_cfg_9);
    }
    
    
    /* This function needs to be called after set_ddr_sdram_cfg() is called */
    
    static void set_ddr_dq_mapping(fsl_ddr_cfg_regs_t *ddr,
    			       const dimm_params_t *dimm_params)
    {
    
    	unsigned int acc_ecc_en = (ddr->ddr_sdram_cfg >> 2) & 0x1;
    
    	int i;
    
    	for (i = 0; i < CONFIG_DIMM_SLOTS_PER_CTLR; i++) {
    		if (dimm_params[i].n_ranks)
    			break;
    	}
    	if (i >= CONFIG_DIMM_SLOTS_PER_CTLR) {
    		puts("DDR error: no DIMM found!\n");
    		return;
    	}
    
    	ddr->dq_map_0 = ((dimm_params[i].dq_mapping[0] & 0x3F) << 26) |
    			((dimm_params[i].dq_mapping[1] & 0x3F) << 20) |
    			((dimm_params[i].dq_mapping[2] & 0x3F) << 14) |
    			((dimm_params[i].dq_mapping[3] & 0x3F) << 8) |
    			((dimm_params[i].dq_mapping[4] & 0x3F) << 2);
    
    	ddr->dq_map_1 = ((dimm_params[i].dq_mapping[5] & 0x3F) << 26) |
    			((dimm_params[i].dq_mapping[6] & 0x3F) << 20) |
    			((dimm_params[i].dq_mapping[7] & 0x3F) << 14) |
    			((dimm_params[i].dq_mapping[10] & 0x3F) << 8) |
    			((dimm_params[i].dq_mapping[11] & 0x3F) << 2);
    
    	ddr->dq_map_2 = ((dimm_params[i].dq_mapping[12] & 0x3F) << 26) |
    			((dimm_params[i].dq_mapping[13] & 0x3F) << 20) |
    			((dimm_params[i].dq_mapping[14] & 0x3F) << 14) |
    			((dimm_params[i].dq_mapping[15] & 0x3F) << 8) |
    			((dimm_params[i].dq_mapping[16] & 0x3F) << 2);
    
    	/* dq_map for ECC[4:7] is set to 0 if accumulated ECC is enabled */
    
    	ddr->dq_map_3 = ((dimm_params[i].dq_mapping[17] & 0x3F) << 26) |
    			((dimm_params[i].dq_mapping[8] & 0x3F) << 20) |
    
    			(acc_ecc_en ? 0 :
    
    			 (dimm_params[i].dq_mapping[9] & 0x3F) << 14) |
    			dimm_params[i].dq_mapping_ors;
    
    
    	debug("FSLDDR: dq_map_0 = 0x%08x\n", ddr->dq_map_0);
    	debug("FSLDDR: dq_map_1 = 0x%08x\n", ddr->dq_map_1);
    	debug("FSLDDR: dq_map_2 = 0x%08x\n", ddr->dq_map_2);
    	debug("FSLDDR: dq_map_3 = 0x%08x\n", ddr->dq_map_3);
    }
    static void set_ddr_sdram_cfg_3(fsl_ddr_cfg_regs_t *ddr,
    			       const memctl_options_t *popts)
    {
    	int rd_pre;
    
    	rd_pre = popts->quad_rank_present ? 1 : 0;
    
    	ddr->ddr_sdram_cfg_3 = (rd_pre & 0x1) << 16;
    
    	debug("FSLDDR: ddr_sdram_cfg_3 = 0x%08x\n", ddr->ddr_sdram_cfg_3);
    }
    #endif	/* CONFIG_SYS_FSL_DDR4 */
    
    
    /* DDR ZQ Calibration Control (DDR_ZQ_CNTL) */
    
    static void set_ddr_zq_cntl(fsl_ddr_cfg_regs_t *ddr, unsigned int zq_en)
    
    {
    	unsigned int zqinit = 0;/* POR ZQ Calibration Time (tZQinit) */
    	/* Normal Operation Full Calibration Time (tZQoper) */
    	unsigned int zqoper = 0;
    	/* Normal Operation Short Calibration Time (tZQCS) */
    	unsigned int zqcs = 0;
    
    #ifdef CONFIG_SYS_FSL_DDR4
    	unsigned int zqcs_init;
    #endif
    
    	if (zq_en) {
    
    #ifdef CONFIG_SYS_FSL_DDR4
    		zqinit = 10;	/* 1024 clocks */
    		zqoper = 9;	/* 512 clocks */
    		zqcs = 7;	/* 128 clocks */
    		zqcs_init = 5;	/* 1024 refresh sequences */
    #else
    
    		zqinit = 9;	/* 512 clocks */
    		zqoper = 8;	/* 256 clocks */
    		zqcs = 6;	/* 64 clocks */
    
    	ddr->ddr_zq_cntl = (0
    			    | ((zq_en & 0x1) << 31)
    			    | ((zqinit & 0xF) << 24)
    			    | ((zqoper & 0xF) << 16)
    			    | ((zqcs & 0xF) << 8)
    
    #ifdef CONFIG_SYS_FSL_DDR4
    			    | ((zqcs_init & 0xF) << 0)
    #endif
    
    	debug("FSLDDR: zq_cntl = 0x%08x\n", ddr->ddr_zq_cntl);
    
    }
    
    /* DDR Write Leveling Control (DDR_WRLVL_CNTL) */
    
    static void set_ddr_wrlvl_cntl(fsl_ddr_cfg_regs_t *ddr, unsigned int wrlvl_en,
    				const memctl_options_t *popts)
    
    {
    	/*
    	 * First DQS pulse rising edge after margining mode
    	 * is programmed (tWL_MRD)
    	 */
    	unsigned int wrlvl_mrd = 0;
    	/* ODT delay after margining mode is programmed (tWL_ODTEN) */
    	unsigned int wrlvl_odten = 0;
    	/* DQS/DQS_ delay after margining mode is programmed (tWL_DQSEN) */
    	unsigned int wrlvl_dqsen = 0;
    	/* WRLVL_SMPL: Write leveling sample time */
    	unsigned int wrlvl_smpl = 0;
    	/* WRLVL_WLR: Write leveling repeition time */
    	unsigned int wrlvl_wlr = 0;
    	/* WRLVL_START: Write leveling start time */
    	unsigned int wrlvl_start = 0;
    
    
    	/* suggest enable write leveling for DDR3 due to fly-by topology */
    	if (wrlvl_en) {
    		/* tWL_MRD min = 40 nCK, we set it 64 */
    		wrlvl_mrd = 0x6;
    		/* tWL_ODTEN 128 */
    		wrlvl_odten = 0x7;
    		/* tWL_DQSEN min = 25 nCK, we set it 32 */
    		wrlvl_dqsen = 0x5;
    		/*
    
    		 * Write leveling sample time at least need 6 clocks
    		 * higher than tWLO to allow enough time for progagation
    		 * delay and sampling the prime data bits.
    
    		 */
    		wrlvl_smpl = 0xf;
    		/*
    		 * Write leveling repetition time
    		 * at least tWLO + 6 clocks clocks
    
    		 * we set it 64
    
    		wrlvl_wlr = 0x6;
    
    		/*
    		 * Write leveling start time
    		 * The value use for the DQS_ADJUST for the first sample
    
    		 * when write leveling is enabled. It probably needs to be
    		 * overriden per platform.
    
    		 */
    		wrlvl_start = 0x8;
    
    		/*
    		 * Override the write leveling sample and start time
    		 * according to specific board
    		 */
    		if (popts->wrlvl_override) {
    			wrlvl_smpl = popts->wrlvl_sample;
    			wrlvl_start = popts->wrlvl_start;
    		}
    
    	ddr->ddr_wrlvl_cntl = (0
    			       | ((wrlvl_en & 0x1) << 31)
    			       | ((wrlvl_mrd & 0x7) << 24)
    			       | ((wrlvl_odten & 0x7) << 20)
    			       | ((wrlvl_dqsen & 0x7) << 16)
    			       | ((wrlvl_smpl & 0xf) << 12)
    			       | ((wrlvl_wlr & 0x7) << 8)
    
    			       | ((wrlvl_start & 0x1F) << 0)
    
    	debug("FSLDDR: wrlvl_cntl = 0x%08x\n", ddr->ddr_wrlvl_cntl);
    
    	ddr->ddr_wrlvl_cntl_2 = popts->wrlvl_ctl_2;
    	debug("FSLDDR: wrlvl_cntl_2 = 0x%08x\n", ddr->ddr_wrlvl_cntl_2);
    	ddr->ddr_wrlvl_cntl_3 = popts->wrlvl_ctl_3;
    	debug("FSLDDR: wrlvl_cntl_3 = 0x%08x\n", ddr->ddr_wrlvl_cntl_3);
    
    
    }
    
    /* DDR Self Refresh Counter (DDR_SR_CNTR) */
    
    static void set_ddr_sr_cntr(fsl_ddr_cfg_regs_t *ddr, unsigned int sr_it)
    
    	/* Self Refresh Idle Threshold */
    
    	ddr->ddr_sr_cntr = (sr_it & 0xF) << 16;
    }
    
    
    static void set_ddr_eor(fsl_ddr_cfg_regs_t *ddr, const memctl_options_t *popts)
    {
    	if (popts->addr_hash) {
    		ddr->ddr_eor = 0x40000000;	/* address hash enable */
    
    		puts("Address hashing enabled.\n");
    
    static void set_ddr_cdr1(fsl_ddr_cfg_regs_t *ddr, const memctl_options_t *popts)
    {
    	ddr->ddr_cdr1 = popts->ddr_cdr1;
    	debug("FSLDDR: ddr_cdr1 = 0x%08x\n", ddr->ddr_cdr1);
    }
    
    
    static void set_ddr_cdr2(fsl_ddr_cfg_regs_t *ddr, const memctl_options_t *popts)
    {
    	ddr->ddr_cdr2 = popts->ddr_cdr2;
    	debug("FSLDDR: ddr_cdr2 = 0x%08x\n", ddr->ddr_cdr2);
    }
    
    
    unsigned int
    check_fsl_memctl_config_regs(const fsl_ddr_cfg_regs_t *ddr)
    {
    	unsigned int res = 0;
    
    	/*
    	 * Check that DDR_SDRAM_CFG[RD_EN] and DDR_SDRAM_CFG[2T_EN] are
    	 * not set at the same time.
    	 */
    	if (ddr->ddr_sdram_cfg & 0x10000000
    	    && ddr->ddr_sdram_cfg & 0x00008000) {
    		printf("Error: DDR_SDRAM_CFG[RD_EN] and DDR_SDRAM_CFG[2T_EN] "
    				" should not be set at the same time.\n");
    		res++;
    	}
    
    	return res;
    }
    
    unsigned int
    
    compute_fsl_memctl_config_regs(const unsigned int ctrl_num,
    			       const memctl_options_t *popts,
    
    			       fsl_ddr_cfg_regs_t *ddr,
    			       const common_timing_params_t *common_dimm,
    			       const dimm_params_t *dimm_params,
    
    			       unsigned int dbw_cap_adj,
    			       unsigned int size_only)
    
    {
    	unsigned int i;
    	unsigned int cas_latency;
    	unsigned int additive_latency;
    
    	unsigned int zq_en;
    	unsigned int wrlvl_en;
    
    	unsigned int ip_rev = 0;
    	unsigned int unq_mrs_en = 0;
    
    
    	memset(ddr, 0, sizeof(fsl_ddr_cfg_regs_t));
    
    	if (common_dimm == NULL) {
    		printf("Error: subset DIMM params struct null pointer\n");
    		return 1;
    	}
    
    	/*
    	 * Process overrides first.
    	 *
    	 * FIXME: somehow add dereated caslat to this
    	 */
    	cas_latency = (popts->cas_latency_override)
    		? popts->cas_latency_override_value
    
    		: common_dimm->lowest_common_spd_caslat;
    
    
    	additive_latency = (popts->additive_latency_override)
    		? popts->additive_latency_override_value
    		: common_dimm->additive_latency;
    
    
    	sr_it = (popts->auto_self_refresh_en)
    		? popts->sr_it
    		: 0;
    
    	/* ZQ calibration */
    	zq_en = (popts->zq_en) ? 1 : 0;
    	/* write leveling */
    	wrlvl_en = (popts->wrlvl_en) ? 1 : 0;
    
    	/* Chip Select Memory Bounds (CSn_BNDS) */
    	for (i = 0; i < CONFIG_CHIP_SELECTS_PER_CTRL; i++) {
    
    		unsigned long long ea, sa;
    
    		unsigned int cs_per_dimm
    			= CONFIG_CHIP_SELECTS_PER_CTRL / CONFIG_DIMM_SLOTS_PER_CTLR;
    		unsigned int dimm_number
    			= i / cs_per_dimm;
    		unsigned long long rank_density
    
    			= dimm_params[dimm_number].rank_density >> dbw_cap_adj;
    
    
    		if (dimm_params[dimm_number].n_ranks == 0) {
    
    			debug("Skipping setup of CS%u "
    
    				"because n_ranks on DIMM %u is 0\n", i, dimm_number);
    
    		if (popts->memctl_interleaving) {
    
    			switch (popts->ba_intlv_ctl & FSL_DDR_CS0_CS1_CS2_CS3) {
    
    			case FSL_DDR_CS0_CS1_CS2_CS3:
    				break;
    
    			case FSL_DDR_CS0_CS1:
    			case FSL_DDR_CS0_CS1_AND_CS2_CS3:
    
    			sa = common_dimm->base_address;
    
    			ea = sa + common_dimm->total_mem - 1;
    
    		} else if (!popts->memctl_interleaving) {
    
    			/*
    			 * If memory interleaving between controllers is NOT
    			 * enabled, the starting address for each memory
    			 * controller is distinct.  However, because rank
    			 * interleaving is enabled, the starting and ending
    			 * addresses of the total memory on that memory
    			 * controller needs to be programmed into its
    			 * respective CS0_BNDS.
    			 */
    
    			switch (popts->ba_intlv_ctl & FSL_DDR_CS0_CS1_CS2_CS3) {
    			case FSL_DDR_CS0_CS1_CS2_CS3:
    				sa = common_dimm->base_address;
    
    				ea = sa + common_dimm->total_mem - 1;
    
    				break;
    			case FSL_DDR_CS0_CS1_AND_CS2_CS3:
    
    				if ((i >= 2) && (dimm_number == 0)) {
    
    					sa = dimm_params[dimm_number].base_address +
    
    					      2 * rank_density;
    					ea = sa + 2 * rank_density - 1;
    
    				} else {
    					sa = dimm_params[dimm_number].base_address;
    
    					ea = sa + 2 * rank_density - 1;
    
    				}
    				break;
    			case FSL_DDR_CS0_CS1:
    
    				if (dimm_params[dimm_number].n_ranks > (i % cs_per_dimm)) {
    					sa = dimm_params[dimm_number].base_address;
    
    					ea = sa + rank_density - 1;
    					if (i != 1)
    						sa += (i % cs_per_dimm) * rank_density;
    					ea += (i % cs_per_dimm) * rank_density;
    
    				break;
    			case FSL_DDR_CS2_CS3:
    
    				if (dimm_params[dimm_number].n_ranks > (i % cs_per_dimm)) {
    					sa = dimm_params[dimm_number].base_address;
    
    					ea = sa + rank_density - 1;
    					if (i != 3)
    						sa += (i % cs_per_dimm) * rank_density;
    					ea += (i % cs_per_dimm) * rank_density;
    
    				if (i == 2)
    					ea += (rank_density >> dbw_cap_adj);
    
    				break;
    			default:  /* No bank(chip-select) interleaving */
    
    				sa = dimm_params[dimm_number].base_address;
    				ea = sa + rank_density - 1;
    				if (dimm_params[dimm_number].n_ranks > (i % cs_per_dimm)) {
    					sa += (i % cs_per_dimm) * rank_density;
    					ea += (i % cs_per_dimm) * rank_density;
    				} else {
    					sa = 0;
    					ea = 0;
    				}
    
    				| ((sa & 0xffff) << 16) /* starting address */
    				| ((ea & 0xffff) << 0)	/* ending address */
    
    			/* setting bnds to 0xffffffff for inactive CS */
    			ddr->cs[i].bnds = 0xffffffff;
    
    		debug("FSLDDR: cs[%d]_bnds = 0x%08x\n", i, ddr->cs[i].bnds);
    
    		set_csn_config(dimm_number, i, ddr, popts, dimm_params);
    		set_csn_config_2(i, ddr);
    
    	/*
    	 * In the case we only need to compute the ddr sdram size, we only need
    	 * to set csn registers, so return from here.
    	 */
    	if (size_only)
    		return 0;
    
    
    	set_ddr_eor(ddr, popts);
    
    
    #if !defined(CONFIG_SYS_FSL_DDR1)
    
    	set_timing_cfg_0(ctrl_num, ddr, popts, dimm_params);
    
    	set_timing_cfg_3(ctrl_num, ddr, popts, common_dimm, cas_latency,
    
    			 additive_latency);
    
    	set_timing_cfg_1(ctrl_num, ddr, popts, common_dimm, cas_latency);
    	set_timing_cfg_2(ctrl_num, ddr, popts, common_dimm,
    			 cas_latency, additive_latency);
    
    	set_ddr_cdr1(ddr, popts);
    
    	set_ddr_cdr2(ddr, popts);
    
    	set_ddr_sdram_cfg(ddr, popts, common_dimm);
    
    	ip_rev = fsl_ddr_get_version(ctrl_num);
    
    	if (ip_rev > 0x40400)
    		unq_mrs_en = 1;
    
    	if ((ip_rev > 0x40700) && (popts->cswl_override != 0))
    
    		ddr->debug[18] = popts->cswl_override;
    
    
    	set_ddr_sdram_cfg_2(ctrl_num, ddr, popts, unq_mrs_en);
    	set_ddr_sdram_mode(ctrl_num, ddr, popts, common_dimm,
    			   cas_latency, additive_latency, unq_mrs_en);
    	set_ddr_sdram_mode_2(ctrl_num, ddr, popts, common_dimm, unq_mrs_en);
    
    #ifdef CONFIG_SYS_FSL_DDR4
    	set_ddr_sdram_mode_9(ddr, popts, common_dimm, unq_mrs_en);
    
    	set_ddr_sdram_mode_10(ctrl_num, ddr, popts, common_dimm, unq_mrs_en);
    
    	set_ddr_sdram_interval(ctrl_num, ddr, popts, common_dimm);
    
    	set_ddr_data_init(ddr);
    	set_ddr_sdram_clk_cntl(ddr, popts);
    	set_ddr_init_addr(ddr);
    	set_ddr_init_ext_addr(ddr);
    
    	set_timing_cfg_4(ddr, popts);
    
    	set_timing_cfg_5(ddr, cas_latency);
    
    #ifdef CONFIG_SYS_FSL_DDR4
    	set_ddr_sdram_cfg_3(ddr, popts);
    	set_timing_cfg_6(ddr);
    
    	set_timing_cfg_7(ctrl_num, ddr, common_dimm);
    	set_timing_cfg_8(ctrl_num, ddr, popts, common_dimm, cas_latency);
    
    	set_timing_cfg_9(ddr);
    	set_ddr_dq_mapping(ddr, dimm_params);
    #endif
    
    	set_ddr_zq_cntl(ddr, zq_en);
    
    	set_ddr_wrlvl_cntl(ddr, wrlvl_en, popts);
    
    	set_ddr_sr_cntr(ddr, sr_it);
    
    	set_ddr_sdram_rcw(ddr, popts, common_dimm);
    
    York Sun's avatar
    York Sun committed
    #ifdef CONFIG_SYS_FSL_DDR_EMU
    	/* disble DDR training for emulator */
    	ddr->debug[2] = 0x00000400;
    
    	ddr->debug[4] = 0xff800800;
    	ddr->debug[5] = 0x08000800;
    	ddr->debug[6] = 0x08000800;
    	ddr->debug[7] = 0x08000800;
    	ddr->debug[8] = 0x08000800;
    
    York Sun's avatar
    York Sun committed
    #endif
    
    #ifdef CONFIG_SYS_FSL_ERRATUM_A004508
    	if ((ip_rev >= 0x40000) && (ip_rev < 0x40400))
    		ddr->debug[2] |= 0x00000200;	/* set bit 22 */
    #endif
    
    
    	return check_fsl_memctl_config_regs(ddr);
    }