Newer
Older
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
*
* sdram_cfg[0] = 1 (ddr sdram logic enable)
* sdram_cfg[1] = 1 (self-refresh-enable)
* sdram_cfg[5:7] = (SDRAM type = DDR SDRAM)
* 010 DDR 1 SDRAM
* 011 DDR 2 SDRAM
*/
sdram_type = (mem_type == SPD_MEMTYPE_DDR) ? 2 : 3;
sdram_cfg_1 = (0
| (1 << 31) /* Enable */
| (1 << 30) /* Self refresh */
| (sdram_type << 24) /* SDRAM type */
);
/*
* sdram_cfg[3] = RD_EN - registered DIMM enable
* A value of 0x26 indicates micron registered
* DIMMS (micron.com)
*/
mod_attr = no_dimm2 ? spd1.mod_attr : spd2.mod_attr;
if (mem_type == SPD_MEMTYPE_DDR && mod_attr == 0x26) {
sdram_cfg_1 |= 0x10000000; /* RD_EN */
}
#if defined(CONFIG_DDR_ECC)
config = no_dimm2 ? spd1.config : spd2.config;
/*
* If the user wanted ECC (enabled via sdram_cfg[2])
*/
if (config == 0x02) {
ddr->err_sbe = 0x00ff0000;
ddr->err_int_en = 0x0000000d;
sdram_cfg_1 |= 0x20000000; /* ECC_EN */
}
#endif
/*
* Enable 2T timing by setting sdram_cfg[16].
*/
sdram_cfg_1 |= 0x8000; /* 2T_EN */
}
/*
* 200 painful micro-seconds must elapse between
* the DDR clock setup and the DDR config enable.
*/
udelay(200);
/*
* Go!
*/
ddr->sdram_cfg_1 = sdram_cfg_1;
asm volatile("sync;isync");
udelay(500);
debug("DDR: sdram_cfg = 0x%08x\n", ddr->sdram_cfg_1);
#if defined(CONFIG_ECC_INIT_VIA_DDRCONTROLLER)
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
d_init = 1;
debug("DDR: memory initializing\n");
/*
* Poll until memory is initialized.
* 512 Meg at 400 might hit this 200 times or so.
*/
while ((ddr->sdram_cfg_2 & (d_init << 4)) != 0) {
udelay(1000);
}
debug("DDR: memory initialized\n\n");
#endif
debug("Enabled DDR Controller %d\n", ddr_num);
return 1;
}
}
long int
spd_sdram(void)
{
int memsize_ddr1_dimm1 = 0;
int memsize_ddr1_dimm2 = 0;
int memsize_ddr2_dimm1 = 0;
int memsize_ddr2_dimm2 = 0;
int memsize_total = 0;
int memsize_ddr1 = 0;
int memsize_ddr2 = 0;
unsigned int ddr1_enabled = 0;
unsigned int ddr2_enabled = 0;
unsigned int law_size_ddr1;
unsigned int law_size_ddr2;
volatile immap_t *immap = (immap_t *)CFG_IMMR;
volatile ccsr_local_mcm_t *mcm = &immap->im_local_mcm;
#ifdef CONFIG_DDR_INTERLEAVE
unsigned int law_size_interleaved;
volatile ccsr_ddr_t *ddr1 = &immap->im_ddr1;
volatile ccsr_ddr_t *ddr2 = &immap->im_ddr2;
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
memsize_ddr1_dimm1 = spd_init(SPD_EEPROM_ADDRESS1,
1, 1,
(unsigned int)memsize_total * 1024*1024);
memsize_total += memsize_ddr1_dimm1;
memsize_ddr2_dimm1 = spd_init(SPD_EEPROM_ADDRESS3,
2, 1,
(unsigned int)memsize_total * 1024*1024);
memsize_total += memsize_ddr2_dimm1;
if (memsize_ddr1_dimm1 != memsize_ddr2_dimm1) {
if (memsize_ddr1_dimm1 < memsize_ddr2_dimm1)
memsize_total -= memsize_ddr1_dimm1;
else
memsize_total -= memsize_ddr2_dimm1;
debug("Total memory available for interleaving 0x%08lx\n",
memsize_total * 1024 * 1024);
debug("Adjusting CS0_BNDS to account for unequal DIMM sizes in interleaved memory\n");
ddr1->cs0_bnds = ((memsize_total * 1024 * 1024) - 1) >> 24;
ddr2->cs0_bnds = ((memsize_total * 1024 * 1024) - 1) >> 24;
debug("DDR1: cs0_bnds = 0x%08x\n", ddr1->cs0_bnds);
debug("DDR2: cs0_bnds = 0x%08x\n", ddr2->cs0_bnds);
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
ddr1_enabled = enable_ddr(1);
ddr2_enabled = enable_ddr(2);
/*
* Both controllers need to be enabled for interleaving.
*/
if (ddr1_enabled && ddr2_enabled) {
law_size_interleaved = 19 + __ilog2(memsize_total);
/*
* Set up LAWBAR for DDR 1 space.
*/
mcm->lawbar1 = ((CFG_DDR_SDRAM_BASE >> 12) & 0xfffff);
mcm->lawar1 = (LAWAR_EN
| LAWAR_TRGT_IF_DDR_INTERLEAVED
| (LAWAR_SIZE & law_size_interleaved));
debug("DDR: LAWBAR1=0x%08x\n", mcm->lawbar1);
debug("DDR: LAWAR1=0x%08x\n", mcm->lawar1);
debug("Interleaved memory size is 0x%08lx\n", memsize_total);
#ifdef CONFIG_DDR_INTERLEAVE
#if (CFG_PAGE_INTERLEAVING == 1)
printf("Page ");
#elif (CFG_BANK_INTERLEAVING == 1)
printf("Bank ");
#elif (CFG_SUPER_BANK_INTERLEAVING == 1)
printf("Super-bank ");
#else
printf("Cache-line ");
#endif
printf("Interleaved");
return memsize_total * 1024 * 1024;
} else {
printf("Interleaved memory not enabled - check CS0 DIMM slots for both controllers.\n");
return 0;
}
#else
/*
* Call spd_sdram() routine to init ddr1 - pass I2c address,
* controller number, dimm number, and starting address.
*/
memsize_ddr1_dimm1 = spd_init(SPD_EEPROM_ADDRESS1,
1, 1,
(unsigned int)memsize_total * 1024*1024);
memsize_total += memsize_ddr1_dimm1;
memsize_ddr1_dimm2 = spd_init(SPD_EEPROM_ADDRESS2,
1, 2,
(unsigned int)memsize_total * 1024*1024);
memsize_total += memsize_ddr1_dimm2;
* Enable the DDR controller - pass ddr controller number.
ddr1_enabled = enable_ddr(1);
/* Keep track of memory to be addressed by DDR1 */
memsize_ddr1 = memsize_ddr1_dimm1 + memsize_ddr1_dimm2;
* First supported LAW size is 16M, at LAWAR_SIZE_16M == 23. Fnord.
*/
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
if (ddr1_enabled) {
law_size_ddr1 = 19 + __ilog2(memsize_ddr1);
/*
* Set up LAWBAR for DDR 1 space.
*/
mcm->lawbar1 = ((CFG_DDR_SDRAM_BASE >> 12) & 0xfffff);
mcm->lawar1 = (LAWAR_EN
| LAWAR_TRGT_IF_DDR1
| (LAWAR_SIZE & law_size_ddr1));
debug("DDR: LAWBAR1=0x%08x\n", mcm->lawbar1);
debug("DDR: LAWAR1=0x%08x\n", mcm->lawar1);
}
#if (CONFIG_NUM_DDR_CONTROLLERS > 1)
memsize_ddr2_dimm1 = spd_init(SPD_EEPROM_ADDRESS3,
2, 1,
(unsigned int)memsize_total * 1024*1024);
memsize_total += memsize_ddr2_dimm1;
memsize_ddr2_dimm2 = spd_init(SPD_EEPROM_ADDRESS4,
2, 2,
(unsigned int)memsize_total * 1024*1024);
memsize_total += memsize_ddr2_dimm2;
ddr2_enabled = enable_ddr(2);
/* Keep track of memory to be addressed by DDR2 */
memsize_ddr2 = memsize_ddr2_dimm1 + memsize_ddr2_dimm2;
if (ddr2_enabled) {
law_size_ddr2 = 19 + __ilog2(memsize_ddr2);
/*
* Set up LAWBAR for DDR 2 space.
*/
if (ddr1_enabled)
mcm->lawbar8 = (((memsize_ddr1 * 1024 * 1024) >> 12)
& 0xfffff);
else
mcm->lawbar8 = ((CFG_DDR_SDRAM_BASE >> 12) & 0xfffff);
mcm->lawar8 = (LAWAR_EN
| LAWAR_TRGT_IF_DDR2
| (LAWAR_SIZE & law_size_ddr2));
debug("\nDDR: LAWBAR8=0x%08x\n", mcm->lawbar8);
debug("DDR: LAWAR8=0x%08x\n", mcm->lawar8);
}
#endif /* CONFIG_NUM_DDR_CONTROLLERS > 1 */
debug("\nMemory sizes are DDR1 = 0x%08lx, DDR2 = 0x%08lx\n",
memsize_ddr1, memsize_ddr2);
* If neither DDR controller is enabled return 0.
if (!ddr1_enabled && !ddr2_enabled)
return 0;
printf("Non-interleaved");
return memsize_total * 1024 * 1024;
#endif /* CONFIG_DDR_INTERLEAVE */
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
#endif /* CONFIG_SPD_EEPROM */
#if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRCONTROLLER)
/*
* Initialize all of memory for ECC, then enable errors.
*/
void
ddr_enable_ecc(unsigned int dram_size)
{
uint *p = 0;
uint i = 0;
volatile immap_t *immap = (immap_t *)CFG_IMMR;
volatile ccsr_ddr_t *ddr1= &immap->im_ddr1;
dma_init();
for (*p = 0; p < (uint *)(8 * 1024); p++) {
if (((unsigned int)p & 0x1f) == 0) {
ppcDcbz((unsigned long) p);
}
*p = (unsigned int)CONFIG_MEM_INIT_VALUE;
if (((unsigned int)p & 0x1c) == 0x1c) {
ppcDcbf((unsigned long) p);
}
}
dma_xfer((uint *)0x002000, 0x002000, (uint *)0); /* 8K */
dma_xfer((uint *)0x004000, 0x004000, (uint *)0); /* 16K */
dma_xfer((uint *)0x008000, 0x008000, (uint *)0); /* 32K */
dma_xfer((uint *)0x010000, 0x010000, (uint *)0); /* 64K */
dma_xfer((uint *)0x020000, 0x020000, (uint *)0); /* 128k */
dma_xfer((uint *)0x040000, 0x040000, (uint *)0); /* 256k */
dma_xfer((uint *)0x080000, 0x080000, (uint *)0); /* 512k */
dma_xfer((uint *)0x100000, 0x100000, (uint *)0); /* 1M */
dma_xfer((uint *)0x200000, 0x200000, (uint *)0); /* 2M */
dma_xfer((uint *)0x400000, 0x400000, (uint *)0); /* 4M */
for (i = 1; i < dram_size / 0x800000; i++) {
dma_xfer((uint *)(0x800000*i), 0x800000, (uint *)0);
}
/*
* Enable errors for ECC.
*/
debug("DMA DDR: err_disable = 0x%08x\n", ddr1->err_disable);
ddr1->err_disable = 0x00000000;
debug("DMA DDR: err_disable = 0x%08x\n", ddr1->err_disable);
}
#endif /* CONFIG_DDR_ECC && ! CONFIG_ECC_INIT_VIA_DDRCONTROLLER */