Ejemplo n.º 1
0
static void program_ddr0_03(unsigned long dimm_ranks[],
			    unsigned char const iic0_dimm_addr[],
			    unsigned long num_dimm_banks,
			    unsigned long sdram_freq,
			    unsigned long rows, unsigned long *cas_latency)
{
	unsigned long dimm_num;
	unsigned long cas_index;
	unsigned long cycle_2_0_clk;
	unsigned long cycle_3_0_clk;
	unsigned long cycle_4_0_clk;
	unsigned long cycle_5_0_clk;
	unsigned long max_2_0_tcyc_ps = 100;
	unsigned long max_3_0_tcyc_ps = 100;
	unsigned long max_4_0_tcyc_ps = 100;
	unsigned long max_5_0_tcyc_ps = 100;
	unsigned char cas_available = 0x3C;	/* value for DDR2 */
	u32 ddr0_03 = DDR0_03_BSTLEN_ENCODE(0x2) | DDR0_03_INITAREF_ENCODE(0x2);
	unsigned int const tcyc_addr[3] = { 9, 23, 25 };

	/*------------------------------------------------------------------
	 * Get the board configuration info.
	 *-----------------------------------------------------------------*/
	debug("sdram_freq = %d\n", sdram_freq);

	/*------------------------------------------------------------------
	 * Handle the timing.  We need to find the worst case timing of all
	 * the dimm modules installed.
	 *-----------------------------------------------------------------*/
	/* loop through all the DIMM slots on the board */
	for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
		/* If a dimm is installed in a particular slot ... */
		if (dimm_ranks[dimm_num]) {
			unsigned char const cas_bit =
			    spd_read(iic0_dimm_addr[dimm_num], 18);
			unsigned char cas_mask;

			cas_available &= cas_bit;
			for (cas_mask = 0x80; cas_mask; cas_mask >>= 1) {
				if (cas_bit & cas_mask)
					break;
			}
			debug("cas_bit (SPD byte 18) = %02X, cas_mask = %02X\n",
			      cas_bit, cas_mask);

			for (cas_index = 0; cas_index < 3;
			     cas_mask >>= 1, cas_index++) {
				unsigned long cycle_time_ps;

				if (!(cas_available & cas_mask)) {
					continue;
				}
				cycle_time_ps =
				    get_tcyc(spd_read(iic0_dimm_addr[dimm_num],
						      tcyc_addr[cas_index]));

				debug("cas_index = %d: cycle_time_ps = %d\n",
				      cas_index, cycle_time_ps);
				/*
				 * DDR2 devices use the following bitmask for CAS latency:
				 *  Bit   7    6    5    4    3    2    1    0
				 *       TBD  6.0  5.0  4.0  3.0  2.0  TBD  TBD
				 */
				switch (cas_mask) {
				case 0x20:
					max_5_0_tcyc_ps =
					    max(max_5_0_tcyc_ps, cycle_time_ps);
					break;
				case 0x10:
					max_4_0_tcyc_ps =
					    max(max_4_0_tcyc_ps, cycle_time_ps);
					break;
				case 0x08:
					max_3_0_tcyc_ps =
					    max(max_3_0_tcyc_ps, cycle_time_ps);
					break;
				case 0x04:
					max_2_0_tcyc_ps =
					    max(max_2_0_tcyc_ps, cycle_time_ps);
					break;
				}
			}
		}
	}
	debug("cas_available (bit map) = 0x%02X\n", cas_available);

	/*------------------------------------------------------------------
	 * Set the SDRAM mode, SDRAM_MMODE
	 *-----------------------------------------------------------------*/

	/* add 10 here because of rounding problems */
	cycle_2_0_clk = MULDIV64(ONE_BILLION, 1000, max_2_0_tcyc_ps) + 10;
	cycle_3_0_clk = MULDIV64(ONE_BILLION, 1000, max_3_0_tcyc_ps) + 10;
	cycle_4_0_clk = MULDIV64(ONE_BILLION, 1000, max_4_0_tcyc_ps) + 10;
	cycle_5_0_clk = MULDIV64(ONE_BILLION, 1000, max_5_0_tcyc_ps) + 10;
	debug("cycle_2_0_clk = %d\n", cycle_2_0_clk);
	debug("cycle_3_0_clk = %d\n", cycle_3_0_clk);
	debug("cycle_4_0_clk = %d\n", cycle_4_0_clk);
	debug("cycle_5_0_clk = %d\n", cycle_5_0_clk);

	if ((cas_available & 0x04) && (sdram_freq <= cycle_2_0_clk)) {
		*cas_latency = 2;
		ddr0_03 |= DDR0_03_CASLAT_ENCODE(0x2) |
		    DDR0_03_CASLAT_LIN_ENCODE(0x4);
	} else if ((cas_available & 0x08) && (sdram_freq <= cycle_3_0_clk)) {
		*cas_latency = 3;
		ddr0_03 |= DDR0_03_CASLAT_ENCODE(0x3) |
		    DDR0_03_CASLAT_LIN_ENCODE(0x6);
	} else if ((cas_available & 0x10) && (sdram_freq <= cycle_4_0_clk)) {
		*cas_latency = 4;
		ddr0_03 |= DDR0_03_CASLAT_ENCODE(0x4) |
		    DDR0_03_CASLAT_LIN_ENCODE(0x8);
	} else if ((cas_available & 0x20) && (sdram_freq <= cycle_5_0_clk)) {
		*cas_latency = 5;
		ddr0_03 |= DDR0_03_CASLAT_ENCODE(0x5) |
		    DDR0_03_CASLAT_LIN_ENCODE(0xA);
	} else {
		printf("ERROR: Cannot find a supported CAS latency with the "
		       "installed DIMMs.\n");
		printf("Only DDR2 DIMMs with CAS latencies of 2.0, 3.0, 4.0, "
		       "and 5.0 are supported.\n");
		printf("Make sure the PLB speed is within the supported range "
		       "of the DIMMs.\n");
		printf("sdram_freq=%ld cycle2=%ld cycle3=%ld cycle4=%ld "
		       "cycle5=%ld\n\n", sdram_freq, cycle_2_0_clk,
		       cycle_3_0_clk, cycle_4_0_clk, cycle_5_0_clk);
		spd_ddr_init_hang();
	}
	debug("CAS latency = %d\n", *cas_latency);
	mtsdram(DDR0_03, ddr0_03);
}
Ejemplo n.º 2
0
/*************************************************************************
 *
 * initdram -- 440EPx's DDR controller is a DENALI Core
 *
 ************************************************************************/
phys_size_t initdram (int board_type)
{
	unsigned char data[2];
	unsigned int chip, v, t, i, x, sz, ms, rr, rf, st;
	unsigned long speed;
	unsigned char mt, bl, cl, rk, tmp; /* memory type, burst length, cas latency, ranks */
	unsigned char map[3] = { SPD_TAC0, SPD_TAC1, SPD_TAC2 };
#ifdef DEBUG
	unsigned int j;
#endif

	chip = SPD_EEPROM_ADDRESS;

	debug("dram: about to probe chip 0x%x\n", chip);

	if(check_sane_spd(chip)){
		printf("dram: nonexistent or unusable spd eeprom\n");
		hang();
	}

#ifdef DEBUG
	debug("dram: raw");
	for(j = 0; j < 64; j++){
		if(i2c_read(chip, j, 1, data, 1) == 0){
			debug("%c%02x", (j % 16) ? ' ' : '\n', data[0]);
		}
	}
	debug("\n");
#endif

	/* rushed dump from spd to ddr controller, calculations hardwired from sequoia and generally iffy */

	if(i2c_read(chip, SPD_MEMORY_TYPE, 1, data, 1) != 0){
		return 0;
	}
	mt = data[0];

	switch(mt){
		case SPD_MEMORY_TYPE_DDR2_SDRAM :
			debug("dram: ddr2 memory\n");
			break;
		/* case SPD_MEMORY_TYPE_DDR_SDRAM  : */
		default :
			printf("dram: unsupported memory type 0x%x\n", mt);
			return 0;
	}

	speed = get_bus_freq(0);
	debug("dram: bus speed %luHz\n", speed);

	if(i2c_read(chip, SPD_REFRESH_RATE, 1, data, 1) != 0){
		return 0;
	}
	rr = decode_refresh(data[0]);
	debug("dram: refresh rate %ups\n", rr);

	xmtsdram(DDR0_02, DDR0_02_START_OFF);
	mfsdram(DDR0_02, ms);
	/* gives us the maximum dimensions the controller can do, used later */
	debug("dram: controller caps 0x%08x\n", ms);

	/* calibration values as recommended */
	xmtsdram(DDR0_00, DDR0_00_DLL_INCREMENT_ENCODE(0x19) | DDR0_00_DLL_START_POINT_ENCODE(0xa));

	/* set as required, possibly could set up interrupt masks */
	xmtsdram(DDR0_01, DDR0_01_PLB0_DB_CS_LOWER_ENCODE(0x1) | DDR0_01_PLB0_DB_CS_UPPER_ENCODE(0));

	v = 0;
	v |= DDR0_03_INITAREF_ENCODE(0x2); /* WARNING: no idea how many autorefresh commands needed during initialisation */
	if(i2c_read(chip, SPD_BURST_LENGTHS, 1, data, 1) != 0){
		return 0;
	}
	bl = ((mt != SPD_MEMORY_TYPE_DDR2_SDRAM) && (data[0] & SPD_BURST_LENGTH_8)) ? 8 : 4;
	debug("dram: burst length caps=0x%x, chosen=%d\n", data[0], bl);
	v |= DDR0_03_BSTLEN_ENCODE((bl == 8) ? 0x3 : 0x2);
	if(i2c_read(chip, SPD_CAS_LATENCIES, 1, data, 1) != 0){
		return 0;
	}
	cl = 0;
	tmp = data[0];
	debug("dram: latency choices 0x%x\n", tmp);
	/* could use a less agressive mode by quitting for a lower x */
	for(i = 7, x = 0; (i >= 2) && (x < 3); i--){
		if(tmp & (0x1 << i)){
			debug("dram: can do cl=%d\n", i);
			if(i2c_read(chip, map[x] + 1, 1, data, 1) != 0){
				return 0;
			}
			t = ps2cycles(speed, (((data[0] >> 4) & 0xf) * 100) + ((data[0] & 0xf) * 10), "minclock");
			if(t > 0){
				debug("dram: clock too fast for cl-%d\n", x);
				break;
			}
			cl = i;
			x++;
		}
	}