diff options
Diffstat (limited to 'drivers/edac')
-rw-r--r-- | drivers/edac/amd64_edac.c | 42 | ||||
-rw-r--r-- | drivers/edac/amd64_edac.h | 19 |
2 files changed, 24 insertions, 37 deletions
diff --git a/drivers/edac/amd64_edac.c b/drivers/edac/amd64_edac.c index 9fd3c2d9c2a7..079f5b8dd5c2 100644 --- a/drivers/edac/amd64_edac.c +++ b/drivers/edac/amd64_edac.c | |||
@@ -799,7 +799,7 @@ static u16 extract_syndrome(struct err_regs *err) | |||
799 | */ | 799 | */ |
800 | static enum edac_type amd64_determine_edac_cap(struct amd64_pvt *pvt) | 800 | static enum edac_type amd64_determine_edac_cap(struct amd64_pvt *pvt) |
801 | { | 801 | { |
802 | int bit; | 802 | u8 bit; |
803 | enum dev_type edac_cap = EDAC_FLAG_NONE; | 803 | enum dev_type edac_cap = EDAC_FLAG_NONE; |
804 | 804 | ||
805 | bit = (boot_cpu_data.x86 > 0xf || pvt->ext_model >= K8_REV_F) | 805 | bit = (boot_cpu_data.x86 > 0xf || pvt->ext_model >= K8_REV_F) |
@@ -826,8 +826,9 @@ static void amd64_dump_dramcfg_low(u32 dclr, int chan) | |||
826 | debugf1(" PAR/ERR parity: %s\n", | 826 | debugf1(" PAR/ERR parity: %s\n", |
827 | (dclr & BIT(8)) ? "enabled" : "disabled"); | 827 | (dclr & BIT(8)) ? "enabled" : "disabled"); |
828 | 828 | ||
829 | debugf1(" DCT 128bit mode width: %s\n", | 829 | if (boot_cpu_data.x86 == 0x10) |
830 | (dclr & BIT(11)) ? "128b" : "64b"); | 830 | debugf1(" DCT 128bit mode width: %s\n", |
831 | (dclr & BIT(11)) ? "128b" : "64b"); | ||
831 | 832 | ||
832 | debugf1(" x4 logical DIMMs present: L0: %s L1: %s L2: %s L3: %s\n", | 833 | debugf1(" x4 logical DIMMs present: L0: %s L1: %s L2: %s L3: %s\n", |
833 | (dclr & BIT(12)) ? "yes" : "no", | 834 | (dclr & BIT(12)) ? "yes" : "no", |
@@ -939,7 +940,10 @@ static enum mem_type amd64_determine_memory_type(struct amd64_pvt *pvt, int cs) | |||
939 | { | 940 | { |
940 | enum mem_type type; | 941 | enum mem_type type; |
941 | 942 | ||
942 | if (boot_cpu_data.x86 >= 0x10 || pvt->ext_model >= K8_REV_F) { | 943 | /* F15h supports only DDR3 */ |
944 | if (boot_cpu_data.x86 >= 0x15) | ||
945 | type = (pvt->dclr0 & BIT(16)) ? MEM_DDR3 : MEM_RDDR3; | ||
946 | else if (boot_cpu_data.x86 == 0x10 || pvt->ext_model >= K8_REV_F) { | ||
943 | if (pvt->dchr0 & DDR3_MODE) | 947 | if (pvt->dchr0 & DDR3_MODE) |
944 | type = (pvt->dclr0 & BIT(16)) ? MEM_DDR3 : MEM_RDDR3; | 948 | type = (pvt->dclr0 & BIT(16)) ? MEM_DDR3 : MEM_RDDR3; |
945 | else | 949 | else |
@@ -953,22 +957,10 @@ static enum mem_type amd64_determine_memory_type(struct amd64_pvt *pvt, int cs) | |||
953 | return type; | 957 | return type; |
954 | } | 958 | } |
955 | 959 | ||
956 | /* | 960 | /* Get the number of DCT channels the memory controller is using. */ |
957 | * Read the DRAM Configuration Low register. It differs between CG, D & E revs | ||
958 | * and the later RevF memory controllers (DDR vs DDR2) | ||
959 | * | ||
960 | * Return: | ||
961 | * number of memory channels in operation | ||
962 | * Pass back: | ||
963 | * contents of the DCL0_LOW register | ||
964 | */ | ||
965 | static int k8_early_channel_count(struct amd64_pvt *pvt) | 961 | static int k8_early_channel_count(struct amd64_pvt *pvt) |
966 | { | 962 | { |
967 | int flag, err = 0; | 963 | int flag; |
968 | |||
969 | err = amd64_read_dct_pci_cfg(pvt, F10_DCLR_0, &pvt->dclr0); | ||
970 | if (err) | ||
971 | return err; | ||
972 | 964 | ||
973 | if (pvt->ext_model >= K8_REV_F) | 965 | if (pvt->ext_model >= K8_REV_F) |
974 | /* RevF (NPT) and later */ | 966 | /* RevF (NPT) and later */ |
@@ -983,7 +975,7 @@ static int k8_early_channel_count(struct amd64_pvt *pvt) | |||
983 | return (flag) ? 2 : 1; | 975 | return (flag) ? 2 : 1; |
984 | } | 976 | } |
985 | 977 | ||
986 | /* extract the ERROR ADDRESS for the K8 CPUs */ | 978 | /* Extract the ERROR ADDRESS for the K8 CPUs */ |
987 | static u64 k8_get_error_address(struct mem_ctl_info *mci, | 979 | static u64 k8_get_error_address(struct mem_ctl_info *mci, |
988 | struct err_regs *info) | 980 | struct err_regs *info) |
989 | { | 981 | { |
@@ -1486,7 +1478,7 @@ static void f10_map_sysaddr_to_csrow(struct mem_ctl_info *mci, | |||
1486 | 1478 | ||
1487 | /* | 1479 | /* |
1488 | * debug routine to display the memory sizes of all logical DIMMs and its | 1480 | * debug routine to display the memory sizes of all logical DIMMs and its |
1489 | * CSROWs as well | 1481 | * CSROWs |
1490 | */ | 1482 | */ |
1491 | static void amd64_debug_display_dimm_sizes(int ctrl, struct amd64_pvt *pvt) | 1483 | static void amd64_debug_display_dimm_sizes(int ctrl, struct amd64_pvt *pvt) |
1492 | { | 1484 | { |
@@ -1960,12 +1952,12 @@ static void read_mc_regs(struct amd64_pvt *pvt) | |||
1960 | 1952 | ||
1961 | amd64_read_pci_cfg(pvt->F3, F10_ONLINE_SPARE, &pvt->online_spare); | 1953 | amd64_read_pci_cfg(pvt->F3, F10_ONLINE_SPARE, &pvt->online_spare); |
1962 | 1954 | ||
1963 | amd64_read_dct_pci_cfg(pvt, F10_DCLR_0, &pvt->dclr0); | 1955 | amd64_read_dct_pci_cfg(pvt, DCLR0, &pvt->dclr0); |
1964 | amd64_read_dct_pci_cfg(pvt, F10_DCHR_0, &pvt->dchr0); | 1956 | amd64_read_dct_pci_cfg(pvt, DCHR0, &pvt->dchr0); |
1965 | 1957 | ||
1966 | if (!dct_ganging_enabled(pvt)) { | 1958 | if (!dct_ganging_enabled(pvt) && boot_cpu_data.x86 > 0xf) { |
1967 | amd64_read_dct_pci_cfg(pvt, F10_DCLR_1, &pvt->dclr1); | 1959 | amd64_read_dct_pci_cfg(pvt, DCLR1, &pvt->dclr1); |
1968 | amd64_read_dct_pci_cfg(pvt, F10_DCHR_1, &pvt->dchr1); | 1960 | amd64_read_dct_pci_cfg(pvt, DCHR1, &pvt->dchr1); |
1969 | } | 1961 | } |
1970 | 1962 | ||
1971 | if (boot_cpu_data.x86 >= 0x10) { | 1963 | if (boot_cpu_data.x86 >= 0x10) { |
diff --git a/drivers/edac/amd64_edac.h b/drivers/edac/amd64_edac.h index ba1818305943..7323f1b493ad 100644 --- a/drivers/edac/amd64_edac.h +++ b/drivers/edac/amd64_edac.h | |||
@@ -218,27 +218,23 @@ | |||
218 | 218 | ||
219 | #define DBAM_MAX_VALUE 11 | 219 | #define DBAM_MAX_VALUE 11 |
220 | 220 | ||
221 | 221 | #define DCLR0 0x90 | |
222 | #define F10_DCLR_0 0x90 | 222 | #define DCLR1 0x190 |
223 | #define F10_DCLR_1 0x190 | ||
224 | #define REVE_WIDTH_128 BIT(16) | 223 | #define REVE_WIDTH_128 BIT(16) |
225 | #define F10_WIDTH_128 BIT(11) | 224 | #define F10_WIDTH_128 BIT(11) |
226 | 225 | ||
227 | 226 | #define DCHR0 0x94 | |
228 | #define F10_DCHR_0 0x94 | 227 | #define DCHR1 0x194 |
229 | #define F10_DCHR_1 0x194 | ||
230 | |||
231 | #define F10_DCHR_FOUR_RANK_DIMM BIT(18) | ||
232 | #define DDR3_MODE BIT(8) | 228 | #define DDR3_MODE BIT(8) |
233 | #define F10_DCHR_MblMode BIT(6) | ||
234 | |||
235 | 229 | ||
236 | #define F10_DCTL_SEL_LOW 0x110 | 230 | #define F10_DCTL_SEL_LOW 0x110 |
237 | #define dct_sel_baseaddr(pvt) ((pvt->dct_sel_low) & 0xFFFFF800) | 231 | #define dct_sel_baseaddr(pvt) ((pvt->dct_sel_low) & 0xFFFFF800) |
238 | #define dct_sel_interleave_addr(pvt) (((pvt->dct_sel_low) >> 6) & 0x3) | 232 | #define dct_sel_interleave_addr(pvt) (((pvt->dct_sel_low) >> 6) & 0x3) |
239 | #define dct_high_range_enabled(pvt) (pvt->dct_sel_low & BIT(0)) | 233 | #define dct_high_range_enabled(pvt) (pvt->dct_sel_low & BIT(0)) |
240 | #define dct_interleave_enabled(pvt) (pvt->dct_sel_low & BIT(2)) | 234 | #define dct_interleave_enabled(pvt) (pvt->dct_sel_low & BIT(2)) |
241 | #define dct_ganging_enabled(pvt) (pvt->dct_sel_low & BIT(4)) | 235 | |
236 | #define dct_ganging_enabled(pvt) ((boot_cpu_data.x86 == 0x10) && ((pvt)->dct_sel_low & BIT(4))) | ||
237 | |||
242 | #define dct_data_intlv_enabled(pvt) (pvt->dct_sel_low & BIT(5)) | 238 | #define dct_data_intlv_enabled(pvt) (pvt->dct_sel_low & BIT(5)) |
243 | #define dct_dram_enabled(pvt) (pvt->dct_sel_low & BIT(8)) | 239 | #define dct_dram_enabled(pvt) (pvt->dct_sel_low & BIT(8)) |
244 | #define dct_memory_cleared(pvt) (pvt->dct_sel_low & BIT(10)) | 240 | #define dct_memory_cleared(pvt) (pvt->dct_sel_low & BIT(10)) |
@@ -262,7 +258,6 @@ | |||
262 | 258 | ||
263 | #define K8_NBSL 0x48 | 259 | #define K8_NBSL 0x48 |
264 | 260 | ||
265 | |||
266 | /* Family F10h: Normalized Extended Error Codes */ | 261 | /* Family F10h: Normalized Extended Error Codes */ |
267 | #define F10_NBSL_EXT_ERR_RES 0x0 | 262 | #define F10_NBSL_EXT_ERR_RES 0x0 |
268 | #define F10_NBSL_EXT_ERR_ECC 0x8 | 263 | #define F10_NBSL_EXT_ERR_ECC 0x8 |