diff options
author | Tom St Denis <tom.stdenis@amd.com> | 2016-11-14 13:55:17 -0500 |
---|---|---|
committer | Alex Deucher <alexander.deucher@amd.com> | 2016-11-23 15:08:42 -0500 |
commit | b00861b98b4eed49460abc444f5455cfe52c40ea (patch) | |
tree | b59f47a79a78e7e3e16207e4e7cc2fb3352f4416 | |
parent | 99e3820a6b4c13047a8f1829c4c682bb5b11acd7 (diff) |
drm/amd/amdgpu: port of DCE v6 to new headers (v3)
Port of SI DCE v6 over to new AMDGPU headers. Tested on a
Tahiti with GNOME through various hot plugs/rotations/sizes/fullscreen/windowed and
staging drm/xf86-video-amdgpu.
(v2) Re-factored to remove formatting changes to si_enums.h
as well rename various defines.
(v3) Rebase on upstream
Signed-off-by: Tom St Denis <tom.stdenis@amd.com>
Reviewed-by: Alex Deucher <alexander.deucher@amd.com>
Acked-by: Christian König <christian.koenig@amd.com>
Signed-off-by: Alex Deucher <alexander.deucher@amd.com>
-rw-r--r-- | drivers/gpu/drm/amd/amdgpu/dce_v6_0.c | 507 | ||||
-rw-r--r-- | drivers/gpu/drm/amd/amdgpu/si_enums.h | 78 | ||||
-rw-r--r-- | drivers/gpu/drm/amd/include/asic_reg/dce/dce_6_0_d.h | 12 |
3 files changed, 350 insertions, 247 deletions
diff --git a/drivers/gpu/drm/amd/amdgpu/dce_v6_0.c b/drivers/gpu/drm/amd/amdgpu/dce_v6_0.c index 57423332bf75..1f322f7d74e7 100644 --- a/drivers/gpu/drm/amd/amdgpu/dce_v6_0.c +++ b/drivers/gpu/drm/amd/amdgpu/dce_v6_0.c | |||
@@ -30,8 +30,19 @@ | |||
30 | #include "atombios_encoders.h" | 30 | #include "atombios_encoders.h" |
31 | #include "amdgpu_pll.h" | 31 | #include "amdgpu_pll.h" |
32 | #include "amdgpu_connectors.h" | 32 | #include "amdgpu_connectors.h" |
33 | #include "si/si_reg.h" | 33 | |
34 | #include "si/sid.h" | 34 | #include "bif/bif_3_0_d.h" |
35 | #include "bif/bif_3_0_sh_mask.h" | ||
36 | #include "oss/oss_1_0_d.h" | ||
37 | #include "oss/oss_1_0_sh_mask.h" | ||
38 | #include "gca/gfx_6_0_d.h" | ||
39 | #include "gca/gfx_6_0_sh_mask.h" | ||
40 | #include "gmc/gmc_6_0_d.h" | ||
41 | #include "gmc/gmc_6_0_sh_mask.h" | ||
42 | #include "dce/dce_6_0_d.h" | ||
43 | #include "dce/dce_6_0_sh_mask.h" | ||
44 | #include "gca/gfx_7_2_enum.h" | ||
45 | #include "si_enums.h" | ||
35 | 46 | ||
36 | static void dce_v6_0_set_display_funcs(struct amdgpu_device *adev); | 47 | static void dce_v6_0_set_display_funcs(struct amdgpu_device *adev); |
37 | static void dce_v6_0_set_irq_funcs(struct amdgpu_device *adev); | 48 | static void dce_v6_0_set_irq_funcs(struct amdgpu_device *adev); |
@@ -48,12 +59,12 @@ static const u32 crtc_offsets[6] = | |||
48 | 59 | ||
49 | static const u32 hpd_offsets[] = | 60 | static const u32 hpd_offsets[] = |
50 | { | 61 | { |
51 | DC_HPD1_INT_STATUS - DC_HPD1_INT_STATUS, | 62 | mmDC_HPD1_INT_STATUS - mmDC_HPD1_INT_STATUS, |
52 | DC_HPD2_INT_STATUS - DC_HPD1_INT_STATUS, | 63 | mmDC_HPD2_INT_STATUS - mmDC_HPD1_INT_STATUS, |
53 | DC_HPD3_INT_STATUS - DC_HPD1_INT_STATUS, | 64 | mmDC_HPD3_INT_STATUS - mmDC_HPD1_INT_STATUS, |
54 | DC_HPD4_INT_STATUS - DC_HPD1_INT_STATUS, | 65 | mmDC_HPD4_INT_STATUS - mmDC_HPD1_INT_STATUS, |
55 | DC_HPD5_INT_STATUS - DC_HPD1_INT_STATUS, | 66 | mmDC_HPD5_INT_STATUS - mmDC_HPD1_INT_STATUS, |
56 | DC_HPD6_INT_STATUS - DC_HPD1_INT_STATUS, | 67 | mmDC_HPD6_INT_STATUS - mmDC_HPD1_INT_STATUS, |
57 | }; | 68 | }; |
58 | 69 | ||
59 | static const uint32_t dig_offsets[] = { | 70 | static const uint32_t dig_offsets[] = { |
@@ -73,32 +84,32 @@ static const struct { | |||
73 | uint32_t hpd; | 84 | uint32_t hpd; |
74 | 85 | ||
75 | } interrupt_status_offsets[6] = { { | 86 | } interrupt_status_offsets[6] = { { |
76 | .reg = DISP_INTERRUPT_STATUS, | 87 | .reg = mmDISP_INTERRUPT_STATUS, |
77 | .vblank = DISP_INTERRUPT_STATUS__LB_D1_VBLANK_INTERRUPT_MASK, | 88 | .vblank = DISP_INTERRUPT_STATUS__LB_D1_VBLANK_INTERRUPT_MASK, |
78 | .vline = DISP_INTERRUPT_STATUS__LB_D1_VLINE_INTERRUPT_MASK, | 89 | .vline = DISP_INTERRUPT_STATUS__LB_D1_VLINE_INTERRUPT_MASK, |
79 | .hpd = DISP_INTERRUPT_STATUS__DC_HPD1_INTERRUPT_MASK | 90 | .hpd = DISP_INTERRUPT_STATUS__DC_HPD1_INTERRUPT_MASK |
80 | }, { | 91 | }, { |
81 | .reg = DISP_INTERRUPT_STATUS_CONTINUE, | 92 | .reg = mmDISP_INTERRUPT_STATUS_CONTINUE, |
82 | .vblank = DISP_INTERRUPT_STATUS_CONTINUE__LB_D2_VBLANK_INTERRUPT_MASK, | 93 | .vblank = DISP_INTERRUPT_STATUS_CONTINUE__LB_D2_VBLANK_INTERRUPT_MASK, |
83 | .vline = DISP_INTERRUPT_STATUS_CONTINUE__LB_D2_VLINE_INTERRUPT_MASK, | 94 | .vline = DISP_INTERRUPT_STATUS_CONTINUE__LB_D2_VLINE_INTERRUPT_MASK, |
84 | .hpd = DISP_INTERRUPT_STATUS_CONTINUE__DC_HPD2_INTERRUPT_MASK | 95 | .hpd = DISP_INTERRUPT_STATUS_CONTINUE__DC_HPD2_INTERRUPT_MASK |
85 | }, { | 96 | }, { |
86 | .reg = DISP_INTERRUPT_STATUS_CONTINUE2, | 97 | .reg = mmDISP_INTERRUPT_STATUS_CONTINUE2, |
87 | .vblank = DISP_INTERRUPT_STATUS_CONTINUE2__LB_D3_VBLANK_INTERRUPT_MASK, | 98 | .vblank = DISP_INTERRUPT_STATUS_CONTINUE2__LB_D3_VBLANK_INTERRUPT_MASK, |
88 | .vline = DISP_INTERRUPT_STATUS_CONTINUE2__LB_D3_VLINE_INTERRUPT_MASK, | 99 | .vline = DISP_INTERRUPT_STATUS_CONTINUE2__LB_D3_VLINE_INTERRUPT_MASK, |
89 | .hpd = DISP_INTERRUPT_STATUS_CONTINUE2__DC_HPD3_INTERRUPT_MASK | 100 | .hpd = DISP_INTERRUPT_STATUS_CONTINUE2__DC_HPD3_INTERRUPT_MASK |
90 | }, { | 101 | }, { |
91 | .reg = DISP_INTERRUPT_STATUS_CONTINUE3, | 102 | .reg = mmDISP_INTERRUPT_STATUS_CONTINUE3, |
92 | .vblank = DISP_INTERRUPT_STATUS_CONTINUE3__LB_D4_VBLANK_INTERRUPT_MASK, | 103 | .vblank = DISP_INTERRUPT_STATUS_CONTINUE3__LB_D4_VBLANK_INTERRUPT_MASK, |
93 | .vline = DISP_INTERRUPT_STATUS_CONTINUE3__LB_D4_VLINE_INTERRUPT_MASK, | 104 | .vline = DISP_INTERRUPT_STATUS_CONTINUE3__LB_D4_VLINE_INTERRUPT_MASK, |
94 | .hpd = DISP_INTERRUPT_STATUS_CONTINUE3__DC_HPD4_INTERRUPT_MASK | 105 | .hpd = DISP_INTERRUPT_STATUS_CONTINUE3__DC_HPD4_INTERRUPT_MASK |
95 | }, { | 106 | }, { |
96 | .reg = DISP_INTERRUPT_STATUS_CONTINUE4, | 107 | .reg = mmDISP_INTERRUPT_STATUS_CONTINUE4, |
97 | .vblank = DISP_INTERRUPT_STATUS_CONTINUE4__LB_D5_VBLANK_INTERRUPT_MASK, | 108 | .vblank = DISP_INTERRUPT_STATUS_CONTINUE4__LB_D5_VBLANK_INTERRUPT_MASK, |
98 | .vline = DISP_INTERRUPT_STATUS_CONTINUE4__LB_D5_VLINE_INTERRUPT_MASK, | 109 | .vline = DISP_INTERRUPT_STATUS_CONTINUE4__LB_D5_VLINE_INTERRUPT_MASK, |
99 | .hpd = DISP_INTERRUPT_STATUS_CONTINUE4__DC_HPD5_INTERRUPT_MASK | 110 | .hpd = DISP_INTERRUPT_STATUS_CONTINUE4__DC_HPD5_INTERRUPT_MASK |
100 | }, { | 111 | }, { |
101 | .reg = DISP_INTERRUPT_STATUS_CONTINUE5, | 112 | .reg = mmDISP_INTERRUPT_STATUS_CONTINUE5, |
102 | .vblank = DISP_INTERRUPT_STATUS_CONTINUE5__LB_D6_VBLANK_INTERRUPT_MASK, | 113 | .vblank = DISP_INTERRUPT_STATUS_CONTINUE5__LB_D6_VBLANK_INTERRUPT_MASK, |
103 | .vline = DISP_INTERRUPT_STATUS_CONTINUE5__LB_D6_VLINE_INTERRUPT_MASK, | 114 | .vline = DISP_INTERRUPT_STATUS_CONTINUE5__LB_D6_VLINE_INTERRUPT_MASK, |
104 | .hpd = DISP_INTERRUPT_STATUS_CONTINUE5__DC_HPD6_INTERRUPT_MASK | 115 | .hpd = DISP_INTERRUPT_STATUS_CONTINUE5__DC_HPD6_INTERRUPT_MASK |
@@ -119,7 +130,7 @@ static void dce_v6_0_audio_endpt_wreg(struct amdgpu_device *adev, | |||
119 | 130 | ||
120 | static bool dce_v6_0_is_in_vblank(struct amdgpu_device *adev, int crtc) | 131 | static bool dce_v6_0_is_in_vblank(struct amdgpu_device *adev, int crtc) |
121 | { | 132 | { |
122 | if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK) | 133 | if (RREG32(mmCRTC_STATUS + crtc_offsets[crtc]) & CRTC_STATUS__CRTC_V_BLANK_MASK) |
123 | return true; | 134 | return true; |
124 | else | 135 | else |
125 | return false; | 136 | return false; |
@@ -129,8 +140,8 @@ static bool dce_v6_0_is_counter_moving(struct amdgpu_device *adev, int crtc) | |||
129 | { | 140 | { |
130 | u32 pos1, pos2; | 141 | u32 pos1, pos2; |
131 | 142 | ||
132 | pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]); | 143 | pos1 = RREG32(mmCRTC_STATUS_POSITION + crtc_offsets[crtc]); |
133 | pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]); | 144 | pos2 = RREG32(mmCRTC_STATUS_POSITION + crtc_offsets[crtc]); |
134 | 145 | ||
135 | if (pos1 != pos2) | 146 | if (pos1 != pos2) |
136 | return true; | 147 | return true; |
@@ -152,7 +163,7 @@ static void dce_v6_0_vblank_wait(struct amdgpu_device *adev, int crtc) | |||
152 | if (crtc >= adev->mode_info.num_crtc) | 163 | if (crtc >= adev->mode_info.num_crtc) |
153 | return; | 164 | return; |
154 | 165 | ||
155 | if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN)) | 166 | if (!(RREG32(mmCRTC_CONTROL + crtc_offsets[crtc]) & CRTC_CONTROL__CRTC_MASTER_EN_MASK)) |
156 | return; | 167 | return; |
157 | 168 | ||
158 | /* depending on when we hit vblank, we may be close to active; if so, | 169 | /* depending on when we hit vblank, we may be close to active; if so, |
@@ -180,7 +191,7 @@ static u32 dce_v6_0_vblank_get_counter(struct amdgpu_device *adev, int crtc) | |||
180 | if (crtc >= adev->mode_info.num_crtc) | 191 | if (crtc >= adev->mode_info.num_crtc) |
181 | return 0; | 192 | return 0; |
182 | else | 193 | else |
183 | return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]); | 194 | return RREG32(mmCRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]); |
184 | } | 195 | } |
185 | 196 | ||
186 | static void dce_v6_0_pageflip_interrupt_init(struct amdgpu_device *adev) | 197 | static void dce_v6_0_pageflip_interrupt_init(struct amdgpu_device *adev) |
@@ -220,16 +231,16 @@ static void dce_v6_0_page_flip(struct amdgpu_device *adev, | |||
220 | struct amdgpu_crtc *amdgpu_crtc = adev->mode_info.crtcs[crtc_id]; | 231 | struct amdgpu_crtc *amdgpu_crtc = adev->mode_info.crtcs[crtc_id]; |
221 | 232 | ||
222 | /* flip at hsync for async, default is vsync */ | 233 | /* flip at hsync for async, default is vsync */ |
223 | WREG32(EVERGREEN_GRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset, async ? | 234 | WREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset, async ? |
224 | EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0); | 235 | GRPH_FLIP_CONTROL__GRPH_SURFACE_UPDATE_H_RETRACE_EN_MASK : 0); |
225 | /* update the scanout addresses */ | 236 | /* update the scanout addresses */ |
226 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, | 237 | WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, |
227 | upper_32_bits(crtc_base)); | 238 | upper_32_bits(crtc_base)); |
228 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, | 239 | WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, |
229 | (u32)crtc_base); | 240 | (u32)crtc_base); |
230 | 241 | ||
231 | /* post the write */ | 242 | /* post the write */ |
232 | RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset); | 243 | RREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset); |
233 | } | 244 | } |
234 | 245 | ||
235 | static int dce_v6_0_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc, | 246 | static int dce_v6_0_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc, |
@@ -237,8 +248,8 @@ static int dce_v6_0_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc, | |||
237 | { | 248 | { |
238 | if ((crtc < 0) || (crtc >= adev->mode_info.num_crtc)) | 249 | if ((crtc < 0) || (crtc >= adev->mode_info.num_crtc)) |
239 | return -EINVAL; | 250 | return -EINVAL; |
240 | *vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END + crtc_offsets[crtc]); | 251 | *vbl = RREG32(mmCRTC_V_BLANK_START_END + crtc_offsets[crtc]); |
241 | *position = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]); | 252 | *position = RREG32(mmCRTC_STATUS_POSITION + crtc_offsets[crtc]); |
242 | 253 | ||
243 | return 0; | 254 | return 0; |
244 | 255 | ||
@@ -261,7 +272,7 @@ static bool dce_v6_0_hpd_sense(struct amdgpu_device *adev, | |||
261 | if (hpd >= adev->mode_info.num_hpd) | 272 | if (hpd >= adev->mode_info.num_hpd) |
262 | return connected; | 273 | return connected; |
263 | 274 | ||
264 | if (RREG32(DC_HPD1_INT_STATUS + hpd_offsets[hpd]) & DC_HPDx_SENSE) | 275 | if (RREG32(mmDC_HPD1_INT_STATUS + hpd_offsets[hpd]) & DC_HPD1_INT_STATUS__DC_HPD1_SENSE_MASK) |
265 | connected = true; | 276 | connected = true; |
266 | 277 | ||
267 | return connected; | 278 | return connected; |
@@ -284,12 +295,12 @@ static void dce_v6_0_hpd_set_polarity(struct amdgpu_device *adev, | |||
284 | if (hpd >= adev->mode_info.num_hpd) | 295 | if (hpd >= adev->mode_info.num_hpd) |
285 | return; | 296 | return; |
286 | 297 | ||
287 | tmp = RREG32(DC_HPD1_INT_CONTROL + hpd_offsets[hpd]); | 298 | tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd]); |
288 | if (connected) | 299 | if (connected) |
289 | tmp &= ~DC_HPDx_INT_POLARITY; | 300 | tmp &= ~DC_HPD1_INT_CONTROL__DC_HPD1_INT_POLARITY_MASK; |
290 | else | 301 | else |
291 | tmp |= DC_HPDx_INT_POLARITY; | 302 | tmp |= DC_HPD1_INT_CONTROL__DC_HPD1_INT_POLARITY_MASK; |
292 | WREG32(DC_HPD1_INT_CONTROL + hpd_offsets[hpd], tmp); | 303 | WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd], tmp); |
293 | } | 304 | } |
294 | 305 | ||
295 | /** | 306 | /** |
@@ -312,9 +323,9 @@ static void dce_v6_0_hpd_init(struct amdgpu_device *adev) | |||
312 | if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd) | 323 | if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd) |
313 | continue; | 324 | continue; |
314 | 325 | ||
315 | tmp = RREG32(DC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); | 326 | tmp = RREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); |
316 | tmp |= DC_HPDx_EN; | 327 | tmp |= DC_HPD1_CONTROL__DC_HPD1_EN_MASK; |
317 | WREG32(DC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); | 328 | WREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); |
318 | 329 | ||
319 | if (connector->connector_type == DRM_MODE_CONNECTOR_eDP || | 330 | if (connector->connector_type == DRM_MODE_CONNECTOR_eDP || |
320 | connector->connector_type == DRM_MODE_CONNECTOR_LVDS) { | 331 | connector->connector_type == DRM_MODE_CONNECTOR_LVDS) { |
@@ -323,9 +334,9 @@ static void dce_v6_0_hpd_init(struct amdgpu_device *adev) | |||
323 | * https://bugzilla.redhat.com/show_bug.cgi?id=726143 | 334 | * https://bugzilla.redhat.com/show_bug.cgi?id=726143 |
324 | * also avoid interrupt storms during dpms. | 335 | * also avoid interrupt storms during dpms. |
325 | */ | 336 | */ |
326 | tmp = RREG32(DC_HPD1_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); | 337 | tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); |
327 | tmp &= ~DC_HPDx_INT_EN; | 338 | tmp &= ~DC_HPD1_INT_CONTROL__DC_HPD1_INT_EN_MASK; |
328 | WREG32(DC_HPD1_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); | 339 | WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); |
329 | continue; | 340 | continue; |
330 | } | 341 | } |
331 | 342 | ||
@@ -355,9 +366,9 @@ static void dce_v6_0_hpd_fini(struct amdgpu_device *adev) | |||
355 | if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd) | 366 | if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd) |
356 | continue; | 367 | continue; |
357 | 368 | ||
358 | tmp = RREG32(DC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); | 369 | tmp = RREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); |
359 | tmp &= ~DC_HPDx_EN; | 370 | tmp &= ~DC_HPD1_CONTROL__DC_HPD1_EN_MASK; |
360 | WREG32(DC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], 0); | 371 | WREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], 0); |
361 | 372 | ||
362 | amdgpu_irq_put(adev, &adev->hpd_irq, amdgpu_connector->hpd.hpd); | 373 | amdgpu_irq_put(adev, &adev->hpd_irq, amdgpu_connector->hpd.hpd); |
363 | } | 374 | } |
@@ -365,7 +376,7 @@ static void dce_v6_0_hpd_fini(struct amdgpu_device *adev) | |||
365 | 376 | ||
366 | static u32 dce_v6_0_hpd_get_gpio_reg(struct amdgpu_device *adev) | 377 | static u32 dce_v6_0_hpd_get_gpio_reg(struct amdgpu_device *adev) |
367 | { | 378 | { |
368 | return SI_DC_GPIO_HPD_A; | 379 | return mmDC_GPIO_HPD_A; |
369 | } | 380 | } |
370 | 381 | ||
371 | static bool dce_v6_0_is_display_hung(struct amdgpu_device *adev) | 382 | static bool dce_v6_0_is_display_hung(struct amdgpu_device *adev) |
@@ -380,7 +391,7 @@ static u32 evergreen_get_vblank_counter(struct amdgpu_device* adev, int crtc) | |||
380 | if (crtc >= adev->mode_info.num_crtc) | 391 | if (crtc >= adev->mode_info.num_crtc) |
381 | return 0; | 392 | return 0; |
382 | else | 393 | else |
383 | return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]); | 394 | return RREG32(mmCRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]); |
384 | } | 395 | } |
385 | 396 | ||
386 | static void dce_v6_0_stop_mc_access(struct amdgpu_device *adev, | 397 | static void dce_v6_0_stop_mc_access(struct amdgpu_device *adev, |
@@ -389,25 +400,25 @@ static void dce_v6_0_stop_mc_access(struct amdgpu_device *adev, | |||
389 | u32 crtc_enabled, tmp, frame_count; | 400 | u32 crtc_enabled, tmp, frame_count; |
390 | int i, j; | 401 | int i, j; |
391 | 402 | ||
392 | save->vga_render_control = RREG32(VGA_RENDER_CONTROL); | 403 | save->vga_render_control = RREG32(mmVGA_RENDER_CONTROL); |
393 | save->vga_hdp_control = RREG32(VGA_HDP_CONTROL); | 404 | save->vga_hdp_control = RREG32(mmVGA_HDP_CONTROL); |
394 | 405 | ||
395 | /* disable VGA render */ | 406 | /* disable VGA render */ |
396 | WREG32(VGA_RENDER_CONTROL, 0); | 407 | WREG32(mmVGA_RENDER_CONTROL, 0); |
397 | 408 | ||
398 | /* blank the display controllers */ | 409 | /* blank the display controllers */ |
399 | for (i = 0; i < adev->mode_info.num_crtc; i++) { | 410 | for (i = 0; i < adev->mode_info.num_crtc; i++) { |
400 | crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN; | 411 | crtc_enabled = RREG32(mmCRTC_CONTROL + crtc_offsets[i]) & CRTC_CONTROL__CRTC_MASTER_EN_MASK; |
401 | if (crtc_enabled) { | 412 | if (crtc_enabled) { |
402 | save->crtc_enabled[i] = true; | 413 | save->crtc_enabled[i] = true; |
403 | tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]); | 414 | tmp = RREG32(mmCRTC_BLANK_CONTROL + crtc_offsets[i]); |
404 | 415 | ||
405 | if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) { | 416 | if (!(tmp & CRTC_BLANK_CONTROL__CRTC_BLANK_DATA_EN_MASK)) { |
406 | dce_v6_0_vblank_wait(adev, i); | 417 | dce_v6_0_vblank_wait(adev, i); |
407 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1); | 418 | WREG32(mmCRTC_UPDATE_LOCK + crtc_offsets[i], 1); |
408 | tmp |= EVERGREEN_CRTC_BLANK_DATA_EN; | 419 | tmp |= CRTC_BLANK_CONTROL__CRTC_BLANK_DATA_EN_MASK; |
409 | WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp); | 420 | WREG32(mmCRTC_BLANK_CONTROL + crtc_offsets[i], tmp); |
410 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0); | 421 | WREG32(mmCRTC_UPDATE_LOCK + crtc_offsets[i], 0); |
411 | } | 422 | } |
412 | /* wait for the next frame */ | 423 | /* wait for the next frame */ |
413 | frame_count = evergreen_get_vblank_counter(adev, i); | 424 | frame_count = evergreen_get_vblank_counter(adev, i); |
@@ -418,11 +429,11 @@ static void dce_v6_0_stop_mc_access(struct amdgpu_device *adev, | |||
418 | } | 429 | } |
419 | 430 | ||
420 | /* XXX this is a hack to avoid strange behavior with EFI on certain systems */ | 431 | /* XXX this is a hack to avoid strange behavior with EFI on certain systems */ |
421 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1); | 432 | WREG32(mmCRTC_UPDATE_LOCK + crtc_offsets[i], 1); |
422 | tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]); | 433 | tmp = RREG32(mmCRTC_CONTROL + crtc_offsets[i]); |
423 | tmp &= ~EVERGREEN_CRTC_MASTER_EN; | 434 | tmp &= ~CRTC_CONTROL__CRTC_MASTER_EN_MASK; |
424 | WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp); | 435 | WREG32(mmCRTC_CONTROL + crtc_offsets[i], tmp); |
425 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0); | 436 | WREG32(mmCRTC_UPDATE_LOCK + crtc_offsets[i], 0); |
426 | save->crtc_enabled[i] = false; | 437 | save->crtc_enabled[i] = false; |
427 | /* ***** */ | 438 | /* ***** */ |
428 | } else { | 439 | } else { |
@@ -439,41 +450,41 @@ static void dce_v6_0_resume_mc_access(struct amdgpu_device *adev, | |||
439 | 450 | ||
440 | /* update crtc base addresses */ | 451 | /* update crtc base addresses */ |
441 | for (i = 0; i < adev->mode_info.num_crtc; i++) { | 452 | for (i = 0; i < adev->mode_info.num_crtc; i++) { |
442 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i], | 453 | WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i], |
443 | upper_32_bits(adev->mc.vram_start)); | 454 | upper_32_bits(adev->mc.vram_start)); |
444 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i], | 455 | WREG32(mmGRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i], |
445 | upper_32_bits(adev->mc.vram_start)); | 456 | upper_32_bits(adev->mc.vram_start)); |
446 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i], | 457 | WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i], |
447 | (u32)adev->mc.vram_start); | 458 | (u32)adev->mc.vram_start); |
448 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i], | 459 | WREG32(mmGRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i], |
449 | (u32)adev->mc.vram_start); | 460 | (u32)adev->mc.vram_start); |
450 | } | 461 | } |
451 | 462 | ||
452 | WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(adev->mc.vram_start)); | 463 | WREG32(mmVGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(adev->mc.vram_start)); |
453 | WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)adev->mc.vram_start); | 464 | WREG32(mmVGA_MEMORY_BASE_ADDRESS, (u32)adev->mc.vram_start); |
454 | 465 | ||
455 | /* unlock regs and wait for update */ | 466 | /* unlock regs and wait for update */ |
456 | for (i = 0; i < adev->mode_info.num_crtc; i++) { | 467 | for (i = 0; i < adev->mode_info.num_crtc; i++) { |
457 | if (save->crtc_enabled[i]) { | 468 | if (save->crtc_enabled[i]) { |
458 | tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]); | 469 | tmp = RREG32(mmMASTER_UPDATE_MODE + crtc_offsets[i]); |
459 | if ((tmp & 0x7) != 3) { | 470 | if ((tmp & 0x7) != 3) { |
460 | tmp &= ~0x7; | 471 | tmp &= ~0x7; |
461 | tmp |= 0x3; | 472 | tmp |= 0x3; |
462 | WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp); | 473 | WREG32(mmMASTER_UPDATE_MODE + crtc_offsets[i], tmp); |
463 | } | 474 | } |
464 | tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]); | 475 | tmp = RREG32(mmGRPH_UPDATE + crtc_offsets[i]); |
465 | if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) { | 476 | if (tmp & GRPH_UPDATE__GRPH_UPDATE_LOCK_MASK) { |
466 | tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK; | 477 | tmp &= ~GRPH_UPDATE__GRPH_UPDATE_LOCK_MASK; |
467 | WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp); | 478 | WREG32(mmGRPH_UPDATE + crtc_offsets[i], tmp); |
468 | } | 479 | } |
469 | tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]); | 480 | tmp = RREG32(mmMASTER_UPDATE_LOCK + crtc_offsets[i]); |
470 | if (tmp & 1) { | 481 | if (tmp & 1) { |
471 | tmp &= ~1; | 482 | tmp &= ~1; |
472 | WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp); | 483 | WREG32(mmMASTER_UPDATE_LOCK + crtc_offsets[i], tmp); |
473 | } | 484 | } |
474 | for (j = 0; j < adev->usec_timeout; j++) { | 485 | for (j = 0; j < adev->usec_timeout; j++) { |
475 | tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]); | 486 | tmp = RREG32(mmGRPH_UPDATE + crtc_offsets[i]); |
476 | if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0) | 487 | if ((tmp & GRPH_UPDATE__GRPH_SURFACE_UPDATE_PENDING_MASK) == 0) |
477 | break; | 488 | break; |
478 | udelay(1); | 489 | udelay(1); |
479 | } | 490 | } |
@@ -481,9 +492,9 @@ static void dce_v6_0_resume_mc_access(struct amdgpu_device *adev, | |||
481 | } | 492 | } |
482 | 493 | ||
483 | /* Unlock vga access */ | 494 | /* Unlock vga access */ |
484 | WREG32(VGA_HDP_CONTROL, save->vga_hdp_control); | 495 | WREG32(mmVGA_HDP_CONTROL, save->vga_hdp_control); |
485 | mdelay(1); | 496 | mdelay(1); |
486 | WREG32(VGA_RENDER_CONTROL, save->vga_render_control); | 497 | WREG32(mmVGA_RENDER_CONTROL, save->vga_render_control); |
487 | 498 | ||
488 | } | 499 | } |
489 | 500 | ||
@@ -491,8 +502,8 @@ static void dce_v6_0_set_vga_render_state(struct amdgpu_device *adev, | |||
491 | bool render) | 502 | bool render) |
492 | { | 503 | { |
493 | if (!render) | 504 | if (!render) |
494 | WREG32(R_000300_VGA_RENDER_CONTROL, | 505 | WREG32(mmVGA_RENDER_CONTROL, |
495 | RREG32(R_000300_VGA_RENDER_CONTROL) & C_000300_VGA_VSTATUS_CNTL); | 506 | RREG32(mmVGA_RENDER_CONTROL) & VGA_VSTATUS_CNTL); |
496 | 507 | ||
497 | } | 508 | } |
498 | 509 | ||
@@ -526,14 +537,14 @@ void dce_v6_0_disable_dce(struct amdgpu_device *adev) | |||
526 | 537 | ||
527 | /*Disable crtc*/ | 538 | /*Disable crtc*/ |
528 | for (i = 0; i < dce_v6_0_get_num_crtc(adev); i++) { | 539 | for (i = 0; i < dce_v6_0_get_num_crtc(adev); i++) { |
529 | crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & | 540 | crtc_enabled = RREG32(mmCRTC_CONTROL + crtc_offsets[i]) & |
530 | EVERGREEN_CRTC_MASTER_EN; | 541 | CRTC_CONTROL__CRTC_MASTER_EN_MASK; |
531 | if (crtc_enabled) { | 542 | if (crtc_enabled) { |
532 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1); | 543 | WREG32(mmCRTC_UPDATE_LOCK + crtc_offsets[i], 1); |
533 | tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]); | 544 | tmp = RREG32(mmCRTC_CONTROL + crtc_offsets[i]); |
534 | tmp &= ~EVERGREEN_CRTC_MASTER_EN; | 545 | tmp &= ~CRTC_CONTROL__CRTC_MASTER_EN_MASK; |
535 | WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp); | 546 | WREG32(mmCRTC_CONTROL + crtc_offsets[i], tmp); |
536 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0); | 547 | WREG32(mmCRTC_UPDATE_LOCK + crtc_offsets[i], 0); |
537 | } | 548 | } |
538 | } | 549 | } |
539 | } | 550 | } |
@@ -569,19 +580,23 @@ static void dce_v6_0_program_fmt(struct drm_encoder *encoder) | |||
569 | case 6: | 580 | case 6: |
570 | if (dither == AMDGPU_FMT_DITHER_ENABLE) | 581 | if (dither == AMDGPU_FMT_DITHER_ENABLE) |
571 | /* XXX sort out optimal dither settings */ | 582 | /* XXX sort out optimal dither settings */ |
572 | tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE | | 583 | tmp |= (FMT_BIT_DEPTH_CONTROL__FMT_FRAME_RANDOM_ENABLE_MASK | |
573 | FMT_SPATIAL_DITHER_EN); | 584 | FMT_BIT_DEPTH_CONTROL__FMT_HIGHPASS_RANDOM_ENABLE_MASK | |
585 | FMT_BIT_DEPTH_CONTROL__FMT_SPATIAL_DITHER_EN_MASK); | ||
574 | else | 586 | else |
575 | tmp |= FMT_TRUNCATE_EN; | 587 | tmp |= FMT_BIT_DEPTH_CONTROL__FMT_TRUNCATE_EN_MASK; |
576 | break; | 588 | break; |
577 | case 8: | 589 | case 8: |
578 | if (dither == AMDGPU_FMT_DITHER_ENABLE) | 590 | if (dither == AMDGPU_FMT_DITHER_ENABLE) |
579 | /* XXX sort out optimal dither settings */ | 591 | /* XXX sort out optimal dither settings */ |
580 | tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE | | 592 | tmp |= (FMT_BIT_DEPTH_CONTROL__FMT_FRAME_RANDOM_ENABLE_MASK | |
581 | FMT_RGB_RANDOM_ENABLE | | 593 | FMT_BIT_DEPTH_CONTROL__FMT_HIGHPASS_RANDOM_ENABLE_MASK | |
582 | FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH); | 594 | FMT_BIT_DEPTH_CONTROL__FMT_RGB_RANDOM_ENABLE_MASK | |
595 | FMT_BIT_DEPTH_CONTROL__FMT_SPATIAL_DITHER_EN_MASK | | ||
596 | FMT_BIT_DEPTH_CONTROL__FMT_SPATIAL_DITHER_DEPTH_MASK); | ||
583 | else | 597 | else |
584 | tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH); | 598 | tmp |= (FMT_BIT_DEPTH_CONTROL__FMT_TRUNCATE_EN_MASK | |
599 | FMT_BIT_DEPTH_CONTROL__FMT_TRUNCATE_DEPTH_MASK); | ||
585 | break; | 600 | break; |
586 | case 10: | 601 | case 10: |
587 | default: | 602 | default: |
@@ -589,7 +604,7 @@ static void dce_v6_0_program_fmt(struct drm_encoder *encoder) | |||
589 | break; | 604 | break; |
590 | } | 605 | } |
591 | 606 | ||
592 | WREG32(FMT_BIT_DEPTH_CONTROL + amdgpu_crtc->crtc_offset, tmp); | 607 | WREG32(mmFMT_BIT_DEPTH_CONTROL + amdgpu_crtc->crtc_offset, tmp); |
593 | } | 608 | } |
594 | 609 | ||
595 | /** | 610 | /** |
@@ -603,7 +618,7 @@ static void dce_v6_0_program_fmt(struct drm_encoder *encoder) | |||
603 | */ | 618 | */ |
604 | static u32 si_get_number_of_dram_channels(struct amdgpu_device *adev) | 619 | static u32 si_get_number_of_dram_channels(struct amdgpu_device *adev) |
605 | { | 620 | { |
606 | u32 tmp = RREG32(MC_SHARED_CHMAP); | 621 | u32 tmp = RREG32(mmMC_SHARED_CHMAP); |
607 | 622 | ||
608 | switch ((tmp & MC_SHARED_CHMAP__NOOFCHAN_MASK) >> MC_SHARED_CHMAP__NOOFCHAN__SHIFT) { | 623 | switch ((tmp & MC_SHARED_CHMAP__NOOFCHAN_MASK) >> MC_SHARED_CHMAP__NOOFCHAN__SHIFT) { |
609 | case 0: | 624 | case 0: |
@@ -1100,28 +1115,28 @@ static void dce_v6_0_program_watermarks(struct amdgpu_device *adev, | |||
1100 | } | 1115 | } |
1101 | 1116 | ||
1102 | /* select wm A */ | 1117 | /* select wm A */ |
1103 | arb_control3 = RREG32(DPG_PIPE_ARBITRATION_CONTROL3 + amdgpu_crtc->crtc_offset); | 1118 | arb_control3 = RREG32(mmDPG_PIPE_ARBITRATION_CONTROL3 + amdgpu_crtc->crtc_offset); |
1104 | tmp = arb_control3; | 1119 | tmp = arb_control3; |
1105 | tmp &= ~LATENCY_WATERMARK_MASK(3); | 1120 | tmp &= ~LATENCY_WATERMARK_MASK(3); |
1106 | tmp |= LATENCY_WATERMARK_MASK(1); | 1121 | tmp |= LATENCY_WATERMARK_MASK(1); |
1107 | WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + amdgpu_crtc->crtc_offset, tmp); | 1122 | WREG32(mmDPG_PIPE_ARBITRATION_CONTROL3 + amdgpu_crtc->crtc_offset, tmp); |
1108 | WREG32(DPG_PIPE_LATENCY_CONTROL + amdgpu_crtc->crtc_offset, | 1123 | WREG32(mmDPG_PIPE_URGENCY_CONTROL + amdgpu_crtc->crtc_offset, |
1109 | (LATENCY_LOW_WATERMARK(latency_watermark_a) | | 1124 | ((latency_watermark_a << DPG_PIPE_URGENCY_CONTROL__URGENCY_LOW_WATERMARK__SHIFT) | |
1110 | LATENCY_HIGH_WATERMARK(line_time))); | 1125 | (line_time << DPG_PIPE_URGENCY_CONTROL__URGENCY_HIGH_WATERMARK__SHIFT))); |
1111 | /* select wm B */ | 1126 | /* select wm B */ |
1112 | tmp = RREG32(DPG_PIPE_ARBITRATION_CONTROL3 + amdgpu_crtc->crtc_offset); | 1127 | tmp = RREG32(mmDPG_PIPE_ARBITRATION_CONTROL3 + amdgpu_crtc->crtc_offset); |
1113 | tmp &= ~LATENCY_WATERMARK_MASK(3); | 1128 | tmp &= ~LATENCY_WATERMARK_MASK(3); |
1114 | tmp |= LATENCY_WATERMARK_MASK(2); | 1129 | tmp |= LATENCY_WATERMARK_MASK(2); |
1115 | WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + amdgpu_crtc->crtc_offset, tmp); | 1130 | WREG32(mmDPG_PIPE_ARBITRATION_CONTROL3 + amdgpu_crtc->crtc_offset, tmp); |
1116 | WREG32(DPG_PIPE_LATENCY_CONTROL + amdgpu_crtc->crtc_offset, | 1131 | WREG32(mmDPG_PIPE_URGENCY_CONTROL + amdgpu_crtc->crtc_offset, |
1117 | (LATENCY_LOW_WATERMARK(latency_watermark_b) | | 1132 | ((latency_watermark_b << DPG_PIPE_URGENCY_CONTROL__URGENCY_LOW_WATERMARK__SHIFT) | |
1118 | LATENCY_HIGH_WATERMARK(line_time))); | 1133 | (line_time << DPG_PIPE_URGENCY_CONTROL__URGENCY_HIGH_WATERMARK__SHIFT))); |
1119 | /* restore original selection */ | 1134 | /* restore original selection */ |
1120 | WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + amdgpu_crtc->crtc_offset, arb_control3); | 1135 | WREG32(mmDPG_PIPE_ARBITRATION_CONTROL3 + amdgpu_crtc->crtc_offset, arb_control3); |
1121 | 1136 | ||
1122 | /* write the priority marks */ | 1137 | /* write the priority marks */ |
1123 | WREG32(PRIORITY_A_CNT + amdgpu_crtc->crtc_offset, priority_a_cnt); | 1138 | WREG32(mmPRIORITY_A_CNT + amdgpu_crtc->crtc_offset, priority_a_cnt); |
1124 | WREG32(PRIORITY_B_CNT + amdgpu_crtc->crtc_offset, priority_b_cnt); | 1139 | WREG32(mmPRIORITY_B_CNT + amdgpu_crtc->crtc_offset, priority_b_cnt); |
1125 | 1140 | ||
1126 | /* save values for DPM */ | 1141 | /* save values for DPM */ |
1127 | amdgpu_crtc->line_time = line_time; | 1142 | amdgpu_crtc->line_time = line_time; |
@@ -1139,7 +1154,7 @@ static u32 dce_v6_0_line_buffer_adjust(struct amdgpu_device *adev, | |||
1139 | /* | 1154 | /* |
1140 | * Line Buffer Setup | 1155 | * Line Buffer Setup |
1141 | * There are 3 line buffers, each one shared by 2 display controllers. | 1156 | * There are 3 line buffers, each one shared by 2 display controllers. |
1142 | * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between | 1157 | * mmDC_LB_MEMORY_SPLIT controls how that line buffer is shared between |
1143 | * the display controllers. The paritioning is done via one of four | 1158 | * the display controllers. The paritioning is done via one of four |
1144 | * preset allocations specified in bits 21:20: | 1159 | * preset allocations specified in bits 21:20: |
1145 | * 0 - half lb | 1160 | * 0 - half lb |
@@ -1162,14 +1177,14 @@ static u32 dce_v6_0_line_buffer_adjust(struct amdgpu_device *adev, | |||
1162 | buffer_alloc = 0; | 1177 | buffer_alloc = 0; |
1163 | } | 1178 | } |
1164 | 1179 | ||
1165 | WREG32(DC_LB_MEMORY_SPLIT + amdgpu_crtc->crtc_offset, | 1180 | WREG32(mmDC_LB_MEMORY_SPLIT + amdgpu_crtc->crtc_offset, |
1166 | DC_LB_MEMORY_CONFIG(tmp)); | 1181 | DC_LB_MEMORY_CONFIG(tmp)); |
1167 | 1182 | ||
1168 | WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset, | 1183 | WREG32(mmPIPE0_DMIF_BUFFER_CONTROL + pipe_offset, |
1169 | DMIF_BUFFERS_ALLOCATED(buffer_alloc)); | 1184 | (buffer_alloc << PIPE0_DMIF_BUFFER_CONTROL__DMIF_BUFFERS_ALLOCATED__SHIFT)); |
1170 | for (i = 0; i < adev->usec_timeout; i++) { | 1185 | for (i = 0; i < adev->usec_timeout; i++) { |
1171 | if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) & | 1186 | if (RREG32(mmPIPE0_DMIF_BUFFER_CONTROL + pipe_offset) & |
1172 | DMIF_BUFFERS_ALLOCATED_COMPLETED) | 1187 | PIPE0_DMIF_BUFFER_CONTROL__DMIF_BUFFERS_ALLOCATION_COMPLETED_MASK) |
1173 | break; | 1188 | break; |
1174 | udelay(1); | 1189 | udelay(1); |
1175 | } | 1190 | } |
@@ -1411,12 +1426,12 @@ static void dce_v6_0_afmt_fini(struct amdgpu_device *adev) | |||
1411 | 1426 | ||
1412 | static const u32 vga_control_regs[6] = | 1427 | static const u32 vga_control_regs[6] = |
1413 | { | 1428 | { |
1414 | AVIVO_D1VGA_CONTROL, | 1429 | mmD1VGA_CONTROL, |
1415 | AVIVO_D2VGA_CONTROL, | 1430 | mmD2VGA_CONTROL, |
1416 | EVERGREEN_D3VGA_CONTROL, | 1431 | mmD3VGA_CONTROL, |
1417 | EVERGREEN_D4VGA_CONTROL, | 1432 | mmD4VGA_CONTROL, |
1418 | EVERGREEN_D5VGA_CONTROL, | 1433 | mmD5VGA_CONTROL, |
1419 | EVERGREEN_D6VGA_CONTROL, | 1434 | mmD6VGA_CONTROL, |
1420 | }; | 1435 | }; |
1421 | 1436 | ||
1422 | static void dce_v6_0_vga_enable(struct drm_crtc *crtc, bool enable) | 1437 | static void dce_v6_0_vga_enable(struct drm_crtc *crtc, bool enable) |
@@ -1436,7 +1451,7 @@ static void dce_v6_0_grph_enable(struct drm_crtc *crtc, bool enable) | |||
1436 | struct drm_device *dev = crtc->dev; | 1451 | struct drm_device *dev = crtc->dev; |
1437 | struct amdgpu_device *adev = dev->dev_private; | 1452 | struct amdgpu_device *adev = dev->dev_private; |
1438 | 1453 | ||
1439 | WREG32(EVERGREEN_GRPH_ENABLE + amdgpu_crtc->crtc_offset, enable ? 1 : 0); | 1454 | WREG32(mmGRPH_ENABLE + amdgpu_crtc->crtc_offset, enable ? 1 : 0); |
1440 | } | 1455 | } |
1441 | 1456 | ||
1442 | static int dce_v6_0_crtc_do_set_base(struct drm_crtc *crtc, | 1457 | static int dce_v6_0_crtc_do_set_base(struct drm_crtc *crtc, |
@@ -1452,7 +1467,7 @@ static int dce_v6_0_crtc_do_set_base(struct drm_crtc *crtc, | |||
1452 | struct amdgpu_bo *abo; | 1467 | struct amdgpu_bo *abo; |
1453 | uint64_t fb_location, tiling_flags; | 1468 | uint64_t fb_location, tiling_flags; |
1454 | uint32_t fb_format, fb_pitch_pixels, pipe_config; | 1469 | uint32_t fb_format, fb_pitch_pixels, pipe_config; |
1455 | u32 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_NONE); | 1470 | u32 fb_swap = GRPH_ENDIAN_SWAP(GRPH_ENDIAN_NONE); |
1456 | u32 viewport_w, viewport_h; | 1471 | u32 viewport_w, viewport_h; |
1457 | int r; | 1472 | int r; |
1458 | bool bypass_lut = false; | 1473 | bool bypass_lut = false; |
@@ -1495,64 +1510,64 @@ static int dce_v6_0_crtc_do_set_base(struct drm_crtc *crtc, | |||
1495 | 1510 | ||
1496 | switch (target_fb->pixel_format) { | 1511 | switch (target_fb->pixel_format) { |
1497 | case DRM_FORMAT_C8: | 1512 | case DRM_FORMAT_C8: |
1498 | fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) | | 1513 | fb_format = (GRPH_DEPTH(GRPH_DEPTH_8BPP) | |
1499 | EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED)); | 1514 | GRPH_FORMAT(GRPH_FORMAT_INDEXED)); |
1500 | break; | 1515 | break; |
1501 | case DRM_FORMAT_XRGB4444: | 1516 | case DRM_FORMAT_XRGB4444: |
1502 | case DRM_FORMAT_ARGB4444: | 1517 | case DRM_FORMAT_ARGB4444: |
1503 | fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) | | 1518 | fb_format = (GRPH_DEPTH(GRPH_DEPTH_16BPP) | |
1504 | EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB4444)); | 1519 | GRPH_FORMAT(GRPH_FORMAT_ARGB4444)); |
1505 | #ifdef __BIG_ENDIAN | 1520 | #ifdef __BIG_ENDIAN |
1506 | fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16); | 1521 | fb_swap = GRPH_ENDIAN_SWAP(GRPH_ENDIAN_8IN16); |
1507 | #endif | 1522 | #endif |
1508 | break; | 1523 | break; |
1509 | case DRM_FORMAT_XRGB1555: | 1524 | case DRM_FORMAT_XRGB1555: |
1510 | case DRM_FORMAT_ARGB1555: | 1525 | case DRM_FORMAT_ARGB1555: |
1511 | fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) | | 1526 | fb_format = (GRPH_DEPTH(GRPH_DEPTH_16BPP) | |
1512 | EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555)); | 1527 | GRPH_FORMAT(GRPH_FORMAT_ARGB1555)); |
1513 | #ifdef __BIG_ENDIAN | 1528 | #ifdef __BIG_ENDIAN |
1514 | fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16); | 1529 | fb_swap = GRPH_ENDIAN_SWAP(GRPH_ENDIAN_8IN16); |
1515 | #endif | 1530 | #endif |
1516 | break; | 1531 | break; |
1517 | case DRM_FORMAT_BGRX5551: | 1532 | case DRM_FORMAT_BGRX5551: |
1518 | case DRM_FORMAT_BGRA5551: | 1533 | case DRM_FORMAT_BGRA5551: |
1519 | fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) | | 1534 | fb_format = (GRPH_DEPTH(GRPH_DEPTH_16BPP) | |
1520 | EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_BGRA5551)); | 1535 | GRPH_FORMAT(GRPH_FORMAT_BGRA5551)); |
1521 | #ifdef __BIG_ENDIAN | 1536 | #ifdef __BIG_ENDIAN |
1522 | fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16); | 1537 | fb_swap = GRPH_ENDIAN_SWAP(GRPH_ENDIAN_8IN16); |
1523 | #endif | 1538 | #endif |
1524 | break; | 1539 | break; |
1525 | case DRM_FORMAT_RGB565: | 1540 | case DRM_FORMAT_RGB565: |
1526 | fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) | | 1541 | fb_format = (GRPH_DEPTH(GRPH_DEPTH_16BPP) | |
1527 | EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565)); | 1542 | GRPH_FORMAT(GRPH_FORMAT_ARGB565)); |
1528 | #ifdef __BIG_ENDIAN | 1543 | #ifdef __BIG_ENDIAN |
1529 | fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16); | 1544 | fb_swap = GRPH_ENDIAN_SWAP(GRPH_ENDIAN_8IN16); |
1530 | #endif | 1545 | #endif |
1531 | break; | 1546 | break; |
1532 | case DRM_FORMAT_XRGB8888: | 1547 | case DRM_FORMAT_XRGB8888: |
1533 | case DRM_FORMAT_ARGB8888: | 1548 | case DRM_FORMAT_ARGB8888: |
1534 | fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) | | 1549 | fb_format = (GRPH_DEPTH(GRPH_DEPTH_32BPP) | |
1535 | EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888)); | 1550 | GRPH_FORMAT(GRPH_FORMAT_ARGB8888)); |
1536 | #ifdef __BIG_ENDIAN | 1551 | #ifdef __BIG_ENDIAN |
1537 | fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32); | 1552 | fb_swap = GRPH_ENDIAN_SWAP(GRPH_ENDIAN_8IN32); |
1538 | #endif | 1553 | #endif |
1539 | break; | 1554 | break; |
1540 | case DRM_FORMAT_XRGB2101010: | 1555 | case DRM_FORMAT_XRGB2101010: |
1541 | case DRM_FORMAT_ARGB2101010: | 1556 | case DRM_FORMAT_ARGB2101010: |
1542 | fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) | | 1557 | fb_format = (GRPH_DEPTH(GRPH_DEPTH_32BPP) | |
1543 | EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB2101010)); | 1558 | GRPH_FORMAT(GRPH_FORMAT_ARGB2101010)); |
1544 | #ifdef __BIG_ENDIAN | 1559 | #ifdef __BIG_ENDIAN |
1545 | fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32); | 1560 | fb_swap = GRPH_ENDIAN_SWAP(GRPH_ENDIAN_8IN32); |
1546 | #endif | 1561 | #endif |
1547 | /* Greater 8 bpc fb needs to bypass hw-lut to retain precision */ | 1562 | /* Greater 8 bpc fb needs to bypass hw-lut to retain precision */ |
1548 | bypass_lut = true; | 1563 | bypass_lut = true; |
1549 | break; | 1564 | break; |
1550 | case DRM_FORMAT_BGRX1010102: | 1565 | case DRM_FORMAT_BGRX1010102: |
1551 | case DRM_FORMAT_BGRA1010102: | 1566 | case DRM_FORMAT_BGRA1010102: |
1552 | fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) | | 1567 | fb_format = (GRPH_DEPTH(GRPH_DEPTH_32BPP) | |
1553 | EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_BGRA1010102)); | 1568 | GRPH_FORMAT(GRPH_FORMAT_BGRA1010102)); |
1554 | #ifdef __BIG_ENDIAN | 1569 | #ifdef __BIG_ENDIAN |
1555 | fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32); | 1570 | fb_swap = GRPH_ENDIAN_SWAP(GRPH_ENDIAN_8IN32); |
1556 | #endif | 1571 | #endif |
1557 | /* Greater 8 bpc fb needs to bypass hw-lut to retain precision */ | 1572 | /* Greater 8 bpc fb needs to bypass hw-lut to retain precision */ |
1558 | bypass_lut = true; | 1573 | bypass_lut = true; |
@@ -1572,75 +1587,75 @@ static int dce_v6_0_crtc_do_set_base(struct drm_crtc *crtc, | |||
1572 | tile_split = AMDGPU_TILING_GET(tiling_flags, TILE_SPLIT); | 1587 | tile_split = AMDGPU_TILING_GET(tiling_flags, TILE_SPLIT); |
1573 | num_banks = AMDGPU_TILING_GET(tiling_flags, NUM_BANKS); | 1588 | num_banks = AMDGPU_TILING_GET(tiling_flags, NUM_BANKS); |
1574 | 1589 | ||
1575 | fb_format |= EVERGREEN_GRPH_NUM_BANKS(num_banks); | 1590 | fb_format |= GRPH_NUM_BANKS(num_banks); |
1576 | fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1); | 1591 | fb_format |= GRPH_ARRAY_MODE(GRPH_ARRAY_2D_TILED_THIN1); |
1577 | fb_format |= EVERGREEN_GRPH_TILE_SPLIT(tile_split); | 1592 | fb_format |= GRPH_TILE_SPLIT(tile_split); |
1578 | fb_format |= EVERGREEN_GRPH_BANK_WIDTH(bankw); | 1593 | fb_format |= GRPH_BANK_WIDTH(bankw); |
1579 | fb_format |= EVERGREEN_GRPH_BANK_HEIGHT(bankh); | 1594 | fb_format |= GRPH_BANK_HEIGHT(bankh); |
1580 | fb_format |= EVERGREEN_GRPH_MACRO_TILE_ASPECT(mtaspect); | 1595 | fb_format |= GRPH_MACRO_TILE_ASPECT(mtaspect); |
1581 | } else if (AMDGPU_TILING_GET(tiling_flags, ARRAY_MODE) == ARRAY_1D_TILED_THIN1) { | 1596 | } else if (AMDGPU_TILING_GET(tiling_flags, ARRAY_MODE) == ARRAY_1D_TILED_THIN1) { |
1582 | fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1); | 1597 | fb_format |= GRPH_ARRAY_MODE(GRPH_ARRAY_1D_TILED_THIN1); |
1583 | } | 1598 | } |
1584 | 1599 | ||
1585 | pipe_config = AMDGPU_TILING_GET(tiling_flags, PIPE_CONFIG); | 1600 | pipe_config = AMDGPU_TILING_GET(tiling_flags, PIPE_CONFIG); |
1586 | fb_format |= SI_GRPH_PIPE_CONFIG(pipe_config); | 1601 | fb_format |= GRPH_PIPE_CONFIG(pipe_config); |
1587 | 1602 | ||
1588 | dce_v6_0_vga_enable(crtc, false); | 1603 | dce_v6_0_vga_enable(crtc, false); |
1589 | 1604 | ||
1590 | /* Make sure surface address is updated at vertical blank rather than | 1605 | /* Make sure surface address is updated at vertical blank rather than |
1591 | * horizontal blank | 1606 | * horizontal blank |
1592 | */ | 1607 | */ |
1593 | WREG32(EVERGREEN_GRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset, 0); | 1608 | WREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset, 0); |
1594 | 1609 | ||
1595 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, | 1610 | WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, |
1596 | upper_32_bits(fb_location)); | 1611 | upper_32_bits(fb_location)); |
1597 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, | 1612 | WREG32(mmGRPH_SECONDARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, |
1598 | upper_32_bits(fb_location)); | 1613 | upper_32_bits(fb_location)); |
1599 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, | 1614 | WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, |
1600 | (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK); | 1615 | (u32)fb_location & GRPH_PRIMARY_SURFACE_ADDRESS__GRPH_PRIMARY_SURFACE_ADDRESS_MASK); |
1601 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, | 1616 | WREG32(mmGRPH_SECONDARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, |
1602 | (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK); | 1617 | (u32) fb_location & GRPH_PRIMARY_SURFACE_ADDRESS__GRPH_PRIMARY_SURFACE_ADDRESS_MASK); |
1603 | WREG32(EVERGREEN_GRPH_CONTROL + amdgpu_crtc->crtc_offset, fb_format); | 1618 | WREG32(mmGRPH_CONTROL + amdgpu_crtc->crtc_offset, fb_format); |
1604 | WREG32(EVERGREEN_GRPH_SWAP_CONTROL + amdgpu_crtc->crtc_offset, fb_swap); | 1619 | WREG32(mmGRPH_SWAP_CNTL + amdgpu_crtc->crtc_offset, fb_swap); |
1605 | 1620 | ||
1606 | /* | 1621 | /* |
1607 | * The LUT only has 256 slots for indexing by a 8 bpc fb. Bypass the LUT | 1622 | * The LUT only has 256 slots for indexing by a 8 bpc fb. Bypass the LUT |
1608 | * for > 8 bpc scanout to avoid truncation of fb indices to 8 msb's, to | 1623 | * for > 8 bpc scanout to avoid truncation of fb indices to 8 msb's, to |
1609 | * retain the full precision throughout the pipeline. | 1624 | * retain the full precision throughout the pipeline. |
1610 | */ | 1625 | */ |
1611 | WREG32_P(EVERGREEN_GRPH_LUT_10BIT_BYPASS_CONTROL + amdgpu_crtc->crtc_offset, | 1626 | WREG32_P(mmGRPH_LUT_10BIT_BYPASS + amdgpu_crtc->crtc_offset, |
1612 | (bypass_lut ? EVERGREEN_LUT_10BIT_BYPASS_EN : 0), | 1627 | (bypass_lut ? GRPH_LUT_10BIT_BYPASS__GRPH_LUT_10BIT_BYPASS_EN_MASK : 0), |
1613 | ~EVERGREEN_LUT_10BIT_BYPASS_EN); | 1628 | ~GRPH_LUT_10BIT_BYPASS__GRPH_LUT_10BIT_BYPASS_EN_MASK); |
1614 | 1629 | ||
1615 | if (bypass_lut) | 1630 | if (bypass_lut) |
1616 | DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n"); | 1631 | DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n"); |
1617 | 1632 | ||
1618 | WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + amdgpu_crtc->crtc_offset, 0); | 1633 | WREG32(mmGRPH_SURFACE_OFFSET_X + amdgpu_crtc->crtc_offset, 0); |
1619 | WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + amdgpu_crtc->crtc_offset, 0); | 1634 | WREG32(mmGRPH_SURFACE_OFFSET_Y + amdgpu_crtc->crtc_offset, 0); |
1620 | WREG32(EVERGREEN_GRPH_X_START + amdgpu_crtc->crtc_offset, 0); | 1635 | WREG32(mmGRPH_X_START + amdgpu_crtc->crtc_offset, 0); |
1621 | WREG32(EVERGREEN_GRPH_Y_START + amdgpu_crtc->crtc_offset, 0); | 1636 | WREG32(mmGRPH_Y_START + amdgpu_crtc->crtc_offset, 0); |
1622 | WREG32(EVERGREEN_GRPH_X_END + amdgpu_crtc->crtc_offset, target_fb->width); | 1637 | WREG32(mmGRPH_X_END + amdgpu_crtc->crtc_offset, target_fb->width); |
1623 | WREG32(EVERGREEN_GRPH_Y_END + amdgpu_crtc->crtc_offset, target_fb->height); | 1638 | WREG32(mmGRPH_Y_END + amdgpu_crtc->crtc_offset, target_fb->height); |
1624 | 1639 | ||
1625 | fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8); | 1640 | fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8); |
1626 | WREG32(EVERGREEN_GRPH_PITCH + amdgpu_crtc->crtc_offset, fb_pitch_pixels); | 1641 | WREG32(mmGRPH_PITCH + amdgpu_crtc->crtc_offset, fb_pitch_pixels); |
1627 | 1642 | ||
1628 | dce_v6_0_grph_enable(crtc, true); | 1643 | dce_v6_0_grph_enable(crtc, true); |
1629 | 1644 | ||
1630 | WREG32(EVERGREEN_DESKTOP_HEIGHT + amdgpu_crtc->crtc_offset, | 1645 | WREG32(mmDESKTOP_HEIGHT + amdgpu_crtc->crtc_offset, |
1631 | target_fb->height); | 1646 | target_fb->height); |
1632 | x &= ~3; | 1647 | x &= ~3; |
1633 | y &= ~1; | 1648 | y &= ~1; |
1634 | WREG32(EVERGREEN_VIEWPORT_START + amdgpu_crtc->crtc_offset, | 1649 | WREG32(mmVIEWPORT_START + amdgpu_crtc->crtc_offset, |
1635 | (x << 16) | y); | 1650 | (x << 16) | y); |
1636 | viewport_w = crtc->mode.hdisplay; | 1651 | viewport_w = crtc->mode.hdisplay; |
1637 | viewport_h = (crtc->mode.vdisplay + 1) & ~1; | 1652 | viewport_h = (crtc->mode.vdisplay + 1) & ~1; |
1638 | 1653 | ||
1639 | WREG32(EVERGREEN_VIEWPORT_SIZE + amdgpu_crtc->crtc_offset, | 1654 | WREG32(mmVIEWPORT_SIZE + amdgpu_crtc->crtc_offset, |
1640 | (viewport_w << 16) | viewport_h); | 1655 | (viewport_w << 16) | viewport_h); |
1641 | 1656 | ||
1642 | /* set pageflip to happen anywhere in vblank interval */ | 1657 | /* set pageflip to happen anywhere in vblank interval */ |
1643 | WREG32(EVERGREEN_MASTER_UPDATE_MODE + amdgpu_crtc->crtc_offset, 0); | 1658 | WREG32(mmMASTER_UPDATE_MODE + amdgpu_crtc->crtc_offset, 0); |
1644 | 1659 | ||
1645 | if (!atomic && fb && fb != crtc->primary->fb) { | 1660 | if (!atomic && fb && fb != crtc->primary->fb) { |
1646 | amdgpu_fb = to_amdgpu_framebuffer(fb); | 1661 | amdgpu_fb = to_amdgpu_framebuffer(fb); |
@@ -1667,10 +1682,10 @@ static void dce_v6_0_set_interleave(struct drm_crtc *crtc, | |||
1667 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); | 1682 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); |
1668 | 1683 | ||
1669 | if (mode->flags & DRM_MODE_FLAG_INTERLACE) | 1684 | if (mode->flags & DRM_MODE_FLAG_INTERLACE) |
1670 | WREG32(EVERGREEN_DATA_FORMAT + amdgpu_crtc->crtc_offset, | 1685 | WREG32(mmDATA_FORMAT + amdgpu_crtc->crtc_offset, |
1671 | EVERGREEN_INTERLEAVE_EN); | 1686 | INTERLEAVE_EN); |
1672 | else | 1687 | else |
1673 | WREG32(EVERGREEN_DATA_FORMAT + amdgpu_crtc->crtc_offset, 0); | 1688 | WREG32(mmDATA_FORMAT + amdgpu_crtc->crtc_offset, 0); |
1674 | } | 1689 | } |
1675 | 1690 | ||
1676 | static void dce_v6_0_crtc_load_lut(struct drm_crtc *crtc) | 1691 | static void dce_v6_0_crtc_load_lut(struct drm_crtc *crtc) |
@@ -1683,54 +1698,52 @@ static void dce_v6_0_crtc_load_lut(struct drm_crtc *crtc) | |||
1683 | 1698 | ||
1684 | DRM_DEBUG_KMS("%d\n", amdgpu_crtc->crtc_id); | 1699 | DRM_DEBUG_KMS("%d\n", amdgpu_crtc->crtc_id); |
1685 | 1700 | ||
1686 | WREG32(NI_INPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset, | 1701 | WREG32(mmINPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset, |
1687 | (NI_INPUT_CSC_GRPH_MODE(NI_INPUT_CSC_BYPASS) | | 1702 | ((0 << INPUT_CSC_CONTROL__INPUT_CSC_GRPH_MODE__SHIFT) | |
1688 | NI_INPUT_CSC_OVL_MODE(NI_INPUT_CSC_BYPASS))); | 1703 | (0 << INPUT_CSC_CONTROL__INPUT_CSC_OVL_MODE__SHIFT))); |
1689 | WREG32(NI_PRESCALE_GRPH_CONTROL + amdgpu_crtc->crtc_offset, | 1704 | WREG32(mmPRESCALE_GRPH_CONTROL + amdgpu_crtc->crtc_offset, |
1690 | NI_GRPH_PRESCALE_BYPASS); | 1705 | PRESCALE_GRPH_CONTROL__GRPH_PRESCALE_BYPASS_MASK); |
1691 | WREG32(NI_PRESCALE_OVL_CONTROL + amdgpu_crtc->crtc_offset, | 1706 | WREG32(mmPRESCALE_OVL_CONTROL + amdgpu_crtc->crtc_offset, |
1692 | NI_OVL_PRESCALE_BYPASS); | 1707 | PRESCALE_OVL_CONTROL__OVL_PRESCALE_BYPASS_MASK); |
1693 | WREG32(NI_INPUT_GAMMA_CONTROL + amdgpu_crtc->crtc_offset, | 1708 | WREG32(mmINPUT_GAMMA_CONTROL + amdgpu_crtc->crtc_offset, |
1694 | (NI_GRPH_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT) | | 1709 | ((0 << INPUT_GAMMA_CONTROL__GRPH_INPUT_GAMMA_MODE__SHIFT) | |
1695 | NI_OVL_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT))); | 1710 | (0 << INPUT_GAMMA_CONTROL__OVL_INPUT_GAMMA_MODE__SHIFT))); |
1696 | |||
1697 | |||
1698 | 1711 | ||
1699 | WREG32(EVERGREEN_DC_LUT_CONTROL + amdgpu_crtc->crtc_offset, 0); | 1712 | WREG32(mmDC_LUT_CONTROL + amdgpu_crtc->crtc_offset, 0); |
1700 | 1713 | ||
1701 | WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + amdgpu_crtc->crtc_offset, 0); | 1714 | WREG32(mmDC_LUT_BLACK_OFFSET_BLUE + amdgpu_crtc->crtc_offset, 0); |
1702 | WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + amdgpu_crtc->crtc_offset, 0); | 1715 | WREG32(mmDC_LUT_BLACK_OFFSET_GREEN + amdgpu_crtc->crtc_offset, 0); |
1703 | WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + amdgpu_crtc->crtc_offset, 0); | 1716 | WREG32(mmDC_LUT_BLACK_OFFSET_RED + amdgpu_crtc->crtc_offset, 0); |
1704 | 1717 | ||
1705 | WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + amdgpu_crtc->crtc_offset, 0xffff); | 1718 | WREG32(mmDC_LUT_WHITE_OFFSET_BLUE + amdgpu_crtc->crtc_offset, 0xffff); |
1706 | WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + amdgpu_crtc->crtc_offset, 0xffff); | 1719 | WREG32(mmDC_LUT_WHITE_OFFSET_GREEN + amdgpu_crtc->crtc_offset, 0xffff); |
1707 | WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + amdgpu_crtc->crtc_offset, 0xffff); | 1720 | WREG32(mmDC_LUT_WHITE_OFFSET_RED + amdgpu_crtc->crtc_offset, 0xffff); |
1708 | 1721 | ||
1709 | WREG32(EVERGREEN_DC_LUT_RW_MODE + amdgpu_crtc->crtc_offset, 0); | 1722 | WREG32(mmDC_LUT_RW_MODE + amdgpu_crtc->crtc_offset, 0); |
1710 | WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + amdgpu_crtc->crtc_offset, 0x00000007); | 1723 | WREG32(mmDC_LUT_WRITE_EN_MASK + amdgpu_crtc->crtc_offset, 0x00000007); |
1711 | 1724 | ||
1712 | WREG32(EVERGREEN_DC_LUT_RW_INDEX + amdgpu_crtc->crtc_offset, 0); | 1725 | WREG32(mmDC_LUT_RW_INDEX + amdgpu_crtc->crtc_offset, 0); |
1713 | for (i = 0; i < 256; i++) { | 1726 | for (i = 0; i < 256; i++) { |
1714 | WREG32(EVERGREEN_DC_LUT_30_COLOR + amdgpu_crtc->crtc_offset, | 1727 | WREG32(mmDC_LUT_30_COLOR + amdgpu_crtc->crtc_offset, |
1715 | (amdgpu_crtc->lut_r[i] << 20) | | 1728 | (amdgpu_crtc->lut_r[i] << 20) | |
1716 | (amdgpu_crtc->lut_g[i] << 10) | | 1729 | (amdgpu_crtc->lut_g[i] << 10) | |
1717 | (amdgpu_crtc->lut_b[i] << 0)); | 1730 | (amdgpu_crtc->lut_b[i] << 0)); |
1718 | } | 1731 | } |
1719 | 1732 | ||
1720 | WREG32(NI_DEGAMMA_CONTROL + amdgpu_crtc->crtc_offset, | 1733 | WREG32(mmDEGAMMA_CONTROL + amdgpu_crtc->crtc_offset, |
1721 | (NI_GRPH_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) | | 1734 | ((0 << DEGAMMA_CONTROL__GRPH_DEGAMMA_MODE__SHIFT) | |
1722 | NI_OVL_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) | | 1735 | (0 << DEGAMMA_CONTROL__OVL_DEGAMMA_MODE__SHIFT) | |
1723 | NI_ICON_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) | | 1736 | ICON_DEGAMMA_MODE(0) | |
1724 | NI_CURSOR_DEGAMMA_MODE(NI_DEGAMMA_BYPASS))); | 1737 | (0 << DEGAMMA_CONTROL__CURSOR_DEGAMMA_MODE__SHIFT))); |
1725 | WREG32(NI_GAMUT_REMAP_CONTROL + amdgpu_crtc->crtc_offset, | 1738 | WREG32(mmGAMUT_REMAP_CONTROL + amdgpu_crtc->crtc_offset, |
1726 | (NI_GRPH_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS) | | 1739 | ((0 << GAMUT_REMAP_CONTROL__GRPH_GAMUT_REMAP_MODE__SHIFT) | |
1727 | NI_OVL_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS))); | 1740 | (0 << GAMUT_REMAP_CONTROL__OVL_GAMUT_REMAP_MODE__SHIFT))); |
1728 | WREG32(NI_REGAMMA_CONTROL + amdgpu_crtc->crtc_offset, | 1741 | WREG32(mmREGAMMA_CONTROL + amdgpu_crtc->crtc_offset, |
1729 | (NI_GRPH_REGAMMA_MODE(NI_REGAMMA_BYPASS) | | 1742 | ((0 << REGAMMA_CONTROL__GRPH_REGAMMA_MODE__SHIFT) | |
1730 | NI_OVL_REGAMMA_MODE(NI_REGAMMA_BYPASS))); | 1743 | (0 << REGAMMA_CONTROL__OVL_REGAMMA_MODE__SHIFT))); |
1731 | WREG32(NI_OUTPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset, | 1744 | WREG32(mmOUTPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset, |
1732 | (NI_OUTPUT_CSC_GRPH_MODE(0) | | 1745 | ((0 << OUTPUT_CSC_CONTROL__OUTPUT_CSC_GRPH_MODE__SHIFT) | |
1733 | NI_OUTPUT_CSC_OVL_MODE(NI_OUTPUT_CSC_BYPASS))); | 1746 | (0 << OUTPUT_CSC_CONTROL__OUTPUT_CSC_OVL_MODE__SHIFT))); |
1734 | /* XXX match this to the depth of the crtc fmt block, move to modeset? */ | 1747 | /* XXX match this to the depth of the crtc fmt block, move to modeset? */ |
1735 | WREG32(0x1a50 + amdgpu_crtc->crtc_offset, 0); | 1748 | WREG32(0x1a50 + amdgpu_crtc->crtc_offset, 0); |
1736 | 1749 | ||
@@ -1809,12 +1822,12 @@ static void dce_v6_0_lock_cursor(struct drm_crtc *crtc, bool lock) | |||
1809 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); | 1822 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); |
1810 | uint32_t cur_lock; | 1823 | uint32_t cur_lock; |
1811 | 1824 | ||
1812 | cur_lock = RREG32(EVERGREEN_CUR_UPDATE + amdgpu_crtc->crtc_offset); | 1825 | cur_lock = RREG32(mmCUR_UPDATE + amdgpu_crtc->crtc_offset); |
1813 | if (lock) | 1826 | if (lock) |
1814 | cur_lock |= EVERGREEN_CURSOR_UPDATE_LOCK; | 1827 | cur_lock |= CUR_UPDATE__CURSOR_UPDATE_LOCK_MASK; |
1815 | else | 1828 | else |
1816 | cur_lock &= ~EVERGREEN_CURSOR_UPDATE_LOCK; | 1829 | cur_lock &= ~CUR_UPDATE__CURSOR_UPDATE_LOCK_MASK; |
1817 | WREG32(EVERGREEN_CUR_UPDATE + amdgpu_crtc->crtc_offset, cur_lock); | 1830 | WREG32(mmCUR_UPDATE + amdgpu_crtc->crtc_offset, cur_lock); |
1818 | } | 1831 | } |
1819 | 1832 | ||
1820 | static void dce_v6_0_hide_cursor(struct drm_crtc *crtc) | 1833 | static void dce_v6_0_hide_cursor(struct drm_crtc *crtc) |
@@ -1822,9 +1835,9 @@ static void dce_v6_0_hide_cursor(struct drm_crtc *crtc) | |||
1822 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); | 1835 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); |
1823 | struct amdgpu_device *adev = crtc->dev->dev_private; | 1836 | struct amdgpu_device *adev = crtc->dev->dev_private; |
1824 | 1837 | ||
1825 | WREG32_IDX(EVERGREEN_CUR_CONTROL + amdgpu_crtc->crtc_offset, | 1838 | WREG32_IDX(mmCUR_CONTROL + amdgpu_crtc->crtc_offset, |
1826 | EVERGREEN_CURSOR_MODE(EVERGREEN_CURSOR_24_8_PRE_MULT) | | 1839 | (CURSOR_24_8_PRE_MULT << CUR_CONTROL__CURSOR_MODE__SHIFT) | |
1827 | EVERGREEN_CURSOR_URGENT_CONTROL(EVERGREEN_CURSOR_URGENT_1_2)); | 1840 | (CURSOR_URGENT_1_2 << CUR_CONTROL__CURSOR_URGENT_CONTROL__SHIFT)); |
1828 | 1841 | ||
1829 | 1842 | ||
1830 | } | 1843 | } |
@@ -1834,15 +1847,15 @@ static void dce_v6_0_show_cursor(struct drm_crtc *crtc) | |||
1834 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); | 1847 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); |
1835 | struct amdgpu_device *adev = crtc->dev->dev_private; | 1848 | struct amdgpu_device *adev = crtc->dev->dev_private; |
1836 | 1849 | ||
1837 | WREG32(EVERGREEN_CUR_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, | 1850 | WREG32(mmCUR_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset, |
1838 | upper_32_bits(amdgpu_crtc->cursor_addr)); | 1851 | upper_32_bits(amdgpu_crtc->cursor_addr)); |
1839 | WREG32(EVERGREEN_CUR_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, | 1852 | WREG32(mmCUR_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset, |
1840 | lower_32_bits(amdgpu_crtc->cursor_addr)); | 1853 | lower_32_bits(amdgpu_crtc->cursor_addr)); |
1841 | 1854 | ||
1842 | WREG32_IDX(EVERGREEN_CUR_CONTROL + amdgpu_crtc->crtc_offset, | 1855 | WREG32_IDX(mmCUR_CONTROL + amdgpu_crtc->crtc_offset, |
1843 | EVERGREEN_CURSOR_EN | | 1856 | CUR_CONTROL__CURSOR_EN_MASK | |
1844 | EVERGREEN_CURSOR_MODE(EVERGREEN_CURSOR_24_8_PRE_MULT) | | 1857 | (CURSOR_24_8_PRE_MULT << CUR_CONTROL__CURSOR_MODE__SHIFT) | |
1845 | EVERGREEN_CURSOR_URGENT_CONTROL(EVERGREEN_CURSOR_URGENT_1_2)); | 1858 | (CURSOR_URGENT_1_2 << CUR_CONTROL__CURSOR_URGENT_CONTROL__SHIFT)); |
1846 | 1859 | ||
1847 | } | 1860 | } |
1848 | 1861 | ||
@@ -1869,9 +1882,9 @@ static int dce_v6_0_cursor_move_locked(struct drm_crtc *crtc, | |||
1869 | y = 0; | 1882 | y = 0; |
1870 | } | 1883 | } |
1871 | 1884 | ||
1872 | WREG32(EVERGREEN_CUR_POSITION + amdgpu_crtc->crtc_offset, (x << 16) | y); | 1885 | WREG32(mmCUR_POSITION + amdgpu_crtc->crtc_offset, (x << 16) | y); |
1873 | WREG32(EVERGREEN_CUR_HOT_SPOT + amdgpu_crtc->crtc_offset, (xorigin << 16) | yorigin); | 1886 | WREG32(mmCUR_HOT_SPOT + amdgpu_crtc->crtc_offset, (xorigin << 16) | yorigin); |
1874 | WREG32(EVERGREEN_CUR_SIZE + amdgpu_crtc->crtc_offset, | 1887 | WREG32(mmCUR_SIZE + amdgpu_crtc->crtc_offset, |
1875 | ((w - 1) << 16) | (amdgpu_crtc->cursor_height - 1)); | 1888 | ((w - 1) << 16) | (amdgpu_crtc->cursor_height - 1)); |
1876 | 1889 | ||
1877 | amdgpu_crtc->cursor_x = x; | 1890 | amdgpu_crtc->cursor_x = x; |
@@ -2477,14 +2490,14 @@ static void dce_v6_0_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev, | |||
2477 | 2490 | ||
2478 | switch (state) { | 2491 | switch (state) { |
2479 | case AMDGPU_IRQ_STATE_DISABLE: | 2492 | case AMDGPU_IRQ_STATE_DISABLE: |
2480 | interrupt_mask = RREG32(INT_MASK + reg_block); | 2493 | interrupt_mask = RREG32(mmINT_MASK + reg_block); |
2481 | interrupt_mask &= ~VBLANK_INT_MASK; | 2494 | interrupt_mask &= ~VBLANK_INT_MASK; |
2482 | WREG32(INT_MASK + reg_block, interrupt_mask); | 2495 | WREG32(mmINT_MASK + reg_block, interrupt_mask); |
2483 | break; | 2496 | break; |
2484 | case AMDGPU_IRQ_STATE_ENABLE: | 2497 | case AMDGPU_IRQ_STATE_ENABLE: |
2485 | interrupt_mask = RREG32(INT_MASK + reg_block); | 2498 | interrupt_mask = RREG32(mmINT_MASK + reg_block); |
2486 | interrupt_mask |= VBLANK_INT_MASK; | 2499 | interrupt_mask |= VBLANK_INT_MASK; |
2487 | WREG32(INT_MASK + reg_block, interrupt_mask); | 2500 | WREG32(mmINT_MASK + reg_block, interrupt_mask); |
2488 | break; | 2501 | break; |
2489 | default: | 2502 | default: |
2490 | break; | 2503 | break; |
@@ -2512,14 +2525,14 @@ static int dce_v6_0_set_hpd_interrupt_state(struct amdgpu_device *adev, | |||
2512 | 2525 | ||
2513 | switch (state) { | 2526 | switch (state) { |
2514 | case AMDGPU_IRQ_STATE_DISABLE: | 2527 | case AMDGPU_IRQ_STATE_DISABLE: |
2515 | dc_hpd_int_cntl = RREG32(DC_HPD1_INT_CONTROL + hpd_offsets[type]); | 2528 | dc_hpd_int_cntl = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[type]); |
2516 | dc_hpd_int_cntl &= ~DC_HPDx_INT_EN; | 2529 | dc_hpd_int_cntl &= ~DC_HPDx_INT_EN; |
2517 | WREG32(DC_HPD1_INT_CONTROL + hpd_offsets[type], dc_hpd_int_cntl); | 2530 | WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[type], dc_hpd_int_cntl); |
2518 | break; | 2531 | break; |
2519 | case AMDGPU_IRQ_STATE_ENABLE: | 2532 | case AMDGPU_IRQ_STATE_ENABLE: |
2520 | dc_hpd_int_cntl = RREG32(DC_HPD1_INT_CONTROL + hpd_offsets[type]); | 2533 | dc_hpd_int_cntl = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[type]); |
2521 | dc_hpd_int_cntl |= DC_HPDx_INT_EN; | 2534 | dc_hpd_int_cntl |= DC_HPDx_INT_EN; |
2522 | WREG32(DC_HPD1_INT_CONTROL + hpd_offsets[type], dc_hpd_int_cntl); | 2535 | WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[type], dc_hpd_int_cntl); |
2523 | break; | 2536 | break; |
2524 | default: | 2537 | default: |
2525 | break; | 2538 | break; |
@@ -2587,7 +2600,7 @@ static int dce_v6_0_crtc_irq(struct amdgpu_device *adev, | |||
2587 | switch (entry->src_data) { | 2600 | switch (entry->src_data) { |
2588 | case 0: /* vblank */ | 2601 | case 0: /* vblank */ |
2589 | if (disp_int & interrupt_status_offsets[crtc].vblank) | 2602 | if (disp_int & interrupt_status_offsets[crtc].vblank) |
2590 | WREG32(VBLANK_STATUS + crtc_offsets[crtc], VBLANK_ACK); | 2603 | WREG32(mmVBLANK_STATUS + crtc_offsets[crtc], VBLANK_ACK); |
2591 | else | 2604 | else |
2592 | DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); | 2605 | DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
2593 | 2606 | ||
@@ -2598,7 +2611,7 @@ static int dce_v6_0_crtc_irq(struct amdgpu_device *adev, | |||
2598 | break; | 2611 | break; |
2599 | case 1: /* vline */ | 2612 | case 1: /* vline */ |
2600 | if (disp_int & interrupt_status_offsets[crtc].vline) | 2613 | if (disp_int & interrupt_status_offsets[crtc].vline) |
2601 | WREG32(VLINE_STATUS + crtc_offsets[crtc], VLINE_ACK); | 2614 | WREG32(mmVLINE_STATUS + crtc_offsets[crtc], VLINE_ACK); |
2602 | else | 2615 | else |
2603 | DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); | 2616 | DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
2604 | 2617 | ||
@@ -2624,12 +2637,12 @@ static int dce_v6_0_set_pageflip_interrupt_state(struct amdgpu_device *adev, | |||
2624 | return -EINVAL; | 2637 | return -EINVAL; |
2625 | } | 2638 | } |
2626 | 2639 | ||
2627 | reg = RREG32(GRPH_INT_CONTROL + crtc_offsets[type]); | 2640 | reg = RREG32(mmGRPH_INTERRUPT_CONTROL + crtc_offsets[type]); |
2628 | if (state == AMDGPU_IRQ_STATE_DISABLE) | 2641 | if (state == AMDGPU_IRQ_STATE_DISABLE) |
2629 | WREG32(GRPH_INT_CONTROL + crtc_offsets[type], | 2642 | WREG32(mmGRPH_INTERRUPT_CONTROL + crtc_offsets[type], |
2630 | reg & ~GRPH_INTERRUPT_CONTROL__GRPH_PFLIP_INT_MASK_MASK); | 2643 | reg & ~GRPH_INTERRUPT_CONTROL__GRPH_PFLIP_INT_MASK_MASK); |
2631 | else | 2644 | else |
2632 | WREG32(GRPH_INT_CONTROL + crtc_offsets[type], | 2645 | WREG32(mmGRPH_INTERRUPT_CONTROL + crtc_offsets[type], |
2633 | reg | GRPH_INTERRUPT_CONTROL__GRPH_PFLIP_INT_MASK_MASK); | 2646 | reg | GRPH_INTERRUPT_CONTROL__GRPH_PFLIP_INT_MASK_MASK); |
2634 | 2647 | ||
2635 | return 0; | 2648 | return 0; |
@@ -2652,9 +2665,9 @@ static int dce_v6_0_pageflip_irq(struct amdgpu_device *adev, | |||
2652 | return -EINVAL; | 2665 | return -EINVAL; |
2653 | } | 2666 | } |
2654 | 2667 | ||
2655 | if (RREG32(GRPH_INT_STATUS + crtc_offsets[crtc_id]) & | 2668 | if (RREG32(mmGRPH_INTERRUPT_STATUS + crtc_offsets[crtc_id]) & |
2656 | GRPH_INTERRUPT_STATUS__GRPH_PFLIP_INT_OCCURRED_MASK) | 2669 | GRPH_INTERRUPT_STATUS__GRPH_PFLIP_INT_OCCURRED_MASK) |
2657 | WREG32(GRPH_INT_STATUS + crtc_offsets[crtc_id], | 2670 | WREG32(mmGRPH_INTERRUPT_STATUS + crtc_offsets[crtc_id], |
2658 | GRPH_INTERRUPT_STATUS__GRPH_PFLIP_INT_CLEAR_MASK); | 2671 | GRPH_INTERRUPT_STATUS__GRPH_PFLIP_INT_CLEAR_MASK); |
2659 | 2672 | ||
2660 | /* IRQ could occur when in initial stage */ | 2673 | /* IRQ could occur when in initial stage */ |
@@ -2705,9 +2718,9 @@ static int dce_v6_0_hpd_irq(struct amdgpu_device *adev, | |||
2705 | mask = interrupt_status_offsets[hpd].hpd; | 2718 | mask = interrupt_status_offsets[hpd].hpd; |
2706 | 2719 | ||
2707 | if (disp_int & mask) { | 2720 | if (disp_int & mask) { |
2708 | tmp = RREG32(DC_HPD1_INT_CONTROL + hpd_offsets[hpd]); | 2721 | tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd]); |
2709 | tmp |= DC_HPD1_INT_CONTROL__DC_HPD1_INT_ACK_MASK; | 2722 | tmp |= DC_HPD1_INT_CONTROL__DC_HPD1_INT_ACK_MASK; |
2710 | WREG32(DC_HPD1_INT_CONTROL + hpd_offsets[hpd], tmp); | 2723 | WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd], tmp); |
2711 | schedule_work(&adev->hotplug_work); | 2724 | schedule_work(&adev->hotplug_work); |
2712 | DRM_INFO("IH: HPD%d\n", hpd + 1); | 2725 | DRM_INFO("IH: HPD%d\n", hpd + 1); |
2713 | } | 2726 | } |
diff --git a/drivers/gpu/drm/amd/amdgpu/si_enums.h b/drivers/gpu/drm/amd/amdgpu/si_enums.h index 3ecd36f30e2a..fde2086246fa 100644 --- a/drivers/gpu/drm/amd/amdgpu/si_enums.h +++ b/drivers/gpu/drm/amd/amdgpu/si_enums.h | |||
@@ -23,6 +23,84 @@ | |||
23 | #ifndef SI_ENUMS_H | 23 | #ifndef SI_ENUMS_H |
24 | #define SI_ENUMS_H | 24 | #define SI_ENUMS_H |
25 | 25 | ||
26 | #define VBLANK_INT_MASK (1 << 0) | ||
27 | #define DC_HPDx_INT_EN (1 << 16) | ||
28 | #define VBLANK_ACK (1 << 4) | ||
29 | #define VLINE_ACK (1 << 4) | ||
30 | |||
31 | #define CURSOR_WIDTH 64 | ||
32 | #define CURSOR_HEIGHT 64 | ||
33 | |||
34 | #define VGA_VSTATUS_CNTL 0xFFFCFFFF | ||
35 | #define PRIORITY_MARK_MASK 0x7fff | ||
36 | #define PRIORITY_OFF (1 << 16) | ||
37 | #define PRIORITY_ALWAYS_ON (1 << 20) | ||
38 | #define INTERLEAVE_EN (1 << 0) | ||
39 | |||
40 | #define LATENCY_WATERMARK_MASK(x) ((x) << 16) | ||
41 | #define DC_LB_MEMORY_CONFIG(x) ((x) << 20) | ||
42 | #define ICON_DEGAMMA_MODE(x) (((x) & 0x3) << 8) | ||
43 | |||
44 | #define GRPH_ENDIAN_SWAP(x) (((x) & 0x3) << 0) | ||
45 | #define GRPH_ENDIAN_NONE 0 | ||
46 | #define GRPH_ENDIAN_8IN16 1 | ||
47 | #define GRPH_ENDIAN_8IN32 2 | ||
48 | #define GRPH_ENDIAN_8IN64 3 | ||
49 | |||
50 | #define GRPH_DEPTH(x) (((x) & 0x3) << 0) | ||
51 | #define GRPH_DEPTH_8BPP 0 | ||
52 | #define GRPH_DEPTH_16BPP 1 | ||
53 | #define GRPH_DEPTH_32BPP 2 | ||
54 | |||
55 | #define GRPH_FORMAT(x) (((x) & 0x7) << 8) | ||
56 | #define GRPH_FORMAT_INDEXED 0 | ||
57 | #define GRPH_FORMAT_ARGB1555 0 | ||
58 | #define GRPH_FORMAT_ARGB565 1 | ||
59 | #define GRPH_FORMAT_ARGB4444 2 | ||
60 | #define GRPH_FORMAT_AI88 3 | ||
61 | #define GRPH_FORMAT_MONO16 4 | ||
62 | #define GRPH_FORMAT_BGRA5551 5 | ||
63 | #define GRPH_FORMAT_ARGB8888 0 | ||
64 | #define GRPH_FORMAT_ARGB2101010 1 | ||
65 | #define GRPH_FORMAT_32BPP_DIG 2 | ||
66 | #define GRPH_FORMAT_8B_ARGB2101010 3 | ||
67 | #define GRPH_FORMAT_BGRA1010102 4 | ||
68 | #define GRPH_FORMAT_8B_BGRA1010102 5 | ||
69 | #define GRPH_FORMAT_RGB111110 6 | ||
70 | #define GRPH_FORMAT_BGR101111 7 | ||
71 | |||
72 | #define GRPH_NUM_BANKS(x) (((x) & 0x3) << 2) | ||
73 | #define GRPH_ARRAY_MODE(x) (((x) & 0x7) << 20) | ||
74 | #define GRPH_ARRAY_LINEAR_GENERAL 0 | ||
75 | #define GRPH_ARRAY_LINEAR_ALIGNED 1 | ||
76 | #define GRPH_ARRAY_1D_TILED_THIN1 2 | ||
77 | #define GRPH_ARRAY_2D_TILED_THIN1 4 | ||
78 | #define GRPH_TILE_SPLIT(x) (((x) & 0x7) << 13) | ||
79 | #define GRPH_BANK_WIDTH(x) (((x) & 0x3) << 6) | ||
80 | #define GRPH_BANK_HEIGHT(x) (((x) & 0x3) << 11) | ||
81 | #define GRPH_MACRO_TILE_ASPECT(x) (((x) & 0x3) << 18) | ||
82 | #define GRPH_ARRAY_MODE(x) (((x) & 0x7) << 20) | ||
83 | #define GRPH_PIPE_CONFIG(x) (((x) & 0x1f) << 24) | ||
84 | |||
85 | #define CURSOR_EN (1 << 0) | ||
86 | #define CURSOR_MODE(x) (((x) & 0x3) << 8) | ||
87 | #define CURSOR_MONO 0 | ||
88 | #define CURSOR_24_1 1 | ||
89 | #define CURSOR_24_8_PRE_MULT 2 | ||
90 | #define CURSOR_24_8_UNPRE_MULT 3 | ||
91 | #define CURSOR_2X_MAGNIFY (1 << 16) | ||
92 | #define CURSOR_FORCE_MC_ON (1 << 20) | ||
93 | #define CURSOR_URGENT_CONTROL(x) (((x) & 0x7) << 24) | ||
94 | #define CURSOR_URGENT_ALWAYS 0 | ||
95 | #define CURSOR_URGENT_1_8 1 | ||
96 | #define CURSOR_URGENT_1_4 2 | ||
97 | #define CURSOR_URGENT_3_8 3 | ||
98 | #define CURSOR_URGENT_1_2 4 | ||
99 | #define CURSOR_UPDATE_PENDING (1 << 0) | ||
100 | #define CURSOR_UPDATE_TAKEN (1 << 1) | ||
101 | #define CURSOR_UPDATE_LOCK (1 << 16) | ||
102 | #define CURSOR_DISABLE_MULTIPLE_UPDATE (1 << 24) | ||
103 | |||
26 | #define AMDGPU_NUM_OF_VMIDS 8 | 104 | #define AMDGPU_NUM_OF_VMIDS 8 |
27 | #define SI_CRTC0_REGISTER_OFFSET 0 | 105 | #define SI_CRTC0_REGISTER_OFFSET 0 |
28 | #define SI_CRTC1_REGISTER_OFFSET 0x300 | 106 | #define SI_CRTC1_REGISTER_OFFSET 0x300 |
diff --git a/drivers/gpu/drm/amd/include/asic_reg/dce/dce_6_0_d.h b/drivers/gpu/drm/amd/include/asic_reg/dce/dce_6_0_d.h index a17973bb63a6..ae798f768853 100644 --- a/drivers/gpu/drm/amd/include/asic_reg/dce/dce_6_0_d.h +++ b/drivers/gpu/drm/amd/include/asic_reg/dce/dce_6_0_d.h | |||
@@ -4442,4 +4442,16 @@ | |||
4442 | #define mmXDMA_TEST_DEBUG_DATA 0x041D | 4442 | #define mmXDMA_TEST_DEBUG_DATA 0x041D |
4443 | #define mmXDMA_TEST_DEBUG_INDEX 0x041C | 4443 | #define mmXDMA_TEST_DEBUG_INDEX 0x041C |
4444 | 4444 | ||
4445 | /* Registers that spilled out of sid.h */ | ||
4446 | #define mmDATA_FORMAT 0x1AC0 | ||
4447 | #define mmDESKTOP_HEIGHT 0x1AC1 | ||
4448 | #define mmDC_LB_MEMORY_SPLIT 0x1AC3 | ||
4449 | #define mmPRIORITY_A_CNT 0x1AC6 | ||
4450 | #define mmPRIORITY_B_CNT 0x1AC7 | ||
4451 | #define mmDPG_PIPE_ARBITRATION_CONTROL3 0x1B32 | ||
4452 | #define mmINT_MASK 0x1AD0 | ||
4453 | #define mmVLINE_STATUS 0x1AEE | ||
4454 | #define mmVBLANK_STATUS 0x1AEF | ||
4455 | |||
4456 | |||
4445 | #endif | 4457 | #endif |