aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
diff options
context:
space:
mode:
authorTom St Denis <tom.stdenis@amd.com>2017-06-12 13:50:53 -0400
committerAlex Deucher <alexander.deucher@amd.com>2017-06-15 11:50:35 -0400
commit0ad6f0d387dad93c1a9fdd191e06326441b701ae (patch)
tree6f479d8c27819f80922df854820f6b4186886818 /drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
parentc5c1effd85248398f9296af55b1eb928098b3aa9 (diff)
drm/amd/amdgpu: Port VCN over to new SOC15 macros
Signed-off-by: Tom St Denis <tom.stdenis@amd.com> Reviewed-by: Alex Deucher <alexander.deucher@amd.com>
Diffstat (limited to 'drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c')
-rw-r--r--drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c164
1 files changed, 82 insertions, 82 deletions
diff --git a/drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c b/drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
index ec33e8fa83c1..21e7b88401e1 100644
--- a/drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
+++ b/drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c
@@ -249,34 +249,34 @@ static void vcn_v1_0_mc_resume(struct amdgpu_device *adev)
249{ 249{
250 uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4); 250 uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4);
251 251
252 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 252 WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW,
253 lower_32_bits(adev->vcn.gpu_addr)); 253 lower_32_bits(adev->vcn.gpu_addr));
254 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 254 WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH,
255 upper_32_bits(adev->vcn.gpu_addr)); 255 upper_32_bits(adev->vcn.gpu_addr));
256 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_VCPU_CACHE_OFFSET0), 256 WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_OFFSET0,
257 AMDGPU_UVD_FIRMWARE_OFFSET >> 3); 257 AMDGPU_UVD_FIRMWARE_OFFSET >> 3);
258 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_VCPU_CACHE_SIZE0), size); 258 WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_SIZE0, size);
259 259
260 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 260 WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW,
261 lower_32_bits(adev->vcn.gpu_addr + size)); 261 lower_32_bits(adev->vcn.gpu_addr + size));
262 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 262 WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH,
263 upper_32_bits(adev->vcn.gpu_addr + size)); 263 upper_32_bits(adev->vcn.gpu_addr + size));
264 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_VCPU_CACHE_OFFSET1), 0); 264 WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_OFFSET1, 0);
265 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_HEAP_SIZE); 265 WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_SIZE1, AMDGPU_VCN_HEAP_SIZE);
266 266
267 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_LOW), 267 WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_LOW,
268 lower_32_bits(adev->vcn.gpu_addr + size + AMDGPU_VCN_HEAP_SIZE)); 268 lower_32_bits(adev->vcn.gpu_addr + size + AMDGPU_VCN_HEAP_SIZE));
269 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_HIGH), 269 WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_HIGH,
270 upper_32_bits(adev->vcn.gpu_addr + size + AMDGPU_VCN_HEAP_SIZE)); 270 upper_32_bits(adev->vcn.gpu_addr + size + AMDGPU_VCN_HEAP_SIZE));
271 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_VCPU_CACHE_OFFSET2), 0); 271 WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_OFFSET2, 0);
272 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_VCPU_CACHE_SIZE2), 272 WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_SIZE2,
273 AMDGPU_VCN_STACK_SIZE + (AMDGPU_VCN_SESSION_SIZE * 40)); 273 AMDGPU_VCN_STACK_SIZE + (AMDGPU_VCN_SESSION_SIZE * 40));
274 274
275 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_UDEC_ADDR_CONFIG), 275 WREG32_SOC15(UVD, 0, mmUVD_UDEC_ADDR_CONFIG,
276 adev->gfx.config.gb_addr_config); 276 adev->gfx.config.gb_addr_config);
277 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_UDEC_DB_ADDR_CONFIG), 277 WREG32_SOC15(UVD, 0, mmUVD_UDEC_DB_ADDR_CONFIG,
278 adev->gfx.config.gb_addr_config); 278 adev->gfx.config.gb_addr_config);
279 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_UDEC_DBW_ADDR_CONFIG), 279 WREG32_SOC15(UVD, 0, mmUVD_UDEC_DBW_ADDR_CONFIG,
280 adev->gfx.config.gb_addr_config); 280 adev->gfx.config.gb_addr_config);
281} 281}
282 282
@@ -293,7 +293,7 @@ static void vcn_v1_0_disable_clock_gating(struct amdgpu_device *adev, bool sw)
293 uint32_t data; 293 uint32_t data;
294 294
295 /* JPEG disable CGC */ 295 /* JPEG disable CGC */
296 data = RREG32(SOC15_REG_OFFSET(VCN, 0, mmJPEG_CGC_CTRL)); 296 data = RREG32_SOC15(VCN, 0, mmJPEG_CGC_CTRL);
297 297
298 if (sw) 298 if (sw)
299 data |= 1 << JPEG_CGC_CTRL__DYN_CLOCK_MODE__SHIFT; 299 data |= 1 << JPEG_CGC_CTRL__DYN_CLOCK_MODE__SHIFT;
@@ -302,14 +302,14 @@ static void vcn_v1_0_disable_clock_gating(struct amdgpu_device *adev, bool sw)
302 302
303 data |= 1 << JPEG_CGC_CTRL__CLK_GATE_DLY_TIMER__SHIFT; 303 data |= 1 << JPEG_CGC_CTRL__CLK_GATE_DLY_TIMER__SHIFT;
304 data |= 4 << JPEG_CGC_CTRL__CLK_OFF_DELAY__SHIFT; 304 data |= 4 << JPEG_CGC_CTRL__CLK_OFF_DELAY__SHIFT;
305 WREG32(SOC15_REG_OFFSET(VCN, 0, mmJPEG_CGC_CTRL), data); 305 WREG32_SOC15(VCN, 0, mmJPEG_CGC_CTRL, data);
306 306
307 data = RREG32(SOC15_REG_OFFSET(VCN, 0, mmJPEG_CGC_GATE)); 307 data = RREG32_SOC15(VCN, 0, mmJPEG_CGC_GATE);
308 data &= ~(JPEG_CGC_GATE__JPEG_MASK | JPEG_CGC_GATE__JPEG2_MASK); 308 data &= ~(JPEG_CGC_GATE__JPEG_MASK | JPEG_CGC_GATE__JPEG2_MASK);
309 WREG32(SOC15_REG_OFFSET(VCN, 0, mmJPEG_CGC_GATE), data); 309 WREG32_SOC15(VCN, 0, mmJPEG_CGC_GATE, data);
310 310
311 /* UVD disable CGC */ 311 /* UVD disable CGC */
312 data = RREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_CGC_CTRL)); 312 data = RREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL);
313 if (sw) 313 if (sw)
314 data |= 1 << UVD_CGC_CTRL__DYN_CLOCK_MODE__SHIFT; 314 data |= 1 << UVD_CGC_CTRL__DYN_CLOCK_MODE__SHIFT;
315 else 315 else
@@ -317,9 +317,9 @@ static void vcn_v1_0_disable_clock_gating(struct amdgpu_device *adev, bool sw)
317 317
318 data |= 1 << UVD_CGC_CTRL__CLK_GATE_DLY_TIMER__SHIFT; 318 data |= 1 << UVD_CGC_CTRL__CLK_GATE_DLY_TIMER__SHIFT;
319 data |= 4 << UVD_CGC_CTRL__CLK_OFF_DELAY__SHIFT; 319 data |= 4 << UVD_CGC_CTRL__CLK_OFF_DELAY__SHIFT;
320 WREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_CGC_CTRL), data); 320 WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data);
321 321
322 data = RREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_CGC_GATE)); 322 data = RREG32_SOC15(VCN, 0, mmUVD_CGC_GATE);
323 data &= ~(UVD_CGC_GATE__SYS_MASK 323 data &= ~(UVD_CGC_GATE__SYS_MASK
324 | UVD_CGC_GATE__UDEC_MASK 324 | UVD_CGC_GATE__UDEC_MASK
325 | UVD_CGC_GATE__MPEG2_MASK 325 | UVD_CGC_GATE__MPEG2_MASK
@@ -340,9 +340,9 @@ static void vcn_v1_0_disable_clock_gating(struct amdgpu_device *adev, bool sw)
340 | UVD_CGC_GATE__WCB_MASK 340 | UVD_CGC_GATE__WCB_MASK
341 | UVD_CGC_GATE__VCPU_MASK 341 | UVD_CGC_GATE__VCPU_MASK
342 | UVD_CGC_GATE__SCPU_MASK); 342 | UVD_CGC_GATE__SCPU_MASK);
343 WREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_CGC_GATE), data); 343 WREG32_SOC15(VCN, 0, mmUVD_CGC_GATE, data);
344 344
345 data = RREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_CGC_CTRL)); 345 data = RREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL);
346 data &= ~(UVD_CGC_CTRL__UDEC_RE_MODE_MASK 346 data &= ~(UVD_CGC_CTRL__UDEC_RE_MODE_MASK
347 | UVD_CGC_CTRL__UDEC_CM_MODE_MASK 347 | UVD_CGC_CTRL__UDEC_CM_MODE_MASK
348 | UVD_CGC_CTRL__UDEC_IT_MODE_MASK 348 | UVD_CGC_CTRL__UDEC_IT_MODE_MASK
@@ -363,10 +363,10 @@ static void vcn_v1_0_disable_clock_gating(struct amdgpu_device *adev, bool sw)
363 | UVD_CGC_CTRL__WCB_MODE_MASK 363 | UVD_CGC_CTRL__WCB_MODE_MASK
364 | UVD_CGC_CTRL__VCPU_MODE_MASK 364 | UVD_CGC_CTRL__VCPU_MODE_MASK
365 | UVD_CGC_CTRL__SCPU_MODE_MASK); 365 | UVD_CGC_CTRL__SCPU_MODE_MASK);
366 WREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_CGC_CTRL), data); 366 WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data);
367 367
368 /* turn on */ 368 /* turn on */
369 data = RREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_SUVD_CGC_GATE)); 369 data = RREG32_SOC15(VCN, 0, mmUVD_SUVD_CGC_GATE);
370 data |= (UVD_SUVD_CGC_GATE__SRE_MASK 370 data |= (UVD_SUVD_CGC_GATE__SRE_MASK
371 | UVD_SUVD_CGC_GATE__SIT_MASK 371 | UVD_SUVD_CGC_GATE__SIT_MASK
372 | UVD_SUVD_CGC_GATE__SMP_MASK 372 | UVD_SUVD_CGC_GATE__SMP_MASK
@@ -391,9 +391,9 @@ static void vcn_v1_0_disable_clock_gating(struct amdgpu_device *adev, bool sw)
391 | UVD_SUVD_CGC_GATE__SIT_VP9_DEC_MASK 391 | UVD_SUVD_CGC_GATE__SIT_VP9_DEC_MASK
392 | UVD_SUVD_CGC_GATE__SDB_VP9_MASK 392 | UVD_SUVD_CGC_GATE__SDB_VP9_MASK
393 | UVD_SUVD_CGC_GATE__IME_HEVC_MASK); 393 | UVD_SUVD_CGC_GATE__IME_HEVC_MASK);
394 WREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_SUVD_CGC_GATE), data); 394 WREG32_SOC15(VCN, 0, mmUVD_SUVD_CGC_GATE, data);
395 395
396 data = RREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_SUVD_CGC_CTRL)); 396 data = RREG32_SOC15(VCN, 0, mmUVD_SUVD_CGC_CTRL);
397 data &= ~(UVD_SUVD_CGC_CTRL__SRE_MODE_MASK 397 data &= ~(UVD_SUVD_CGC_CTRL__SRE_MODE_MASK
398 | UVD_SUVD_CGC_CTRL__SIT_MODE_MASK 398 | UVD_SUVD_CGC_CTRL__SIT_MODE_MASK
399 | UVD_SUVD_CGC_CTRL__SMP_MODE_MASK 399 | UVD_SUVD_CGC_CTRL__SMP_MODE_MASK
@@ -404,7 +404,7 @@ static void vcn_v1_0_disable_clock_gating(struct amdgpu_device *adev, bool sw)
404 | UVD_SUVD_CGC_CTRL__ENT_MODE_MASK 404 | UVD_SUVD_CGC_CTRL__ENT_MODE_MASK
405 | UVD_SUVD_CGC_CTRL__IME_MODE_MASK 405 | UVD_SUVD_CGC_CTRL__IME_MODE_MASK
406 | UVD_SUVD_CGC_CTRL__SITE_MODE_MASK); 406 | UVD_SUVD_CGC_CTRL__SITE_MODE_MASK);
407 WREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_SUVD_CGC_CTRL), data); 407 WREG32_SOC15(VCN, 0, mmUVD_SUVD_CGC_CTRL, data);
408} 408}
409 409
410/** 410/**
@@ -420,30 +420,30 @@ static void vcn_v1_0_enable_clock_gating(struct amdgpu_device *adev, bool sw)
420 uint32_t data = 0; 420 uint32_t data = 0;
421 421
422 /* enable JPEG CGC */ 422 /* enable JPEG CGC */
423 data = RREG32(SOC15_REG_OFFSET(VCN, 0, mmJPEG_CGC_CTRL)); 423 data = RREG32_SOC15(VCN, 0, mmJPEG_CGC_CTRL);
424 if (sw) 424 if (sw)
425 data |= 1 << JPEG_CGC_CTRL__DYN_CLOCK_MODE__SHIFT; 425 data |= 1 << JPEG_CGC_CTRL__DYN_CLOCK_MODE__SHIFT;
426 else 426 else
427 data |= 0 << JPEG_CGC_CTRL__DYN_CLOCK_MODE__SHIFT; 427 data |= 0 << JPEG_CGC_CTRL__DYN_CLOCK_MODE__SHIFT;
428 data |= 1 << JPEG_CGC_CTRL__CLK_GATE_DLY_TIMER__SHIFT; 428 data |= 1 << JPEG_CGC_CTRL__CLK_GATE_DLY_TIMER__SHIFT;
429 data |= 4 << JPEG_CGC_CTRL__CLK_OFF_DELAY__SHIFT; 429 data |= 4 << JPEG_CGC_CTRL__CLK_OFF_DELAY__SHIFT;
430 WREG32(SOC15_REG_OFFSET(VCN, 0, mmJPEG_CGC_CTRL), data); 430 WREG32_SOC15(VCN, 0, mmJPEG_CGC_CTRL, data);
431 431
432 data = RREG32(SOC15_REG_OFFSET(VCN, 0, mmJPEG_CGC_GATE)); 432 data = RREG32_SOC15(VCN, 0, mmJPEG_CGC_GATE);
433 data |= (JPEG_CGC_GATE__JPEG_MASK | JPEG_CGC_GATE__JPEG2_MASK); 433 data |= (JPEG_CGC_GATE__JPEG_MASK | JPEG_CGC_GATE__JPEG2_MASK);
434 WREG32(SOC15_REG_OFFSET(VCN, 0, mmJPEG_CGC_GATE), data); 434 WREG32_SOC15(VCN, 0, mmJPEG_CGC_GATE, data);
435 435
436 /* enable UVD CGC */ 436 /* enable UVD CGC */
437 data = RREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_CGC_CTRL)); 437 data = RREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL);
438 if (sw) 438 if (sw)
439 data |= 1 << UVD_CGC_CTRL__DYN_CLOCK_MODE__SHIFT; 439 data |= 1 << UVD_CGC_CTRL__DYN_CLOCK_MODE__SHIFT;
440 else 440 else
441 data |= 0 << UVD_CGC_CTRL__DYN_CLOCK_MODE__SHIFT; 441 data |= 0 << UVD_CGC_CTRL__DYN_CLOCK_MODE__SHIFT;
442 data |= 1 << UVD_CGC_CTRL__CLK_GATE_DLY_TIMER__SHIFT; 442 data |= 1 << UVD_CGC_CTRL__CLK_GATE_DLY_TIMER__SHIFT;
443 data |= 4 << UVD_CGC_CTRL__CLK_OFF_DELAY__SHIFT; 443 data |= 4 << UVD_CGC_CTRL__CLK_OFF_DELAY__SHIFT;
444 WREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_CGC_CTRL), data); 444 WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data);
445 445
446 data = RREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_CGC_CTRL)); 446 data = RREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL);
447 data |= (UVD_CGC_CTRL__UDEC_RE_MODE_MASK 447 data |= (UVD_CGC_CTRL__UDEC_RE_MODE_MASK
448 | UVD_CGC_CTRL__UDEC_CM_MODE_MASK 448 | UVD_CGC_CTRL__UDEC_CM_MODE_MASK
449 | UVD_CGC_CTRL__UDEC_IT_MODE_MASK 449 | UVD_CGC_CTRL__UDEC_IT_MODE_MASK
@@ -464,9 +464,9 @@ static void vcn_v1_0_enable_clock_gating(struct amdgpu_device *adev, bool sw)
464 | UVD_CGC_CTRL__WCB_MODE_MASK 464 | UVD_CGC_CTRL__WCB_MODE_MASK
465 | UVD_CGC_CTRL__VCPU_MODE_MASK 465 | UVD_CGC_CTRL__VCPU_MODE_MASK
466 | UVD_CGC_CTRL__SCPU_MODE_MASK); 466 | UVD_CGC_CTRL__SCPU_MODE_MASK);
467 WREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_CGC_CTRL), data); 467 WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data);
468 468
469 data = RREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_SUVD_CGC_CTRL)); 469 data = RREG32_SOC15(VCN, 0, mmUVD_SUVD_CGC_CTRL);
470 data |= (UVD_SUVD_CGC_CTRL__SRE_MODE_MASK 470 data |= (UVD_SUVD_CGC_CTRL__SRE_MODE_MASK
471 | UVD_SUVD_CGC_CTRL__SIT_MODE_MASK 471 | UVD_SUVD_CGC_CTRL__SIT_MODE_MASK
472 | UVD_SUVD_CGC_CTRL__SMP_MODE_MASK 472 | UVD_SUVD_CGC_CTRL__SMP_MODE_MASK
@@ -477,7 +477,7 @@ static void vcn_v1_0_enable_clock_gating(struct amdgpu_device *adev, bool sw)
477 | UVD_SUVD_CGC_CTRL__ENT_MODE_MASK 477 | UVD_SUVD_CGC_CTRL__ENT_MODE_MASK
478 | UVD_SUVD_CGC_CTRL__IME_MODE_MASK 478 | UVD_SUVD_CGC_CTRL__IME_MODE_MASK
479 | UVD_SUVD_CGC_CTRL__SITE_MODE_MASK); 479 | UVD_SUVD_CGC_CTRL__SITE_MODE_MASK);
480 WREG32(SOC15_REG_OFFSET(VCN, 0, mmUVD_SUVD_CGC_CTRL), data); 480 WREG32_SOC15(VCN, 0, mmUVD_SUVD_CGC_CTRL, data);
481} 481}
482 482
483/** 483/**
@@ -513,7 +513,7 @@ static int vcn_v1_0_start(struct amdgpu_device *adev)
513 mdelay(1); 513 mdelay(1);
514 514
515 /* put LMI, VCPU, RBC etc... into reset */ 515 /* put LMI, VCPU, RBC etc... into reset */
516 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_SOFT_RESET), 516 WREG32_SOC15(UVD, 0, mmUVD_SOFT_RESET,
517 UVD_SOFT_RESET__LMI_SOFT_RESET_MASK | 517 UVD_SOFT_RESET__LMI_SOFT_RESET_MASK |
518 UVD_SOFT_RESET__VCPU_SOFT_RESET_MASK | 518 UVD_SOFT_RESET__VCPU_SOFT_RESET_MASK |
519 UVD_SOFT_RESET__LBSI_SOFT_RESET_MASK | 519 UVD_SOFT_RESET__LBSI_SOFT_RESET_MASK |
@@ -525,7 +525,7 @@ static int vcn_v1_0_start(struct amdgpu_device *adev)
525 mdelay(5); 525 mdelay(5);
526 526
527 /* initialize VCN memory controller */ 527 /* initialize VCN memory controller */
528 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_CTRL), 528 WREG32_SOC15(UVD, 0, mmUVD_LMI_CTRL,
529 (0x40 << UVD_LMI_CTRL__WRITE_CLEAN_TIMER__SHIFT) | 529 (0x40 << UVD_LMI_CTRL__WRITE_CLEAN_TIMER__SHIFT) |
530 UVD_LMI_CTRL__WRITE_CLEAN_TIMER_EN_MASK | 530 UVD_LMI_CTRL__WRITE_CLEAN_TIMER_EN_MASK |
531 UVD_LMI_CTRL__DATA_COHERENCY_EN_MASK | 531 UVD_LMI_CTRL__DATA_COHERENCY_EN_MASK |
@@ -537,22 +537,22 @@ static int vcn_v1_0_start(struct amdgpu_device *adev)
537 /* swap (8 in 32) RB and IB */ 537 /* swap (8 in 32) RB and IB */
538 lmi_swap_cntl = 0xa; 538 lmi_swap_cntl = 0xa;
539#endif 539#endif
540 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_SWAP_CNTL), lmi_swap_cntl); 540 WREG32_SOC15(UVD, 0, mmUVD_LMI_SWAP_CNTL, lmi_swap_cntl);
541 541
542 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_MPC_SET_MUXA0), 0x40c2040); 542 WREG32_SOC15(UVD, 0, mmUVD_MPC_SET_MUXA0, 0x40c2040);
543 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_MPC_SET_MUXA1), 0x0); 543 WREG32_SOC15(UVD, 0, mmUVD_MPC_SET_MUXA1, 0x0);
544 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_MPC_SET_MUXB0), 0x40c2040); 544 WREG32_SOC15(UVD, 0, mmUVD_MPC_SET_MUXB0, 0x40c2040);
545 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_MPC_SET_MUXB1), 0x0); 545 WREG32_SOC15(UVD, 0, mmUVD_MPC_SET_MUXB1, 0x0);
546 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_MPC_SET_ALU), 0); 546 WREG32_SOC15(UVD, 0, mmUVD_MPC_SET_ALU, 0);
547 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_MPC_SET_MUX), 0x88); 547 WREG32_SOC15(UVD, 0, mmUVD_MPC_SET_MUX, 0x88);
548 548
549 /* take all subblocks out of reset, except VCPU */ 549 /* take all subblocks out of reset, except VCPU */
550 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_SOFT_RESET), 550 WREG32_SOC15(UVD, 0, mmUVD_SOFT_RESET,
551 UVD_SOFT_RESET__VCPU_SOFT_RESET_MASK); 551 UVD_SOFT_RESET__VCPU_SOFT_RESET_MASK);
552 mdelay(5); 552 mdelay(5);
553 553
554 /* enable VCPU clock */ 554 /* enable VCPU clock */
555 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_VCPU_CNTL), 555 WREG32_SOC15(UVD, 0, mmUVD_VCPU_CNTL,
556 UVD_VCPU_CNTL__CLK_EN_MASK); 556 UVD_VCPU_CNTL__CLK_EN_MASK);
557 557
558 /* enable UMC */ 558 /* enable UMC */
@@ -560,14 +560,14 @@ static int vcn_v1_0_start(struct amdgpu_device *adev)
560 ~UVD_LMI_CTRL2__STALL_ARB_UMC_MASK); 560 ~UVD_LMI_CTRL2__STALL_ARB_UMC_MASK);
561 561
562 /* boot up the VCPU */ 562 /* boot up the VCPU */
563 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_SOFT_RESET), 0); 563 WREG32_SOC15(UVD, 0, mmUVD_SOFT_RESET, 0);
564 mdelay(10); 564 mdelay(10);
565 565
566 for (i = 0; i < 10; ++i) { 566 for (i = 0; i < 10; ++i) {
567 uint32_t status; 567 uint32_t status;
568 568
569 for (j = 0; j < 100; ++j) { 569 for (j = 0; j < 100; ++j) {
570 status = RREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_STATUS)); 570 status = RREG32_SOC15(UVD, 0, mmUVD_STATUS);
571 if (status & 2) 571 if (status & 2)
572 break; 572 break;
573 mdelay(10); 573 mdelay(10);
@@ -608,44 +608,44 @@ static int vcn_v1_0_start(struct amdgpu_device *adev)
608 tmp = REG_SET_FIELD(tmp, UVD_RBC_RB_CNTL, RB_WPTR_POLL_EN, 0); 608 tmp = REG_SET_FIELD(tmp, UVD_RBC_RB_CNTL, RB_WPTR_POLL_EN, 0);
609 tmp = REG_SET_FIELD(tmp, UVD_RBC_RB_CNTL, RB_NO_UPDATE, 1); 609 tmp = REG_SET_FIELD(tmp, UVD_RBC_RB_CNTL, RB_NO_UPDATE, 1);
610 tmp = REG_SET_FIELD(tmp, UVD_RBC_RB_CNTL, RB_RPTR_WR_EN, 1); 610 tmp = REG_SET_FIELD(tmp, UVD_RBC_RB_CNTL, RB_RPTR_WR_EN, 1);
611 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_CNTL), tmp); 611 WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_CNTL, tmp);
612 612
613 /* set the write pointer delay */ 613 /* set the write pointer delay */
614 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_WPTR_CNTL), 0); 614 WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_WPTR_CNTL, 0);
615 615
616 /* set the wb address */ 616 /* set the wb address */
617 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_RPTR_ADDR), 617 WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR_ADDR,
618 (upper_32_bits(ring->gpu_addr) >> 2)); 618 (upper_32_bits(ring->gpu_addr) >> 2));
619 619
620 /* programm the RB_BASE for ring buffer */ 620 /* programm the RB_BASE for ring buffer */
621 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_RBC_RB_64BIT_BAR_LOW), 621 WREG32_SOC15(UVD, 0, mmUVD_LMI_RBC_RB_64BIT_BAR_LOW,
622 lower_32_bits(ring->gpu_addr)); 622 lower_32_bits(ring->gpu_addr));
623 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_RBC_RB_64BIT_BAR_HIGH), 623 WREG32_SOC15(UVD, 0, mmUVD_LMI_RBC_RB_64BIT_BAR_HIGH,
624 upper_32_bits(ring->gpu_addr)); 624 upper_32_bits(ring->gpu_addr));
625 625
626 /* Initialize the ring buffer's read and write pointers */ 626 /* Initialize the ring buffer's read and write pointers */
627 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_RPTR), 0); 627 WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR, 0);
628 628
629 ring->wptr = RREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_RPTR)); 629 ring->wptr = RREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR);
630 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_WPTR), 630 WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_WPTR,
631 lower_32_bits(ring->wptr)); 631 lower_32_bits(ring->wptr));
632 632
633 WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_CNTL), 0, 633 WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_CNTL), 0,
634 ~UVD_RBC_RB_CNTL__RB_NO_FETCH_MASK); 634 ~UVD_RBC_RB_CNTL__RB_NO_FETCH_MASK);
635 635
636 ring = &adev->vcn.ring_enc[0]; 636 ring = &adev->vcn.ring_enc[0];
637 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_RPTR), lower_32_bits(ring->wptr)); 637 WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR, lower_32_bits(ring->wptr));
638 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_WPTR), lower_32_bits(ring->wptr)); 638 WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
639 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_BASE_LO), ring->gpu_addr); 639 WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO, ring->gpu_addr);
640 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_BASE_HI), upper_32_bits(ring->gpu_addr)); 640 WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
641 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_SIZE), ring->ring_size / 4); 641 WREG32_SOC15(UVD, 0, mmUVD_RB_SIZE, ring->ring_size / 4);
642 642
643 ring = &adev->vcn.ring_enc[1]; 643 ring = &adev->vcn.ring_enc[1];
644 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_RPTR2), lower_32_bits(ring->wptr)); 644 WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR2, lower_32_bits(ring->wptr));
645 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_WPTR2), lower_32_bits(ring->wptr)); 645 WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
646 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_BASE_LO2), ring->gpu_addr); 646 WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO2, ring->gpu_addr);
647 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_BASE_HI2), upper_32_bits(ring->gpu_addr)); 647 WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
648 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_SIZE2), ring->ring_size / 4); 648 WREG32_SOC15(UVD, 0, mmUVD_RB_SIZE2, ring->ring_size / 4);
649 649
650 return 0; 650 return 0;
651} 651}
@@ -660,7 +660,7 @@ static int vcn_v1_0_start(struct amdgpu_device *adev)
660static int vcn_v1_0_stop(struct amdgpu_device *adev) 660static int vcn_v1_0_stop(struct amdgpu_device *adev)
661{ 661{
662 /* force RBC into idle state */ 662 /* force RBC into idle state */
663 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_CNTL), 0x11010101); 663 WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_CNTL, 0x11010101);
664 664
665 /* Stall UMC and register bus before resetting VCPU */ 665 /* Stall UMC and register bus before resetting VCPU */
666 WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_CTRL2), 666 WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_CTRL2),
@@ -669,12 +669,12 @@ static int vcn_v1_0_stop(struct amdgpu_device *adev)
669 mdelay(1); 669 mdelay(1);
670 670
671 /* put VCPU into reset */ 671 /* put VCPU into reset */
672 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_SOFT_RESET), 672 WREG32_SOC15(UVD, 0, mmUVD_SOFT_RESET,
673 UVD_SOFT_RESET__VCPU_SOFT_RESET_MASK); 673 UVD_SOFT_RESET__VCPU_SOFT_RESET_MASK);
674 mdelay(5); 674 mdelay(5);
675 675
676 /* disable VCPU clock */ 676 /* disable VCPU clock */
677 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_VCPU_CNTL), 0x0); 677 WREG32_SOC15(UVD, 0, mmUVD_VCPU_CNTL, 0x0);
678 678
679 /* Unstall UMC and register bus */ 679 /* Unstall UMC and register bus */
680 WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_CTRL2), 0, 680 WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_CTRL2), 0,
@@ -704,7 +704,7 @@ static uint64_t vcn_v1_0_dec_ring_get_rptr(struct amdgpu_ring *ring)
704{ 704{
705 struct amdgpu_device *adev = ring->adev; 705 struct amdgpu_device *adev = ring->adev;
706 706
707 return RREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_RPTR)); 707 return RREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR);
708} 708}
709 709
710/** 710/**
@@ -718,7 +718,7 @@ static uint64_t vcn_v1_0_dec_ring_get_wptr(struct amdgpu_ring *ring)
718{ 718{
719 struct amdgpu_device *adev = ring->adev; 719 struct amdgpu_device *adev = ring->adev;
720 720
721 return RREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_WPTR)); 721 return RREG32_SOC15(UVD, 0, mmUVD_RBC_RB_WPTR);
722} 722}
723 723
724/** 724/**
@@ -732,7 +732,7 @@ static void vcn_v1_0_dec_ring_set_wptr(struct amdgpu_ring *ring)
732{ 732{
733 struct amdgpu_device *adev = ring->adev; 733 struct amdgpu_device *adev = ring->adev;
734 734
735 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_WPTR), lower_32_bits(ring->wptr)); 735 WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
736} 736}
737 737
738/** 738/**
@@ -922,9 +922,9 @@ static uint64_t vcn_v1_0_enc_ring_get_rptr(struct amdgpu_ring *ring)
922 struct amdgpu_device *adev = ring->adev; 922 struct amdgpu_device *adev = ring->adev;
923 923
924 if (ring == &adev->vcn.ring_enc[0]) 924 if (ring == &adev->vcn.ring_enc[0])
925 return RREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_RPTR)); 925 return RREG32_SOC15(UVD, 0, mmUVD_RB_RPTR);
926 else 926 else
927 return RREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_RPTR2)); 927 return RREG32_SOC15(UVD, 0, mmUVD_RB_RPTR2);
928} 928}
929 929
930 /** 930 /**
@@ -939,9 +939,9 @@ static uint64_t vcn_v1_0_enc_ring_get_wptr(struct amdgpu_ring *ring)
939 struct amdgpu_device *adev = ring->adev; 939 struct amdgpu_device *adev = ring->adev;
940 940
941 if (ring == &adev->vcn.ring_enc[0]) 941 if (ring == &adev->vcn.ring_enc[0])
942 return RREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_WPTR)); 942 return RREG32_SOC15(UVD, 0, mmUVD_RB_WPTR);
943 else 943 else
944 return RREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_WPTR2)); 944 return RREG32_SOC15(UVD, 0, mmUVD_RB_WPTR2);
945} 945}
946 946
947 /** 947 /**
@@ -956,10 +956,10 @@ static void vcn_v1_0_enc_ring_set_wptr(struct amdgpu_ring *ring)
956 struct amdgpu_device *adev = ring->adev; 956 struct amdgpu_device *adev = ring->adev;
957 957
958 if (ring == &adev->vcn.ring_enc[0]) 958 if (ring == &adev->vcn.ring_enc[0])
959 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_WPTR), 959 WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR,
960 lower_32_bits(ring->wptr)); 960 lower_32_bits(ring->wptr));
961 else 961 else
962 WREG32(SOC15_REG_OFFSET(UVD, 0, mmUVD_RB_WPTR2), 962 WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR2,
963 lower_32_bits(ring->wptr)); 963 lower_32_bits(ring->wptr));
964} 964}
965 965