aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorTom St Denis <tom.stdenis@amd.com>2016-03-23 13:14:31 -0400
committerAlex Deucher <alexander.deucher@amd.com>2016-05-02 15:25:40 -0400
commitbe3ecca7fe157aa0eadf4ada8bee2d3e47e83c76 (patch)
tree244f2dedafba76f8325e29fc86c68350b4ea171c
parentb2c0cbd657173f024138d6421774007690ceeffd (diff)
drm/amd/amdgpu: Add SW clock gating support to UVD 5 and 6
This patch adds support for software clock gating to UVD 5 and UVD 6 blocks with a preliminary commented out hardware gating routine. Currently hardware gating does not work so it's not activated. Signed-off-by: Tom St Denis <tom.stdenis@amd.com> Reviewed-by: Alex Deucher <alexander.deucher@amd.com> Signed-off-by: Alex Deucher <alexander.deucher@amd.com>
-rw-r--r--drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c115
-rw-r--r--drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c281
2 files changed, 232 insertions, 164 deletions
diff --git a/drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c b/drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
index 16476d80f475..de459c8000a7 100644
--- a/drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
+++ b/drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c
@@ -31,6 +31,7 @@
31#include "uvd/uvd_5_0_sh_mask.h" 31#include "uvd/uvd_5_0_sh_mask.h"
32#include "oss/oss_2_0_d.h" 32#include "oss/oss_2_0_d.h"
33#include "oss/oss_2_0_sh_mask.h" 33#include "oss/oss_2_0_sh_mask.h"
34#include "vi.h"
34 35
35static void uvd_v5_0_set_ring_funcs(struct amdgpu_device *adev); 36static void uvd_v5_0_set_ring_funcs(struct amdgpu_device *adev);
36static void uvd_v5_0_set_irq_funcs(struct amdgpu_device *adev); 37static void uvd_v5_0_set_irq_funcs(struct amdgpu_device *adev);
@@ -754,14 +755,128 @@ static int uvd_v5_0_process_interrupt(struct amdgpu_device *adev,
754 return 0; 755 return 0;
755} 756}
756 757
758static void uvd_v5_0_set_sw_clock_gating(struct amdgpu_device *adev)
759{
760 uint32_t data, data1, data2, suvd_flags;
761
762 data = RREG32(mmUVD_CGC_CTRL);
763 data1 = RREG32(mmUVD_SUVD_CGC_GATE);
764 data2 = RREG32(mmUVD_SUVD_CGC_CTRL);
765
766 data &= ~(UVD_CGC_CTRL__CLK_OFF_DELAY_MASK |
767 UVD_CGC_CTRL__CLK_GATE_DLY_TIMER_MASK);
768
769 suvd_flags = UVD_SUVD_CGC_GATE__SRE_MASK |
770 UVD_SUVD_CGC_GATE__SIT_MASK |
771 UVD_SUVD_CGC_GATE__SMP_MASK |
772 UVD_SUVD_CGC_GATE__SCM_MASK |
773 UVD_SUVD_CGC_GATE__SDB_MASK;
774
775 data |= UVD_CGC_CTRL__DYN_CLOCK_MODE_MASK |
776 (1 << REG_FIELD_SHIFT(UVD_CGC_CTRL, CLK_GATE_DLY_TIMER)) |
777 (4 << REG_FIELD_SHIFT(UVD_CGC_CTRL, CLK_OFF_DELAY));
778
779 data &= ~(UVD_CGC_CTRL__UDEC_RE_MODE_MASK |
780 UVD_CGC_CTRL__UDEC_CM_MODE_MASK |
781 UVD_CGC_CTRL__UDEC_IT_MODE_MASK |
782 UVD_CGC_CTRL__UDEC_DB_MODE_MASK |
783 UVD_CGC_CTRL__UDEC_MP_MODE_MASK |
784 UVD_CGC_CTRL__SYS_MODE_MASK |
785 UVD_CGC_CTRL__UDEC_MODE_MASK |
786 UVD_CGC_CTRL__MPEG2_MODE_MASK |
787 UVD_CGC_CTRL__REGS_MODE_MASK |
788 UVD_CGC_CTRL__RBC_MODE_MASK |
789 UVD_CGC_CTRL__LMI_MC_MODE_MASK |
790 UVD_CGC_CTRL__LMI_UMC_MODE_MASK |
791 UVD_CGC_CTRL__IDCT_MODE_MASK |
792 UVD_CGC_CTRL__MPRD_MODE_MASK |
793 UVD_CGC_CTRL__MPC_MODE_MASK |
794 UVD_CGC_CTRL__LBSI_MODE_MASK |
795 UVD_CGC_CTRL__LRBBM_MODE_MASK |
796 UVD_CGC_CTRL__WCB_MODE_MASK |
797 UVD_CGC_CTRL__VCPU_MODE_MASK |
798 UVD_CGC_CTRL__JPEG_MODE_MASK |
799 UVD_CGC_CTRL__SCPU_MODE_MASK);
800 data2 &= ~(UVD_SUVD_CGC_CTRL__SRE_MODE_MASK |
801 UVD_SUVD_CGC_CTRL__SIT_MODE_MASK |
802 UVD_SUVD_CGC_CTRL__SMP_MODE_MASK |
803 UVD_SUVD_CGC_CTRL__SCM_MODE_MASK |
804 UVD_SUVD_CGC_CTRL__SDB_MODE_MASK);
805 data1 |= suvd_flags;
806
807 WREG32(mmUVD_CGC_CTRL, data);
808 WREG32(mmUVD_CGC_GATE, 0);
809 WREG32(mmUVD_SUVD_CGC_GATE, data1);
810 WREG32(mmUVD_SUVD_CGC_CTRL, data2);
811}
812
813#if 0
814static void uvd_v5_0_set_hw_clock_gating(struct amdgpu_device *adev)
815{
816 uint32_t data, data1, cgc_flags, suvd_flags;
817
818 data = RREG32(mmUVD_CGC_GATE);
819 data1 = RREG32(mmUVD_SUVD_CGC_GATE);
820
821 cgc_flags = UVD_CGC_GATE__SYS_MASK |
822 UVD_CGC_GATE__UDEC_MASK |
823 UVD_CGC_GATE__MPEG2_MASK |
824 UVD_CGC_GATE__RBC_MASK |
825 UVD_CGC_GATE__LMI_MC_MASK |
826 UVD_CGC_GATE__IDCT_MASK |
827 UVD_CGC_GATE__MPRD_MASK |
828 UVD_CGC_GATE__MPC_MASK |
829 UVD_CGC_GATE__LBSI_MASK |
830 UVD_CGC_GATE__LRBBM_MASK |
831 UVD_CGC_GATE__UDEC_RE_MASK |
832 UVD_CGC_GATE__UDEC_CM_MASK |
833 UVD_CGC_GATE__UDEC_IT_MASK |
834 UVD_CGC_GATE__UDEC_DB_MASK |
835 UVD_CGC_GATE__UDEC_MP_MASK |
836 UVD_CGC_GATE__WCB_MASK |
837 UVD_CGC_GATE__VCPU_MASK |
838 UVD_CGC_GATE__SCPU_MASK;
839
840 suvd_flags = UVD_SUVD_CGC_GATE__SRE_MASK |
841 UVD_SUVD_CGC_GATE__SIT_MASK |
842 UVD_SUVD_CGC_GATE__SMP_MASK |
843 UVD_SUVD_CGC_GATE__SCM_MASK |
844 UVD_SUVD_CGC_GATE__SDB_MASK;
845
846 data |= cgc_flags;
847 data1 |= suvd_flags;
848
849 WREG32(mmUVD_CGC_GATE, data);
850 WREG32(mmUVD_SUVD_CGC_GATE, data1);
851}
852#endif
853
757static int uvd_v5_0_set_clockgating_state(void *handle, 854static int uvd_v5_0_set_clockgating_state(void *handle,
758 enum amd_clockgating_state state) 855 enum amd_clockgating_state state)
759{ 856{
760 struct amdgpu_device *adev = (struct amdgpu_device *)handle; 857 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
858 bool enable = (state == AMD_CG_STATE_GATE) ? true : false;
859 static int curstate = -1;
761 860
762 if (!(adev->cg_flags & AMD_CG_SUPPORT_UVD_MGCG)) 861 if (!(adev->cg_flags & AMD_CG_SUPPORT_UVD_MGCG))
763 return 0; 862 return 0;
764 863
864 if (curstate == state)
865 return 0;
866
867 curstate = state;
868 if (enable) {
869 /* disable HW gating and enable Sw gating */
870 uvd_v5_0_set_sw_clock_gating(adev);
871 } else {
872 /* wait for STATUS to clear */
873 if (uvd_v5_0_wait_for_idle(handle))
874 return -EBUSY;
875
876 /* enable HW gates because UVD is idle */
877/* uvd_v5_0_set_hw_clock_gating(adev); */
878 }
879
765 return 0; 880 return 0;
766} 881}
767 882
diff --git a/drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c b/drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
index d49379145ef2..372d70a0daec 100644
--- a/drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
+++ b/drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c
@@ -31,11 +31,13 @@
31#include "uvd/uvd_6_0_sh_mask.h" 31#include "uvd/uvd_6_0_sh_mask.h"
32#include "oss/oss_2_0_d.h" 32#include "oss/oss_2_0_d.h"
33#include "oss/oss_2_0_sh_mask.h" 33#include "oss/oss_2_0_sh_mask.h"
34#include "vi.h"
34 35
35static void uvd_v6_0_set_ring_funcs(struct amdgpu_device *adev); 36static void uvd_v6_0_set_ring_funcs(struct amdgpu_device *adev);
36static void uvd_v6_0_set_irq_funcs(struct amdgpu_device *adev); 37static void uvd_v6_0_set_irq_funcs(struct amdgpu_device *adev);
37static int uvd_v6_0_start(struct amdgpu_device *adev); 38static int uvd_v6_0_start(struct amdgpu_device *adev);
38static void uvd_v6_0_stop(struct amdgpu_device *adev); 39static void uvd_v6_0_stop(struct amdgpu_device *adev);
40static void uvd_v6_0_set_sw_clock_gating(struct amdgpu_device *adev);
39 41
40/** 42/**
41 * uvd_v6_0_ring_get_rptr - get read pointer 43 * uvd_v6_0_ring_get_rptr - get read pointer
@@ -284,6 +286,7 @@ static void uvd_v6_0_mc_resume(struct amdgpu_device *adev)
284 WREG32(mmUVD_UDEC_DBW_ADDR_CONFIG, adev->gfx.config.gb_addr_config); 286 WREG32(mmUVD_UDEC_DBW_ADDR_CONFIG, adev->gfx.config.gb_addr_config);
285} 287}
286 288
289#if 0
287static void cz_set_uvd_clock_gating_branches(struct amdgpu_device *adev, 290static void cz_set_uvd_clock_gating_branches(struct amdgpu_device *adev,
288 bool enable) 291 bool enable)
289{ 292{
@@ -360,157 +363,7 @@ static void cz_set_uvd_clock_gating_branches(struct amdgpu_device *adev,
360 WREG32(mmUVD_CGC_GATE, data); 363 WREG32(mmUVD_CGC_GATE, data);
361 WREG32(mmUVD_SUVD_CGC_GATE, data1); 364 WREG32(mmUVD_SUVD_CGC_GATE, data1);
362} 365}
363 366#endif
364static void tonga_set_uvd_clock_gating_branches(struct amdgpu_device *adev,
365 bool enable)
366{
367 u32 data, data1;
368
369 data = RREG32(mmUVD_CGC_GATE);
370 data1 = RREG32(mmUVD_SUVD_CGC_GATE);
371 if (enable) {
372 data |= UVD_CGC_GATE__SYS_MASK |
373 UVD_CGC_GATE__UDEC_MASK |
374 UVD_CGC_GATE__MPEG2_MASK |
375 UVD_CGC_GATE__RBC_MASK |
376 UVD_CGC_GATE__LMI_MC_MASK |
377 UVD_CGC_GATE__IDCT_MASK |
378 UVD_CGC_GATE__MPRD_MASK |
379 UVD_CGC_GATE__MPC_MASK |
380 UVD_CGC_GATE__LBSI_MASK |
381 UVD_CGC_GATE__LRBBM_MASK |
382 UVD_CGC_GATE__UDEC_RE_MASK |
383 UVD_CGC_GATE__UDEC_CM_MASK |
384 UVD_CGC_GATE__UDEC_IT_MASK |
385 UVD_CGC_GATE__UDEC_DB_MASK |
386 UVD_CGC_GATE__UDEC_MP_MASK |
387 UVD_CGC_GATE__WCB_MASK |
388 UVD_CGC_GATE__VCPU_MASK |
389 UVD_CGC_GATE__SCPU_MASK;
390 data1 |= UVD_SUVD_CGC_GATE__SRE_MASK |
391 UVD_SUVD_CGC_GATE__SIT_MASK |
392 UVD_SUVD_CGC_GATE__SMP_MASK |
393 UVD_SUVD_CGC_GATE__SCM_MASK |
394 UVD_SUVD_CGC_GATE__SDB_MASK;
395 } else {
396 data &= ~(UVD_CGC_GATE__SYS_MASK |
397 UVD_CGC_GATE__UDEC_MASK |
398 UVD_CGC_GATE__MPEG2_MASK |
399 UVD_CGC_GATE__RBC_MASK |
400 UVD_CGC_GATE__LMI_MC_MASK |
401 UVD_CGC_GATE__LMI_UMC_MASK |
402 UVD_CGC_GATE__IDCT_MASK |
403 UVD_CGC_GATE__MPRD_MASK |
404 UVD_CGC_GATE__MPC_MASK |
405 UVD_CGC_GATE__LBSI_MASK |
406 UVD_CGC_GATE__LRBBM_MASK |
407 UVD_CGC_GATE__UDEC_RE_MASK |
408 UVD_CGC_GATE__UDEC_CM_MASK |
409 UVD_CGC_GATE__UDEC_IT_MASK |
410 UVD_CGC_GATE__UDEC_DB_MASK |
411 UVD_CGC_GATE__UDEC_MP_MASK |
412 UVD_CGC_GATE__WCB_MASK |
413 UVD_CGC_GATE__VCPU_MASK |
414 UVD_CGC_GATE__SCPU_MASK);
415 data1 &= ~(UVD_SUVD_CGC_GATE__SRE_MASK |
416 UVD_SUVD_CGC_GATE__SIT_MASK |
417 UVD_SUVD_CGC_GATE__SMP_MASK |
418 UVD_SUVD_CGC_GATE__SCM_MASK |
419 UVD_SUVD_CGC_GATE__SDB_MASK);
420 }
421 WREG32(mmUVD_CGC_GATE, data);
422 WREG32(mmUVD_SUVD_CGC_GATE, data1);
423}
424
425static void uvd_v6_0_set_uvd_dynamic_clock_mode(struct amdgpu_device *adev,
426 bool swmode)
427{
428 u32 data, data1 = 0, data2;
429
430 /* Always un-gate UVD REGS bit */
431 data = RREG32(mmUVD_CGC_GATE);
432 data &= ~(UVD_CGC_GATE__REGS_MASK);
433 WREG32(mmUVD_CGC_GATE, data);
434
435 data = RREG32(mmUVD_CGC_CTRL);
436 data &= ~(UVD_CGC_CTRL__CLK_OFF_DELAY_MASK |
437 UVD_CGC_CTRL__CLK_GATE_DLY_TIMER_MASK);
438 data |= UVD_CGC_CTRL__DYN_CLOCK_MODE_MASK |
439 1 << REG_FIELD_SHIFT(UVD_CGC_CTRL, CLK_GATE_DLY_TIMER) |
440 4 << REG_FIELD_SHIFT(UVD_CGC_CTRL, CLK_OFF_DELAY);
441
442 data2 = RREG32(mmUVD_SUVD_CGC_CTRL);
443 if (swmode) {
444 data &= ~(UVD_CGC_CTRL__UDEC_RE_MODE_MASK |
445 UVD_CGC_CTRL__UDEC_CM_MODE_MASK |
446 UVD_CGC_CTRL__UDEC_IT_MODE_MASK |
447 UVD_CGC_CTRL__UDEC_DB_MODE_MASK |
448 UVD_CGC_CTRL__UDEC_MP_MODE_MASK |
449 UVD_CGC_CTRL__SYS_MODE_MASK |
450 UVD_CGC_CTRL__UDEC_MODE_MASK |
451 UVD_CGC_CTRL__MPEG2_MODE_MASK |
452 UVD_CGC_CTRL__REGS_MODE_MASK |
453 UVD_CGC_CTRL__RBC_MODE_MASK |
454 UVD_CGC_CTRL__LMI_MC_MODE_MASK |
455 UVD_CGC_CTRL__LMI_UMC_MODE_MASK |
456 UVD_CGC_CTRL__IDCT_MODE_MASK |
457 UVD_CGC_CTRL__MPRD_MODE_MASK |
458 UVD_CGC_CTRL__MPC_MODE_MASK |
459 UVD_CGC_CTRL__LBSI_MODE_MASK |
460 UVD_CGC_CTRL__LRBBM_MODE_MASK |
461 UVD_CGC_CTRL__WCB_MODE_MASK |
462 UVD_CGC_CTRL__VCPU_MODE_MASK |
463 UVD_CGC_CTRL__JPEG_MODE_MASK |
464 UVD_CGC_CTRL__SCPU_MODE_MASK);
465 data1 |= UVD_CGC_CTRL2__DYN_OCLK_RAMP_EN_MASK |
466 UVD_CGC_CTRL2__DYN_RCLK_RAMP_EN_MASK;
467 data1 &= ~UVD_CGC_CTRL2__GATER_DIV_ID_MASK;
468 data1 |= 7 << REG_FIELD_SHIFT(UVD_CGC_CTRL2, GATER_DIV_ID);
469 data2 &= ~(UVD_SUVD_CGC_CTRL__SRE_MODE_MASK |
470 UVD_SUVD_CGC_CTRL__SIT_MODE_MASK |
471 UVD_SUVD_CGC_CTRL__SMP_MODE_MASK |
472 UVD_SUVD_CGC_CTRL__SCM_MODE_MASK |
473 UVD_SUVD_CGC_CTRL__SDB_MODE_MASK);
474 } else {
475 data |= UVD_CGC_CTRL__UDEC_RE_MODE_MASK |
476 UVD_CGC_CTRL__UDEC_CM_MODE_MASK |
477 UVD_CGC_CTRL__UDEC_IT_MODE_MASK |
478 UVD_CGC_CTRL__UDEC_DB_MODE_MASK |
479 UVD_CGC_CTRL__UDEC_MP_MODE_MASK |
480 UVD_CGC_CTRL__SYS_MODE_MASK |
481 UVD_CGC_CTRL__UDEC_MODE_MASK |
482 UVD_CGC_CTRL__MPEG2_MODE_MASK |
483 UVD_CGC_CTRL__REGS_MODE_MASK |
484 UVD_CGC_CTRL__RBC_MODE_MASK |
485 UVD_CGC_CTRL__LMI_MC_MODE_MASK |
486 UVD_CGC_CTRL__LMI_UMC_MODE_MASK |
487 UVD_CGC_CTRL__IDCT_MODE_MASK |
488 UVD_CGC_CTRL__MPRD_MODE_MASK |
489 UVD_CGC_CTRL__MPC_MODE_MASK |
490 UVD_CGC_CTRL__LBSI_MODE_MASK |
491 UVD_CGC_CTRL__LRBBM_MODE_MASK |
492 UVD_CGC_CTRL__WCB_MODE_MASK |
493 UVD_CGC_CTRL__VCPU_MODE_MASK |
494 UVD_CGC_CTRL__SCPU_MODE_MASK;
495 data2 |= UVD_SUVD_CGC_CTRL__SRE_MODE_MASK |
496 UVD_SUVD_CGC_CTRL__SIT_MODE_MASK |
497 UVD_SUVD_CGC_CTRL__SMP_MODE_MASK |
498 UVD_SUVD_CGC_CTRL__SCM_MODE_MASK |
499 UVD_SUVD_CGC_CTRL__SDB_MODE_MASK;
500 }
501 WREG32(mmUVD_CGC_CTRL, data);
502 WREG32(mmUVD_SUVD_CGC_CTRL, data2);
503
504 data = RREG32_UVD_CTX(ixUVD_CGC_CTRL2);
505 data &= ~(REG_FIELD_MASK(UVD_CGC_CTRL2, DYN_OCLK_RAMP_EN) |
506 REG_FIELD_MASK(UVD_CGC_CTRL2, DYN_RCLK_RAMP_EN) |
507 REG_FIELD_MASK(UVD_CGC_CTRL2, GATER_DIV_ID));
508 data1 &= (REG_FIELD_MASK(UVD_CGC_CTRL2, DYN_OCLK_RAMP_EN) |
509 REG_FIELD_MASK(UVD_CGC_CTRL2, DYN_RCLK_RAMP_EN) |
510 REG_FIELD_MASK(UVD_CGC_CTRL2, GATER_DIV_ID));
511 data |= data1;
512 WREG32_UVD_CTX(ixUVD_CGC_CTRL2, data);
513}
514 367
515/** 368/**
516 * uvd_v6_0_start - start UVD block 369 * uvd_v6_0_start - start UVD block
@@ -538,11 +391,7 @@ static int uvd_v6_0_start(struct amdgpu_device *adev)
538 391
539 /* Set dynamic clock gating in S/W control mode */ 392 /* Set dynamic clock gating in S/W control mode */
540 if (adev->cg_flags & AMD_CG_SUPPORT_UVD_MGCG) { 393 if (adev->cg_flags & AMD_CG_SUPPORT_UVD_MGCG) {
541 if (adev->flags & AMD_IS_APU) 394 uvd_v6_0_set_sw_clock_gating(adev);
542 cz_set_uvd_clock_gating_branches(adev, false);
543 else
544 tonga_set_uvd_clock_gating_branches(adev, false);
545 uvd_v6_0_set_uvd_dynamic_clock_mode(adev, true);
546 } else { 395 } else {
547 /* disable clock gating */ 396 /* disable clock gating */
548 uint32_t data = RREG32(mmUVD_CGC_CTRL); 397 uint32_t data = RREG32(mmUVD_CGC_CTRL);
@@ -978,25 +827,129 @@ static int uvd_v6_0_process_interrupt(struct amdgpu_device *adev,
978 return 0; 827 return 0;
979} 828}
980 829
830static void uvd_v6_0_set_sw_clock_gating(struct amdgpu_device *adev)
831{
832 uint32_t data, data1, data2, suvd_flags;
833
834 data = RREG32(mmUVD_CGC_CTRL);
835 data1 = RREG32(mmUVD_SUVD_CGC_GATE);
836 data2 = RREG32(mmUVD_SUVD_CGC_CTRL);
837
838 data &= ~(UVD_CGC_CTRL__CLK_OFF_DELAY_MASK |
839 UVD_CGC_CTRL__CLK_GATE_DLY_TIMER_MASK);
840
841 suvd_flags = UVD_SUVD_CGC_GATE__SRE_MASK |
842 UVD_SUVD_CGC_GATE__SIT_MASK |
843 UVD_SUVD_CGC_GATE__SMP_MASK |
844 UVD_SUVD_CGC_GATE__SCM_MASK |
845 UVD_SUVD_CGC_GATE__SDB_MASK;
846
847 data |= UVD_CGC_CTRL__DYN_CLOCK_MODE_MASK |
848 (1 << REG_FIELD_SHIFT(UVD_CGC_CTRL, CLK_GATE_DLY_TIMER)) |
849 (4 << REG_FIELD_SHIFT(UVD_CGC_CTRL, CLK_OFF_DELAY));
850
851 data &= ~(UVD_CGC_CTRL__UDEC_RE_MODE_MASK |
852 UVD_CGC_CTRL__UDEC_CM_MODE_MASK |
853 UVD_CGC_CTRL__UDEC_IT_MODE_MASK |
854 UVD_CGC_CTRL__UDEC_DB_MODE_MASK |
855 UVD_CGC_CTRL__UDEC_MP_MODE_MASK |
856 UVD_CGC_CTRL__SYS_MODE_MASK |
857 UVD_CGC_CTRL__UDEC_MODE_MASK |
858 UVD_CGC_CTRL__MPEG2_MODE_MASK |
859 UVD_CGC_CTRL__REGS_MODE_MASK |
860 UVD_CGC_CTRL__RBC_MODE_MASK |
861 UVD_CGC_CTRL__LMI_MC_MODE_MASK |
862 UVD_CGC_CTRL__LMI_UMC_MODE_MASK |
863 UVD_CGC_CTRL__IDCT_MODE_MASK |
864 UVD_CGC_CTRL__MPRD_MODE_MASK |
865 UVD_CGC_CTRL__MPC_MODE_MASK |
866 UVD_CGC_CTRL__LBSI_MODE_MASK |
867 UVD_CGC_CTRL__LRBBM_MODE_MASK |
868 UVD_CGC_CTRL__WCB_MODE_MASK |
869 UVD_CGC_CTRL__VCPU_MODE_MASK |
870 UVD_CGC_CTRL__JPEG_MODE_MASK |
871 UVD_CGC_CTRL__SCPU_MODE_MASK |
872 UVD_CGC_CTRL__JPEG2_MODE_MASK);
873 data2 &= ~(UVD_SUVD_CGC_CTRL__SRE_MODE_MASK |
874 UVD_SUVD_CGC_CTRL__SIT_MODE_MASK |
875 UVD_SUVD_CGC_CTRL__SMP_MODE_MASK |
876 UVD_SUVD_CGC_CTRL__SCM_MODE_MASK |
877 UVD_SUVD_CGC_CTRL__SDB_MODE_MASK);
878 data1 |= suvd_flags;
879
880 WREG32(mmUVD_CGC_CTRL, data);
881 WREG32(mmUVD_CGC_GATE, 0);
882 WREG32(mmUVD_SUVD_CGC_GATE, data1);
883 WREG32(mmUVD_SUVD_CGC_CTRL, data2);
884}
885
886#if 0
887static void uvd_v6_0_set_hw_clock_gating(struct amdgpu_device *adev)
888{
889 uint32_t data, data1, cgc_flags, suvd_flags;
890
891 data = RREG32(mmUVD_CGC_GATE);
892 data1 = RREG32(mmUVD_SUVD_CGC_GATE);
893
894 cgc_flags = UVD_CGC_GATE__SYS_MASK |
895 UVD_CGC_GATE__UDEC_MASK |
896 UVD_CGC_GATE__MPEG2_MASK |
897 UVD_CGC_GATE__RBC_MASK |
898 UVD_CGC_GATE__LMI_MC_MASK |
899 UVD_CGC_GATE__IDCT_MASK |
900 UVD_CGC_GATE__MPRD_MASK |
901 UVD_CGC_GATE__MPC_MASK |
902 UVD_CGC_GATE__LBSI_MASK |
903 UVD_CGC_GATE__LRBBM_MASK |
904 UVD_CGC_GATE__UDEC_RE_MASK |
905 UVD_CGC_GATE__UDEC_CM_MASK |
906 UVD_CGC_GATE__UDEC_IT_MASK |
907 UVD_CGC_GATE__UDEC_DB_MASK |
908 UVD_CGC_GATE__UDEC_MP_MASK |
909 UVD_CGC_GATE__WCB_MASK |
910 UVD_CGC_GATE__VCPU_MASK |
911 UVD_CGC_GATE__SCPU_MASK |
912 UVD_CGC_GATE__JPEG_MASK |
913 UVD_CGC_GATE__JPEG2_MASK;
914
915 suvd_flags = UVD_SUVD_CGC_GATE__SRE_MASK |
916 UVD_SUVD_CGC_GATE__SIT_MASK |
917 UVD_SUVD_CGC_GATE__SMP_MASK |
918 UVD_SUVD_CGC_GATE__SCM_MASK |
919 UVD_SUVD_CGC_GATE__SDB_MASK;
920
921 data |= cgc_flags;
922 data1 |= suvd_flags;
923
924 WREG32(mmUVD_CGC_GATE, data);
925 WREG32(mmUVD_SUVD_CGC_GATE, data1);
926}
927#endif
928
981static int uvd_v6_0_set_clockgating_state(void *handle, 929static int uvd_v6_0_set_clockgating_state(void *handle,
982 enum amd_clockgating_state state) 930 enum amd_clockgating_state state)
983{ 931{
984 struct amdgpu_device *adev = (struct amdgpu_device *)handle; 932 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
985 bool enable = (state == AMD_CG_STATE_GATE) ? true : false; 933 bool enable = (state == AMD_CG_STATE_GATE) ? true : false;
934 static int curstate = -1;
986 935
987 if (!(adev->cg_flags & AMD_CG_SUPPORT_UVD_MGCG)) 936 if (!(adev->cg_flags & AMD_CG_SUPPORT_UVD_MGCG))
988 return 0; 937 return 0;
989 938
939 if (curstate == state)
940 return 0;
941
942 curstate = state;
990 if (enable) { 943 if (enable) {
991 if (adev->flags & AMD_IS_APU) 944 /* disable HW gating and enable Sw gating */
992 cz_set_uvd_clock_gating_branches(adev, enable); 945 uvd_v6_0_set_sw_clock_gating(adev);
993 else
994 tonga_set_uvd_clock_gating_branches(adev, enable);
995 uvd_v6_0_set_uvd_dynamic_clock_mode(adev, true);
996 } else { 946 } else {
997 uint32_t data = RREG32(mmUVD_CGC_CTRL); 947 /* wait for STATUS to clear */
998 data &= ~UVD_CGC_CTRL__DYN_CLOCK_MODE_MASK; 948 if (uvd_v6_0_wait_for_idle(handle))
999 WREG32(mmUVD_CGC_CTRL, data); 949 return -EBUSY;
950
951 /* enable HW gates because UVD is idle */
952/* uvd_v6_0_set_hw_clock_gating(adev); */
1000 } 953 }
1001 954
1002 return 0; 955 return 0;