diff options
Diffstat (limited to 'drivers/gpu/drm/i915/intel_display.c')
-rw-r--r-- | drivers/gpu/drm/i915/intel_display.c | 143 |
1 files changed, 133 insertions, 10 deletions
diff --git a/drivers/gpu/drm/i915/intel_display.c b/drivers/gpu/drm/i915/intel_display.c index c8fd15f146af..1b5cd833bc70 100644 --- a/drivers/gpu/drm/i915/intel_display.c +++ b/drivers/gpu/drm/i915/intel_display.c | |||
@@ -1188,25 +1188,30 @@ static void intel_update_fbc(struct drm_crtc *crtc, | |||
1188 | if (intel_fb->obj->size > dev_priv->cfb_size) { | 1188 | if (intel_fb->obj->size > dev_priv->cfb_size) { |
1189 | DRM_DEBUG_KMS("framebuffer too large, disabling " | 1189 | DRM_DEBUG_KMS("framebuffer too large, disabling " |
1190 | "compression\n"); | 1190 | "compression\n"); |
1191 | dev_priv->no_fbc_reason = FBC_STOLEN_TOO_SMALL; | ||
1191 | goto out_disable; | 1192 | goto out_disable; |
1192 | } | 1193 | } |
1193 | if ((mode->flags & DRM_MODE_FLAG_INTERLACE) || | 1194 | if ((mode->flags & DRM_MODE_FLAG_INTERLACE) || |
1194 | (mode->flags & DRM_MODE_FLAG_DBLSCAN)) { | 1195 | (mode->flags & DRM_MODE_FLAG_DBLSCAN)) { |
1195 | DRM_DEBUG_KMS("mode incompatible with compression, " | 1196 | DRM_DEBUG_KMS("mode incompatible with compression, " |
1196 | "disabling\n"); | 1197 | "disabling\n"); |
1198 | dev_priv->no_fbc_reason = FBC_UNSUPPORTED_MODE; | ||
1197 | goto out_disable; | 1199 | goto out_disable; |
1198 | } | 1200 | } |
1199 | if ((mode->hdisplay > 2048) || | 1201 | if ((mode->hdisplay > 2048) || |
1200 | (mode->vdisplay > 1536)) { | 1202 | (mode->vdisplay > 1536)) { |
1201 | DRM_DEBUG_KMS("mode too large for compression, disabling\n"); | 1203 | DRM_DEBUG_KMS("mode too large for compression, disabling\n"); |
1204 | dev_priv->no_fbc_reason = FBC_MODE_TOO_LARGE; | ||
1202 | goto out_disable; | 1205 | goto out_disable; |
1203 | } | 1206 | } |
1204 | if ((IS_I915GM(dev) || IS_I945GM(dev)) && plane != 0) { | 1207 | if ((IS_I915GM(dev) || IS_I945GM(dev)) && plane != 0) { |
1205 | DRM_DEBUG_KMS("plane not 0, disabling compression\n"); | 1208 | DRM_DEBUG_KMS("plane not 0, disabling compression\n"); |
1209 | dev_priv->no_fbc_reason = FBC_BAD_PLANE; | ||
1206 | goto out_disable; | 1210 | goto out_disable; |
1207 | } | 1211 | } |
1208 | if (obj_priv->tiling_mode != I915_TILING_X) { | 1212 | if (obj_priv->tiling_mode != I915_TILING_X) { |
1209 | DRM_DEBUG_KMS("framebuffer not tiled, disabling compression\n"); | 1213 | DRM_DEBUG_KMS("framebuffer not tiled, disabling compression\n"); |
1214 | dev_priv->no_fbc_reason = FBC_NOT_TILED; | ||
1210 | goto out_disable; | 1215 | goto out_disable; |
1211 | } | 1216 | } |
1212 | 1217 | ||
@@ -2757,11 +2762,22 @@ static void i9xx_update_wm(struct drm_device *dev, int planea_clock, | |||
2757 | srwm = total_size - sr_entries; | 2762 | srwm = total_size - sr_entries; |
2758 | if (srwm < 0) | 2763 | if (srwm < 0) |
2759 | srwm = 1; | 2764 | srwm = 1; |
2760 | I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_EN | (srwm & 0x3f)); | 2765 | |
2766 | if (IS_I945G(dev) || IS_I945GM(dev)) | ||
2767 | I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_FIFO_MASK | (srwm & 0xff)); | ||
2768 | else if (IS_I915GM(dev)) { | ||
2769 | /* 915M has a smaller SRWM field */ | ||
2770 | I915_WRITE(FW_BLC_SELF, srwm & 0x3f); | ||
2771 | I915_WRITE(INSTPM, I915_READ(INSTPM) | INSTPM_SELF_EN); | ||
2772 | } | ||
2761 | } else { | 2773 | } else { |
2762 | /* Turn off self refresh if both pipes are enabled */ | 2774 | /* Turn off self refresh if both pipes are enabled */ |
2763 | I915_WRITE(FW_BLC_SELF, I915_READ(FW_BLC_SELF) | 2775 | if (IS_I945G(dev) || IS_I945GM(dev)) { |
2764 | & ~FW_BLC_SELF_EN); | 2776 | I915_WRITE(FW_BLC_SELF, I915_READ(FW_BLC_SELF) |
2777 | & ~FW_BLC_SELF_EN); | ||
2778 | } else if (IS_I915GM(dev)) { | ||
2779 | I915_WRITE(INSTPM, I915_READ(INSTPM) & ~INSTPM_SELF_EN); | ||
2780 | } | ||
2765 | } | 2781 | } |
2766 | 2782 | ||
2767 | DRM_DEBUG_KMS("Setting FIFO watermarks - A: %d, B: %d, C: %d, SR %d\n", | 2783 | DRM_DEBUG_KMS("Setting FIFO watermarks - A: %d, B: %d, C: %d, SR %d\n", |
@@ -4010,6 +4026,11 @@ static void intel_idle_update(struct work_struct *work) | |||
4010 | 4026 | ||
4011 | mutex_lock(&dev->struct_mutex); | 4027 | mutex_lock(&dev->struct_mutex); |
4012 | 4028 | ||
4029 | if (IS_I945G(dev) || IS_I945GM(dev)) { | ||
4030 | DRM_DEBUG_DRIVER("enable memory self refresh on 945\n"); | ||
4031 | I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_EN_MASK | FW_BLC_SELF_EN); | ||
4032 | } | ||
4033 | |||
4013 | list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { | 4034 | list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { |
4014 | /* Skip inactive CRTCs */ | 4035 | /* Skip inactive CRTCs */ |
4015 | if (!crtc->fb) | 4036 | if (!crtc->fb) |
@@ -4043,9 +4064,17 @@ void intel_mark_busy(struct drm_device *dev, struct drm_gem_object *obj) | |||
4043 | if (!drm_core_check_feature(dev, DRIVER_MODESET)) | 4064 | if (!drm_core_check_feature(dev, DRIVER_MODESET)) |
4044 | return; | 4065 | return; |
4045 | 4066 | ||
4046 | if (!dev_priv->busy) | 4067 | if (!dev_priv->busy) { |
4068 | if (IS_I945G(dev) || IS_I945GM(dev)) { | ||
4069 | u32 fw_blc_self; | ||
4070 | |||
4071 | DRM_DEBUG_DRIVER("disable memory self refresh on 945\n"); | ||
4072 | fw_blc_self = I915_READ(FW_BLC_SELF); | ||
4073 | fw_blc_self &= ~FW_BLC_SELF_EN; | ||
4074 | I915_WRITE(FW_BLC_SELF, fw_blc_self | FW_BLC_SELF_EN_MASK); | ||
4075 | } | ||
4047 | dev_priv->busy = true; | 4076 | dev_priv->busy = true; |
4048 | else | 4077 | } else |
4049 | mod_timer(&dev_priv->idle_timer, jiffies + | 4078 | mod_timer(&dev_priv->idle_timer, jiffies + |
4050 | msecs_to_jiffies(GPU_IDLE_TIMEOUT)); | 4079 | msecs_to_jiffies(GPU_IDLE_TIMEOUT)); |
4051 | 4080 | ||
@@ -4057,6 +4086,14 @@ void intel_mark_busy(struct drm_device *dev, struct drm_gem_object *obj) | |||
4057 | intel_fb = to_intel_framebuffer(crtc->fb); | 4086 | intel_fb = to_intel_framebuffer(crtc->fb); |
4058 | if (intel_fb->obj == obj) { | 4087 | if (intel_fb->obj == obj) { |
4059 | if (!intel_crtc->busy) { | 4088 | if (!intel_crtc->busy) { |
4089 | if (IS_I945G(dev) || IS_I945GM(dev)) { | ||
4090 | u32 fw_blc_self; | ||
4091 | |||
4092 | DRM_DEBUG_DRIVER("disable memory self refresh on 945\n"); | ||
4093 | fw_blc_self = I915_READ(FW_BLC_SELF); | ||
4094 | fw_blc_self &= ~FW_BLC_SELF_EN; | ||
4095 | I915_WRITE(FW_BLC_SELF, fw_blc_self | FW_BLC_SELF_EN_MASK); | ||
4096 | } | ||
4060 | /* Non-busy -> busy, upclock */ | 4097 | /* Non-busy -> busy, upclock */ |
4061 | intel_increase_pllclock(crtc, true); | 4098 | intel_increase_pllclock(crtc, true); |
4062 | intel_crtc->busy = true; | 4099 | intel_crtc->busy = true; |
@@ -4586,6 +4623,91 @@ err_unref: | |||
4586 | return NULL; | 4623 | return NULL; |
4587 | } | 4624 | } |
4588 | 4625 | ||
4626 | void ironlake_enable_drps(struct drm_device *dev) | ||
4627 | { | ||
4628 | struct drm_i915_private *dev_priv = dev->dev_private; | ||
4629 | u32 rgvmodectl = I915_READ(MEMMODECTL), rgvswctl; | ||
4630 | u8 fmax, fmin, fstart, vstart; | ||
4631 | int i = 0; | ||
4632 | |||
4633 | /* 100ms RC evaluation intervals */ | ||
4634 | I915_WRITE(RCUPEI, 100000); | ||
4635 | I915_WRITE(RCDNEI, 100000); | ||
4636 | |||
4637 | /* Set max/min thresholds to 90ms and 80ms respectively */ | ||
4638 | I915_WRITE(RCBMAXAVG, 90000); | ||
4639 | I915_WRITE(RCBMINAVG, 80000); | ||
4640 | |||
4641 | I915_WRITE(MEMIHYST, 1); | ||
4642 | |||
4643 | /* Set up min, max, and cur for interrupt handling */ | ||
4644 | fmax = (rgvmodectl & MEMMODE_FMAX_MASK) >> MEMMODE_FMAX_SHIFT; | ||
4645 | fmin = (rgvmodectl & MEMMODE_FMIN_MASK); | ||
4646 | fstart = (rgvmodectl & MEMMODE_FSTART_MASK) >> | ||
4647 | MEMMODE_FSTART_SHIFT; | ||
4648 | vstart = (I915_READ(PXVFREQ_BASE + (fstart * 4)) & PXVFREQ_PX_MASK) >> | ||
4649 | PXVFREQ_PX_SHIFT; | ||
4650 | |||
4651 | dev_priv->max_delay = fstart; /* can't go to fmax w/o IPS */ | ||
4652 | dev_priv->min_delay = fmin; | ||
4653 | dev_priv->cur_delay = fstart; | ||
4654 | |||
4655 | I915_WRITE(MEMINTREN, MEMINT_CX_SUPR_EN | MEMINT_EVAL_CHG_EN); | ||
4656 | |||
4657 | /* | ||
4658 | * Interrupts will be enabled in ironlake_irq_postinstall | ||
4659 | */ | ||
4660 | |||
4661 | I915_WRITE(VIDSTART, vstart); | ||
4662 | POSTING_READ(VIDSTART); | ||
4663 | |||
4664 | rgvmodectl |= MEMMODE_SWMODE_EN; | ||
4665 | I915_WRITE(MEMMODECTL, rgvmodectl); | ||
4666 | |||
4667 | while (I915_READ(MEMSWCTL) & MEMCTL_CMD_STS) { | ||
4668 | if (i++ > 100) { | ||
4669 | DRM_ERROR("stuck trying to change perf mode\n"); | ||
4670 | break; | ||
4671 | } | ||
4672 | msleep(1); | ||
4673 | } | ||
4674 | msleep(1); | ||
4675 | |||
4676 | rgvswctl = (MEMCTL_CMD_CHFREQ << MEMCTL_CMD_SHIFT) | | ||
4677 | (fstart << MEMCTL_FREQ_SHIFT) | MEMCTL_SFCAVM; | ||
4678 | I915_WRITE(MEMSWCTL, rgvswctl); | ||
4679 | POSTING_READ(MEMSWCTL); | ||
4680 | |||
4681 | rgvswctl |= MEMCTL_CMD_STS; | ||
4682 | I915_WRITE(MEMSWCTL, rgvswctl); | ||
4683 | } | ||
4684 | |||
4685 | void ironlake_disable_drps(struct drm_device *dev) | ||
4686 | { | ||
4687 | struct drm_i915_private *dev_priv = dev->dev_private; | ||
4688 | u32 rgvswctl; | ||
4689 | u8 fstart; | ||
4690 | |||
4691 | /* Ack interrupts, disable EFC interrupt */ | ||
4692 | I915_WRITE(MEMINTREN, I915_READ(MEMINTREN) & ~MEMINT_EVAL_CHG_EN); | ||
4693 | I915_WRITE(MEMINTRSTS, MEMINT_EVAL_CHG); | ||
4694 | I915_WRITE(DEIER, I915_READ(DEIER) & ~DE_PCU_EVENT); | ||
4695 | I915_WRITE(DEIIR, DE_PCU_EVENT); | ||
4696 | I915_WRITE(DEIMR, I915_READ(DEIMR) | DE_PCU_EVENT); | ||
4697 | |||
4698 | /* Go back to the starting frequency */ | ||
4699 | fstart = (I915_READ(MEMMODECTL) & MEMMODE_FSTART_MASK) >> | ||
4700 | MEMMODE_FSTART_SHIFT; | ||
4701 | rgvswctl = (MEMCTL_CMD_CHFREQ << MEMCTL_CMD_SHIFT) | | ||
4702 | (fstart << MEMCTL_FREQ_SHIFT) | MEMCTL_SFCAVM; | ||
4703 | I915_WRITE(MEMSWCTL, rgvswctl); | ||
4704 | msleep(1); | ||
4705 | rgvswctl |= MEMCTL_CMD_STS; | ||
4706 | I915_WRITE(MEMSWCTL, rgvswctl); | ||
4707 | msleep(1); | ||
4708 | |||
4709 | } | ||
4710 | |||
4589 | void intel_init_clock_gating(struct drm_device *dev) | 4711 | void intel_init_clock_gating(struct drm_device *dev) |
4590 | { | 4712 | { |
4591 | struct drm_i915_private *dev_priv = dev->dev_private; | 4713 | struct drm_i915_private *dev_priv = dev->dev_private; |
@@ -4769,11 +4891,6 @@ void intel_modeset_init(struct drm_device *dev) | |||
4769 | DRM_DEBUG_KMS("%d display pipe%s available.\n", | 4891 | DRM_DEBUG_KMS("%d display pipe%s available.\n", |
4770 | num_pipe, num_pipe > 1 ? "s" : ""); | 4892 | num_pipe, num_pipe > 1 ? "s" : ""); |
4771 | 4893 | ||
4772 | if (IS_I85X(dev)) | ||
4773 | pci_read_config_word(dev->pdev, HPLLCC, &dev_priv->orig_clock); | ||
4774 | else if (IS_I9XX(dev) || IS_G4X(dev)) | ||
4775 | pci_read_config_word(dev->pdev, GCFGC, &dev_priv->orig_clock); | ||
4776 | |||
4777 | for (i = 0; i < num_pipe; i++) { | 4894 | for (i = 0; i < num_pipe; i++) { |
4778 | intel_crtc_init(dev, i); | 4895 | intel_crtc_init(dev, i); |
4779 | } | 4896 | } |
@@ -4782,6 +4899,9 @@ void intel_modeset_init(struct drm_device *dev) | |||
4782 | 4899 | ||
4783 | intel_init_clock_gating(dev); | 4900 | intel_init_clock_gating(dev); |
4784 | 4901 | ||
4902 | if (IS_IRONLAKE_M(dev)) | ||
4903 | ironlake_enable_drps(dev); | ||
4904 | |||
4785 | INIT_WORK(&dev_priv->idle_work, intel_idle_update); | 4905 | INIT_WORK(&dev_priv->idle_work, intel_idle_update); |
4786 | setup_timer(&dev_priv->idle_timer, intel_gpu_idle_timer, | 4906 | setup_timer(&dev_priv->idle_timer, intel_gpu_idle_timer, |
4787 | (unsigned long)dev); | 4907 | (unsigned long)dev); |
@@ -4829,6 +4949,9 @@ void intel_modeset_cleanup(struct drm_device *dev) | |||
4829 | drm_gem_object_unreference(dev_priv->pwrctx); | 4949 | drm_gem_object_unreference(dev_priv->pwrctx); |
4830 | } | 4950 | } |
4831 | 4951 | ||
4952 | if (IS_IRONLAKE_M(dev)) | ||
4953 | ironlake_disable_drps(dev); | ||
4954 | |||
4832 | mutex_unlock(&dev->struct_mutex); | 4955 | mutex_unlock(&dev->struct_mutex); |
4833 | 4956 | ||
4834 | drm_mode_config_cleanup(dev); | 4957 | drm_mode_config_cleanup(dev); |