aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/intel_display.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/i915/intel_display.c')
-rw-r--r--drivers/gpu/drm/i915/intel_display.c216
1 files changed, 167 insertions, 49 deletions
diff --git a/drivers/gpu/drm/i915/intel_display.c b/drivers/gpu/drm/i915/intel_display.c
index b27202d23eb..9cd6de5f990 100644
--- a/drivers/gpu/drm/i915/intel_display.c
+++ b/drivers/gpu/drm/i915/intel_display.c
@@ -232,7 +232,7 @@ struct intel_limit {
232#define G4X_P2_DISPLAY_PORT_FAST 10 232#define G4X_P2_DISPLAY_PORT_FAST 10
233#define G4X_P2_DISPLAY_PORT_LIMIT 0 233#define G4X_P2_DISPLAY_PORT_LIMIT 0
234 234
235/* Ironlake */ 235/* Ironlake / Sandybridge */
236/* as we calculate clock using (register_value + 2) for 236/* as we calculate clock using (register_value + 2) for
237 N/M1/M2, so here the range value for them is (actual_value-2). 237 N/M1/M2, so here the range value for them is (actual_value-2).
238 */ 238 */
@@ -690,7 +690,7 @@ static const intel_limit_t *intel_limit(struct drm_crtc *crtc)
690 struct drm_device *dev = crtc->dev; 690 struct drm_device *dev = crtc->dev;
691 const intel_limit_t *limit; 691 const intel_limit_t *limit;
692 692
693 if (IS_IRONLAKE(dev)) 693 if (HAS_PCH_SPLIT(dev))
694 limit = intel_ironlake_limit(crtc); 694 limit = intel_ironlake_limit(crtc);
695 else if (IS_G4X(dev)) { 695 else if (IS_G4X(dev)) {
696 limit = intel_g4x_limit(crtc); 696 limit = intel_g4x_limit(crtc);
@@ -886,7 +886,7 @@ intel_g4x_find_best_PLL(const intel_limit_t *limit, struct drm_crtc *crtc,
886 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS)) { 886 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS)) {
887 int lvds_reg; 887 int lvds_reg;
888 888
889 if (IS_IRONLAKE(dev)) 889 if (HAS_PCH_SPLIT(dev))
890 lvds_reg = PCH_LVDS; 890 lvds_reg = PCH_LVDS;
891 else 891 else
892 lvds_reg = LVDS; 892 lvds_reg = LVDS;
@@ -1188,25 +1188,30 @@ static void intel_update_fbc(struct drm_crtc *crtc,
1188 if (intel_fb->obj->size > dev_priv->cfb_size) { 1188 if (intel_fb->obj->size > dev_priv->cfb_size) {
1189 DRM_DEBUG_KMS("framebuffer too large, disabling " 1189 DRM_DEBUG_KMS("framebuffer too large, disabling "
1190 "compression\n"); 1190 "compression\n");
1191 dev_priv->no_fbc_reason = FBC_STOLEN_TOO_SMALL;
1191 goto out_disable; 1192 goto out_disable;
1192 } 1193 }
1193 if ((mode->flags & DRM_MODE_FLAG_INTERLACE) || 1194 if ((mode->flags & DRM_MODE_FLAG_INTERLACE) ||
1194 (mode->flags & DRM_MODE_FLAG_DBLSCAN)) { 1195 (mode->flags & DRM_MODE_FLAG_DBLSCAN)) {
1195 DRM_DEBUG_KMS("mode incompatible with compression, " 1196 DRM_DEBUG_KMS("mode incompatible with compression, "
1196 "disabling\n"); 1197 "disabling\n");
1198 dev_priv->no_fbc_reason = FBC_UNSUPPORTED_MODE;
1197 goto out_disable; 1199 goto out_disable;
1198 } 1200 }
1199 if ((mode->hdisplay > 2048) || 1201 if ((mode->hdisplay > 2048) ||
1200 (mode->vdisplay > 1536)) { 1202 (mode->vdisplay > 1536)) {
1201 DRM_DEBUG_KMS("mode too large for compression, disabling\n"); 1203 DRM_DEBUG_KMS("mode too large for compression, disabling\n");
1204 dev_priv->no_fbc_reason = FBC_MODE_TOO_LARGE;
1202 goto out_disable; 1205 goto out_disable;
1203 } 1206 }
1204 if ((IS_I915GM(dev) || IS_I945GM(dev)) && plane != 0) { 1207 if ((IS_I915GM(dev) || IS_I945GM(dev)) && plane != 0) {
1205 DRM_DEBUG_KMS("plane not 0, disabling compression\n"); 1208 DRM_DEBUG_KMS("plane not 0, disabling compression\n");
1209 dev_priv->no_fbc_reason = FBC_BAD_PLANE;
1206 goto out_disable; 1210 goto out_disable;
1207 } 1211 }
1208 if (obj_priv->tiling_mode != I915_TILING_X) { 1212 if (obj_priv->tiling_mode != I915_TILING_X) {
1209 DRM_DEBUG_KMS("framebuffer not tiled, disabling compression\n"); 1213 DRM_DEBUG_KMS("framebuffer not tiled, disabling compression\n");
1214 dev_priv->no_fbc_reason = FBC_NOT_TILED;
1210 goto out_disable; 1215 goto out_disable;
1211 } 1216 }
1212 1217
@@ -1366,7 +1371,7 @@ intel_pipe_set_base(struct drm_crtc *crtc, int x, int y,
1366 dspcntr &= ~DISPPLANE_TILED; 1371 dspcntr &= ~DISPPLANE_TILED;
1367 } 1372 }
1368 1373
1369 if (IS_IRONLAKE(dev)) 1374 if (HAS_PCH_SPLIT(dev))
1370 /* must disable */ 1375 /* must disable */
1371 dspcntr |= DISPPLANE_TRICKLE_FEED_DISABLE; 1376 dspcntr |= DISPPLANE_TRICKLE_FEED_DISABLE;
1372 1377
@@ -1427,7 +1432,7 @@ static void i915_disable_vga (struct drm_device *dev)
1427 u8 sr1; 1432 u8 sr1;
1428 u32 vga_reg; 1433 u32 vga_reg;
1429 1434
1430 if (IS_IRONLAKE(dev)) 1435 if (HAS_PCH_SPLIT(dev))
1431 vga_reg = CPU_VGACNTRL; 1436 vga_reg = CPU_VGACNTRL;
1432 else 1437 else
1433 vga_reg = VGACNTRL; 1438 vga_reg = VGACNTRL;
@@ -2111,7 +2116,7 @@ static bool intel_crtc_mode_fixup(struct drm_crtc *crtc,
2111 struct drm_display_mode *adjusted_mode) 2116 struct drm_display_mode *adjusted_mode)
2112{ 2117{
2113 struct drm_device *dev = crtc->dev; 2118 struct drm_device *dev = crtc->dev;
2114 if (IS_IRONLAKE(dev)) { 2119 if (HAS_PCH_SPLIT(dev)) {
2115 /* FDI link clock is fixed at 2.7G */ 2120 /* FDI link clock is fixed at 2.7G */
2116 if (mode->clock * 3 > 27000 * 4) 2121 if (mode->clock * 3 > 27000 * 4)
2117 return MODE_CLOCK_HIGH; 2122 return MODE_CLOCK_HIGH;
@@ -2757,11 +2762,22 @@ static void i9xx_update_wm(struct drm_device *dev, int planea_clock,
2757 srwm = total_size - sr_entries; 2762 srwm = total_size - sr_entries;
2758 if (srwm < 0) 2763 if (srwm < 0)
2759 srwm = 1; 2764 srwm = 1;
2760 I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_EN | (srwm & 0x3f)); 2765
2766 if (IS_I945G(dev) || IS_I945GM(dev))
2767 I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_FIFO_MASK | (srwm & 0xff));
2768 else if (IS_I915GM(dev)) {
2769 /* 915M has a smaller SRWM field */
2770 I915_WRITE(FW_BLC_SELF, srwm & 0x3f);
2771 I915_WRITE(INSTPM, I915_READ(INSTPM) | INSTPM_SELF_EN);
2772 }
2761 } else { 2773 } else {
2762 /* Turn off self refresh if both pipes are enabled */ 2774 /* Turn off self refresh if both pipes are enabled */
2763 I915_WRITE(FW_BLC_SELF, I915_READ(FW_BLC_SELF) 2775 if (IS_I945G(dev) || IS_I945GM(dev)) {
2764 & ~FW_BLC_SELF_EN); 2776 I915_WRITE(FW_BLC_SELF, I915_READ(FW_BLC_SELF)
2777 & ~FW_BLC_SELF_EN);
2778 } else if (IS_I915GM(dev)) {
2779 I915_WRITE(INSTPM, I915_READ(INSTPM) & ~INSTPM_SELF_EN);
2780 }
2765 } 2781 }
2766 2782
2767 DRM_DEBUG_KMS("Setting FIFO watermarks - A: %d, B: %d, C: %d, SR %d\n", 2783 DRM_DEBUG_KMS("Setting FIFO watermarks - A: %d, B: %d, C: %d, SR %d\n",
@@ -2967,7 +2983,7 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
2967 refclk / 1000); 2983 refclk / 1000);
2968 } else if (IS_I9XX(dev)) { 2984 } else if (IS_I9XX(dev)) {
2969 refclk = 96000; 2985 refclk = 96000;
2970 if (IS_IRONLAKE(dev)) 2986 if (HAS_PCH_SPLIT(dev))
2971 refclk = 120000; /* 120Mhz refclk */ 2987 refclk = 120000; /* 120Mhz refclk */
2972 } else { 2988 } else {
2973 refclk = 48000; 2989 refclk = 48000;
@@ -3025,7 +3041,7 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3025 } 3041 }
3026 3042
3027 /* FDI link */ 3043 /* FDI link */
3028 if (IS_IRONLAKE(dev)) { 3044 if (HAS_PCH_SPLIT(dev)) {
3029 int lane, link_bw, bpp; 3045 int lane, link_bw, bpp;
3030 /* eDP doesn't require FDI link, so just set DP M/N 3046 /* eDP doesn't require FDI link, so just set DP M/N
3031 according to current link config */ 3047 according to current link config */
@@ -3102,7 +3118,7 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3102 * PCH B stepping, previous chipset stepping should be 3118 * PCH B stepping, previous chipset stepping should be
3103 * ignoring this setting. 3119 * ignoring this setting.
3104 */ 3120 */
3105 if (IS_IRONLAKE(dev)) { 3121 if (HAS_PCH_SPLIT(dev)) {
3106 temp = I915_READ(PCH_DREF_CONTROL); 3122 temp = I915_READ(PCH_DREF_CONTROL);
3107 /* Always enable nonspread source */ 3123 /* Always enable nonspread source */
3108 temp &= ~DREF_NONSPREAD_SOURCE_MASK; 3124 temp &= ~DREF_NONSPREAD_SOURCE_MASK;
@@ -3149,7 +3165,7 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3149 reduced_clock.m2; 3165 reduced_clock.m2;
3150 } 3166 }
3151 3167
3152 if (!IS_IRONLAKE(dev)) 3168 if (!HAS_PCH_SPLIT(dev))
3153 dpll = DPLL_VGA_MODE_DIS; 3169 dpll = DPLL_VGA_MODE_DIS;
3154 3170
3155 if (IS_I9XX(dev)) { 3171 if (IS_I9XX(dev)) {
@@ -3162,7 +3178,7 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3162 sdvo_pixel_multiply = adjusted_mode->clock / mode->clock; 3178 sdvo_pixel_multiply = adjusted_mode->clock / mode->clock;
3163 if (IS_I945G(dev) || IS_I945GM(dev) || IS_G33(dev)) 3179 if (IS_I945G(dev) || IS_I945GM(dev) || IS_G33(dev))
3164 dpll |= (sdvo_pixel_multiply - 1) << SDVO_MULTIPLIER_SHIFT_HIRES; 3180 dpll |= (sdvo_pixel_multiply - 1) << SDVO_MULTIPLIER_SHIFT_HIRES;
3165 else if (IS_IRONLAKE(dev)) 3181 else if (HAS_PCH_SPLIT(dev))
3166 dpll |= (sdvo_pixel_multiply - 1) << PLL_REF_SDVO_HDMI_MULTIPLIER_SHIFT; 3182 dpll |= (sdvo_pixel_multiply - 1) << PLL_REF_SDVO_HDMI_MULTIPLIER_SHIFT;
3167 } 3183 }
3168 if (is_dp) 3184 if (is_dp)
@@ -3174,7 +3190,7 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3174 else { 3190 else {
3175 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT; 3191 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT;
3176 /* also FPA1 */ 3192 /* also FPA1 */
3177 if (IS_IRONLAKE(dev)) 3193 if (HAS_PCH_SPLIT(dev))
3178 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA1_P1_POST_DIV_SHIFT; 3194 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA1_P1_POST_DIV_SHIFT;
3179 if (IS_G4X(dev) && has_reduced_clock) 3195 if (IS_G4X(dev) && has_reduced_clock)
3180 dpll |= (1 << (reduced_clock.p1 - 1)) << DPLL_FPA1_P1_POST_DIV_SHIFT; 3196 dpll |= (1 << (reduced_clock.p1 - 1)) << DPLL_FPA1_P1_POST_DIV_SHIFT;
@@ -3193,7 +3209,7 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3193 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_14; 3209 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_14;
3194 break; 3210 break;
3195 } 3211 }
3196 if (IS_I965G(dev) && !IS_IRONLAKE(dev)) 3212 if (IS_I965G(dev) && !HAS_PCH_SPLIT(dev))
3197 dpll |= (6 << PLL_LOAD_PULSE_PHASE_SHIFT); 3213 dpll |= (6 << PLL_LOAD_PULSE_PHASE_SHIFT);
3198 } else { 3214 } else {
3199 if (is_lvds) { 3215 if (is_lvds) {
@@ -3227,7 +3243,7 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3227 3243
3228 /* Ironlake's plane is forced to pipe, bit 24 is to 3244 /* Ironlake's plane is forced to pipe, bit 24 is to
3229 enable color space conversion */ 3245 enable color space conversion */
3230 if (!IS_IRONLAKE(dev)) { 3246 if (!HAS_PCH_SPLIT(dev)) {
3231 if (pipe == 0) 3247 if (pipe == 0)
3232 dspcntr &= ~DISPPLANE_SEL_PIPE_MASK; 3248 dspcntr &= ~DISPPLANE_SEL_PIPE_MASK;
3233 else 3249 else
@@ -3254,14 +3270,14 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3254 3270
3255 3271
3256 /* Disable the panel fitter if it was on our pipe */ 3272 /* Disable the panel fitter if it was on our pipe */
3257 if (!IS_IRONLAKE(dev) && intel_panel_fitter_pipe(dev) == pipe) 3273 if (!HAS_PCH_SPLIT(dev) && intel_panel_fitter_pipe(dev) == pipe)
3258 I915_WRITE(PFIT_CONTROL, 0); 3274 I915_WRITE(PFIT_CONTROL, 0);
3259 3275
3260 DRM_DEBUG_KMS("Mode for pipe %c:\n", pipe == 0 ? 'A' : 'B'); 3276 DRM_DEBUG_KMS("Mode for pipe %c:\n", pipe == 0 ? 'A' : 'B');
3261 drm_mode_debug_printmodeline(mode); 3277 drm_mode_debug_printmodeline(mode);
3262 3278
3263 /* assign to Ironlake registers */ 3279 /* assign to Ironlake registers */
3264 if (IS_IRONLAKE(dev)) { 3280 if (HAS_PCH_SPLIT(dev)) {
3265 fp_reg = pch_fp_reg; 3281 fp_reg = pch_fp_reg;
3266 dpll_reg = pch_dpll_reg; 3282 dpll_reg = pch_dpll_reg;
3267 } 3283 }
@@ -3282,7 +3298,7 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3282 if (is_lvds) { 3298 if (is_lvds) {
3283 u32 lvds; 3299 u32 lvds;
3284 3300
3285 if (IS_IRONLAKE(dev)) 3301 if (HAS_PCH_SPLIT(dev))
3286 lvds_reg = PCH_LVDS; 3302 lvds_reg = PCH_LVDS;
3287 3303
3288 lvds = I915_READ(lvds_reg); 3304 lvds = I915_READ(lvds_reg);
@@ -3304,12 +3320,12 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3304 /* set the dithering flag */ 3320 /* set the dithering flag */
3305 if (IS_I965G(dev)) { 3321 if (IS_I965G(dev)) {
3306 if (dev_priv->lvds_dither) { 3322 if (dev_priv->lvds_dither) {
3307 if (IS_IRONLAKE(dev)) 3323 if (HAS_PCH_SPLIT(dev))
3308 pipeconf |= PIPE_ENABLE_DITHER; 3324 pipeconf |= PIPE_ENABLE_DITHER;
3309 else 3325 else
3310 lvds |= LVDS_ENABLE_DITHER; 3326 lvds |= LVDS_ENABLE_DITHER;
3311 } else { 3327 } else {
3312 if (IS_IRONLAKE(dev)) 3328 if (HAS_PCH_SPLIT(dev))
3313 pipeconf &= ~PIPE_ENABLE_DITHER; 3329 pipeconf &= ~PIPE_ENABLE_DITHER;
3314 else 3330 else
3315 lvds &= ~LVDS_ENABLE_DITHER; 3331 lvds &= ~LVDS_ENABLE_DITHER;
@@ -3328,7 +3344,7 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3328 /* Wait for the clocks to stabilize. */ 3344 /* Wait for the clocks to stabilize. */
3329 udelay(150); 3345 udelay(150);
3330 3346
3331 if (IS_I965G(dev) && !IS_IRONLAKE(dev)) { 3347 if (IS_I965G(dev) && !HAS_PCH_SPLIT(dev)) {
3332 if (is_sdvo) { 3348 if (is_sdvo) {
3333 sdvo_pixel_multiply = adjusted_mode->clock / mode->clock; 3349 sdvo_pixel_multiply = adjusted_mode->clock / mode->clock;
3334 I915_WRITE(dpll_md_reg, (0 << DPLL_MD_UDI_DIVIDER_SHIFT) | 3350 I915_WRITE(dpll_md_reg, (0 << DPLL_MD_UDI_DIVIDER_SHIFT) |
@@ -3375,14 +3391,14 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3375 /* pipesrc and dspsize control the size that is scaled from, which should 3391 /* pipesrc and dspsize control the size that is scaled from, which should
3376 * always be the user's requested size. 3392 * always be the user's requested size.
3377 */ 3393 */
3378 if (!IS_IRONLAKE(dev)) { 3394 if (!HAS_PCH_SPLIT(dev)) {
3379 I915_WRITE(dspsize_reg, ((mode->vdisplay - 1) << 16) | 3395 I915_WRITE(dspsize_reg, ((mode->vdisplay - 1) << 16) |
3380 (mode->hdisplay - 1)); 3396 (mode->hdisplay - 1));
3381 I915_WRITE(dsppos_reg, 0); 3397 I915_WRITE(dsppos_reg, 0);
3382 } 3398 }
3383 I915_WRITE(pipesrc_reg, ((mode->hdisplay - 1) << 16) | (mode->vdisplay - 1)); 3399 I915_WRITE(pipesrc_reg, ((mode->hdisplay - 1) << 16) | (mode->vdisplay - 1));
3384 3400
3385 if (IS_IRONLAKE(dev)) { 3401 if (HAS_PCH_SPLIT(dev)) {
3386 I915_WRITE(data_m1_reg, TU_SIZE(m_n.tu) | m_n.gmch_m); 3402 I915_WRITE(data_m1_reg, TU_SIZE(m_n.tu) | m_n.gmch_m);
3387 I915_WRITE(data_n1_reg, TU_SIZE(m_n.tu) | m_n.gmch_n); 3403 I915_WRITE(data_n1_reg, TU_SIZE(m_n.tu) | m_n.gmch_n);
3388 I915_WRITE(link_m1_reg, m_n.link_m); 3404 I915_WRITE(link_m1_reg, m_n.link_m);
@@ -3438,7 +3454,7 @@ void intel_crtc_load_lut(struct drm_crtc *crtc)
3438 return; 3454 return;
3439 3455
3440 /* use legacy palette for Ironlake */ 3456 /* use legacy palette for Ironlake */
3441 if (IS_IRONLAKE(dev)) 3457 if (HAS_PCH_SPLIT(dev))
3442 palreg = (intel_crtc->pipe == 0) ? LGC_PALETTE_A : 3458 palreg = (intel_crtc->pipe == 0) ? LGC_PALETTE_A :
3443 LGC_PALETTE_B; 3459 LGC_PALETTE_B;
3444 3460
@@ -3553,11 +3569,10 @@ static int intel_crtc_cursor_set(struct drm_crtc *crtc,
3553 intel_crtc->cursor_bo = bo; 3569 intel_crtc->cursor_bo = bo;
3554 3570
3555 return 0; 3571 return 0;
3556fail:
3557 mutex_lock(&dev->struct_mutex);
3558fail_locked: 3572fail_locked:
3559 drm_gem_object_unreference(bo);
3560 mutex_unlock(&dev->struct_mutex); 3573 mutex_unlock(&dev->struct_mutex);
3574fail:
3575 drm_gem_object_unreference_unlocked(bo);
3561 return ret; 3576 return ret;
3562} 3577}
3563 3578
@@ -3922,7 +3937,7 @@ static void intel_increase_pllclock(struct drm_crtc *crtc, bool schedule)
3922 int dpll_reg = (pipe == 0) ? DPLL_A : DPLL_B; 3937 int dpll_reg = (pipe == 0) ? DPLL_A : DPLL_B;
3923 int dpll = I915_READ(dpll_reg); 3938 int dpll = I915_READ(dpll_reg);
3924 3939
3925 if (IS_IRONLAKE(dev)) 3940 if (HAS_PCH_SPLIT(dev))
3926 return; 3941 return;
3927 3942
3928 if (!dev_priv->lvds_downclock_avail) 3943 if (!dev_priv->lvds_downclock_avail)
@@ -3961,7 +3976,7 @@ static void intel_decrease_pllclock(struct drm_crtc *crtc)
3961 int dpll_reg = (pipe == 0) ? DPLL_A : DPLL_B; 3976 int dpll_reg = (pipe == 0) ? DPLL_A : DPLL_B;
3962 int dpll = I915_READ(dpll_reg); 3977 int dpll = I915_READ(dpll_reg);
3963 3978
3964 if (IS_IRONLAKE(dev)) 3979 if (HAS_PCH_SPLIT(dev))
3965 return; 3980 return;
3966 3981
3967 if (!dev_priv->lvds_downclock_avail) 3982 if (!dev_priv->lvds_downclock_avail)
@@ -4011,6 +4026,11 @@ static void intel_idle_update(struct work_struct *work)
4011 4026
4012 mutex_lock(&dev->struct_mutex); 4027 mutex_lock(&dev->struct_mutex);
4013 4028
4029 if (IS_I945G(dev) || IS_I945GM(dev)) {
4030 DRM_DEBUG_DRIVER("enable memory self refresh on 945\n");
4031 I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_EN_MASK | FW_BLC_SELF_EN);
4032 }
4033
4014 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 4034 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
4015 /* Skip inactive CRTCs */ 4035 /* Skip inactive CRTCs */
4016 if (!crtc->fb) 4036 if (!crtc->fb)
@@ -4044,9 +4064,17 @@ void intel_mark_busy(struct drm_device *dev, struct drm_gem_object *obj)
4044 if (!drm_core_check_feature(dev, DRIVER_MODESET)) 4064 if (!drm_core_check_feature(dev, DRIVER_MODESET))
4045 return; 4065 return;
4046 4066
4047 if (!dev_priv->busy) 4067 if (!dev_priv->busy) {
4068 if (IS_I945G(dev) || IS_I945GM(dev)) {
4069 u32 fw_blc_self;
4070
4071 DRM_DEBUG_DRIVER("disable memory self refresh on 945\n");
4072 fw_blc_self = I915_READ(FW_BLC_SELF);
4073 fw_blc_self &= ~FW_BLC_SELF_EN;
4074 I915_WRITE(FW_BLC_SELF, fw_blc_self | FW_BLC_SELF_EN_MASK);
4075 }
4048 dev_priv->busy = true; 4076 dev_priv->busy = true;
4049 else 4077 } else
4050 mod_timer(&dev_priv->idle_timer, jiffies + 4078 mod_timer(&dev_priv->idle_timer, jiffies +
4051 msecs_to_jiffies(GPU_IDLE_TIMEOUT)); 4079 msecs_to_jiffies(GPU_IDLE_TIMEOUT));
4052 4080
@@ -4058,6 +4086,14 @@ void intel_mark_busy(struct drm_device *dev, struct drm_gem_object *obj)
4058 intel_fb = to_intel_framebuffer(crtc->fb); 4086 intel_fb = to_intel_framebuffer(crtc->fb);
4059 if (intel_fb->obj == obj) { 4087 if (intel_fb->obj == obj) {
4060 if (!intel_crtc->busy) { 4088 if (!intel_crtc->busy) {
4089 if (IS_I945G(dev) || IS_I945GM(dev)) {
4090 u32 fw_blc_self;
4091
4092 DRM_DEBUG_DRIVER("disable memory self refresh on 945\n");
4093 fw_blc_self = I915_READ(FW_BLC_SELF);
4094 fw_blc_self &= ~FW_BLC_SELF_EN;
4095 I915_WRITE(FW_BLC_SELF, fw_blc_self | FW_BLC_SELF_EN_MASK);
4096 }
4061 /* Non-busy -> busy, upclock */ 4097 /* Non-busy -> busy, upclock */
4062 intel_increase_pllclock(crtc, true); 4098 intel_increase_pllclock(crtc, true);
4063 intel_crtc->busy = true; 4099 intel_crtc->busy = true;
@@ -4382,7 +4418,7 @@ static void intel_setup_outputs(struct drm_device *dev)
4382 if (IS_MOBILE(dev) && !IS_I830(dev)) 4418 if (IS_MOBILE(dev) && !IS_I830(dev))
4383 intel_lvds_init(dev); 4419 intel_lvds_init(dev);
4384 4420
4385 if (IS_IRONLAKE(dev)) { 4421 if (HAS_PCH_SPLIT(dev)) {
4386 int found; 4422 int found;
4387 4423
4388 if (IS_MOBILE(dev) && (I915_READ(DP_A) & DP_DETECTED)) 4424 if (IS_MOBILE(dev) && (I915_READ(DP_A) & DP_DETECTED))
@@ -4451,7 +4487,7 @@ static void intel_setup_outputs(struct drm_device *dev)
4451 DRM_DEBUG_KMS("probing DP_D\n"); 4487 DRM_DEBUG_KMS("probing DP_D\n");
4452 intel_dp_init(dev, DP_D); 4488 intel_dp_init(dev, DP_D);
4453 } 4489 }
4454 } else if (IS_I8XX(dev)) 4490 } else if (IS_GEN2(dev))
4455 intel_dvo_init(dev); 4491 intel_dvo_init(dev);
4456 4492
4457 if (SUPPORTS_TV(dev)) 4493 if (SUPPORTS_TV(dev))
@@ -4476,9 +4512,7 @@ static void intel_user_framebuffer_destroy(struct drm_framebuffer *fb)
4476 intelfb_remove(dev, fb); 4512 intelfb_remove(dev, fb);
4477 4513
4478 drm_framebuffer_cleanup(fb); 4514 drm_framebuffer_cleanup(fb);
4479 mutex_lock(&dev->struct_mutex); 4515 drm_gem_object_unreference_unlocked(intel_fb->obj);
4480 drm_gem_object_unreference(intel_fb->obj);
4481 mutex_unlock(&dev->struct_mutex);
4482 4516
4483 kfree(intel_fb); 4517 kfree(intel_fb);
4484} 4518}
@@ -4541,9 +4575,7 @@ intel_user_framebuffer_create(struct drm_device *dev,
4541 4575
4542 ret = intel_framebuffer_create(dev, mode_cmd, &fb, obj); 4576 ret = intel_framebuffer_create(dev, mode_cmd, &fb, obj);
4543 if (ret) { 4577 if (ret) {
4544 mutex_lock(&dev->struct_mutex); 4578 drm_gem_object_unreference_unlocked(obj);
4545 drm_gem_object_unreference(obj);
4546 mutex_unlock(&dev->struct_mutex);
4547 return NULL; 4579 return NULL;
4548 } 4580 }
4549 4581
@@ -4591,6 +4623,91 @@ err_unref:
4591 return NULL; 4623 return NULL;
4592} 4624}
4593 4625
4626void ironlake_enable_drps(struct drm_device *dev)
4627{
4628 struct drm_i915_private *dev_priv = dev->dev_private;
4629 u32 rgvmodectl = I915_READ(MEMMODECTL), rgvswctl;
4630 u8 fmax, fmin, fstart, vstart;
4631 int i = 0;
4632
4633 /* 100ms RC evaluation intervals */
4634 I915_WRITE(RCUPEI, 100000);
4635 I915_WRITE(RCDNEI, 100000);
4636
4637 /* Set max/min thresholds to 90ms and 80ms respectively */
4638 I915_WRITE(RCBMAXAVG, 90000);
4639 I915_WRITE(RCBMINAVG, 80000);
4640
4641 I915_WRITE(MEMIHYST, 1);
4642
4643 /* Set up min, max, and cur for interrupt handling */
4644 fmax = (rgvmodectl & MEMMODE_FMAX_MASK) >> MEMMODE_FMAX_SHIFT;
4645 fmin = (rgvmodectl & MEMMODE_FMIN_MASK);
4646 fstart = (rgvmodectl & MEMMODE_FSTART_MASK) >>
4647 MEMMODE_FSTART_SHIFT;
4648 vstart = (I915_READ(PXVFREQ_BASE + (fstart * 4)) & PXVFREQ_PX_MASK) >>
4649 PXVFREQ_PX_SHIFT;
4650
4651 dev_priv->max_delay = fstart; /* can't go to fmax w/o IPS */
4652 dev_priv->min_delay = fmin;
4653 dev_priv->cur_delay = fstart;
4654
4655 I915_WRITE(MEMINTREN, MEMINT_CX_SUPR_EN | MEMINT_EVAL_CHG_EN);
4656
4657 /*
4658 * Interrupts will be enabled in ironlake_irq_postinstall
4659 */
4660
4661 I915_WRITE(VIDSTART, vstart);
4662 POSTING_READ(VIDSTART);
4663
4664 rgvmodectl |= MEMMODE_SWMODE_EN;
4665 I915_WRITE(MEMMODECTL, rgvmodectl);
4666
4667 while (I915_READ(MEMSWCTL) & MEMCTL_CMD_STS) {
4668 if (i++ > 100) {
4669 DRM_ERROR("stuck trying to change perf mode\n");
4670 break;
4671 }
4672 msleep(1);
4673 }
4674 msleep(1);
4675
4676 rgvswctl = (MEMCTL_CMD_CHFREQ << MEMCTL_CMD_SHIFT) |
4677 (fstart << MEMCTL_FREQ_SHIFT) | MEMCTL_SFCAVM;
4678 I915_WRITE(MEMSWCTL, rgvswctl);
4679 POSTING_READ(MEMSWCTL);
4680
4681 rgvswctl |= MEMCTL_CMD_STS;
4682 I915_WRITE(MEMSWCTL, rgvswctl);
4683}
4684
4685void ironlake_disable_drps(struct drm_device *dev)
4686{
4687 struct drm_i915_private *dev_priv = dev->dev_private;
4688 u32 rgvswctl;
4689 u8 fstart;
4690
4691 /* Ack interrupts, disable EFC interrupt */
4692 I915_WRITE(MEMINTREN, I915_READ(MEMINTREN) & ~MEMINT_EVAL_CHG_EN);
4693 I915_WRITE(MEMINTRSTS, MEMINT_EVAL_CHG);
4694 I915_WRITE(DEIER, I915_READ(DEIER) & ~DE_PCU_EVENT);
4695 I915_WRITE(DEIIR, DE_PCU_EVENT);
4696 I915_WRITE(DEIMR, I915_READ(DEIMR) | DE_PCU_EVENT);
4697
4698 /* Go back to the starting frequency */
4699 fstart = (I915_READ(MEMMODECTL) & MEMMODE_FSTART_MASK) >>
4700 MEMMODE_FSTART_SHIFT;
4701 rgvswctl = (MEMCTL_CMD_CHFREQ << MEMCTL_CMD_SHIFT) |
4702 (fstart << MEMCTL_FREQ_SHIFT) | MEMCTL_SFCAVM;
4703 I915_WRITE(MEMSWCTL, rgvswctl);
4704 msleep(1);
4705 rgvswctl |= MEMCTL_CMD_STS;
4706 I915_WRITE(MEMSWCTL, rgvswctl);
4707 msleep(1);
4708
4709}
4710
4594void intel_init_clock_gating(struct drm_device *dev) 4711void intel_init_clock_gating(struct drm_device *dev)
4595{ 4712{
4596 struct drm_i915_private *dev_priv = dev->dev_private; 4713 struct drm_i915_private *dev_priv = dev->dev_private;
@@ -4599,7 +4716,7 @@ void intel_init_clock_gating(struct drm_device *dev)
4599 * Disable clock gating reported to work incorrectly according to the 4716 * Disable clock gating reported to work incorrectly according to the
4600 * specs, but enable as much else as we can. 4717 * specs, but enable as much else as we can.
4601 */ 4718 */
4602 if (IS_IRONLAKE(dev)) { 4719 if (HAS_PCH_SPLIT(dev)) {
4603 return; 4720 return;
4604 } else if (IS_G4X(dev)) { 4721 } else if (IS_G4X(dev)) {
4605 uint32_t dspclk_gate; 4722 uint32_t dspclk_gate;
@@ -4672,7 +4789,7 @@ static void intel_init_display(struct drm_device *dev)
4672 struct drm_i915_private *dev_priv = dev->dev_private; 4789 struct drm_i915_private *dev_priv = dev->dev_private;
4673 4790
4674 /* We always want a DPMS function */ 4791 /* We always want a DPMS function */
4675 if (IS_IRONLAKE(dev)) 4792 if (HAS_PCH_SPLIT(dev))
4676 dev_priv->display.dpms = ironlake_crtc_dpms; 4793 dev_priv->display.dpms = ironlake_crtc_dpms;
4677 else 4794 else
4678 dev_priv->display.dpms = i9xx_crtc_dpms; 4795 dev_priv->display.dpms = i9xx_crtc_dpms;
@@ -4715,7 +4832,7 @@ static void intel_init_display(struct drm_device *dev)
4715 i830_get_display_clock_speed; 4832 i830_get_display_clock_speed;
4716 4833
4717 /* For FIFO watermark updates */ 4834 /* For FIFO watermark updates */
4718 if (IS_IRONLAKE(dev)) 4835 if (HAS_PCH_SPLIT(dev))
4719 dev_priv->display.update_wm = NULL; 4836 dev_priv->display.update_wm = NULL;
4720 else if (IS_G4X(dev)) 4837 else if (IS_G4X(dev))
4721 dev_priv->display.update_wm = g4x_update_wm; 4838 dev_priv->display.update_wm = g4x_update_wm;
@@ -4774,11 +4891,6 @@ void intel_modeset_init(struct drm_device *dev)
4774 DRM_DEBUG_KMS("%d display pipe%s available.\n", 4891 DRM_DEBUG_KMS("%d display pipe%s available.\n",
4775 num_pipe, num_pipe > 1 ? "s" : ""); 4892 num_pipe, num_pipe > 1 ? "s" : "");
4776 4893
4777 if (IS_I85X(dev))
4778 pci_read_config_word(dev->pdev, HPLLCC, &dev_priv->orig_clock);
4779 else if (IS_I9XX(dev) || IS_G4X(dev))
4780 pci_read_config_word(dev->pdev, GCFGC, &dev_priv->orig_clock);
4781
4782 for (i = 0; i < num_pipe; i++) { 4894 for (i = 0; i < num_pipe; i++) {
4783 intel_crtc_init(dev, i); 4895 intel_crtc_init(dev, i);
4784 } 4896 }
@@ -4787,6 +4899,9 @@ void intel_modeset_init(struct drm_device *dev)
4787 4899
4788 intel_init_clock_gating(dev); 4900 intel_init_clock_gating(dev);
4789 4901
4902 if (IS_IRONLAKE_M(dev))
4903 ironlake_enable_drps(dev);
4904
4790 INIT_WORK(&dev_priv->idle_work, intel_idle_update); 4905 INIT_WORK(&dev_priv->idle_work, intel_idle_update);
4791 setup_timer(&dev_priv->idle_timer, intel_gpu_idle_timer, 4906 setup_timer(&dev_priv->idle_timer, intel_gpu_idle_timer,
4792 (unsigned long)dev); 4907 (unsigned long)dev);
@@ -4834,6 +4949,9 @@ void intel_modeset_cleanup(struct drm_device *dev)
4834 drm_gem_object_unreference(dev_priv->pwrctx); 4949 drm_gem_object_unreference(dev_priv->pwrctx);
4835 } 4950 }
4836 4951
4952 if (IS_IRONLAKE_M(dev))
4953 ironlake_disable_drps(dev);
4954
4837 mutex_unlock(&dev->struct_mutex); 4955 mutex_unlock(&dev->struct_mutex);
4838 4956
4839 drm_mode_config_cleanup(dev); 4957 drm_mode_config_cleanup(dev);