aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/intel_atomic.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/i915/intel_atomic.c')
-rw-r--r--drivers/gpu/drm/i915/intel_atomic.c118
1 files changed, 71 insertions, 47 deletions
diff --git a/drivers/gpu/drm/i915/intel_atomic.c b/drivers/gpu/drm/i915/intel_atomic.c
index b04952bacf77..a5a2c8fe58a7 100644
--- a/drivers/gpu/drm/i915/intel_atomic.c
+++ b/drivers/gpu/drm/i915/intel_atomic.c
@@ -203,6 +203,72 @@ intel_crtc_destroy_state(struct drm_crtc *crtc,
203 drm_atomic_helper_crtc_destroy_state(crtc, state); 203 drm_atomic_helper_crtc_destroy_state(crtc, state);
204} 204}
205 205
206static void intel_atomic_setup_scaler(struct intel_crtc_scaler_state *scaler_state,
207 int num_scalers_need, struct intel_crtc *intel_crtc,
208 const char *name, int idx,
209 struct intel_plane_state *plane_state,
210 int *scaler_id)
211{
212 struct drm_i915_private *dev_priv = to_i915(intel_crtc->base.dev);
213 int j;
214 u32 mode;
215
216 if (*scaler_id < 0) {
217 /* find a free scaler */
218 for (j = 0; j < intel_crtc->num_scalers; j++) {
219 if (scaler_state->scalers[j].in_use)
220 continue;
221
222 *scaler_id = j;
223 scaler_state->scalers[*scaler_id].in_use = 1;
224 break;
225 }
226 }
227
228 if (WARN(*scaler_id < 0, "Cannot find scaler for %s:%d\n", name, idx))
229 return;
230
231 /* set scaler mode */
232 if (plane_state && plane_state->base.fb &&
233 plane_state->base.fb->format->is_yuv &&
234 plane_state->base.fb->format->num_planes > 1) {
235 if (IS_GEN9(dev_priv) &&
236 !IS_GEMINILAKE(dev_priv)) {
237 mode = SKL_PS_SCALER_MODE_NV12;
238 } else if (icl_is_hdr_plane(to_intel_plane(plane_state->base.plane))) {
239 /*
240 * On gen11+'s HDR planes we only use the scaler for
241 * scaling. They have a dedicated chroma upsampler, so
242 * we don't need the scaler to upsample the UV plane.
243 */
244 mode = PS_SCALER_MODE_NORMAL;
245 } else {
246 mode = PS_SCALER_MODE_PLANAR;
247
248 if (plane_state->linked_plane)
249 mode |= PS_PLANE_Y_SEL(plane_state->linked_plane->id);
250 }
251 } else if (INTEL_GEN(dev_priv) > 9 || IS_GEMINILAKE(dev_priv)) {
252 mode = PS_SCALER_MODE_NORMAL;
253 } else if (num_scalers_need == 1 && intel_crtc->num_scalers > 1) {
254 /*
255 * when only 1 scaler is in use on a pipe with 2 scalers
256 * scaler 0 operates in high quality (HQ) mode.
257 * In this case use scaler 0 to take advantage of HQ mode
258 */
259 scaler_state->scalers[*scaler_id].in_use = 0;
260 *scaler_id = 0;
261 scaler_state->scalers[0].in_use = 1;
262 mode = SKL_PS_SCALER_MODE_HQ;
263 } else {
264 mode = SKL_PS_SCALER_MODE_DYN;
265 }
266
267 DRM_DEBUG_KMS("Attached scaler id %u.%u to %s:%d\n",
268 intel_crtc->pipe, *scaler_id, name, idx);
269 scaler_state->scalers[*scaler_id].mode = mode;
270}
271
206/** 272/**
207 * intel_atomic_setup_scalers() - setup scalers for crtc per staged requests 273 * intel_atomic_setup_scalers() - setup scalers for crtc per staged requests
208 * @dev_priv: i915 device 274 * @dev_priv: i915 device
@@ -232,7 +298,7 @@ int intel_atomic_setup_scalers(struct drm_i915_private *dev_priv,
232 struct drm_atomic_state *drm_state = crtc_state->base.state; 298 struct drm_atomic_state *drm_state = crtc_state->base.state;
233 struct intel_atomic_state *intel_state = to_intel_atomic_state(drm_state); 299 struct intel_atomic_state *intel_state = to_intel_atomic_state(drm_state);
234 int num_scalers_need; 300 int num_scalers_need;
235 int i, j; 301 int i;
236 302
237 num_scalers_need = hweight32(scaler_state->scaler_users); 303 num_scalers_need = hweight32(scaler_state->scaler_users);
238 304
@@ -304,59 +370,17 @@ int intel_atomic_setup_scalers(struct drm_i915_private *dev_priv,
304 idx = plane->base.id; 370 idx = plane->base.id;
305 371
306 /* plane on different crtc cannot be a scaler user of this crtc */ 372 /* plane on different crtc cannot be a scaler user of this crtc */
307 if (WARN_ON(intel_plane->pipe != intel_crtc->pipe)) { 373 if (WARN_ON(intel_plane->pipe != intel_crtc->pipe))
308 continue; 374 continue;
309 }
310 375
311 plane_state = intel_atomic_get_new_plane_state(intel_state, 376 plane_state = intel_atomic_get_new_plane_state(intel_state,
312 intel_plane); 377 intel_plane);
313 scaler_id = &plane_state->scaler_id; 378 scaler_id = &plane_state->scaler_id;
314 } 379 }
315 380
316 if (*scaler_id < 0) { 381 intel_atomic_setup_scaler(scaler_state, num_scalers_need,
317 /* find a free scaler */ 382 intel_crtc, name, idx,
318 for (j = 0; j < intel_crtc->num_scalers; j++) { 383 plane_state, scaler_id);
319 if (!scaler_state->scalers[j].in_use) {
320 scaler_state->scalers[j].in_use = 1;
321 *scaler_id = j;
322 DRM_DEBUG_KMS("Attached scaler id %u.%u to %s:%d\n",
323 intel_crtc->pipe, *scaler_id, name, idx);
324 break;
325 }
326 }
327 }
328
329 if (WARN_ON(*scaler_id < 0)) {
330 DRM_DEBUG_KMS("Cannot find scaler for %s:%d\n", name, idx);
331 continue;
332 }
333
334 /* set scaler mode */
335 if ((INTEL_GEN(dev_priv) >= 9) &&
336 plane_state && plane_state->base.fb &&
337 plane_state->base.fb->format->format ==
338 DRM_FORMAT_NV12) {
339 if (INTEL_GEN(dev_priv) == 9 &&
340 !IS_GEMINILAKE(dev_priv) &&
341 !IS_SKYLAKE(dev_priv))
342 scaler_state->scalers[*scaler_id].mode =
343 SKL_PS_SCALER_MODE_NV12;
344 else
345 scaler_state->scalers[*scaler_id].mode =
346 PS_SCALER_MODE_PLANAR;
347 } else if (num_scalers_need == 1 && intel_crtc->pipe != PIPE_C) {
348 /*
349 * when only 1 scaler is in use on either pipe A or B,
350 * scaler 0 operates in high quality (HQ) mode.
351 * In this case use scaler 0 to take advantage of HQ mode
352 */
353 *scaler_id = 0;
354 scaler_state->scalers[0].in_use = 1;
355 scaler_state->scalers[0].mode = PS_SCALER_MODE_HQ;
356 scaler_state->scalers[1].in_use = 0;
357 } else {
358 scaler_state->scalers[*scaler_id].mode = PS_SCALER_MODE_DYN;
359 }
360 } 384 }
361 385
362 return 0; 386 return 0;