aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorArchit Taneja <architt@codeaurora.org>2017-03-23 06:28:05 -0400
committerRob Clark <robdclark@gmail.com>2017-04-08 06:59:34 -0400
commit0ddc3a630743adc36639f2c4d2134808f0835e9a (patch)
treeefdc57f560d3c10dbd5c0b4903b8fcb72a494df3
parentbcb877b7fdeaf0867d3363136644e4d378207e31 (diff)
drm/msm/mdp5: Start using parameters from CRTC state
In the last few commits, we've been adding params to mdp5_crtc_state, and assigning them in the atomic_check() funcs. Now it's time to actually start using them. Remove the duplicated params from the mdp5_crtc struct, and start using them in the mdp5_crtc code. The majority of the references to these params is in code that executes after the atomic swap has occurred, so it's okay to use crtc->state in them. There are a couple of legacy LM cursor ops that may not use the updated state, but (I think) it's okay to live with that. Now that we dynamically allocate a mixer to the CRTC, we can also remove the static assignment to it in mdp5_crtc_init, and also drop the code that skipped init-ing WB bound mixers (those will now be rejected by mdp5_mixer_assign()). Signed-off-by: Archit Taneja <architt@codeaurora.org> Signed-off-by: Rob Clark <robdclark@gmail.com>
-rw-r--r--drivers/gpu/drm/msm/mdp/mdp5/mdp5_crtc.c122
-rw-r--r--drivers/gpu/drm/msm/mdp/mdp5/mdp5_kms.c4
-rw-r--r--drivers/gpu/drm/msm/mdp/mdp5/mdp5_mixer.c4
3 files changed, 64 insertions, 66 deletions
diff --git a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_crtc.c b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_crtc.c
index 7913e93a1d90..c33855eeda24 100644
--- a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_crtc.c
+++ b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_crtc.c
@@ -32,12 +32,8 @@ struct mdp5_crtc {
32 int id; 32 int id;
33 bool enabled; 33 bool enabled;
34 34
35 struct mdp5_hw_mixer *mixer;
36 spinlock_t lm_lock; /* protect REG_MDP5_LM_* registers */ 35 spinlock_t lm_lock; /* protect REG_MDP5_LM_* registers */
37 36
38 /* CTL used for this CRTC: */
39 struct mdp5_ctl *ctl;
40
41 /* if there is a pending flip, these will be non-null: */ 37 /* if there is a pending flip, these will be non-null: */
42 struct drm_pending_vblank_event *event; 38 struct drm_pending_vblank_event *event;
43 39
@@ -59,8 +55,6 @@ struct mdp5_crtc {
59 55
60 struct completion pp_completion; 56 struct completion pp_completion;
61 57
62 bool cmd_mode;
63
64 struct { 58 struct {
65 /* protect REG_MDP5_LM_CURSOR* registers and cursor scanout_bo*/ 59 /* protect REG_MDP5_LM_CURSOR* registers and cursor scanout_bo*/
66 spinlock_t lock; 60 spinlock_t lock;
@@ -95,10 +89,11 @@ static void request_pp_done_pending(struct drm_crtc *crtc)
95 89
96static u32 crtc_flush(struct drm_crtc *crtc, u32 flush_mask) 90static u32 crtc_flush(struct drm_crtc *crtc, u32 flush_mask)
97{ 91{
98 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 92 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
93 struct mdp5_ctl *ctl = mdp5_cstate->ctl;
99 94
100 DBG("%s: flush=%08x", crtc->name, flush_mask); 95 DBG("%s: flush=%08x", crtc->name, flush_mask);
101 return mdp5_ctl_commit(mdp5_crtc->ctl, flush_mask); 96 return mdp5_ctl_commit(ctl, flush_mask);
102} 97}
103 98
104/* 99/*
@@ -108,20 +103,20 @@ static u32 crtc_flush(struct drm_crtc *crtc, u32 flush_mask)
108 */ 103 */
109static u32 crtc_flush_all(struct drm_crtc *crtc) 104static u32 crtc_flush_all(struct drm_crtc *crtc)
110{ 105{
111 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 106 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
112 struct mdp5_hw_mixer *mixer; 107 struct mdp5_hw_mixer *mixer;
113 struct drm_plane *plane; 108 struct drm_plane *plane;
114 uint32_t flush_mask = 0; 109 uint32_t flush_mask = 0;
115 110
116 /* this should not happen: */ 111 /* this should not happen: */
117 if (WARN_ON(!mdp5_crtc->ctl)) 112 if (WARN_ON(!mdp5_cstate->ctl))
118 return 0; 113 return 0;
119 114
120 drm_atomic_crtc_for_each_plane(plane, crtc) { 115 drm_atomic_crtc_for_each_plane(plane, crtc) {
121 flush_mask |= mdp5_plane_get_flush(plane); 116 flush_mask |= mdp5_plane_get_flush(plane);
122 } 117 }
123 118
124 mixer = mdp5_crtc->mixer; 119 mixer = mdp5_cstate->pipeline.mixer;
125 flush_mask |= mdp_ctl_flush_mask_lm(mixer->lm); 120 flush_mask |= mdp_ctl_flush_mask_lm(mixer->lm);
126 121
127 return crtc_flush(crtc, flush_mask); 122 return crtc_flush(crtc, flush_mask);
@@ -130,7 +125,9 @@ static u32 crtc_flush_all(struct drm_crtc *crtc)
130/* if file!=NULL, this is preclose potential cancel-flip path */ 125/* if file!=NULL, this is preclose potential cancel-flip path */
131static void complete_flip(struct drm_crtc *crtc, struct drm_file *file) 126static void complete_flip(struct drm_crtc *crtc, struct drm_file *file)
132{ 127{
128 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
133 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 129 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc);
130 struct mdp5_ctl *ctl = mdp5_cstate->ctl;
134 struct drm_device *dev = crtc->dev; 131 struct drm_device *dev = crtc->dev;
135 struct drm_pending_vblank_event *event; 132 struct drm_pending_vblank_event *event;
136 unsigned long flags; 133 unsigned long flags;
@@ -144,10 +141,11 @@ static void complete_flip(struct drm_crtc *crtc, struct drm_file *file)
144 } 141 }
145 spin_unlock_irqrestore(&dev->event_lock, flags); 142 spin_unlock_irqrestore(&dev->event_lock, flags);
146 143
147 if (mdp5_crtc->ctl && !crtc->state->enable) { 144 if (ctl && !crtc->state->enable) {
148 /* set STAGE_UNUSED for all layers */ 145 /* set STAGE_UNUSED for all layers */
149 mdp5_ctl_blend(mdp5_crtc->ctl, NULL, 0, 0); 146 mdp5_ctl_blend(ctl, NULL, 0, 0);
150 mdp5_crtc->ctl = NULL; 147 /* XXX: What to do here? */
148 /* mdp5_crtc->ctl = NULL; */
151 } 149 }
152} 150}
153 151
@@ -196,13 +194,15 @@ static inline u32 mdp5_lm_use_fg_alpha_mask(enum mdp_mixer_stage_id stage)
196static void blend_setup(struct drm_crtc *crtc) 194static void blend_setup(struct drm_crtc *crtc)
197{ 195{
198 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 196 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc);
197 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
199 struct mdp5_kms *mdp5_kms = get_kms(crtc); 198 struct mdp5_kms *mdp5_kms = get_kms(crtc);
200 struct drm_plane *plane; 199 struct drm_plane *plane;
201 const struct mdp5_cfg_hw *hw_cfg; 200 const struct mdp5_cfg_hw *hw_cfg;
202 struct mdp5_plane_state *pstate, *pstates[STAGE_MAX + 1] = {NULL}; 201 struct mdp5_plane_state *pstate, *pstates[STAGE_MAX + 1] = {NULL};
203 const struct mdp_format *format; 202 const struct mdp_format *format;
204 struct mdp5_hw_mixer *mixer = mdp5_crtc->mixer; 203 struct mdp5_hw_mixer *mixer = mdp5_cstate->pipeline.mixer;
205 uint32_t lm = mixer->lm; 204 uint32_t lm = mixer->lm;
205 struct mdp5_ctl *ctl = mdp5_cstate->ctl;
206 uint32_t blend_op, fg_alpha, bg_alpha, ctl_blend_flags = 0; 206 uint32_t blend_op, fg_alpha, bg_alpha, ctl_blend_flags = 0;
207 unsigned long flags; 207 unsigned long flags;
208 enum mdp5_pipe stage[STAGE_MAX + 1] = { SSPP_NONE }; 208 enum mdp5_pipe stage[STAGE_MAX + 1] = { SSPP_NONE };
@@ -216,7 +216,8 @@ static void blend_setup(struct drm_crtc *crtc)
216 spin_lock_irqsave(&mdp5_crtc->lm_lock, flags); 216 spin_lock_irqsave(&mdp5_crtc->lm_lock, flags);
217 217
218 /* ctl could be released already when we are shutting down: */ 218 /* ctl could be released already when we are shutting down: */
219 if (!mdp5_crtc->ctl) 219 /* XXX: Can this happen now? */
220 if (!ctl)
220 goto out; 221 goto out;
221 222
222 /* Collect all plane information */ 223 /* Collect all plane information */
@@ -293,7 +294,7 @@ static void blend_setup(struct drm_crtc *crtc)
293 294
294 mdp5_write(mdp5_kms, REG_MDP5_LM_BLEND_COLOR_OUT(lm), mixer_op_mode); 295 mdp5_write(mdp5_kms, REG_MDP5_LM_BLEND_COLOR_OUT(lm), mixer_op_mode);
295 296
296 mdp5_ctl_blend(mdp5_crtc->ctl, stage, plane_cnt, ctl_blend_flags); 297 mdp5_ctl_blend(ctl, stage, plane_cnt, ctl_blend_flags);
297 298
298out: 299out:
299 spin_unlock_irqrestore(&mdp5_crtc->lm_lock, flags); 300 spin_unlock_irqrestore(&mdp5_crtc->lm_lock, flags);
@@ -302,8 +303,9 @@ out:
302static void mdp5_crtc_mode_set_nofb(struct drm_crtc *crtc) 303static void mdp5_crtc_mode_set_nofb(struct drm_crtc *crtc)
303{ 304{
304 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 305 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc);
306 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
305 struct mdp5_kms *mdp5_kms = get_kms(crtc); 307 struct mdp5_kms *mdp5_kms = get_kms(crtc);
306 struct mdp5_hw_mixer *mixer = mdp5_crtc->mixer; 308 struct mdp5_hw_mixer *mixer = mdp5_cstate->pipeline.mixer;
307 uint32_t lm = mixer->lm; 309 uint32_t lm = mixer->lm;
308 unsigned long flags; 310 unsigned long flags;
309 struct drm_display_mode *mode; 311 struct drm_display_mode *mode;
@@ -332,6 +334,7 @@ static void mdp5_crtc_mode_set_nofb(struct drm_crtc *crtc)
332static void mdp5_crtc_disable(struct drm_crtc *crtc) 334static void mdp5_crtc_disable(struct drm_crtc *crtc)
333{ 335{
334 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 336 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc);
337 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
335 struct mdp5_kms *mdp5_kms = get_kms(crtc); 338 struct mdp5_kms *mdp5_kms = get_kms(crtc);
336 339
337 DBG("%s", crtc->name); 340 DBG("%s", crtc->name);
@@ -339,7 +342,7 @@ static void mdp5_crtc_disable(struct drm_crtc *crtc)
339 if (WARN_ON(!mdp5_crtc->enabled)) 342 if (WARN_ON(!mdp5_crtc->enabled))
340 return; 343 return;
341 344
342 if (mdp5_crtc->cmd_mode) 345 if (mdp5_cstate->cmd_mode)
343 mdp_irq_unregister(&mdp5_kms->base, &mdp5_crtc->pp_done); 346 mdp_irq_unregister(&mdp5_kms->base, &mdp5_crtc->pp_done);
344 347
345 mdp_irq_unregister(&mdp5_kms->base, &mdp5_crtc->err); 348 mdp_irq_unregister(&mdp5_kms->base, &mdp5_crtc->err);
@@ -351,6 +354,7 @@ static void mdp5_crtc_disable(struct drm_crtc *crtc)
351static void mdp5_crtc_enable(struct drm_crtc *crtc) 354static void mdp5_crtc_enable(struct drm_crtc *crtc)
352{ 355{
353 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 356 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc);
357 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
354 struct mdp5_kms *mdp5_kms = get_kms(crtc); 358 struct mdp5_kms *mdp5_kms = get_kms(crtc);
355 359
356 DBG("%s", crtc->name); 360 DBG("%s", crtc->name);
@@ -361,7 +365,7 @@ static void mdp5_crtc_enable(struct drm_crtc *crtc)
361 mdp5_enable(mdp5_kms); 365 mdp5_enable(mdp5_kms);
362 mdp_irq_register(&mdp5_kms->base, &mdp5_crtc->err); 366 mdp_irq_register(&mdp5_kms->base, &mdp5_crtc->err);
363 367
364 if (mdp5_crtc->cmd_mode) 368 if (mdp5_cstate->cmd_mode)
365 mdp_irq_register(&mdp5_kms->base, &mdp5_crtc->pp_done); 369 mdp_irq_register(&mdp5_kms->base, &mdp5_crtc->pp_done);
366 370
367 mdp5_crtc->enabled = true; 371 mdp5_crtc->enabled = true;
@@ -508,6 +512,7 @@ static void mdp5_crtc_atomic_flush(struct drm_crtc *crtc,
508 struct drm_crtc_state *old_crtc_state) 512 struct drm_crtc_state *old_crtc_state)
509{ 513{
510 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 514 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc);
515 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
511 struct drm_device *dev = crtc->dev; 516 struct drm_device *dev = crtc->dev;
512 unsigned long flags; 517 unsigned long flags;
513 518
@@ -524,7 +529,8 @@ static void mdp5_crtc_atomic_flush(struct drm_crtc *crtc,
524 * it means we are trying to flush a CRTC whose state is disabled: 529 * it means we are trying to flush a CRTC whose state is disabled:
525 * nothing else needs to be done. 530 * nothing else needs to be done.
526 */ 531 */
527 if (unlikely(!mdp5_crtc->ctl)) 532 /* XXX: Can this happen now ? */
533 if (unlikely(!mdp5_cstate->ctl))
528 return; 534 return;
529 535
530 blend_setup(crtc); 536 blend_setup(crtc);
@@ -535,11 +541,16 @@ static void mdp5_crtc_atomic_flush(struct drm_crtc *crtc,
535 * This is safe because no pp_done will happen before SW trigger 541 * This is safe because no pp_done will happen before SW trigger
536 * in command mode. 542 * in command mode.
537 */ 543 */
538 if (mdp5_crtc->cmd_mode) 544 if (mdp5_cstate->cmd_mode)
539 request_pp_done_pending(crtc); 545 request_pp_done_pending(crtc);
540 546
541 mdp5_crtc->flushed_mask = crtc_flush_all(crtc); 547 mdp5_crtc->flushed_mask = crtc_flush_all(crtc);
542 548
549 /* XXX are we leaking out state here? */
550 mdp5_crtc->vblank.irqmask = mdp5_cstate->vblank_irqmask;
551 mdp5_crtc->err.irqmask = mdp5_cstate->err_irqmask;
552 mdp5_crtc->pp_done.irqmask = mdp5_cstate->pp_done_irqmask;
553
543 request_pending(crtc, PENDING_FLIP); 554 request_pending(crtc, PENDING_FLIP);
544} 555}
545 556
@@ -574,11 +585,13 @@ static int mdp5_crtc_cursor_set(struct drm_crtc *crtc,
574 uint32_t width, uint32_t height) 585 uint32_t width, uint32_t height)
575{ 586{
576 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 587 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc);
588 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
577 struct drm_device *dev = crtc->dev; 589 struct drm_device *dev = crtc->dev;
578 struct mdp5_kms *mdp5_kms = get_kms(crtc); 590 struct mdp5_kms *mdp5_kms = get_kms(crtc);
579 struct drm_gem_object *cursor_bo, *old_bo = NULL; 591 struct drm_gem_object *cursor_bo, *old_bo = NULL;
580 uint32_t blendcfg, stride; 592 uint32_t blendcfg, stride;
581 uint64_t cursor_addr; 593 uint64_t cursor_addr;
594 struct mdp5_ctl *ctl;
582 int ret, lm; 595 int ret, lm;
583 enum mdp5_cursor_alpha cur_alpha = CURSOR_ALPHA_PER_PIXEL; 596 enum mdp5_cursor_alpha cur_alpha = CURSOR_ALPHA_PER_PIXEL;
584 uint32_t flush_mask = mdp_ctl_flush_mask_cursor(0); 597 uint32_t flush_mask = mdp_ctl_flush_mask_cursor(0);
@@ -591,7 +604,8 @@ static int mdp5_crtc_cursor_set(struct drm_crtc *crtc,
591 return -EINVAL; 604 return -EINVAL;
592 } 605 }
593 606
594 if (NULL == mdp5_crtc->ctl) 607 ctl = mdp5_cstate->ctl;
608 if (!ctl)
595 return -EINVAL; 609 return -EINVAL;
596 610
597 if (!handle) { 611 if (!handle) {
@@ -608,7 +622,7 @@ static int mdp5_crtc_cursor_set(struct drm_crtc *crtc,
608 if (ret) 622 if (ret)
609 return -EINVAL; 623 return -EINVAL;
610 624
611 lm = mdp5_crtc->mixer->lm; 625 lm = mdp5_cstate->pipeline.mixer->lm;
612 stride = width * drm_format_plane_cpp(DRM_FORMAT_ARGB8888, 0); 626 stride = width * drm_format_plane_cpp(DRM_FORMAT_ARGB8888, 0);
613 627
614 spin_lock_irqsave(&mdp5_crtc->cursor.lock, flags); 628 spin_lock_irqsave(&mdp5_crtc->cursor.lock, flags);
@@ -638,7 +652,7 @@ static int mdp5_crtc_cursor_set(struct drm_crtc *crtc,
638 spin_unlock_irqrestore(&mdp5_crtc->cursor.lock, flags); 652 spin_unlock_irqrestore(&mdp5_crtc->cursor.lock, flags);
639 653
640set_cursor: 654set_cursor:
641 ret = mdp5_ctl_set_cursor(mdp5_crtc->ctl, 0, cursor_enable); 655 ret = mdp5_ctl_set_cursor(ctl, 0, cursor_enable);
642 if (ret) { 656 if (ret) {
643 dev_err(dev->dev, "failed to %sable cursor: %d\n", 657 dev_err(dev->dev, "failed to %sable cursor: %d\n",
644 cursor_enable ? "en" : "dis", ret); 658 cursor_enable ? "en" : "dis", ret);
@@ -660,7 +674,8 @@ static int mdp5_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
660{ 674{
661 struct mdp5_kms *mdp5_kms = get_kms(crtc); 675 struct mdp5_kms *mdp5_kms = get_kms(crtc);
662 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 676 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc);
663 uint32_t lm = mdp5_crtc->mixer->lm; 677 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
678 uint32_t lm = mdp5_cstate->pipeline.mixer->lm;
664 uint32_t flush_mask = mdp_ctl_flush_mask_cursor(0); 679 uint32_t flush_mask = mdp_ctl_flush_mask_cursor(0);
665 uint32_t roi_w; 680 uint32_t roi_w;
666 uint32_t roi_h; 681 uint32_t roi_h;
@@ -818,23 +833,26 @@ static void mdp5_crtc_wait_for_pp_done(struct drm_crtc *crtc)
818{ 833{
819 struct drm_device *dev = crtc->dev; 834 struct drm_device *dev = crtc->dev;
820 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 835 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc);
836 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
821 int ret; 837 int ret;
822 838
823 ret = wait_for_completion_timeout(&mdp5_crtc->pp_completion, 839 ret = wait_for_completion_timeout(&mdp5_crtc->pp_completion,
824 msecs_to_jiffies(50)); 840 msecs_to_jiffies(50));
825 if (ret == 0) 841 if (ret == 0)
826 dev_warn(dev->dev, "pp done time out, lm=%d\n", 842 dev_warn(dev->dev, "pp done time out, lm=%d\n",
827 mdp5_crtc->mixer->lm); 843 mdp5_cstate->pipeline.mixer->lm);
828} 844}
829 845
830static void mdp5_crtc_wait_for_flush_done(struct drm_crtc *crtc) 846static void mdp5_crtc_wait_for_flush_done(struct drm_crtc *crtc)
831{ 847{
832 struct drm_device *dev = crtc->dev; 848 struct drm_device *dev = crtc->dev;
833 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 849 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc);
850 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
851 struct mdp5_ctl *ctl = mdp5_cstate->ctl;
834 int ret; 852 int ret;
835 853
836 /* Should not call this function if crtc is disabled. */ 854 /* Should not call this function if crtc is disabled. */
837 if (!mdp5_crtc->ctl) 855 if (!ctl)
838 return; 856 return;
839 857
840 ret = drm_crtc_vblank_get(crtc); 858 ret = drm_crtc_vblank_get(crtc);
@@ -842,7 +860,7 @@ static void mdp5_crtc_wait_for_flush_done(struct drm_crtc *crtc)
842 return; 860 return;
843 861
844 ret = wait_event_timeout(dev->vblank[drm_crtc_index(crtc)].queue, 862 ret = wait_event_timeout(dev->vblank[drm_crtc_index(crtc)].queue,
845 ((mdp5_ctl_get_commit_status(mdp5_crtc->ctl) & 863 ((mdp5_ctl_get_commit_status(ctl) &
846 mdp5_crtc->flushed_mask) == 0), 864 mdp5_crtc->flushed_mask) == 0),
847 msecs_to_jiffies(50)); 865 msecs_to_jiffies(50));
848 if (ret <= 0) 866 if (ret <= 0)
@@ -862,50 +880,41 @@ uint32_t mdp5_crtc_vblank(struct drm_crtc *crtc)
862void mdp5_crtc_set_pipeline(struct drm_crtc *crtc, 880void mdp5_crtc_set_pipeline(struct drm_crtc *crtc,
863 struct mdp5_interface *intf, struct mdp5_ctl *ctl) 881 struct mdp5_interface *intf, struct mdp5_ctl *ctl)
864{ 882{
865 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 883 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
884 struct mdp5_hw_mixer *mixer = mdp5_cstate->pipeline.mixer;
866 struct mdp5_kms *mdp5_kms = get_kms(crtc); 885 struct mdp5_kms *mdp5_kms = get_kms(crtc);
867 struct mdp5_hw_mixer *mixer = mdp5_crtc->mixer;
868
869 /* now that we know what irq's we want: */
870 mdp5_crtc->err.irqmask = intf2err(intf->num);
871 mdp5_crtc->vblank.irqmask = intf2vblank(mixer, intf);
872
873 if ((intf->type == INTF_DSI) &&
874 (intf->mode == MDP5_INTF_DSI_MODE_COMMAND)) {
875 mdp5_crtc->pp_done.irqmask = lm2ppdone(mixer);
876 mdp5_crtc->pp_done.irq = mdp5_crtc_pp_done_irq;
877 mdp5_crtc->cmd_mode = true;
878 } else {
879 mdp5_crtc->pp_done.irqmask = 0;
880 mdp5_crtc->pp_done.irq = NULL;
881 mdp5_crtc->cmd_mode = false;
882 }
883 886
887 /* should this be done elsewhere ? */
884 mdp_irq_update(&mdp5_kms->base); 888 mdp_irq_update(&mdp5_kms->base);
885 889
886 mdp5_crtc->ctl = ctl;
887 mdp5_ctl_set_pipeline(ctl, intf, mixer); 890 mdp5_ctl_set_pipeline(ctl, intf, mixer);
888} 891}
889 892
890struct mdp5_ctl *mdp5_crtc_get_ctl(struct drm_crtc *crtc) 893struct mdp5_ctl *mdp5_crtc_get_ctl(struct drm_crtc *crtc)
891{ 894{
892 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 895 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
893 896
894 return mdp5_crtc->ctl; 897 return mdp5_cstate->ctl;
895} 898}
896 899
897struct mdp5_hw_mixer *mdp5_crtc_get_mixer(struct drm_crtc *crtc) 900struct mdp5_hw_mixer *mdp5_crtc_get_mixer(struct drm_crtc *crtc)
898{ 901{
899 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 902 struct mdp5_crtc_state *mdp5_cstate;
900 return WARN_ON(!crtc) || WARN_ON(!mdp5_crtc->mixer) ? 903
901 ERR_PTR(-EINVAL) : mdp5_crtc->mixer; 904 if (WARN_ON(!crtc))
905 return ERR_PTR(-EINVAL);
906
907 mdp5_cstate = to_mdp5_crtc_state(crtc->state);
908
909 return WARN_ON(!mdp5_cstate->pipeline.mixer) ?
910 ERR_PTR(-EINVAL) : mdp5_cstate->pipeline.mixer;
902} 911}
903 912
904void mdp5_crtc_wait_for_commit_done(struct drm_crtc *crtc) 913void mdp5_crtc_wait_for_commit_done(struct drm_crtc *crtc)
905{ 914{
906 struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); 915 struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state);
907 916
908 if (mdp5_crtc->cmd_mode) 917 if (mdp5_cstate->cmd_mode)
909 mdp5_crtc_wait_for_pp_done(crtc); 918 mdp5_crtc_wait_for_pp_done(crtc);
910 else 919 else
911 mdp5_crtc_wait_for_flush_done(crtc); 920 mdp5_crtc_wait_for_flush_done(crtc);
@@ -918,7 +927,6 @@ struct drm_crtc *mdp5_crtc_init(struct drm_device *dev,
918{ 927{
919 struct drm_crtc *crtc = NULL; 928 struct drm_crtc *crtc = NULL;
920 struct mdp5_crtc *mdp5_crtc; 929 struct mdp5_crtc *mdp5_crtc;
921 struct mdp5_kms *mdp5_kms;
922 930
923 mdp5_crtc = kzalloc(sizeof(*mdp5_crtc), GFP_KERNEL); 931 mdp5_crtc = kzalloc(sizeof(*mdp5_crtc), GFP_KERNEL);
924 if (!mdp5_crtc) 932 if (!mdp5_crtc)
@@ -934,6 +942,7 @@ struct drm_crtc *mdp5_crtc_init(struct drm_device *dev,
934 942
935 mdp5_crtc->vblank.irq = mdp5_crtc_vblank_irq; 943 mdp5_crtc->vblank.irq = mdp5_crtc_vblank_irq;
936 mdp5_crtc->err.irq = mdp5_crtc_err_irq; 944 mdp5_crtc->err.irq = mdp5_crtc_err_irq;
945 mdp5_crtc->pp_done.irq = mdp5_crtc_pp_done_irq;
937 946
938 if (cursor_plane) 947 if (cursor_plane)
939 drm_crtc_init_with_planes(dev, crtc, plane, cursor_plane, 948 drm_crtc_init_with_planes(dev, crtc, plane, cursor_plane,
@@ -948,8 +957,5 @@ struct drm_crtc *mdp5_crtc_init(struct drm_device *dev,
948 drm_crtc_helper_add(crtc, &mdp5_crtc_helper_funcs); 957 drm_crtc_helper_add(crtc, &mdp5_crtc_helper_funcs);
949 plane->crtc = crtc; 958 plane->crtc = crtc;
950 959
951 mdp5_kms = get_kms(crtc);
952 mdp5_crtc->mixer = mdp5_kms->hwmixers[id];
953
954 return crtc; 960 return crtc;
955} 961}
diff --git a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_kms.c b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_kms.c
index 880c7d4208ec..24f76f48d56c 100644
--- a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_kms.c
+++ b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_kms.c
@@ -849,10 +849,6 @@ static int hwmixer_init(struct mdp5_kms *mdp5_kms)
849 return ret; 849 return ret;
850 } 850 }
851 851
852 /* Don't create LMs connected to WB for now */
853 if (!mixer)
854 continue;
855
856 mixer->idx = mdp5_kms->num_hwmixers; 852 mixer->idx = mdp5_kms->num_hwmixers;
857 mdp5_kms->hwmixers[mdp5_kms->num_hwmixers++] = mixer; 853 mdp5_kms->hwmixers[mdp5_kms->num_hwmixers++] = mixer;
858 } 854 }
diff --git a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_mixer.c b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_mixer.c
index 6f353c4886e4..9bb1f824b2a9 100644
--- a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_mixer.c
+++ b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_mixer.c
@@ -84,10 +84,6 @@ struct mdp5_hw_mixer *mdp5_mixer_init(const struct mdp5_lm_instance *lm)
84{ 84{
85 struct mdp5_hw_mixer *mixer; 85 struct mdp5_hw_mixer *mixer;
86 86
87 /* ignore WB bound mixers for now */
88 if (lm->caps & MDP_LM_CAP_WB)
89 return NULL;
90
91 mixer = kzalloc(sizeof(*mixer), GFP_KERNEL); 87 mixer = kzalloc(sizeof(*mixer), GFP_KERNEL);
92 if (!mixer) 88 if (!mixer)
93 return ERR_PTR(-ENOMEM); 89 return ERR_PTR(-ENOMEM);