diff options
author | Archit Taneja <architt@codeaurora.org> | 2017-03-23 06:28:06 -0400 |
---|---|---|
committer | Rob Clark <robdclark@gmail.com> | 2017-04-08 06:59:34 -0400 |
commit | f316b25a23da330678d476acb8a97048a793376d (patch) | |
tree | cf851ccb0dda73c4419d5a467c7d04e0e5da6e3f | |
parent | 0ddc3a630743adc36639f2c4d2134808f0835e9a (diff) |
drm/msm/mdp5: Remove mixer/intf pointers from mdp5_ctl
These are a part of CRTC state, it doesn't feel nice to leave them
hanging in mdp5_ctl struct. Pass mdp5_pipeline pointer instead
wherever it is needed.
We still have some params in mdp5_ctl like start_mask etc which
are derivative of atomic state, and should be rolled back if
a commit fails, but it doesn't seem to cause much trouble.
Signed-off-by: Archit Taneja <architt@codeaurora.org>
Signed-off-by: Rob Clark <robdclark@gmail.com>
-rw-r--r-- | drivers/gpu/drm/msm/mdp/mdp5/mdp5_cmd_encoder.c | 15 | ||||
-rw-r--r-- | drivers/gpu/drm/msm/mdp/mdp5/mdp5_crtc.c | 32 | ||||
-rw-r--r-- | drivers/gpu/drm/msm/mdp/mdp5/mdp5_ctl.c | 68 | ||||
-rw-r--r-- | drivers/gpu/drm/msm/mdp/mdp5/mdp5_ctl.h | 16 | ||||
-rw-r--r-- | drivers/gpu/drm/msm/mdp/mdp5/mdp5_encoder.c | 13 | ||||
-rw-r--r-- | drivers/gpu/drm/msm/mdp/mdp5/mdp5_kms.h | 4 | ||||
-rw-r--r-- | drivers/gpu/drm/msm/mdp/mdp5/mdp5_plane.c | 3 |
7 files changed, 88 insertions, 63 deletions
diff --git a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_cmd_encoder.c b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_cmd_encoder.c index 18c967107bc4..8dafc7bdba48 100644 --- a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_cmd_encoder.c +++ b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_cmd_encoder.c | |||
@@ -132,8 +132,6 @@ void mdp5_cmd_encoder_mode_set(struct drm_encoder *encoder, | |||
132 | struct drm_display_mode *mode, | 132 | struct drm_display_mode *mode, |
133 | struct drm_display_mode *adjusted_mode) | 133 | struct drm_display_mode *adjusted_mode) |
134 | { | 134 | { |
135 | struct mdp5_encoder *mdp5_cmd_enc = to_mdp5_encoder(encoder); | ||
136 | |||
137 | mode = adjusted_mode; | 135 | mode = adjusted_mode; |
138 | 136 | ||
139 | DBG("set mode: %d:\"%s\" %d %d %d %d %d %d %d %d %d %d 0x%x 0x%x", | 137 | DBG("set mode: %d:\"%s\" %d %d %d %d %d %d %d %d %d %d 0x%x 0x%x", |
@@ -145,8 +143,7 @@ void mdp5_cmd_encoder_mode_set(struct drm_encoder *encoder, | |||
145 | mode->vsync_end, mode->vtotal, | 143 | mode->vsync_end, mode->vtotal, |
146 | mode->type, mode->flags); | 144 | mode->type, mode->flags); |
147 | pingpong_tearcheck_setup(encoder, mode); | 145 | pingpong_tearcheck_setup(encoder, mode); |
148 | mdp5_crtc_set_pipeline(encoder->crtc, mdp5_cmd_enc->intf, | 146 | mdp5_crtc_set_pipeline(encoder->crtc); |
149 | mdp5_cmd_enc->ctl); | ||
150 | } | 147 | } |
151 | 148 | ||
152 | void mdp5_cmd_encoder_disable(struct drm_encoder *encoder) | 149 | void mdp5_cmd_encoder_disable(struct drm_encoder *encoder) |
@@ -154,14 +151,15 @@ void mdp5_cmd_encoder_disable(struct drm_encoder *encoder) | |||
154 | struct mdp5_encoder *mdp5_cmd_enc = to_mdp5_encoder(encoder); | 151 | struct mdp5_encoder *mdp5_cmd_enc = to_mdp5_encoder(encoder); |
155 | struct mdp5_ctl *ctl = mdp5_cmd_enc->ctl; | 152 | struct mdp5_ctl *ctl = mdp5_cmd_enc->ctl; |
156 | struct mdp5_interface *intf = mdp5_cmd_enc->intf; | 153 | struct mdp5_interface *intf = mdp5_cmd_enc->intf; |
154 | struct mdp5_pipeline *pipeline = mdp5_crtc_get_pipeline(encoder->crtc); | ||
157 | 155 | ||
158 | if (WARN_ON(!mdp5_cmd_enc->enabled)) | 156 | if (WARN_ON(!mdp5_cmd_enc->enabled)) |
159 | return; | 157 | return; |
160 | 158 | ||
161 | pingpong_tearcheck_disable(encoder); | 159 | pingpong_tearcheck_disable(encoder); |
162 | 160 | ||
163 | mdp5_ctl_set_encoder_state(ctl, false); | 161 | mdp5_ctl_set_encoder_state(ctl, pipeline, false); |
164 | mdp5_ctl_commit(ctl, mdp_ctl_flush_mask_encoder(intf)); | 162 | mdp5_ctl_commit(ctl, pipeline, mdp_ctl_flush_mask_encoder(intf)); |
165 | 163 | ||
166 | bs_set(mdp5_cmd_enc, 0); | 164 | bs_set(mdp5_cmd_enc, 0); |
167 | 165 | ||
@@ -173,6 +171,7 @@ void mdp5_cmd_encoder_enable(struct drm_encoder *encoder) | |||
173 | struct mdp5_encoder *mdp5_cmd_enc = to_mdp5_encoder(encoder); | 171 | struct mdp5_encoder *mdp5_cmd_enc = to_mdp5_encoder(encoder); |
174 | struct mdp5_ctl *ctl = mdp5_cmd_enc->ctl; | 172 | struct mdp5_ctl *ctl = mdp5_cmd_enc->ctl; |
175 | struct mdp5_interface *intf = mdp5_cmd_enc->intf; | 173 | struct mdp5_interface *intf = mdp5_cmd_enc->intf; |
174 | struct mdp5_pipeline *pipeline = mdp5_crtc_get_pipeline(encoder->crtc); | ||
176 | 175 | ||
177 | if (WARN_ON(mdp5_cmd_enc->enabled)) | 176 | if (WARN_ON(mdp5_cmd_enc->enabled)) |
178 | return; | 177 | return; |
@@ -181,9 +180,9 @@ void mdp5_cmd_encoder_enable(struct drm_encoder *encoder) | |||
181 | if (pingpong_tearcheck_enable(encoder)) | 180 | if (pingpong_tearcheck_enable(encoder)) |
182 | return; | 181 | return; |
183 | 182 | ||
184 | mdp5_ctl_commit(ctl, mdp_ctl_flush_mask_encoder(intf)); | 183 | mdp5_ctl_commit(ctl, pipeline, mdp_ctl_flush_mask_encoder(intf)); |
185 | 184 | ||
186 | mdp5_ctl_set_encoder_state(ctl, true); | 185 | mdp5_ctl_set_encoder_state(ctl, pipeline, true); |
187 | 186 | ||
188 | mdp5_cmd_enc->enabled = true; | 187 | mdp5_cmd_enc->enabled = true; |
189 | } | 188 | } |
diff --git a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_crtc.c b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_crtc.c index c33855eeda24..03a64bd39f22 100644 --- a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_crtc.c +++ b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_crtc.c | |||
@@ -91,9 +91,10 @@ static u32 crtc_flush(struct drm_crtc *crtc, u32 flush_mask) | |||
91 | { | 91 | { |
92 | struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state); | 92 | struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state); |
93 | struct mdp5_ctl *ctl = mdp5_cstate->ctl; | 93 | struct mdp5_ctl *ctl = mdp5_cstate->ctl; |
94 | struct mdp5_pipeline *pipeline = &mdp5_cstate->pipeline; | ||
94 | 95 | ||
95 | DBG("%s: flush=%08x", crtc->name, flush_mask); | 96 | DBG("%s: flush=%08x", crtc->name, flush_mask); |
96 | return mdp5_ctl_commit(ctl, flush_mask); | 97 | return mdp5_ctl_commit(ctl, pipeline, flush_mask); |
97 | } | 98 | } |
98 | 99 | ||
99 | /* | 100 | /* |
@@ -126,6 +127,7 @@ static u32 crtc_flush_all(struct drm_crtc *crtc) | |||
126 | static void complete_flip(struct drm_crtc *crtc, struct drm_file *file) | 127 | static void complete_flip(struct drm_crtc *crtc, struct drm_file *file) |
127 | { | 128 | { |
128 | struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state); | 129 | struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state); |
130 | struct mdp5_pipeline *pipeline = &mdp5_cstate->pipeline; | ||
129 | struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); | 131 | struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); |
130 | struct mdp5_ctl *ctl = mdp5_cstate->ctl; | 132 | struct mdp5_ctl *ctl = mdp5_cstate->ctl; |
131 | struct drm_device *dev = crtc->dev; | 133 | struct drm_device *dev = crtc->dev; |
@@ -143,7 +145,7 @@ static void complete_flip(struct drm_crtc *crtc, struct drm_file *file) | |||
143 | 145 | ||
144 | if (ctl && !crtc->state->enable) { | 146 | if (ctl && !crtc->state->enable) { |
145 | /* set STAGE_UNUSED for all layers */ | 147 | /* set STAGE_UNUSED for all layers */ |
146 | mdp5_ctl_blend(ctl, NULL, 0, 0); | 148 | mdp5_ctl_blend(ctl, pipeline, NULL, 0, 0); |
147 | /* XXX: What to do here? */ | 149 | /* XXX: What to do here? */ |
148 | /* mdp5_crtc->ctl = NULL; */ | 150 | /* mdp5_crtc->ctl = NULL; */ |
149 | } | 151 | } |
@@ -195,12 +197,13 @@ static void blend_setup(struct drm_crtc *crtc) | |||
195 | { | 197 | { |
196 | struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); | 198 | struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); |
197 | struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state); | 199 | struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state); |
200 | struct mdp5_pipeline *pipeline = &mdp5_cstate->pipeline; | ||
198 | struct mdp5_kms *mdp5_kms = get_kms(crtc); | 201 | struct mdp5_kms *mdp5_kms = get_kms(crtc); |
199 | struct drm_plane *plane; | 202 | struct drm_plane *plane; |
200 | const struct mdp5_cfg_hw *hw_cfg; | 203 | const struct mdp5_cfg_hw *hw_cfg; |
201 | struct mdp5_plane_state *pstate, *pstates[STAGE_MAX + 1] = {NULL}; | 204 | struct mdp5_plane_state *pstate, *pstates[STAGE_MAX + 1] = {NULL}; |
202 | const struct mdp_format *format; | 205 | const struct mdp_format *format; |
203 | struct mdp5_hw_mixer *mixer = mdp5_cstate->pipeline.mixer; | 206 | struct mdp5_hw_mixer *mixer = pipeline->mixer; |
204 | uint32_t lm = mixer->lm; | 207 | uint32_t lm = mixer->lm; |
205 | struct mdp5_ctl *ctl = mdp5_cstate->ctl; | 208 | struct mdp5_ctl *ctl = mdp5_cstate->ctl; |
206 | uint32_t blend_op, fg_alpha, bg_alpha, ctl_blend_flags = 0; | 209 | uint32_t blend_op, fg_alpha, bg_alpha, ctl_blend_flags = 0; |
@@ -294,7 +297,7 @@ static void blend_setup(struct drm_crtc *crtc) | |||
294 | 297 | ||
295 | mdp5_write(mdp5_kms, REG_MDP5_LM_BLEND_COLOR_OUT(lm), mixer_op_mode); | 298 | mdp5_write(mdp5_kms, REG_MDP5_LM_BLEND_COLOR_OUT(lm), mixer_op_mode); |
296 | 299 | ||
297 | mdp5_ctl_blend(ctl, stage, plane_cnt, ctl_blend_flags); | 300 | mdp5_ctl_blend(ctl, pipeline, stage, plane_cnt, ctl_blend_flags); |
298 | 301 | ||
299 | out: | 302 | out: |
300 | spin_unlock_irqrestore(&mdp5_crtc->lm_lock, flags); | 303 | spin_unlock_irqrestore(&mdp5_crtc->lm_lock, flags); |
@@ -586,6 +589,7 @@ static int mdp5_crtc_cursor_set(struct drm_crtc *crtc, | |||
586 | { | 589 | { |
587 | struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); | 590 | struct mdp5_crtc *mdp5_crtc = to_mdp5_crtc(crtc); |
588 | struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state); | 591 | struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state); |
592 | struct mdp5_pipeline *pipeline = &mdp5_cstate->pipeline; | ||
589 | struct drm_device *dev = crtc->dev; | 593 | struct drm_device *dev = crtc->dev; |
590 | struct mdp5_kms *mdp5_kms = get_kms(crtc); | 594 | struct mdp5_kms *mdp5_kms = get_kms(crtc); |
591 | struct drm_gem_object *cursor_bo, *old_bo = NULL; | 595 | struct drm_gem_object *cursor_bo, *old_bo = NULL; |
@@ -652,7 +656,7 @@ static int mdp5_crtc_cursor_set(struct drm_crtc *crtc, | |||
652 | spin_unlock_irqrestore(&mdp5_crtc->cursor.lock, flags); | 656 | spin_unlock_irqrestore(&mdp5_crtc->cursor.lock, flags); |
653 | 657 | ||
654 | set_cursor: | 658 | set_cursor: |
655 | ret = mdp5_ctl_set_cursor(ctl, 0, cursor_enable); | 659 | ret = mdp5_ctl_set_cursor(ctl, pipeline, 0, cursor_enable); |
656 | if (ret) { | 660 | if (ret) { |
657 | dev_err(dev->dev, "failed to %sable cursor: %d\n", | 661 | dev_err(dev->dev, "failed to %sable cursor: %d\n", |
658 | cursor_enable ? "en" : "dis", ret); | 662 | cursor_enable ? "en" : "dis", ret); |
@@ -877,17 +881,15 @@ uint32_t mdp5_crtc_vblank(struct drm_crtc *crtc) | |||
877 | return mdp5_crtc->vblank.irqmask; | 881 | return mdp5_crtc->vblank.irqmask; |
878 | } | 882 | } |
879 | 883 | ||
880 | void mdp5_crtc_set_pipeline(struct drm_crtc *crtc, | 884 | void mdp5_crtc_set_pipeline(struct drm_crtc *crtc) |
881 | struct mdp5_interface *intf, struct mdp5_ctl *ctl) | ||
882 | { | 885 | { |
883 | struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state); | 886 | struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state); |
884 | struct mdp5_hw_mixer *mixer = mdp5_cstate->pipeline.mixer; | ||
885 | struct mdp5_kms *mdp5_kms = get_kms(crtc); | 887 | struct mdp5_kms *mdp5_kms = get_kms(crtc); |
886 | 888 | ||
887 | /* should this be done elsewhere ? */ | 889 | /* should this be done elsewhere ? */ |
888 | mdp_irq_update(&mdp5_kms->base); | 890 | mdp_irq_update(&mdp5_kms->base); |
889 | 891 | ||
890 | mdp5_ctl_set_pipeline(ctl, intf, mixer); | 892 | mdp5_ctl_set_pipeline(mdp5_cstate->ctl, &mdp5_cstate->pipeline); |
891 | } | 893 | } |
892 | 894 | ||
893 | struct mdp5_ctl *mdp5_crtc_get_ctl(struct drm_crtc *crtc) | 895 | struct mdp5_ctl *mdp5_crtc_get_ctl(struct drm_crtc *crtc) |
@@ -910,6 +912,18 @@ struct mdp5_hw_mixer *mdp5_crtc_get_mixer(struct drm_crtc *crtc) | |||
910 | ERR_PTR(-EINVAL) : mdp5_cstate->pipeline.mixer; | 912 | ERR_PTR(-EINVAL) : mdp5_cstate->pipeline.mixer; |
911 | } | 913 | } |
912 | 914 | ||
915 | struct mdp5_pipeline *mdp5_crtc_get_pipeline(struct drm_crtc *crtc) | ||
916 | { | ||
917 | struct mdp5_crtc_state *mdp5_cstate; | ||
918 | |||
919 | if (WARN_ON(!crtc)) | ||
920 | return ERR_PTR(-EINVAL); | ||
921 | |||
922 | mdp5_cstate = to_mdp5_crtc_state(crtc->state); | ||
923 | |||
924 | return &mdp5_cstate->pipeline; | ||
925 | } | ||
926 | |||
913 | void mdp5_crtc_wait_for_commit_done(struct drm_crtc *crtc) | 927 | void mdp5_crtc_wait_for_commit_done(struct drm_crtc *crtc) |
914 | { | 928 | { |
915 | struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state); | 929 | struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc->state); |
diff --git a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_ctl.c b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_ctl.c index a86f1fd359c3..9a0109410974 100644 --- a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_ctl.c +++ b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_ctl.c | |||
@@ -36,14 +36,10 @@ struct mdp5_ctl { | |||
36 | struct mdp5_ctl_manager *ctlm; | 36 | struct mdp5_ctl_manager *ctlm; |
37 | 37 | ||
38 | u32 id; | 38 | u32 id; |
39 | struct mdp5_hw_mixer *mixer; | ||
40 | 39 | ||
41 | /* CTL status bitmask */ | 40 | /* CTL status bitmask */ |
42 | u32 status; | 41 | u32 status; |
43 | 42 | ||
44 | /* Operation Mode Configuration for the Pipeline */ | ||
45 | struct mdp5_interface *intf; | ||
46 | |||
47 | bool encoder_enabled; | 43 | bool encoder_enabled; |
48 | uint32_t start_mask; | 44 | uint32_t start_mask; |
49 | 45 | ||
@@ -170,14 +166,12 @@ static void set_ctl_op(struct mdp5_ctl *ctl, struct mdp5_interface *intf) | |||
170 | spin_unlock_irqrestore(&ctl->hw_lock, flags); | 166 | spin_unlock_irqrestore(&ctl->hw_lock, flags); |
171 | } | 167 | } |
172 | 168 | ||
173 | int mdp5_ctl_set_pipeline(struct mdp5_ctl *ctl, struct mdp5_interface *intf, | 169 | int mdp5_ctl_set_pipeline(struct mdp5_ctl *ctl, struct mdp5_pipeline *pipeline) |
174 | struct mdp5_hw_mixer *mixer) | ||
175 | { | 170 | { |
176 | struct mdp5_ctl_manager *ctl_mgr = ctl->ctlm; | 171 | struct mdp5_ctl_manager *ctl_mgr = ctl->ctlm; |
177 | struct mdp5_kms *mdp5_kms = get_kms(ctl_mgr); | 172 | struct mdp5_kms *mdp5_kms = get_kms(ctl_mgr); |
178 | 173 | struct mdp5_interface *intf = pipeline->intf; | |
179 | ctl->mixer = mixer; | 174 | struct mdp5_hw_mixer *mixer = pipeline->mixer; |
180 | ctl->intf = intf; | ||
181 | 175 | ||
182 | ctl->start_mask = mdp_ctl_flush_mask_lm(mixer->lm) | | 176 | ctl->start_mask = mdp_ctl_flush_mask_lm(mixer->lm) | |
183 | mdp_ctl_flush_mask_encoder(intf); | 177 | mdp_ctl_flush_mask_encoder(intf); |
@@ -191,16 +185,19 @@ int mdp5_ctl_set_pipeline(struct mdp5_ctl *ctl, struct mdp5_interface *intf, | |||
191 | return 0; | 185 | return 0; |
192 | } | 186 | } |
193 | 187 | ||
194 | static bool start_signal_needed(struct mdp5_ctl *ctl) | 188 | static bool start_signal_needed(struct mdp5_ctl *ctl, |
189 | struct mdp5_pipeline *pipeline) | ||
195 | { | 190 | { |
191 | struct mdp5_interface *intf = pipeline->intf; | ||
192 | |||
196 | if (!ctl->encoder_enabled || ctl->start_mask != 0) | 193 | if (!ctl->encoder_enabled || ctl->start_mask != 0) |
197 | return false; | 194 | return false; |
198 | 195 | ||
199 | switch (ctl->intf->type) { | 196 | switch (intf->type) { |
200 | case INTF_WB: | 197 | case INTF_WB: |
201 | return true; | 198 | return true; |
202 | case INTF_DSI: | 199 | case INTF_DSI: |
203 | return ctl->intf->mode == MDP5_INTF_DSI_MODE_COMMAND; | 200 | return intf->mode == MDP5_INTF_DSI_MODE_COMMAND; |
204 | default: | 201 | default: |
205 | return false; | 202 | return false; |
206 | } | 203 | } |
@@ -222,11 +219,13 @@ static void send_start_signal(struct mdp5_ctl *ctl) | |||
222 | spin_unlock_irqrestore(&ctl->hw_lock, flags); | 219 | spin_unlock_irqrestore(&ctl->hw_lock, flags); |
223 | } | 220 | } |
224 | 221 | ||
225 | static void refill_start_mask(struct mdp5_ctl *ctl) | 222 | static void refill_start_mask(struct mdp5_ctl *ctl, |
223 | struct mdp5_pipeline *pipeline) | ||
226 | { | 224 | { |
227 | struct mdp5_interface *intf = ctl->intf; | 225 | struct mdp5_interface *intf = pipeline->intf; |
226 | struct mdp5_hw_mixer *mixer = pipeline->mixer; | ||
228 | 227 | ||
229 | ctl->start_mask = mdp_ctl_flush_mask_lm(ctl->mixer->lm); | 228 | ctl->start_mask = mdp_ctl_flush_mask_lm(mixer->lm); |
230 | 229 | ||
231 | /* | 230 | /* |
232 | * Writeback encoder needs to program & flush | 231 | * Writeback encoder needs to program & flush |
@@ -244,17 +243,21 @@ static void refill_start_mask(struct mdp5_ctl *ctl) | |||
244 | * Note: | 243 | * Note: |
245 | * This encoder state is needed to trigger START signal (data path kickoff). | 244 | * This encoder state is needed to trigger START signal (data path kickoff). |
246 | */ | 245 | */ |
247 | int mdp5_ctl_set_encoder_state(struct mdp5_ctl *ctl, bool enabled) | 246 | int mdp5_ctl_set_encoder_state(struct mdp5_ctl *ctl, |
247 | struct mdp5_pipeline *pipeline, | ||
248 | bool enabled) | ||
248 | { | 249 | { |
250 | struct mdp5_interface *intf = pipeline->intf; | ||
251 | |||
249 | if (WARN_ON(!ctl)) | 252 | if (WARN_ON(!ctl)) |
250 | return -EINVAL; | 253 | return -EINVAL; |
251 | 254 | ||
252 | ctl->encoder_enabled = enabled; | 255 | ctl->encoder_enabled = enabled; |
253 | DBG("intf_%d: %s", ctl->intf->num, enabled ? "on" : "off"); | 256 | DBG("intf_%d: %s", intf->num, enabled ? "on" : "off"); |
254 | 257 | ||
255 | if (start_signal_needed(ctl)) { | 258 | if (start_signal_needed(ctl, pipeline)) { |
256 | send_start_signal(ctl); | 259 | send_start_signal(ctl); |
257 | refill_start_mask(ctl); | 260 | refill_start_mask(ctl, pipeline); |
258 | } | 261 | } |
259 | 262 | ||
260 | return 0; | 263 | return 0; |
@@ -265,12 +268,13 @@ int mdp5_ctl_set_encoder_state(struct mdp5_ctl *ctl, bool enabled) | |||
265 | * CTL registers need to be flushed after calling this function | 268 | * CTL registers need to be flushed after calling this function |
266 | * (call mdp5_ctl_commit() with mdp_ctl_flush_mask_ctl() mask) | 269 | * (call mdp5_ctl_commit() with mdp_ctl_flush_mask_ctl() mask) |
267 | */ | 270 | */ |
268 | int mdp5_ctl_set_cursor(struct mdp5_ctl *ctl, int cursor_id, bool enable) | 271 | int mdp5_ctl_set_cursor(struct mdp5_ctl *ctl, struct mdp5_pipeline *pipeline, |
272 | int cursor_id, bool enable) | ||
269 | { | 273 | { |
270 | struct mdp5_ctl_manager *ctl_mgr = ctl->ctlm; | 274 | struct mdp5_ctl_manager *ctl_mgr = ctl->ctlm; |
271 | unsigned long flags; | 275 | unsigned long flags; |
272 | u32 blend_cfg; | 276 | u32 blend_cfg; |
273 | struct mdp5_hw_mixer *mixer = ctl->mixer; | 277 | struct mdp5_hw_mixer *mixer = pipeline->mixer; |
274 | 278 | ||
275 | if (unlikely(WARN_ON(!mixer))) { | 279 | if (unlikely(WARN_ON(!mixer))) { |
276 | dev_err(ctl_mgr->dev->dev, "CTL %d cannot find LM", | 280 | dev_err(ctl_mgr->dev->dev, "CTL %d cannot find LM", |
@@ -340,10 +344,10 @@ static u32 mdp_ctl_blend_ext_mask(enum mdp5_pipe pipe, | |||
340 | } | 344 | } |
341 | } | 345 | } |
342 | 346 | ||
343 | int mdp5_ctl_blend(struct mdp5_ctl *ctl, enum mdp5_pipe *stage, u32 stage_cnt, | 347 | int mdp5_ctl_blend(struct mdp5_ctl *ctl, struct mdp5_pipeline *pipeline, |
344 | u32 ctl_blend_op_flags) | 348 | enum mdp5_pipe *stage, u32 stage_cnt, u32 ctl_blend_op_flags) |
345 | { | 349 | { |
346 | struct mdp5_hw_mixer *mixer = ctl->mixer; | 350 | struct mdp5_hw_mixer *mixer = pipeline->mixer; |
347 | unsigned long flags; | 351 | unsigned long flags; |
348 | u32 blend_cfg = 0, blend_ext_cfg = 0; | 352 | u32 blend_cfg = 0, blend_ext_cfg = 0; |
349 | int i, start_stage; | 353 | int i, start_stage; |
@@ -430,7 +434,8 @@ u32 mdp_ctl_flush_mask_lm(int lm) | |||
430 | } | 434 | } |
431 | } | 435 | } |
432 | 436 | ||
433 | static u32 fix_sw_flush(struct mdp5_ctl *ctl, u32 flush_mask) | 437 | static u32 fix_sw_flush(struct mdp5_ctl *ctl, struct mdp5_pipeline *pipeline, |
438 | u32 flush_mask) | ||
434 | { | 439 | { |
435 | struct mdp5_ctl_manager *ctl_mgr = ctl->ctlm; | 440 | struct mdp5_ctl_manager *ctl_mgr = ctl->ctlm; |
436 | u32 sw_mask = 0; | 441 | u32 sw_mask = 0; |
@@ -439,7 +444,7 @@ static u32 fix_sw_flush(struct mdp5_ctl *ctl, u32 flush_mask) | |||
439 | 444 | ||
440 | /* for some targets, cursor bit is the same as LM bit */ | 445 | /* for some targets, cursor bit is the same as LM bit */ |
441 | if (BIT_NEEDS_SW_FIX(MDP5_CTL_FLUSH_CURSOR_0)) | 446 | if (BIT_NEEDS_SW_FIX(MDP5_CTL_FLUSH_CURSOR_0)) |
442 | sw_mask |= mdp_ctl_flush_mask_lm(ctl->mixer->lm); | 447 | sw_mask |= mdp_ctl_flush_mask_lm(pipeline->mixer->lm); |
443 | 448 | ||
444 | return sw_mask; | 449 | return sw_mask; |
445 | } | 450 | } |
@@ -485,7 +490,9 @@ static void fix_for_single_flush(struct mdp5_ctl *ctl, u32 *flush_mask, | |||
485 | * | 490 | * |
486 | * Return H/W flushed bit mask. | 491 | * Return H/W flushed bit mask. |
487 | */ | 492 | */ |
488 | u32 mdp5_ctl_commit(struct mdp5_ctl *ctl, u32 flush_mask) | 493 | u32 mdp5_ctl_commit(struct mdp5_ctl *ctl, |
494 | struct mdp5_pipeline *pipeline, | ||
495 | u32 flush_mask) | ||
489 | { | 496 | { |
490 | struct mdp5_ctl_manager *ctl_mgr = ctl->ctlm; | 497 | struct mdp5_ctl_manager *ctl_mgr = ctl->ctlm; |
491 | unsigned long flags; | 498 | unsigned long flags; |
@@ -502,7 +509,7 @@ u32 mdp5_ctl_commit(struct mdp5_ctl *ctl, u32 flush_mask) | |||
502 | ctl->pending_ctl_trigger = 0; | 509 | ctl->pending_ctl_trigger = 0; |
503 | } | 510 | } |
504 | 511 | ||
505 | flush_mask |= fix_sw_flush(ctl, flush_mask); | 512 | flush_mask |= fix_sw_flush(ctl, pipeline, flush_mask); |
506 | 513 | ||
507 | flush_mask &= ctl_mgr->flush_hw_mask; | 514 | flush_mask &= ctl_mgr->flush_hw_mask; |
508 | 515 | ||
@@ -516,9 +523,9 @@ u32 mdp5_ctl_commit(struct mdp5_ctl *ctl, u32 flush_mask) | |||
516 | spin_unlock_irqrestore(&ctl->hw_lock, flags); | 523 | spin_unlock_irqrestore(&ctl->hw_lock, flags); |
517 | } | 524 | } |
518 | 525 | ||
519 | if (start_signal_needed(ctl)) { | 526 | if (start_signal_needed(ctl, pipeline)) { |
520 | send_start_signal(ctl); | 527 | send_start_signal(ctl); |
521 | refill_start_mask(ctl); | 528 | refill_start_mask(ctl, pipeline); |
522 | } | 529 | } |
523 | 530 | ||
524 | return curr_ctl_flush_mask; | 531 | return curr_ctl_flush_mask; |
@@ -605,7 +612,6 @@ struct mdp5_ctl *mdp5_ctlm_request(struct mdp5_ctl_manager *ctl_mgr, | |||
605 | 612 | ||
606 | found: | 613 | found: |
607 | ctl = &ctl_mgr->ctls[c]; | 614 | ctl = &ctl_mgr->ctls[c]; |
608 | ctl->mixer = NULL; | ||
609 | ctl->status |= CTL_STAT_BUSY; | 615 | ctl->status |= CTL_STAT_BUSY; |
610 | ctl->pending_ctl_trigger = 0; | 616 | ctl->pending_ctl_trigger = 0; |
611 | DBG("CTL %d allocated", ctl->id); | 617 | DBG("CTL %d allocated", ctl->id); |
diff --git a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_ctl.h b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_ctl.h index 882c9d2be365..751dd861cfd8 100644 --- a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_ctl.h +++ b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_ctl.h | |||
@@ -37,11 +37,13 @@ struct mdp5_ctl *mdp5_ctlm_request(struct mdp5_ctl_manager *ctlm, int intf_num); | |||
37 | int mdp5_ctl_get_ctl_id(struct mdp5_ctl *ctl); | 37 | int mdp5_ctl_get_ctl_id(struct mdp5_ctl *ctl); |
38 | 38 | ||
39 | struct mdp5_interface; | 39 | struct mdp5_interface; |
40 | int mdp5_ctl_set_pipeline(struct mdp5_ctl *ctl, struct mdp5_interface *intf, | 40 | struct mdp5_pipeline; |
41 | struct mdp5_hw_mixer *lm); | 41 | int mdp5_ctl_set_pipeline(struct mdp5_ctl *ctl, struct mdp5_pipeline *p); |
42 | int mdp5_ctl_set_encoder_state(struct mdp5_ctl *ctl, bool enabled); | 42 | int mdp5_ctl_set_encoder_state(struct mdp5_ctl *ctl, struct mdp5_pipeline *p, |
43 | bool enabled); | ||
43 | 44 | ||
44 | int mdp5_ctl_set_cursor(struct mdp5_ctl *ctl, int cursor_id, bool enable); | 45 | int mdp5_ctl_set_cursor(struct mdp5_ctl *ctl, struct mdp5_pipeline *pipeline, |
46 | int cursor_id, bool enable); | ||
45 | int mdp5_ctl_pair(struct mdp5_ctl *ctlx, struct mdp5_ctl *ctly, bool enable); | 47 | int mdp5_ctl_pair(struct mdp5_ctl *ctlx, struct mdp5_ctl *ctly, bool enable); |
46 | 48 | ||
47 | /* | 49 | /* |
@@ -56,7 +58,8 @@ int mdp5_ctl_pair(struct mdp5_ctl *ctlx, struct mdp5_ctl *ctly, bool enable); | |||
56 | * (call mdp5_ctl_commit() with mdp_ctl_flush_mask_ctl() mask) | 58 | * (call mdp5_ctl_commit() with mdp_ctl_flush_mask_ctl() mask) |
57 | */ | 59 | */ |
58 | #define MDP5_CTL_BLEND_OP_FLAG_BORDER_OUT BIT(0) | 60 | #define MDP5_CTL_BLEND_OP_FLAG_BORDER_OUT BIT(0) |
59 | int mdp5_ctl_blend(struct mdp5_ctl *ctl, enum mdp5_pipe *stage, u32 stage_cnt, | 61 | int mdp5_ctl_blend(struct mdp5_ctl *ctl, struct mdp5_pipeline *pipeline, |
62 | enum mdp5_pipe *stage, u32 stage_cnt, | ||
60 | u32 ctl_blend_op_flags); | 63 | u32 ctl_blend_op_flags); |
61 | 64 | ||
62 | /** | 65 | /** |
@@ -71,7 +74,8 @@ u32 mdp_ctl_flush_mask_cursor(int cursor_id); | |||
71 | u32 mdp_ctl_flush_mask_encoder(struct mdp5_interface *intf); | 74 | u32 mdp_ctl_flush_mask_encoder(struct mdp5_interface *intf); |
72 | 75 | ||
73 | /* @flush_mask: see CTL flush masks definitions below */ | 76 | /* @flush_mask: see CTL flush masks definitions below */ |
74 | u32 mdp5_ctl_commit(struct mdp5_ctl *ctl, u32 flush_mask); | 77 | u32 mdp5_ctl_commit(struct mdp5_ctl *ctl, struct mdp5_pipeline *pipeline, |
78 | u32 flush_mask); | ||
75 | u32 mdp5_ctl_get_commit_status(struct mdp5_ctl *ctl); | 79 | u32 mdp5_ctl_get_commit_status(struct mdp5_ctl *ctl); |
76 | 80 | ||
77 | 81 | ||
diff --git a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_encoder.c b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_encoder.c index 6d535140ef21..c2ab0f033031 100644 --- a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_encoder.c +++ b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_encoder.c | |||
@@ -206,8 +206,7 @@ static void mdp5_vid_encoder_mode_set(struct drm_encoder *encoder, | |||
206 | 206 | ||
207 | spin_unlock_irqrestore(&mdp5_encoder->intf_lock, flags); | 207 | spin_unlock_irqrestore(&mdp5_encoder->intf_lock, flags); |
208 | 208 | ||
209 | mdp5_crtc_set_pipeline(encoder->crtc, mdp5_encoder->intf, | 209 | mdp5_crtc_set_pipeline(encoder->crtc); |
210 | mdp5_encoder->ctl); | ||
211 | } | 210 | } |
212 | 211 | ||
213 | static void mdp5_vid_encoder_disable(struct drm_encoder *encoder) | 212 | static void mdp5_vid_encoder_disable(struct drm_encoder *encoder) |
@@ -215,6 +214,7 @@ static void mdp5_vid_encoder_disable(struct drm_encoder *encoder) | |||
215 | struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder); | 214 | struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder); |
216 | struct mdp5_kms *mdp5_kms = get_kms(encoder); | 215 | struct mdp5_kms *mdp5_kms = get_kms(encoder); |
217 | struct mdp5_ctl *ctl = mdp5_encoder->ctl; | 216 | struct mdp5_ctl *ctl = mdp5_encoder->ctl; |
217 | struct mdp5_pipeline *pipeline = mdp5_crtc_get_pipeline(encoder->crtc); | ||
218 | struct mdp5_hw_mixer *mixer = mdp5_crtc_get_mixer(encoder->crtc); | 218 | struct mdp5_hw_mixer *mixer = mdp5_crtc_get_mixer(encoder->crtc); |
219 | struct mdp5_interface *intf = mdp5_encoder->intf; | 219 | struct mdp5_interface *intf = mdp5_encoder->intf; |
220 | int intfn = mdp5_encoder->intf->num; | 220 | int intfn = mdp5_encoder->intf->num; |
@@ -223,12 +223,12 @@ static void mdp5_vid_encoder_disable(struct drm_encoder *encoder) | |||
223 | if (WARN_ON(!mdp5_encoder->enabled)) | 223 | if (WARN_ON(!mdp5_encoder->enabled)) |
224 | return; | 224 | return; |
225 | 225 | ||
226 | mdp5_ctl_set_encoder_state(ctl, false); | 226 | mdp5_ctl_set_encoder_state(ctl, pipeline, false); |
227 | 227 | ||
228 | spin_lock_irqsave(&mdp5_encoder->intf_lock, flags); | 228 | spin_lock_irqsave(&mdp5_encoder->intf_lock, flags); |
229 | mdp5_write(mdp5_kms, REG_MDP5_INTF_TIMING_ENGINE_EN(intfn), 0); | 229 | mdp5_write(mdp5_kms, REG_MDP5_INTF_TIMING_ENGINE_EN(intfn), 0); |
230 | spin_unlock_irqrestore(&mdp5_encoder->intf_lock, flags); | 230 | spin_unlock_irqrestore(&mdp5_encoder->intf_lock, flags); |
231 | mdp5_ctl_commit(ctl, mdp_ctl_flush_mask_encoder(intf)); | 231 | mdp5_ctl_commit(ctl, pipeline, mdp_ctl_flush_mask_encoder(intf)); |
232 | 232 | ||
233 | /* | 233 | /* |
234 | * Wait for a vsync so we know the ENABLE=0 latched before | 234 | * Wait for a vsync so we know the ENABLE=0 latched before |
@@ -251,6 +251,7 @@ static void mdp5_vid_encoder_enable(struct drm_encoder *encoder) | |||
251 | struct mdp5_kms *mdp5_kms = get_kms(encoder); | 251 | struct mdp5_kms *mdp5_kms = get_kms(encoder); |
252 | struct mdp5_ctl *ctl = mdp5_encoder->ctl; | 252 | struct mdp5_ctl *ctl = mdp5_encoder->ctl; |
253 | struct mdp5_interface *intf = mdp5_encoder->intf; | 253 | struct mdp5_interface *intf = mdp5_encoder->intf; |
254 | struct mdp5_pipeline *pipeline = mdp5_crtc_get_pipeline(encoder->crtc); | ||
254 | int intfn = intf->num; | 255 | int intfn = intf->num; |
255 | unsigned long flags; | 256 | unsigned long flags; |
256 | 257 | ||
@@ -261,9 +262,9 @@ static void mdp5_vid_encoder_enable(struct drm_encoder *encoder) | |||
261 | spin_lock_irqsave(&mdp5_encoder->intf_lock, flags); | 262 | spin_lock_irqsave(&mdp5_encoder->intf_lock, flags); |
262 | mdp5_write(mdp5_kms, REG_MDP5_INTF_TIMING_ENGINE_EN(intfn), 1); | 263 | mdp5_write(mdp5_kms, REG_MDP5_INTF_TIMING_ENGINE_EN(intfn), 1); |
263 | spin_unlock_irqrestore(&mdp5_encoder->intf_lock, flags); | 264 | spin_unlock_irqrestore(&mdp5_encoder->intf_lock, flags); |
264 | mdp5_ctl_commit(ctl, mdp_ctl_flush_mask_encoder(intf)); | 265 | mdp5_ctl_commit(ctl, pipeline, mdp_ctl_flush_mask_encoder(intf)); |
265 | 266 | ||
266 | mdp5_ctl_set_encoder_state(ctl, true); | 267 | mdp5_ctl_set_encoder_state(ctl, pipeline, true); |
267 | 268 | ||
268 | mdp5_encoder->enabled = true; | 269 | mdp5_encoder->enabled = true; |
269 | } | 270 | } |
diff --git a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_kms.h b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_kms.h index 35a21e6c5856..fcd067f65a93 100644 --- a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_kms.h +++ b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_kms.h | |||
@@ -280,8 +280,8 @@ struct mdp5_ctl *mdp5_crtc_get_ctl(struct drm_crtc *crtc); | |||
280 | uint32_t mdp5_crtc_vblank(struct drm_crtc *crtc); | 280 | uint32_t mdp5_crtc_vblank(struct drm_crtc *crtc); |
281 | 281 | ||
282 | struct mdp5_hw_mixer *mdp5_crtc_get_mixer(struct drm_crtc *crtc); | 282 | struct mdp5_hw_mixer *mdp5_crtc_get_mixer(struct drm_crtc *crtc); |
283 | void mdp5_crtc_set_pipeline(struct drm_crtc *crtc, | 283 | struct mdp5_pipeline *mdp5_crtc_get_pipeline(struct drm_crtc *crtc); |
284 | struct mdp5_interface *intf, struct mdp5_ctl *ctl); | 284 | void mdp5_crtc_set_pipeline(struct drm_crtc *crtc); |
285 | void mdp5_crtc_wait_for_commit_done(struct drm_crtc *crtc); | 285 | void mdp5_crtc_wait_for_commit_done(struct drm_crtc *crtc); |
286 | struct drm_crtc *mdp5_crtc_init(struct drm_device *dev, | 286 | struct drm_crtc *mdp5_crtc_init(struct drm_device *dev, |
287 | struct drm_plane *plane, | 287 | struct drm_plane *plane, |
diff --git a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_plane.c b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_plane.c index 49e50debda90..6086a2de3d27 100644 --- a/drivers/gpu/drm/msm/mdp/mdp5/mdp5_plane.c +++ b/drivers/gpu/drm/msm/mdp/mdp5/mdp5_plane.c | |||
@@ -937,6 +937,7 @@ static int mdp5_update_cursor_plane_legacy(struct drm_plane *plane, | |||
937 | 937 | ||
938 | if (new_plane_state->visible) { | 938 | if (new_plane_state->visible) { |
939 | struct mdp5_ctl *ctl; | 939 | struct mdp5_ctl *ctl; |
940 | struct mdp5_pipeline *pipeline = mdp5_crtc_get_pipeline(crtc); | ||
940 | 941 | ||
941 | ret = mdp5_plane_mode_set(plane, crtc, fb, | 942 | ret = mdp5_plane_mode_set(plane, crtc, fb, |
942 | &new_plane_state->src, | 943 | &new_plane_state->src, |
@@ -945,7 +946,7 @@ static int mdp5_update_cursor_plane_legacy(struct drm_plane *plane, | |||
945 | 946 | ||
946 | ctl = mdp5_crtc_get_ctl(crtc); | 947 | ctl = mdp5_crtc_get_ctl(crtc); |
947 | 948 | ||
948 | mdp5_ctl_commit(ctl, mdp5_plane_get_flush(plane)); | 949 | mdp5_ctl_commit(ctl, pipeline, mdp5_plane_get_flush(plane)); |
949 | } | 950 | } |
950 | 951 | ||
951 | *to_mdp5_plane_state(plane_state) = | 952 | *to_mdp5_plane_state(plane_state) = |