diff options
author | Dave Airlie <airlied@redhat.com> | 2016-11-06 23:20:43 -0500 |
---|---|---|
committer | Dave Airlie <airlied@redhat.com> | 2016-11-06 23:20:43 -0500 |
commit | afdd548f742ca454fc343696de472f3aaa5dc488 (patch) | |
tree | 29f3d0087452fdcf7166d83c82a936af26f3e8e0 | |
parent | 7b624ad8fea1be7ff4c22643e212191aa6a2a3c2 (diff) | |
parent | b27add13f500469127afdf011dbcc9c649e16e54 (diff) |
Merge branch 'linux-4.10' of git://github.com/skeggsb/linux into drm-next
- Initial atomic modesetting support. Used for "legacy" KMS interfaces,
ioctl not exposed by default, but there is a commandline option to
enable it.
- Initial DP 1.2 MST support
- Misc other code cleanups + fixes
* 'linux-4.10' of git://github.com/skeggsb/linux: (64 commits)
drm/nouveau/fifo/gf100-: protect channel preempt with subdev mutex
drm/nouveau/gr: fallback to legacy paths during firmware lookup
drm/nouveau/kms/nv50: initial support for DP 1.2 multi-stream
drm/nouveau/kms/nv50: allow encoder update to be called from other modules
drm/nouveau/kms/nv50: rename remaining nv50_crtc to nv50_head
drm/nouveau/kms/nv50: remove code to create ctxdma for every framebuffer
drm/nouveau/kms/nv50: remove code to support non-atomic page flips
drm/nouveau/kms/nv50: remove code to support non-atomic connector properties
drm/nouveau/kms/nv50: remove code to support non-atomic dpms
drm/nouveau/kms/nv50: remove code to support non-atomic modesets
drm/nouveau/kms/nv50: transition to atomic interfaces internally
drm/nouveau/kms/nv50: turn mode_set_base_atomic() into a stub
drm/nouveau/kms/nv50: convert encoder mode_fixup into an atomic_check()
drm/nouveau/kms/nv50: clean-up encoder functions
drm/nouveau/kms/nv50: ensure encoder normal power state is enabled at startup
drm/nouveau/kms/nv50: prepare ctxdma interface to be usable with atomic
drm/nouveau/kms/nv50: separate out cursor channel commit
drm/nouveau/kms/nv50: separate out base channel commit
drm/nouveau/kms/nv50: separate out vblank dmi commit
drm/nouveau/kms/nv50: separate out procamp commit
...
104 files changed, 4130 insertions, 1865 deletions
diff --git a/drivers/gpu/drm/nouveau/dispnv04/overlay.c b/drivers/gpu/drm/nouveau/dispnv04/overlay.c index ec444eac6258..a79514d440b3 100644 --- a/drivers/gpu/drm/nouveau/dispnv04/overlay.c +++ b/drivers/gpu/drm/nouveau/dispnv04/overlay.c | |||
@@ -33,7 +33,7 @@ | |||
33 | #include "nouveau_connector.h" | 33 | #include "nouveau_connector.h" |
34 | #include "nouveau_display.h" | 34 | #include "nouveau_display.h" |
35 | #include "nvreg.h" | 35 | #include "nvreg.h" |
36 | 36 | #include "disp.h" | |
37 | 37 | ||
38 | struct nouveau_plane { | 38 | struct nouveau_plane { |
39 | struct drm_plane base; | 39 | struct drm_plane base; |
diff --git a/drivers/gpu/drm/nouveau/include/nvif/cl5070.h b/drivers/gpu/drm/nouveau/include/nvif/cl5070.h index d15c296b5f33..ae49dfd1f97b 100644 --- a/drivers/gpu/drm/nouveau/include/nvif/cl5070.h +++ b/drivers/gpu/drm/nouveau/include/nvif/cl5070.h | |||
@@ -34,6 +34,8 @@ struct nv50_disp_mthd_v1 { | |||
34 | #define NV50_DISP_MTHD_V1_SOR_HDMI_PWR 0x22 | 34 | #define NV50_DISP_MTHD_V1_SOR_HDMI_PWR 0x22 |
35 | #define NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT 0x23 | 35 | #define NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT 0x23 |
36 | #define NV50_DISP_MTHD_V1_SOR_DP_PWR 0x24 | 36 | #define NV50_DISP_MTHD_V1_SOR_DP_PWR 0x24 |
37 | #define NV50_DISP_MTHD_V1_SOR_DP_MST_LINK 0x25 | ||
38 | #define NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI 0x26 | ||
37 | #define NV50_DISP_MTHD_V1_PIOR_PWR 0x30 | 39 | #define NV50_DISP_MTHD_V1_PIOR_PWR 0x30 |
38 | __u8 method; | 40 | __u8 method; |
39 | __u16 hasht; | 41 | __u16 hasht; |
@@ -90,6 +92,21 @@ struct nv50_disp_sor_dp_pwr_v0 { | |||
90 | __u8 pad02[6]; | 92 | __u8 pad02[6]; |
91 | }; | 93 | }; |
92 | 94 | ||
95 | struct nv50_disp_sor_dp_mst_link_v0 { | ||
96 | __u8 version; | ||
97 | __u8 state; | ||
98 | __u8 pad02[6]; | ||
99 | }; | ||
100 | |||
101 | struct nv50_disp_sor_dp_mst_vcpi_v0 { | ||
102 | __u8 version; | ||
103 | __u8 pad01[1]; | ||
104 | __u8 start_slot; | ||
105 | __u8 num_slots; | ||
106 | __u16 pbn; | ||
107 | __u16 aligned_pbn; | ||
108 | }; | ||
109 | |||
93 | struct nv50_disp_pior_pwr_v0 { | 110 | struct nv50_disp_pior_pwr_v0 { |
94 | __u8 version; | 111 | __u8 version; |
95 | __u8 state; | 112 | __u8 state; |
diff --git a/drivers/gpu/drm/nouveau/include/nvif/object.h b/drivers/gpu/drm/nouveau/include/nvif/object.h index 8d815967767f..9e58b305b020 100644 --- a/drivers/gpu/drm/nouveau/include/nvif/object.h +++ b/drivers/gpu/drm/nouveau/include/nvif/object.h | |||
@@ -66,6 +66,35 @@ void nvif_object_unmap(struct nvif_object *); | |||
66 | 66 | ||
67 | #define nvif_mthd(a,b,c,d) nvif_object_mthd((a), (b), (c), (d)) | 67 | #define nvif_mthd(a,b,c,d) nvif_object_mthd((a), (b), (c), (d)) |
68 | 68 | ||
69 | struct nvif_mclass { | ||
70 | s32 oclass; | ||
71 | int version; | ||
72 | }; | ||
73 | |||
74 | #define nvif_mclass(o,m) ({ \ | ||
75 | struct nvif_object *object = (o); \ | ||
76 | struct nvif_sclass *sclass; \ | ||
77 | const typeof(m[0]) *mclass = (m); \ | ||
78 | int ret = -ENODEV; \ | ||
79 | int cnt, i, j; \ | ||
80 | \ | ||
81 | cnt = nvif_object_sclass_get(object, &sclass); \ | ||
82 | if (cnt >= 0) { \ | ||
83 | for (i = 0; ret < 0 && mclass[i].oclass; i++) { \ | ||
84 | for (j = 0; j < cnt; j++) { \ | ||
85 | if (mclass[i].oclass == sclass[j].oclass && \ | ||
86 | mclass[i].version >= sclass[j].minver && \ | ||
87 | mclass[i].version <= sclass[j].maxver) { \ | ||
88 | ret = i; \ | ||
89 | break; \ | ||
90 | } \ | ||
91 | } \ | ||
92 | } \ | ||
93 | nvif_object_sclass_put(&sclass); \ | ||
94 | } \ | ||
95 | ret; \ | ||
96 | }) | ||
97 | |||
69 | /*XXX*/ | 98 | /*XXX*/ |
70 | #include <core/object.h> | 99 | #include <core/object.h> |
71 | #define nvxx_object(a) ({ \ | 100 | #define nvxx_object(a) ({ \ |
diff --git a/drivers/gpu/drm/nouveau/include/nvkm/subdev/fb.h b/drivers/gpu/drm/nouveau/include/nvkm/subdev/fb.h index 3a410275fa71..65ce79a85d37 100644 --- a/drivers/gpu/drm/nouveau/include/nvkm/subdev/fb.h +++ b/drivers/gpu/drm/nouveau/include/nvkm/subdev/fb.h | |||
@@ -93,6 +93,7 @@ int gk104_fb_new(struct nvkm_device *, int, struct nvkm_fb **); | |||
93 | int gk20a_fb_new(struct nvkm_device *, int, struct nvkm_fb **); | 93 | int gk20a_fb_new(struct nvkm_device *, int, struct nvkm_fb **); |
94 | int gm107_fb_new(struct nvkm_device *, int, struct nvkm_fb **); | 94 | int gm107_fb_new(struct nvkm_device *, int, struct nvkm_fb **); |
95 | int gm200_fb_new(struct nvkm_device *, int, struct nvkm_fb **); | 95 | int gm200_fb_new(struct nvkm_device *, int, struct nvkm_fb **); |
96 | int gm20b_fb_new(struct nvkm_device *, int, struct nvkm_fb **); | ||
96 | int gp100_fb_new(struct nvkm_device *, int, struct nvkm_fb **); | 97 | int gp100_fb_new(struct nvkm_device *, int, struct nvkm_fb **); |
97 | int gp104_fb_new(struct nvkm_device *, int, struct nvkm_fb **); | 98 | int gp104_fb_new(struct nvkm_device *, int, struct nvkm_fb **); |
98 | 99 | ||
@@ -156,4 +157,6 @@ struct nvkm_ram_func { | |||
156 | int (*prog)(struct nvkm_ram *); | 157 | int (*prog)(struct nvkm_ram *); |
157 | void (*tidy)(struct nvkm_ram *); | 158 | void (*tidy)(struct nvkm_ram *); |
158 | }; | 159 | }; |
160 | |||
161 | extern const u8 gf100_pte_storage_type_map[256]; | ||
159 | #endif | 162 | #endif |
diff --git a/drivers/gpu/drm/nouveau/nouveau_bios.c b/drivers/gpu/drm/nouveau/nouveau_bios.c index a1570b109434..23ffe8571a99 100644 --- a/drivers/gpu/drm/nouveau/nouveau_bios.c +++ b/drivers/gpu/drm/nouveau/nouveau_bios.c | |||
@@ -333,6 +333,9 @@ get_fp_strap(struct drm_device *dev, struct nvbios *bios) | |||
333 | if (bios->major_version < 5 && bios->data[0x48] & 0x4) | 333 | if (bios->major_version < 5 && bios->data[0x48] & 0x4) |
334 | return NVReadVgaCrtc5758(dev, 0, 0xf) & 0xf; | 334 | return NVReadVgaCrtc5758(dev, 0, 0xf) & 0xf; |
335 | 335 | ||
336 | if (drm->device.info.family >= NV_DEVICE_INFO_V0_MAXWELL) | ||
337 | return nvif_rd32(device, 0x001800) & 0x0000000f; | ||
338 | else | ||
336 | if (drm->device.info.family >= NV_DEVICE_INFO_V0_TESLA) | 339 | if (drm->device.info.family >= NV_DEVICE_INFO_V0_TESLA) |
337 | return (nvif_rd32(device, NV_PEXTDEV_BOOT_0) >> 24) & 0xf; | 340 | return (nvif_rd32(device, NV_PEXTDEV_BOOT_0) >> 24) & 0xf; |
338 | else | 341 | else |
diff --git a/drivers/gpu/drm/nouveau/nouveau_connector.c b/drivers/gpu/drm/nouveau/nouveau_connector.c index c1084088f9e4..947c200655b4 100644 --- a/drivers/gpu/drm/nouveau/nouveau_connector.c +++ b/drivers/gpu/drm/nouveau/nouveau_connector.c | |||
@@ -30,6 +30,7 @@ | |||
30 | #include <linux/vga_switcheroo.h> | 30 | #include <linux/vga_switcheroo.h> |
31 | 31 | ||
32 | #include <drm/drmP.h> | 32 | #include <drm/drmP.h> |
33 | #include <drm/drm_atomic_helper.h> | ||
33 | #include <drm/drm_edid.h> | 34 | #include <drm/drm_edid.h> |
34 | #include <drm/drm_crtc_helper.h> | 35 | #include <drm/drm_crtc_helper.h> |
35 | 36 | ||
@@ -47,6 +48,301 @@ | |||
47 | #include <nvif/cl0046.h> | 48 | #include <nvif/cl0046.h> |
48 | #include <nvif/event.h> | 49 | #include <nvif/event.h> |
49 | 50 | ||
51 | struct drm_display_mode * | ||
52 | nouveau_conn_native_mode(struct drm_connector *connector) | ||
53 | { | ||
54 | const struct drm_connector_helper_funcs *helper = connector->helper_private; | ||
55 | struct nouveau_drm *drm = nouveau_drm(connector->dev); | ||
56 | struct drm_device *dev = connector->dev; | ||
57 | struct drm_display_mode *mode, *largest = NULL; | ||
58 | int high_w = 0, high_h = 0, high_v = 0; | ||
59 | |||
60 | list_for_each_entry(mode, &connector->probed_modes, head) { | ||
61 | mode->vrefresh = drm_mode_vrefresh(mode); | ||
62 | if (helper->mode_valid(connector, mode) != MODE_OK || | ||
63 | (mode->flags & DRM_MODE_FLAG_INTERLACE)) | ||
64 | continue; | ||
65 | |||
66 | /* Use preferred mode if there is one.. */ | ||
67 | if (mode->type & DRM_MODE_TYPE_PREFERRED) { | ||
68 | NV_DEBUG(drm, "native mode from preferred\n"); | ||
69 | return drm_mode_duplicate(dev, mode); | ||
70 | } | ||
71 | |||
72 | /* Otherwise, take the resolution with the largest width, then | ||
73 | * height, then vertical refresh | ||
74 | */ | ||
75 | if (mode->hdisplay < high_w) | ||
76 | continue; | ||
77 | |||
78 | if (mode->hdisplay == high_w && mode->vdisplay < high_h) | ||
79 | continue; | ||
80 | |||
81 | if (mode->hdisplay == high_w && mode->vdisplay == high_h && | ||
82 | mode->vrefresh < high_v) | ||
83 | continue; | ||
84 | |||
85 | high_w = mode->hdisplay; | ||
86 | high_h = mode->vdisplay; | ||
87 | high_v = mode->vrefresh; | ||
88 | largest = mode; | ||
89 | } | ||
90 | |||
91 | NV_DEBUG(drm, "native mode from largest: %dx%d@%d\n", | ||
92 | high_w, high_h, high_v); | ||
93 | return largest ? drm_mode_duplicate(dev, largest) : NULL; | ||
94 | } | ||
95 | |||
96 | int | ||
97 | nouveau_conn_atomic_get_property(struct drm_connector *connector, | ||
98 | const struct drm_connector_state *state, | ||
99 | struct drm_property *property, u64 *val) | ||
100 | { | ||
101 | struct nouveau_conn_atom *asyc = nouveau_conn_atom(state); | ||
102 | struct nouveau_display *disp = nouveau_display(connector->dev); | ||
103 | struct drm_device *dev = connector->dev; | ||
104 | |||
105 | if (property == dev->mode_config.scaling_mode_property) | ||
106 | *val = asyc->scaler.mode; | ||
107 | else if (property == disp->underscan_property) | ||
108 | *val = asyc->scaler.underscan.mode; | ||
109 | else if (property == disp->underscan_hborder_property) | ||
110 | *val = asyc->scaler.underscan.hborder; | ||
111 | else if (property == disp->underscan_vborder_property) | ||
112 | *val = asyc->scaler.underscan.vborder; | ||
113 | else if (property == disp->dithering_mode) | ||
114 | *val = asyc->dither.mode; | ||
115 | else if (property == disp->dithering_depth) | ||
116 | *val = asyc->dither.depth; | ||
117 | else if (property == disp->vibrant_hue_property) | ||
118 | *val = asyc->procamp.vibrant_hue; | ||
119 | else if (property == disp->color_vibrance_property) | ||
120 | *val = asyc->procamp.color_vibrance; | ||
121 | else | ||
122 | return -EINVAL; | ||
123 | |||
124 | return 0; | ||
125 | } | ||
126 | |||
127 | int | ||
128 | nouveau_conn_atomic_set_property(struct drm_connector *connector, | ||
129 | struct drm_connector_state *state, | ||
130 | struct drm_property *property, u64 val) | ||
131 | { | ||
132 | struct drm_device *dev = connector->dev; | ||
133 | struct nouveau_conn_atom *asyc = nouveau_conn_atom(state); | ||
134 | struct nouveau_display *disp = nouveau_display(dev); | ||
135 | |||
136 | if (property == dev->mode_config.scaling_mode_property) { | ||
137 | switch (val) { | ||
138 | case DRM_MODE_SCALE_NONE: | ||
139 | /* We allow 'None' for EDID modes, even on a fixed | ||
140 | * panel (some exist with support for lower refresh | ||
141 | * rates, which people might want to use for power- | ||
142 | * saving purposes). | ||
143 | * | ||
144 | * Non-EDID modes will force the use of GPU scaling | ||
145 | * to the native mode regardless of this setting. | ||
146 | */ | ||
147 | switch (connector->connector_type) { | ||
148 | case DRM_MODE_CONNECTOR_LVDS: | ||
149 | case DRM_MODE_CONNECTOR_eDP: | ||
150 | /* ... except prior to G80, where the code | ||
151 | * doesn't support such things. | ||
152 | */ | ||
153 | if (disp->disp.oclass < NV50_DISP) | ||
154 | return -EINVAL; | ||
155 | break; | ||
156 | default: | ||
157 | break; | ||
158 | } | ||
159 | case DRM_MODE_SCALE_FULLSCREEN: | ||
160 | case DRM_MODE_SCALE_CENTER: | ||
161 | case DRM_MODE_SCALE_ASPECT: | ||
162 | break; | ||
163 | default: | ||
164 | return -EINVAL; | ||
165 | } | ||
166 | |||
167 | if (asyc->scaler.mode != val) { | ||
168 | asyc->scaler.mode = val; | ||
169 | asyc->set.scaler = true; | ||
170 | } | ||
171 | } else | ||
172 | if (property == disp->underscan_property) { | ||
173 | if (asyc->scaler.underscan.mode != val) { | ||
174 | asyc->scaler.underscan.mode = val; | ||
175 | asyc->set.scaler = true; | ||
176 | } | ||
177 | } else | ||
178 | if (property == disp->underscan_hborder_property) { | ||
179 | if (asyc->scaler.underscan.hborder != val) { | ||
180 | asyc->scaler.underscan.hborder = val; | ||
181 | asyc->set.scaler = true; | ||
182 | } | ||
183 | } else | ||
184 | if (property == disp->underscan_vborder_property) { | ||
185 | if (asyc->scaler.underscan.vborder != val) { | ||
186 | asyc->scaler.underscan.vborder = val; | ||
187 | asyc->set.scaler = true; | ||
188 | } | ||
189 | } else | ||
190 | if (property == disp->dithering_mode) { | ||
191 | if (asyc->dither.mode != val) { | ||
192 | asyc->dither.mode = val; | ||
193 | asyc->set.dither = true; | ||
194 | } | ||
195 | } else | ||
196 | if (property == disp->dithering_depth) { | ||
197 | if (asyc->dither.mode != val) { | ||
198 | asyc->dither.depth = val; | ||
199 | asyc->set.dither = true; | ||
200 | } | ||
201 | } else | ||
202 | if (property == disp->vibrant_hue_property) { | ||
203 | if (asyc->procamp.vibrant_hue != val) { | ||
204 | asyc->procamp.vibrant_hue = val; | ||
205 | asyc->set.procamp = true; | ||
206 | } | ||
207 | } else | ||
208 | if (property == disp->color_vibrance_property) { | ||
209 | if (asyc->procamp.color_vibrance != val) { | ||
210 | asyc->procamp.color_vibrance = val; | ||
211 | asyc->set.procamp = true; | ||
212 | } | ||
213 | } else { | ||
214 | return -EINVAL; | ||
215 | } | ||
216 | |||
217 | return 0; | ||
218 | } | ||
219 | |||
220 | void | ||
221 | nouveau_conn_atomic_destroy_state(struct drm_connector *connector, | ||
222 | struct drm_connector_state *state) | ||
223 | { | ||
224 | struct nouveau_conn_atom *asyc = nouveau_conn_atom(state); | ||
225 | __drm_atomic_helper_connector_destroy_state(&asyc->state); | ||
226 | kfree(asyc); | ||
227 | } | ||
228 | |||
229 | struct drm_connector_state * | ||
230 | nouveau_conn_atomic_duplicate_state(struct drm_connector *connector) | ||
231 | { | ||
232 | struct nouveau_conn_atom *armc = nouveau_conn_atom(connector->state); | ||
233 | struct nouveau_conn_atom *asyc; | ||
234 | if (!(asyc = kmalloc(sizeof(*asyc), GFP_KERNEL))) | ||
235 | return NULL; | ||
236 | __drm_atomic_helper_connector_duplicate_state(connector, &asyc->state); | ||
237 | asyc->dither = armc->dither; | ||
238 | asyc->scaler = armc->scaler; | ||
239 | asyc->procamp = armc->procamp; | ||
240 | asyc->set.mask = 0; | ||
241 | return &asyc->state; | ||
242 | } | ||
243 | |||
244 | void | ||
245 | nouveau_conn_reset(struct drm_connector *connector) | ||
246 | { | ||
247 | struct nouveau_conn_atom *asyc; | ||
248 | |||
249 | if (WARN_ON(!(asyc = kzalloc(sizeof(*asyc), GFP_KERNEL)))) | ||
250 | return; | ||
251 | |||
252 | if (connector->state) | ||
253 | __drm_atomic_helper_connector_destroy_state(connector->state); | ||
254 | __drm_atomic_helper_connector_reset(connector, &asyc->state); | ||
255 | asyc->dither.mode = DITHERING_MODE_AUTO; | ||
256 | asyc->dither.depth = DITHERING_DEPTH_AUTO; | ||
257 | asyc->scaler.mode = DRM_MODE_SCALE_NONE; | ||
258 | asyc->scaler.underscan.mode = UNDERSCAN_OFF; | ||
259 | asyc->procamp.color_vibrance = 150; | ||
260 | asyc->procamp.vibrant_hue = 90; | ||
261 | |||
262 | if (nouveau_display(connector->dev)->disp.oclass < NV50_DISP) { | ||
263 | switch (connector->connector_type) { | ||
264 | case DRM_MODE_CONNECTOR_LVDS: | ||
265 | /* See note in nouveau_conn_atomic_set_property(). */ | ||
266 | asyc->scaler.mode = DRM_MODE_SCALE_FULLSCREEN; | ||
267 | break; | ||
268 | default: | ||
269 | break; | ||
270 | } | ||
271 | } | ||
272 | } | ||
273 | |||
274 | void | ||
275 | nouveau_conn_attach_properties(struct drm_connector *connector) | ||
276 | { | ||
277 | struct drm_device *dev = connector->dev; | ||
278 | struct nouveau_conn_atom *armc = nouveau_conn_atom(connector->state); | ||
279 | struct nouveau_display *disp = nouveau_display(dev); | ||
280 | |||
281 | /* Init DVI-I specific properties. */ | ||
282 | if (connector->connector_type == DRM_MODE_CONNECTOR_DVII) | ||
283 | drm_object_attach_property(&connector->base, dev->mode_config. | ||
284 | dvi_i_subconnector_property, 0); | ||
285 | |||
286 | /* Add overscan compensation options to digital outputs. */ | ||
287 | if (disp->underscan_property && | ||
288 | (connector->connector_type == DRM_MODE_CONNECTOR_DVID || | ||
289 | connector->connector_type == DRM_MODE_CONNECTOR_DVII || | ||
290 | connector->connector_type == DRM_MODE_CONNECTOR_HDMIA || | ||
291 | connector->connector_type == DRM_MODE_CONNECTOR_DisplayPort)) { | ||
292 | drm_object_attach_property(&connector->base, | ||
293 | disp->underscan_property, | ||
294 | UNDERSCAN_OFF); | ||
295 | drm_object_attach_property(&connector->base, | ||
296 | disp->underscan_hborder_property, 0); | ||
297 | drm_object_attach_property(&connector->base, | ||
298 | disp->underscan_vborder_property, 0); | ||
299 | } | ||
300 | |||
301 | /* Add hue and saturation options. */ | ||
302 | if (disp->vibrant_hue_property) | ||
303 | drm_object_attach_property(&connector->base, | ||
304 | disp->vibrant_hue_property, | ||
305 | armc->procamp.vibrant_hue); | ||
306 | if (disp->color_vibrance_property) | ||
307 | drm_object_attach_property(&connector->base, | ||
308 | disp->color_vibrance_property, | ||
309 | armc->procamp.color_vibrance); | ||
310 | |||
311 | /* Scaling mode property. */ | ||
312 | switch (connector->connector_type) { | ||
313 | case DRM_MODE_CONNECTOR_TV: | ||
314 | break; | ||
315 | case DRM_MODE_CONNECTOR_VGA: | ||
316 | if (disp->disp.oclass < NV50_DISP) | ||
317 | break; /* Can only scale on DFPs. */ | ||
318 | /* Fall-through. */ | ||
319 | default: | ||
320 | drm_object_attach_property(&connector->base, dev->mode_config. | ||
321 | scaling_mode_property, | ||
322 | armc->scaler.mode); | ||
323 | break; | ||
324 | } | ||
325 | |||
326 | /* Dithering properties. */ | ||
327 | switch (connector->connector_type) { | ||
328 | case DRM_MODE_CONNECTOR_TV: | ||
329 | case DRM_MODE_CONNECTOR_VGA: | ||
330 | break; | ||
331 | default: | ||
332 | if (disp->dithering_mode) { | ||
333 | drm_object_attach_property(&connector->base, | ||
334 | disp->dithering_mode, | ||
335 | armc->dither.mode); | ||
336 | } | ||
337 | if (disp->dithering_depth) { | ||
338 | drm_object_attach_property(&connector->base, | ||
339 | disp->dithering_depth, | ||
340 | armc->dither.depth); | ||
341 | } | ||
342 | break; | ||
343 | } | ||
344 | } | ||
345 | |||
50 | MODULE_PARM_DESC(tv_disable, "Disable TV-out detection"); | 346 | MODULE_PARM_DESC(tv_disable, "Disable TV-out detection"); |
51 | int nouveau_tv_disable = 0; | 347 | int nouveau_tv_disable = 0; |
52 | module_param_named(tv_disable, nouveau_tv_disable, int, 0400); | 348 | module_param_named(tv_disable, nouveau_tv_disable, int, 0400); |
@@ -151,7 +447,9 @@ nouveau_connector_ddc_detect(struct drm_connector *connector) | |||
151 | 447 | ||
152 | if (nv_encoder->dcb->type == DCB_OUTPUT_DP) { | 448 | if (nv_encoder->dcb->type == DCB_OUTPUT_DP) { |
153 | int ret = nouveau_dp_detect(nv_encoder); | 449 | int ret = nouveau_dp_detect(nv_encoder); |
154 | if (ret == 0) | 450 | if (ret == NOUVEAU_DP_MST) |
451 | return NULL; | ||
452 | if (ret == NOUVEAU_DP_SST) | ||
155 | break; | 453 | break; |
156 | } else | 454 | } else |
157 | if ((vga_switcheroo_handler_flags() & | 455 | if ((vga_switcheroo_handler_flags() & |
@@ -465,199 +763,39 @@ static int | |||
465 | nouveau_connector_set_property(struct drm_connector *connector, | 763 | nouveau_connector_set_property(struct drm_connector *connector, |
466 | struct drm_property *property, uint64_t value) | 764 | struct drm_property *property, uint64_t value) |
467 | { | 765 | { |
468 | struct nouveau_display *disp = nouveau_display(connector->dev); | 766 | struct nouveau_conn_atom *asyc = nouveau_conn_atom(connector->state); |
469 | struct nouveau_connector *nv_connector = nouveau_connector(connector); | 767 | struct nouveau_connector *nv_connector = nouveau_connector(connector); |
470 | struct nouveau_encoder *nv_encoder = nv_connector->detected_encoder; | 768 | struct nouveau_encoder *nv_encoder = nv_connector->detected_encoder; |
471 | struct drm_encoder *encoder = to_drm_encoder(nv_encoder); | 769 | struct drm_encoder *encoder = to_drm_encoder(nv_encoder); |
472 | struct drm_device *dev = connector->dev; | ||
473 | struct nouveau_crtc *nv_crtc; | ||
474 | int ret; | 770 | int ret; |
475 | 771 | ||
476 | nv_crtc = NULL; | 772 | if (connector->dev->mode_config.funcs->atomic_commit) |
477 | if (connector->encoder && connector->encoder->crtc) | 773 | return drm_atomic_helper_connector_set_property(connector, property, value); |
478 | nv_crtc = nouveau_crtc(connector->encoder->crtc); | ||
479 | |||
480 | /* Scaling mode */ | ||
481 | if (property == dev->mode_config.scaling_mode_property) { | ||
482 | bool modeset = false; | ||
483 | |||
484 | switch (value) { | ||
485 | case DRM_MODE_SCALE_NONE: | ||
486 | /* We allow 'None' for EDID modes, even on a fixed | ||
487 | * panel (some exist with support for lower refresh | ||
488 | * rates, which people might want to use for power | ||
489 | * saving purposes). | ||
490 | * | ||
491 | * Non-EDID modes will force the use of GPU scaling | ||
492 | * to the native mode regardless of this setting. | ||
493 | */ | ||
494 | switch (nv_connector->type) { | ||
495 | case DCB_CONNECTOR_LVDS: | ||
496 | case DCB_CONNECTOR_LVDS_SPWG: | ||
497 | case DCB_CONNECTOR_eDP: | ||
498 | /* ... except prior to G80, where the code | ||
499 | * doesn't support such things. | ||
500 | */ | ||
501 | if (disp->disp.oclass < NV50_DISP) | ||
502 | return -EINVAL; | ||
503 | break; | ||
504 | default: | ||
505 | break; | ||
506 | } | ||
507 | break; | ||
508 | case DRM_MODE_SCALE_FULLSCREEN: | ||
509 | case DRM_MODE_SCALE_CENTER: | ||
510 | case DRM_MODE_SCALE_ASPECT: | ||
511 | break; | ||
512 | default: | ||
513 | return -EINVAL; | ||
514 | } | ||
515 | |||
516 | /* Changing between GPU and panel scaling requires a full | ||
517 | * modeset | ||
518 | */ | ||
519 | if ((nv_connector->scaling_mode == DRM_MODE_SCALE_NONE) || | ||
520 | (value == DRM_MODE_SCALE_NONE)) | ||
521 | modeset = true; | ||
522 | nv_connector->scaling_mode = value; | ||
523 | |||
524 | if (!nv_crtc) | ||
525 | return 0; | ||
526 | |||
527 | if (modeset || !nv_crtc->set_scale) { | ||
528 | ret = drm_crtc_helper_set_mode(&nv_crtc->base, | ||
529 | &nv_crtc->base.mode, | ||
530 | nv_crtc->base.x, | ||
531 | nv_crtc->base.y, NULL); | ||
532 | if (!ret) | ||
533 | return -EINVAL; | ||
534 | } else { | ||
535 | ret = nv_crtc->set_scale(nv_crtc, true); | ||
536 | if (ret) | ||
537 | return ret; | ||
538 | } | ||
539 | |||
540 | return 0; | ||
541 | } | ||
542 | |||
543 | /* Underscan */ | ||
544 | if (property == disp->underscan_property) { | ||
545 | if (nv_connector->underscan != value) { | ||
546 | nv_connector->underscan = value; | ||
547 | if (!nv_crtc || !nv_crtc->set_scale) | ||
548 | return 0; | ||
549 | |||
550 | return nv_crtc->set_scale(nv_crtc, true); | ||
551 | } | ||
552 | |||
553 | return 0; | ||
554 | } | ||
555 | |||
556 | if (property == disp->underscan_hborder_property) { | ||
557 | if (nv_connector->underscan_hborder != value) { | ||
558 | nv_connector->underscan_hborder = value; | ||
559 | if (!nv_crtc || !nv_crtc->set_scale) | ||
560 | return 0; | ||
561 | |||
562 | return nv_crtc->set_scale(nv_crtc, true); | ||
563 | } | ||
564 | |||
565 | return 0; | ||
566 | } | ||
567 | |||
568 | if (property == disp->underscan_vborder_property) { | ||
569 | if (nv_connector->underscan_vborder != value) { | ||
570 | nv_connector->underscan_vborder = value; | ||
571 | if (!nv_crtc || !nv_crtc->set_scale) | ||
572 | return 0; | ||
573 | |||
574 | return nv_crtc->set_scale(nv_crtc, true); | ||
575 | } | ||
576 | |||
577 | return 0; | ||
578 | } | ||
579 | 774 | ||
580 | /* Dithering */ | 775 | ret = connector->funcs->atomic_set_property(&nv_connector->base, |
581 | if (property == disp->dithering_mode) { | 776 | &asyc->state, |
582 | nv_connector->dithering_mode = value; | 777 | property, value); |
583 | if (!nv_crtc || !nv_crtc->set_dither) | 778 | if (ret) { |
584 | return 0; | 779 | if (nv_encoder && nv_encoder->dcb->type == DCB_OUTPUT_TV) |
585 | 780 | return get_slave_funcs(encoder)->set_property( | |
586 | return nv_crtc->set_dither(nv_crtc, true); | 781 | encoder, connector, property, value); |
587 | } | 782 | return ret; |
588 | |||
589 | if (property == disp->dithering_depth) { | ||
590 | nv_connector->dithering_depth = value; | ||
591 | if (!nv_crtc || !nv_crtc->set_dither) | ||
592 | return 0; | ||
593 | |||
594 | return nv_crtc->set_dither(nv_crtc, true); | ||
595 | } | ||
596 | |||
597 | if (nv_crtc && nv_crtc->set_color_vibrance) { | ||
598 | /* Hue */ | ||
599 | if (property == disp->vibrant_hue_property) { | ||
600 | nv_crtc->vibrant_hue = value - 90; | ||
601 | return nv_crtc->set_color_vibrance(nv_crtc, true); | ||
602 | } | ||
603 | /* Saturation */ | ||
604 | if (property == disp->color_vibrance_property) { | ||
605 | nv_crtc->color_vibrance = value - 100; | ||
606 | return nv_crtc->set_color_vibrance(nv_crtc, true); | ||
607 | } | ||
608 | } | 783 | } |
609 | 784 | ||
610 | if (nv_encoder && nv_encoder->dcb->type == DCB_OUTPUT_TV) | 785 | nv_connector->scaling_mode = asyc->scaler.mode; |
611 | return get_slave_funcs(encoder)->set_property( | 786 | nv_connector->dithering_mode = asyc->dither.mode; |
612 | encoder, connector, property, value); | ||
613 | |||
614 | return -EINVAL; | ||
615 | } | ||
616 | |||
617 | static struct drm_display_mode * | ||
618 | nouveau_connector_native_mode(struct drm_connector *connector) | ||
619 | { | ||
620 | const struct drm_connector_helper_funcs *helper = connector->helper_private; | ||
621 | struct nouveau_drm *drm = nouveau_drm(connector->dev); | ||
622 | struct nouveau_connector *nv_connector = nouveau_connector(connector); | ||
623 | struct drm_device *dev = connector->dev; | ||
624 | struct drm_display_mode *mode, *largest = NULL; | ||
625 | int high_w = 0, high_h = 0, high_v = 0; | ||
626 | 787 | ||
627 | list_for_each_entry(mode, &nv_connector->base.probed_modes, head) { | 788 | if (connector->encoder && connector->encoder->crtc) { |
628 | mode->vrefresh = drm_mode_vrefresh(mode); | 789 | ret = drm_crtc_helper_set_mode(connector->encoder->crtc, |
629 | if (helper->mode_valid(connector, mode) != MODE_OK || | 790 | &connector->encoder->crtc->mode, |
630 | (mode->flags & DRM_MODE_FLAG_INTERLACE)) | 791 | connector->encoder->crtc->x, |
631 | continue; | 792 | connector->encoder->crtc->y, |
632 | 793 | NULL); | |
633 | /* Use preferred mode if there is one.. */ | 794 | if (!ret) |
634 | if (mode->type & DRM_MODE_TYPE_PREFERRED) { | 795 | return -EINVAL; |
635 | NV_DEBUG(drm, "native mode from preferred\n"); | ||
636 | return drm_mode_duplicate(dev, mode); | ||
637 | } | ||
638 | |||
639 | /* Otherwise, take the resolution with the largest width, then | ||
640 | * height, then vertical refresh | ||
641 | */ | ||
642 | if (mode->hdisplay < high_w) | ||
643 | continue; | ||
644 | |||
645 | if (mode->hdisplay == high_w && mode->vdisplay < high_h) | ||
646 | continue; | ||
647 | |||
648 | if (mode->hdisplay == high_w && mode->vdisplay == high_h && | ||
649 | mode->vrefresh < high_v) | ||
650 | continue; | ||
651 | |||
652 | high_w = mode->hdisplay; | ||
653 | high_h = mode->vdisplay; | ||
654 | high_v = mode->vrefresh; | ||
655 | largest = mode; | ||
656 | } | 796 | } |
657 | 797 | ||
658 | NV_DEBUG(drm, "native mode from largest: %dx%d@%d\n", | 798 | return 0; |
659 | high_w, high_h, high_v); | ||
660 | return largest ? drm_mode_duplicate(dev, largest) : NULL; | ||
661 | } | 799 | } |
662 | 800 | ||
663 | struct moderec { | 801 | struct moderec { |
@@ -805,8 +943,7 @@ nouveau_connector_get_modes(struct drm_connector *connector) | |||
805 | * the list of modes. | 943 | * the list of modes. |
806 | */ | 944 | */ |
807 | if (!nv_connector->native_mode) | 945 | if (!nv_connector->native_mode) |
808 | nv_connector->native_mode = | 946 | nv_connector->native_mode = nouveau_conn_native_mode(connector); |
809 | nouveau_connector_native_mode(connector); | ||
810 | if (ret == 0 && nv_connector->native_mode) { | 947 | if (ret == 0 && nv_connector->native_mode) { |
811 | struct drm_display_mode *mode; | 948 | struct drm_display_mode *mode; |
812 | 949 | ||
@@ -934,56 +1071,42 @@ nouveau_connector_helper_funcs = { | |||
934 | .best_encoder = nouveau_connector_best_encoder, | 1071 | .best_encoder = nouveau_connector_best_encoder, |
935 | }; | 1072 | }; |
936 | 1073 | ||
1074 | static int | ||
1075 | nouveau_connector_dpms(struct drm_connector *connector, int mode) | ||
1076 | { | ||
1077 | if (connector->dev->mode_config.funcs->atomic_commit) | ||
1078 | return drm_atomic_helper_connector_dpms(connector, mode); | ||
1079 | return drm_helper_connector_dpms(connector, mode); | ||
1080 | } | ||
1081 | |||
937 | static const struct drm_connector_funcs | 1082 | static const struct drm_connector_funcs |
938 | nouveau_connector_funcs = { | 1083 | nouveau_connector_funcs = { |
939 | .dpms = drm_helper_connector_dpms, | 1084 | .dpms = nouveau_connector_dpms, |
1085 | .reset = nouveau_conn_reset, | ||
940 | .detect = nouveau_connector_detect, | 1086 | .detect = nouveau_connector_detect, |
941 | .destroy = nouveau_connector_destroy, | 1087 | .force = nouveau_connector_force, |
942 | .fill_modes = drm_helper_probe_single_connector_modes, | 1088 | .fill_modes = drm_helper_probe_single_connector_modes, |
943 | .set_property = nouveau_connector_set_property, | 1089 | .set_property = nouveau_connector_set_property, |
944 | .force = nouveau_connector_force | 1090 | .destroy = nouveau_connector_destroy, |
1091 | .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state, | ||
1092 | .atomic_destroy_state = nouveau_conn_atomic_destroy_state, | ||
1093 | .atomic_set_property = nouveau_conn_atomic_set_property, | ||
1094 | .atomic_get_property = nouveau_conn_atomic_get_property, | ||
945 | }; | 1095 | }; |
946 | 1096 | ||
947 | static const struct drm_connector_funcs | 1097 | static const struct drm_connector_funcs |
948 | nouveau_connector_funcs_lvds = { | 1098 | nouveau_connector_funcs_lvds = { |
949 | .dpms = drm_helper_connector_dpms, | 1099 | .dpms = nouveau_connector_dpms, |
1100 | .reset = nouveau_conn_reset, | ||
950 | .detect = nouveau_connector_detect_lvds, | 1101 | .detect = nouveau_connector_detect_lvds, |
951 | .destroy = nouveau_connector_destroy, | 1102 | .force = nouveau_connector_force, |
952 | .fill_modes = drm_helper_probe_single_connector_modes, | 1103 | .fill_modes = drm_helper_probe_single_connector_modes, |
953 | .set_property = nouveau_connector_set_property, | 1104 | .set_property = nouveau_connector_set_property, |
954 | .force = nouveau_connector_force | ||
955 | }; | ||
956 | |||
957 | static int | ||
958 | nouveau_connector_dp_dpms(struct drm_connector *connector, int mode) | ||
959 | { | ||
960 | struct nouveau_encoder *nv_encoder = NULL; | ||
961 | |||
962 | if (connector->encoder) | ||
963 | nv_encoder = nouveau_encoder(connector->encoder); | ||
964 | if (nv_encoder && nv_encoder->dcb && | ||
965 | nv_encoder->dcb->type == DCB_OUTPUT_DP) { | ||
966 | if (mode == DRM_MODE_DPMS_ON) { | ||
967 | u8 data = DP_SET_POWER_D0; | ||
968 | nvkm_wraux(nv_encoder->aux, DP_SET_POWER, &data, 1); | ||
969 | usleep_range(1000, 2000); | ||
970 | } else { | ||
971 | u8 data = DP_SET_POWER_D3; | ||
972 | nvkm_wraux(nv_encoder->aux, DP_SET_POWER, &data, 1); | ||
973 | } | ||
974 | } | ||
975 | |||
976 | return drm_helper_connector_dpms(connector, mode); | ||
977 | } | ||
978 | |||
979 | static const struct drm_connector_funcs | ||
980 | nouveau_connector_funcs_dp = { | ||
981 | .dpms = nouveau_connector_dp_dpms, | ||
982 | .detect = nouveau_connector_detect, | ||
983 | .destroy = nouveau_connector_destroy, | 1105 | .destroy = nouveau_connector_destroy, |
984 | .fill_modes = drm_helper_probe_single_connector_modes, | 1106 | .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state, |
985 | .set_property = nouveau_connector_set_property, | 1107 | .atomic_destroy_state = nouveau_conn_atomic_destroy_state, |
986 | .force = nouveau_connector_force | 1108 | .atomic_set_property = nouveau_conn_atomic_set_property, |
1109 | .atomic_get_property = nouveau_conn_atomic_get_property, | ||
987 | }; | 1110 | }; |
988 | 1111 | ||
989 | static int | 1112 | static int |
@@ -995,19 +1118,20 @@ nouveau_connector_hotplug(struct nvif_notify *notify) | |||
995 | struct nouveau_drm *drm = nouveau_drm(connector->dev); | 1118 | struct nouveau_drm *drm = nouveau_drm(connector->dev); |
996 | const struct nvif_notify_conn_rep_v0 *rep = notify->data; | 1119 | const struct nvif_notify_conn_rep_v0 *rep = notify->data; |
997 | const char *name = connector->name; | 1120 | const char *name = connector->name; |
1121 | struct nouveau_encoder *nv_encoder; | ||
998 | 1122 | ||
999 | if (rep->mask & NVIF_NOTIFY_CONN_V0_IRQ) { | 1123 | if (rep->mask & NVIF_NOTIFY_CONN_V0_IRQ) { |
1124 | NV_DEBUG(drm, "service %s\n", name); | ||
1125 | if ((nv_encoder = find_encoder(connector, DCB_OUTPUT_DP))) | ||
1126 | nv50_mstm_service(nv_encoder->dp.mstm); | ||
1000 | } else { | 1127 | } else { |
1001 | bool plugged = (rep->mask != NVIF_NOTIFY_CONN_V0_UNPLUG); | 1128 | bool plugged = (rep->mask != NVIF_NOTIFY_CONN_V0_UNPLUG); |
1002 | 1129 | ||
1003 | NV_DEBUG(drm, "%splugged %s\n", plugged ? "" : "un", name); | 1130 | NV_DEBUG(drm, "%splugged %s\n", plugged ? "" : "un", name); |
1004 | 1131 | if ((nv_encoder = find_encoder(connector, DCB_OUTPUT_DP))) { | |
1005 | mutex_lock(&drm->dev->mode_config.mutex); | 1132 | if (!plugged) |
1006 | if (plugged) | 1133 | nv50_mstm_remove(nv_encoder->dp.mstm); |
1007 | drm_helper_connector_dpms(connector, DRM_MODE_DPMS_ON); | 1134 | } |
1008 | else | ||
1009 | drm_helper_connector_dpms(connector, DRM_MODE_DPMS_OFF); | ||
1010 | mutex_unlock(&drm->dev->mode_config.mutex); | ||
1011 | 1135 | ||
1012 | drm_helper_hpd_irq_event(connector->dev); | 1136 | drm_helper_hpd_irq_event(connector->dev); |
1013 | } | 1137 | } |
@@ -1188,7 +1312,7 @@ nouveau_connector_create(struct drm_device *dev, int index) | |||
1188 | return ERR_PTR(ret); | 1312 | return ERR_PTR(ret); |
1189 | } | 1313 | } |
1190 | 1314 | ||
1191 | funcs = &nouveau_connector_funcs_dp; | 1315 | funcs = &nouveau_connector_funcs; |
1192 | break; | 1316 | break; |
1193 | default: | 1317 | default: |
1194 | funcs = &nouveau_connector_funcs; | 1318 | funcs = &nouveau_connector_funcs; |
@@ -1202,38 +1326,10 @@ nouveau_connector_create(struct drm_device *dev, int index) | |||
1202 | drm_connector_init(dev, connector, funcs, type); | 1326 | drm_connector_init(dev, connector, funcs, type); |
1203 | drm_connector_helper_add(connector, &nouveau_connector_helper_funcs); | 1327 | drm_connector_helper_add(connector, &nouveau_connector_helper_funcs); |
1204 | 1328 | ||
1205 | /* Init DVI-I specific properties */ | 1329 | connector->funcs->reset(connector); |
1206 | if (nv_connector->type == DCB_CONNECTOR_DVI_I) | 1330 | nouveau_conn_attach_properties(connector); |
1207 | drm_object_attach_property(&connector->base, dev->mode_config.dvi_i_subconnector_property, 0); | ||
1208 | 1331 | ||
1209 | /* Add overscan compensation options to digital outputs */ | 1332 | /* Default scaling mode */ |
1210 | if (disp->underscan_property && | ||
1211 | (type == DRM_MODE_CONNECTOR_DVID || | ||
1212 | type == DRM_MODE_CONNECTOR_DVII || | ||
1213 | type == DRM_MODE_CONNECTOR_HDMIA || | ||
1214 | type == DRM_MODE_CONNECTOR_DisplayPort)) { | ||
1215 | drm_object_attach_property(&connector->base, | ||
1216 | disp->underscan_property, | ||
1217 | UNDERSCAN_OFF); | ||
1218 | drm_object_attach_property(&connector->base, | ||
1219 | disp->underscan_hborder_property, | ||
1220 | 0); | ||
1221 | drm_object_attach_property(&connector->base, | ||
1222 | disp->underscan_vborder_property, | ||
1223 | 0); | ||
1224 | } | ||
1225 | |||
1226 | /* Add hue and saturation options */ | ||
1227 | if (disp->vibrant_hue_property) | ||
1228 | drm_object_attach_property(&connector->base, | ||
1229 | disp->vibrant_hue_property, | ||
1230 | 90); | ||
1231 | if (disp->color_vibrance_property) | ||
1232 | drm_object_attach_property(&connector->base, | ||
1233 | disp->color_vibrance_property, | ||
1234 | 150); | ||
1235 | |||
1236 | /* default scaling mode */ | ||
1237 | switch (nv_connector->type) { | 1333 | switch (nv_connector->type) { |
1238 | case DCB_CONNECTOR_LVDS: | 1334 | case DCB_CONNECTOR_LVDS: |
1239 | case DCB_CONNECTOR_LVDS_SPWG: | 1335 | case DCB_CONNECTOR_LVDS_SPWG: |
@@ -1250,23 +1346,6 @@ nouveau_connector_create(struct drm_device *dev, int index) | |||
1250 | break; | 1346 | break; |
1251 | } | 1347 | } |
1252 | 1348 | ||
1253 | /* scaling mode property */ | ||
1254 | switch (nv_connector->type) { | ||
1255 | case DCB_CONNECTOR_TV_0: | ||
1256 | case DCB_CONNECTOR_TV_1: | ||
1257 | case DCB_CONNECTOR_TV_3: | ||
1258 | break; | ||
1259 | case DCB_CONNECTOR_VGA: | ||
1260 | if (disp->disp.oclass < NV50_DISP) | ||
1261 | break; /* can only scale on DFPs */ | ||
1262 | /* fall-through */ | ||
1263 | default: | ||
1264 | drm_object_attach_property(&connector->base, dev->mode_config. | ||
1265 | scaling_mode_property, | ||
1266 | nv_connector->scaling_mode); | ||
1267 | break; | ||
1268 | } | ||
1269 | |||
1270 | /* dithering properties */ | 1349 | /* dithering properties */ |
1271 | switch (nv_connector->type) { | 1350 | switch (nv_connector->type) { |
1272 | case DCB_CONNECTOR_TV_0: | 1351 | case DCB_CONNECTOR_TV_0: |
@@ -1275,20 +1354,7 @@ nouveau_connector_create(struct drm_device *dev, int index) | |||
1275 | case DCB_CONNECTOR_VGA: | 1354 | case DCB_CONNECTOR_VGA: |
1276 | break; | 1355 | break; |
1277 | default: | 1356 | default: |
1278 | if (disp->dithering_mode) { | 1357 | nv_connector->dithering_mode = DITHERING_MODE_AUTO; |
1279 | nv_connector->dithering_mode = DITHERING_MODE_AUTO; | ||
1280 | drm_object_attach_property(&connector->base, | ||
1281 | disp->dithering_mode, | ||
1282 | nv_connector-> | ||
1283 | dithering_mode); | ||
1284 | } | ||
1285 | if (disp->dithering_depth) { | ||
1286 | nv_connector->dithering_depth = DITHERING_DEPTH_AUTO; | ||
1287 | drm_object_attach_property(&connector->base, | ||
1288 | disp->dithering_depth, | ||
1289 | nv_connector-> | ||
1290 | dithering_depth); | ||
1291 | } | ||
1292 | break; | 1358 | break; |
1293 | } | 1359 | } |
1294 | 1360 | ||
diff --git a/drivers/gpu/drm/nouveau/nouveau_connector.h b/drivers/gpu/drm/nouveau/nouveau_connector.h index 7446ee66ea04..096983c42a1f 100644 --- a/drivers/gpu/drm/nouveau/nouveau_connector.h +++ b/drivers/gpu/drm/nouveau/nouveau_connector.h | |||
@@ -35,30 +35,6 @@ | |||
35 | 35 | ||
36 | struct nvkm_i2c_port; | 36 | struct nvkm_i2c_port; |
37 | 37 | ||
38 | enum nouveau_underscan_type { | ||
39 | UNDERSCAN_OFF, | ||
40 | UNDERSCAN_ON, | ||
41 | UNDERSCAN_AUTO, | ||
42 | }; | ||
43 | |||
44 | /* the enum values specifically defined here match nv50/nvd0 hw values, and | ||
45 | * the code relies on this | ||
46 | */ | ||
47 | enum nouveau_dithering_mode { | ||
48 | DITHERING_MODE_OFF = 0x00, | ||
49 | DITHERING_MODE_ON = 0x01, | ||
50 | DITHERING_MODE_DYNAMIC2X2 = 0x10 | DITHERING_MODE_ON, | ||
51 | DITHERING_MODE_STATIC2X2 = 0x18 | DITHERING_MODE_ON, | ||
52 | DITHERING_MODE_TEMPORAL = 0x20 | DITHERING_MODE_ON, | ||
53 | DITHERING_MODE_AUTO | ||
54 | }; | ||
55 | |||
56 | enum nouveau_dithering_depth { | ||
57 | DITHERING_DEPTH_6BPC = 0x00, | ||
58 | DITHERING_DEPTH_8BPC = 0x02, | ||
59 | DITHERING_DEPTH_AUTO | ||
60 | }; | ||
61 | |||
62 | struct nouveau_connector { | 38 | struct nouveau_connector { |
63 | struct drm_connector base; | 39 | struct drm_connector base; |
64 | enum dcb_connector_type type; | 40 | enum dcb_connector_type type; |
@@ -70,12 +46,7 @@ struct nouveau_connector { | |||
70 | struct drm_dp_aux aux; | 46 | struct drm_dp_aux aux; |
71 | 47 | ||
72 | int dithering_mode; | 48 | int dithering_mode; |
73 | int dithering_depth; | ||
74 | int scaling_mode; | 49 | int scaling_mode; |
75 | bool scaling_full; | ||
76 | enum nouveau_underscan_type underscan; | ||
77 | u32 underscan_hborder; | ||
78 | u32 underscan_vborder; | ||
79 | 50 | ||
80 | struct nouveau_encoder *detected_encoder; | 51 | struct nouveau_encoder *detected_encoder; |
81 | struct edid *edid; | 52 | struct edid *edid; |
@@ -109,5 +80,74 @@ nouveau_connector_create(struct drm_device *, int index); | |||
109 | extern int nouveau_tv_disable; | 80 | extern int nouveau_tv_disable; |
110 | extern int nouveau_ignorelid; | 81 | extern int nouveau_ignorelid; |
111 | extern int nouveau_duallink; | 82 | extern int nouveau_duallink; |
83 | extern int nouveau_hdmimhz; | ||
84 | |||
85 | #include <drm/drm_crtc.h> | ||
86 | #define nouveau_conn_atom(p) \ | ||
87 | container_of((p), struct nouveau_conn_atom, state) | ||
88 | |||
89 | struct nouveau_conn_atom { | ||
90 | struct drm_connector_state state; | ||
91 | |||
92 | struct { | ||
93 | /* The enum values specifically defined here match nv50/gf119 | ||
94 | * hw values, and the code relies on this. | ||
95 | */ | ||
96 | enum { | ||
97 | DITHERING_MODE_OFF = 0x00, | ||
98 | DITHERING_MODE_ON = 0x01, | ||
99 | DITHERING_MODE_DYNAMIC2X2 = 0x10 | DITHERING_MODE_ON, | ||
100 | DITHERING_MODE_STATIC2X2 = 0x18 | DITHERING_MODE_ON, | ||
101 | DITHERING_MODE_TEMPORAL = 0x20 | DITHERING_MODE_ON, | ||
102 | DITHERING_MODE_AUTO | ||
103 | } mode; | ||
104 | enum { | ||
105 | DITHERING_DEPTH_6BPC = 0x00, | ||
106 | DITHERING_DEPTH_8BPC = 0x02, | ||
107 | DITHERING_DEPTH_AUTO | ||
108 | } depth; | ||
109 | } dither; | ||
110 | |||
111 | struct { | ||
112 | int mode; /* DRM_MODE_SCALE_* */ | ||
113 | struct { | ||
114 | enum { | ||
115 | UNDERSCAN_OFF, | ||
116 | UNDERSCAN_ON, | ||
117 | UNDERSCAN_AUTO, | ||
118 | } mode; | ||
119 | u32 hborder; | ||
120 | u32 vborder; | ||
121 | } underscan; | ||
122 | bool full; | ||
123 | } scaler; | ||
124 | |||
125 | struct { | ||
126 | int color_vibrance; | ||
127 | int vibrant_hue; | ||
128 | } procamp; | ||
129 | |||
130 | union { | ||
131 | struct { | ||
132 | bool dither:1; | ||
133 | bool scaler:1; | ||
134 | bool procamp:1; | ||
135 | }; | ||
136 | u8 mask; | ||
137 | } set; | ||
138 | }; | ||
112 | 139 | ||
140 | void nouveau_conn_attach_properties(struct drm_connector *); | ||
141 | void nouveau_conn_reset(struct drm_connector *); | ||
142 | struct drm_connector_state * | ||
143 | nouveau_conn_atomic_duplicate_state(struct drm_connector *); | ||
144 | void nouveau_conn_atomic_destroy_state(struct drm_connector *, | ||
145 | struct drm_connector_state *); | ||
146 | int nouveau_conn_atomic_set_property(struct drm_connector *, | ||
147 | struct drm_connector_state *, | ||
148 | struct drm_property *, u64); | ||
149 | int nouveau_conn_atomic_get_property(struct drm_connector *, | ||
150 | const struct drm_connector_state *, | ||
151 | struct drm_property *, u64 *); | ||
152 | struct drm_display_mode *nouveau_conn_native_mode(struct drm_connector *); | ||
113 | #endif /* __NOUVEAU_CONNECTOR_H__ */ | 153 | #endif /* __NOUVEAU_CONNECTOR_H__ */ |
diff --git a/drivers/gpu/drm/nouveau/nouveau_crtc.h b/drivers/gpu/drm/nouveau/nouveau_crtc.h index 863f10b8d818..050fcf30a0d2 100644 --- a/drivers/gpu/drm/nouveau/nouveau_crtc.h +++ b/drivers/gpu/drm/nouveau/nouveau_crtc.h | |||
@@ -38,8 +38,6 @@ struct nouveau_crtc { | |||
38 | uint32_t dpms_saved_fp_control; | 38 | uint32_t dpms_saved_fp_control; |
39 | uint32_t fp_users; | 39 | uint32_t fp_users; |
40 | int saturation; | 40 | int saturation; |
41 | int color_vibrance; | ||
42 | int vibrant_hue; | ||
43 | int sharpness; | 41 | int sharpness; |
44 | int last_dpms; | 42 | int last_dpms; |
45 | 43 | ||
@@ -54,7 +52,6 @@ struct nouveau_crtc { | |||
54 | 52 | ||
55 | struct { | 53 | struct { |
56 | struct nouveau_bo *nvbo; | 54 | struct nouveau_bo *nvbo; |
57 | bool visible; | ||
58 | uint32_t offset; | 55 | uint32_t offset; |
59 | void (*set_offset)(struct nouveau_crtc *, uint32_t offset); | 56 | void (*set_offset)(struct nouveau_crtc *, uint32_t offset); |
60 | void (*set_pos)(struct nouveau_crtc *, int x, int y); | 57 | void (*set_pos)(struct nouveau_crtc *, int x, int y); |
@@ -70,10 +67,6 @@ struct nouveau_crtc { | |||
70 | int depth; | 67 | int depth; |
71 | } lut; | 68 | } lut; |
72 | 69 | ||
73 | int (*set_dither)(struct nouveau_crtc *crtc, bool update); | ||
74 | int (*set_scale)(struct nouveau_crtc *crtc, bool update); | ||
75 | int (*set_color_vibrance)(struct nouveau_crtc *crtc, bool update); | ||
76 | |||
77 | void (*save)(struct drm_crtc *crtc); | 70 | void (*save)(struct drm_crtc *crtc); |
78 | void (*restore)(struct drm_crtc *crtc); | 71 | void (*restore)(struct drm_crtc *crtc); |
79 | }; | 72 | }; |
diff --git a/drivers/gpu/drm/nouveau/nouveau_display.c b/drivers/gpu/drm/nouveau/nouveau_display.c index afbf557b23d4..75c90a8da18a 100644 --- a/drivers/gpu/drm/nouveau/nouveau_display.c +++ b/drivers/gpu/drm/nouveau/nouveau_display.c | |||
@@ -25,6 +25,8 @@ | |||
25 | */ | 25 | */ |
26 | 26 | ||
27 | #include <drm/drmP.h> | 27 | #include <drm/drmP.h> |
28 | #include <drm/drm_atomic.h> | ||
29 | #include <drm/drm_atomic_helper.h> | ||
28 | #include <drm/drm_crtc_helper.h> | 30 | #include <drm/drm_crtc_helper.h> |
29 | 31 | ||
30 | #include <nvif/class.h> | 32 | #include <nvif/class.h> |
@@ -92,7 +94,7 @@ calc(int blanks, int blanke, int total, int line) | |||
92 | return line; | 94 | return line; |
93 | } | 95 | } |
94 | 96 | ||
95 | int | 97 | static int |
96 | nouveau_display_scanoutpos_head(struct drm_crtc *crtc, int *vpos, int *hpos, | 98 | nouveau_display_scanoutpos_head(struct drm_crtc *crtc, int *vpos, int *hpos, |
97 | ktime_t *stime, ktime_t *etime) | 99 | ktime_t *stime, ktime_t *etime) |
98 | { | 100 | { |
@@ -158,9 +160,13 @@ nouveau_display_vblstamp(struct drm_device *dev, unsigned int pipe, | |||
158 | 160 | ||
159 | list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { | 161 | list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { |
160 | if (nouveau_crtc(crtc)->index == pipe) { | 162 | if (nouveau_crtc(crtc)->index == pipe) { |
163 | struct drm_display_mode *mode; | ||
164 | if (dev->mode_config.funcs->atomic_commit) | ||
165 | mode = &crtc->state->adjusted_mode; | ||
166 | else | ||
167 | mode = &crtc->hwmode; | ||
161 | return drm_calc_vbltimestamp_from_scanoutpos(dev, | 168 | return drm_calc_vbltimestamp_from_scanoutpos(dev, |
162 | pipe, max_error, time, flags, | 169 | pipe, max_error, time, flags, mode); |
163 | &crtc->hwmode); | ||
164 | } | 170 | } |
165 | } | 171 | } |
166 | 172 | ||
@@ -217,10 +223,6 @@ static void | |||
217 | nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb) | 223 | nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb) |
218 | { | 224 | { |
219 | struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb); | 225 | struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb); |
220 | struct nouveau_display *disp = nouveau_display(drm_fb->dev); | ||
221 | |||
222 | if (disp->fb_dtor) | ||
223 | disp->fb_dtor(drm_fb); | ||
224 | 226 | ||
225 | if (fb->nvbo) | 227 | if (fb->nvbo) |
226 | drm_gem_object_unreference_unlocked(&fb->nvbo->gem); | 228 | drm_gem_object_unreference_unlocked(&fb->nvbo->gem); |
@@ -245,57 +247,45 @@ static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = { | |||
245 | }; | 247 | }; |
246 | 248 | ||
247 | int | 249 | int |
248 | nouveau_framebuffer_init(struct drm_device *dev, | 250 | nouveau_framebuffer_new(struct drm_device *dev, |
249 | struct nouveau_framebuffer *nv_fb, | 251 | const struct drm_mode_fb_cmd2 *mode_cmd, |
250 | const struct drm_mode_fb_cmd2 *mode_cmd, | 252 | struct nouveau_bo *nvbo, |
251 | struct nouveau_bo *nvbo) | 253 | struct nouveau_framebuffer **pfb) |
252 | { | 254 | { |
253 | struct nouveau_display *disp = nouveau_display(dev); | 255 | struct nouveau_framebuffer *fb; |
254 | struct drm_framebuffer *fb = &nv_fb->base; | ||
255 | int ret; | 256 | int ret; |
256 | 257 | ||
257 | drm_helper_mode_fill_fb_struct(fb, mode_cmd); | 258 | if (!(fb = *pfb = kzalloc(sizeof(*fb), GFP_KERNEL))) |
258 | nv_fb->nvbo = nvbo; | 259 | return -ENOMEM; |
259 | |||
260 | ret = drm_framebuffer_init(dev, fb, &nouveau_framebuffer_funcs); | ||
261 | if (ret) | ||
262 | return ret; | ||
263 | 260 | ||
264 | if (disp->fb_ctor) { | 261 | drm_helper_mode_fill_fb_struct(&fb->base, mode_cmd); |
265 | ret = disp->fb_ctor(fb); | 262 | fb->nvbo = nvbo; |
266 | if (ret) | ||
267 | disp->fb_dtor(fb); | ||
268 | } | ||
269 | 263 | ||
264 | ret = drm_framebuffer_init(dev, &fb->base, &nouveau_framebuffer_funcs); | ||
265 | if (ret) | ||
266 | kfree(fb); | ||
270 | return ret; | 267 | return ret; |
271 | } | 268 | } |
272 | 269 | ||
273 | static struct drm_framebuffer * | 270 | struct drm_framebuffer * |
274 | nouveau_user_framebuffer_create(struct drm_device *dev, | 271 | nouveau_user_framebuffer_create(struct drm_device *dev, |
275 | struct drm_file *file_priv, | 272 | struct drm_file *file_priv, |
276 | const struct drm_mode_fb_cmd2 *mode_cmd) | 273 | const struct drm_mode_fb_cmd2 *mode_cmd) |
277 | { | 274 | { |
278 | struct nouveau_framebuffer *nouveau_fb; | 275 | struct nouveau_framebuffer *fb; |
276 | struct nouveau_bo *nvbo; | ||
279 | struct drm_gem_object *gem; | 277 | struct drm_gem_object *gem; |
280 | int ret = -ENOMEM; | 278 | int ret; |
281 | 279 | ||
282 | gem = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); | 280 | gem = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); |
283 | if (!gem) | 281 | if (!gem) |
284 | return ERR_PTR(-ENOENT); | 282 | return ERR_PTR(-ENOENT); |
283 | nvbo = nouveau_gem_object(gem); | ||
285 | 284 | ||
286 | nouveau_fb = kzalloc(sizeof(struct nouveau_framebuffer), GFP_KERNEL); | 285 | ret = nouveau_framebuffer_new(dev, mode_cmd, nvbo, &fb); |
287 | if (!nouveau_fb) | 286 | if (ret == 0) |
288 | goto err_unref; | 287 | return &fb->base; |
289 | |||
290 | ret = nouveau_framebuffer_init(dev, nouveau_fb, mode_cmd, nouveau_gem_object(gem)); | ||
291 | if (ret) | ||
292 | goto err; | ||
293 | |||
294 | return &nouveau_fb->base; | ||
295 | 288 | ||
296 | err: | ||
297 | kfree(nouveau_fb); | ||
298 | err_unref: | ||
299 | drm_gem_object_unreference_unlocked(gem); | 289 | drm_gem_object_unreference_unlocked(gem); |
300 | return ERR_PTR(ret); | 290 | return ERR_PTR(ret); |
301 | } | 291 | } |
@@ -385,13 +375,16 @@ nouveau_display_init(struct drm_device *dev) | |||
385 | } | 375 | } |
386 | 376 | ||
387 | void | 377 | void |
388 | nouveau_display_fini(struct drm_device *dev) | 378 | nouveau_display_fini(struct drm_device *dev, bool suspend) |
389 | { | 379 | { |
390 | struct nouveau_display *disp = nouveau_display(dev); | 380 | struct nouveau_display *disp = nouveau_display(dev); |
391 | struct nouveau_drm *drm = nouveau_drm(dev); | 381 | struct nouveau_drm *drm = nouveau_drm(dev); |
392 | struct drm_connector *connector; | 382 | struct drm_connector *connector; |
393 | int head; | 383 | int head; |
394 | 384 | ||
385 | if (!suspend) | ||
386 | drm_crtc_force_disable_all(dev); | ||
387 | |||
395 | /* Make sure that drm and hw vblank irqs get properly disabled. */ | 388 | /* Make sure that drm and hw vblank irqs get properly disabled. */ |
396 | for (head = 0; head < dev->mode_config.num_crtc; head++) | 389 | for (head = 0; head < dev->mode_config.num_crtc; head++) |
397 | drm_vblank_off(dev, head); | 390 | drm_vblank_off(dev, head); |
@@ -530,6 +523,8 @@ nouveau_display_create(struct drm_device *dev) | |||
530 | if (ret) | 523 | if (ret) |
531 | goto disp_create_err; | 524 | goto disp_create_err; |
532 | 525 | ||
526 | drm_mode_config_reset(dev); | ||
527 | |||
533 | if (dev->mode_config.num_crtc) { | 528 | if (dev->mode_config.num_crtc) { |
534 | ret = nouveau_display_vblank_init(dev); | 529 | ret = nouveau_display_vblank_init(dev); |
535 | if (ret) | 530 | if (ret) |
@@ -556,7 +551,6 @@ nouveau_display_destroy(struct drm_device *dev) | |||
556 | nouveau_display_vblank_fini(dev); | 551 | nouveau_display_vblank_fini(dev); |
557 | 552 | ||
558 | drm_kms_helper_poll_fini(dev); | 553 | drm_kms_helper_poll_fini(dev); |
559 | drm_crtc_force_disable_all(dev); | ||
560 | drm_mode_config_cleanup(dev); | 554 | drm_mode_config_cleanup(dev); |
561 | 555 | ||
562 | if (disp->dtor) | 556 | if (disp->dtor) |
@@ -568,12 +562,138 @@ nouveau_display_destroy(struct drm_device *dev) | |||
568 | kfree(disp); | 562 | kfree(disp); |
569 | } | 563 | } |
570 | 564 | ||
565 | static int | ||
566 | nouveau_atomic_disable_connector(struct drm_atomic_state *state, | ||
567 | struct drm_connector *connector) | ||
568 | { | ||
569 | struct drm_connector_state *connector_state; | ||
570 | struct drm_crtc *crtc; | ||
571 | struct drm_crtc_state *crtc_state; | ||
572 | struct drm_plane_state *plane_state; | ||
573 | struct drm_plane *plane; | ||
574 | int ret; | ||
575 | |||
576 | if (!(crtc = connector->state->crtc)) | ||
577 | return 0; | ||
578 | |||
579 | connector_state = drm_atomic_get_connector_state(state, connector); | ||
580 | if (IS_ERR(connector_state)) | ||
581 | return PTR_ERR(connector_state); | ||
582 | |||
583 | ret = drm_atomic_set_crtc_for_connector(connector_state, NULL); | ||
584 | if (ret) | ||
585 | return ret; | ||
586 | |||
587 | crtc_state = drm_atomic_get_crtc_state(state, crtc); | ||
588 | if (IS_ERR(crtc_state)) | ||
589 | return PTR_ERR(crtc_state); | ||
590 | |||
591 | ret = drm_atomic_set_mode_for_crtc(crtc_state, NULL); | ||
592 | if (ret) | ||
593 | return ret; | ||
594 | |||
595 | crtc_state->active = false; | ||
596 | |||
597 | drm_for_each_plane_mask(plane, connector->dev, crtc_state->plane_mask) { | ||
598 | plane_state = drm_atomic_get_plane_state(state, plane); | ||
599 | if (IS_ERR(plane_state)) | ||
600 | return PTR_ERR(plane_state); | ||
601 | |||
602 | ret = drm_atomic_set_crtc_for_plane(plane_state, NULL); | ||
603 | if (ret) | ||
604 | return ret; | ||
605 | |||
606 | drm_atomic_set_fb_for_plane(plane_state, NULL); | ||
607 | } | ||
608 | |||
609 | return 0; | ||
610 | } | ||
611 | |||
612 | static int | ||
613 | nouveau_atomic_disable(struct drm_device *dev, | ||
614 | struct drm_modeset_acquire_ctx *ctx) | ||
615 | { | ||
616 | struct drm_atomic_state *state; | ||
617 | struct drm_connector *connector; | ||
618 | int ret; | ||
619 | |||
620 | state = drm_atomic_state_alloc(dev); | ||
621 | if (!state) | ||
622 | return -ENOMEM; | ||
623 | |||
624 | state->acquire_ctx = ctx; | ||
625 | |||
626 | drm_for_each_connector(connector, dev) { | ||
627 | ret = nouveau_atomic_disable_connector(state, connector); | ||
628 | if (ret) | ||
629 | break; | ||
630 | } | ||
631 | |||
632 | if (ret == 0) | ||
633 | ret = drm_atomic_commit(state); | ||
634 | drm_atomic_state_put(state); | ||
635 | return ret; | ||
636 | } | ||
637 | |||
638 | static struct drm_atomic_state * | ||
639 | nouveau_atomic_suspend(struct drm_device *dev) | ||
640 | { | ||
641 | struct drm_modeset_acquire_ctx ctx; | ||
642 | struct drm_atomic_state *state; | ||
643 | int ret; | ||
644 | |||
645 | drm_modeset_acquire_init(&ctx, 0); | ||
646 | |||
647 | retry: | ||
648 | ret = drm_modeset_lock_all_ctx(dev, &ctx); | ||
649 | if (ret < 0) { | ||
650 | state = ERR_PTR(ret); | ||
651 | goto unlock; | ||
652 | } | ||
653 | |||
654 | state = drm_atomic_helper_duplicate_state(dev, &ctx); | ||
655 | if (IS_ERR(state)) | ||
656 | goto unlock; | ||
657 | |||
658 | ret = nouveau_atomic_disable(dev, &ctx); | ||
659 | if (ret < 0) { | ||
660 | drm_atomic_state_put(state); | ||
661 | state = ERR_PTR(ret); | ||
662 | goto unlock; | ||
663 | } | ||
664 | |||
665 | unlock: | ||
666 | if (PTR_ERR(state) == -EDEADLK) { | ||
667 | drm_modeset_backoff(&ctx); | ||
668 | goto retry; | ||
669 | } | ||
670 | |||
671 | drm_modeset_drop_locks(&ctx); | ||
672 | drm_modeset_acquire_fini(&ctx); | ||
673 | return state; | ||
674 | } | ||
675 | |||
571 | int | 676 | int |
572 | nouveau_display_suspend(struct drm_device *dev, bool runtime) | 677 | nouveau_display_suspend(struct drm_device *dev, bool runtime) |
573 | { | 678 | { |
679 | struct nouveau_display *disp = nouveau_display(dev); | ||
574 | struct drm_crtc *crtc; | 680 | struct drm_crtc *crtc; |
575 | 681 | ||
576 | nouveau_display_fini(dev); | 682 | if (dev->mode_config.funcs->atomic_commit) { |
683 | if (!runtime) { | ||
684 | disp->suspend = nouveau_atomic_suspend(dev); | ||
685 | if (IS_ERR(disp->suspend)) { | ||
686 | int ret = PTR_ERR(disp->suspend); | ||
687 | disp->suspend = NULL; | ||
688 | return ret; | ||
689 | } | ||
690 | } | ||
691 | |||
692 | nouveau_display_fini(dev, true); | ||
693 | return 0; | ||
694 | } | ||
695 | |||
696 | nouveau_display_fini(dev, true); | ||
577 | 697 | ||
578 | list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { | 698 | list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { |
579 | struct nouveau_framebuffer *nouveau_fb; | 699 | struct nouveau_framebuffer *nouveau_fb; |
@@ -600,10 +720,20 @@ nouveau_display_suspend(struct drm_device *dev, bool runtime) | |||
600 | void | 720 | void |
601 | nouveau_display_resume(struct drm_device *dev, bool runtime) | 721 | nouveau_display_resume(struct drm_device *dev, bool runtime) |
602 | { | 722 | { |
723 | struct nouveau_display *disp = nouveau_display(dev); | ||
603 | struct nouveau_drm *drm = nouveau_drm(dev); | 724 | struct nouveau_drm *drm = nouveau_drm(dev); |
604 | struct drm_crtc *crtc; | 725 | struct drm_crtc *crtc; |
605 | int ret, head; | 726 | int ret, head; |
606 | 727 | ||
728 | if (dev->mode_config.funcs->atomic_commit) { | ||
729 | nouveau_display_init(dev); | ||
730 | if (disp->suspend) { | ||
731 | drm_atomic_helper_resume(dev, disp->suspend); | ||
732 | disp->suspend = NULL; | ||
733 | } | ||
734 | return; | ||
735 | } | ||
736 | |||
607 | /* re-pin fb/cursors */ | 737 | /* re-pin fb/cursors */ |
608 | list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { | 738 | list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { |
609 | struct nouveau_framebuffer *nouveau_fb; | 739 | struct nouveau_framebuffer *nouveau_fb; |
@@ -692,10 +822,7 @@ nouveau_page_flip_emit(struct nouveau_channel *chan, | |||
692 | if (ret) | 822 | if (ret) |
693 | goto fail; | 823 | goto fail; |
694 | 824 | ||
695 | if (drm->device.info.family < NV_DEVICE_INFO_V0_FERMI) | 825 | BEGIN_NV04(chan, NvSubSw, NV_SW_PAGE_FLIP, 1); |
696 | BEGIN_NV04(chan, NvSubSw, NV_SW_PAGE_FLIP, 1); | ||
697 | else | ||
698 | BEGIN_NVC0(chan, FermiSw, NV_SW_PAGE_FLIP, 1); | ||
699 | OUT_RING (chan, 0x00000000); | 826 | OUT_RING (chan, 0x00000000); |
700 | FIRE_RING (chan); | 827 | FIRE_RING (chan); |
701 | 828 | ||
@@ -724,6 +851,8 @@ nouveau_crtc_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb, | |||
724 | struct nouveau_channel *chan; | 851 | struct nouveau_channel *chan; |
725 | struct nouveau_cli *cli; | 852 | struct nouveau_cli *cli; |
726 | struct nouveau_fence *fence; | 853 | struct nouveau_fence *fence; |
854 | struct nv04_display *dispnv04 = nv04_display(dev); | ||
855 | int head = nouveau_crtc(crtc)->index; | ||
727 | int ret; | 856 | int ret; |
728 | 857 | ||
729 | chan = drm->channel; | 858 | chan = drm->channel; |
@@ -770,32 +899,23 @@ nouveau_crtc_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb, | |||
770 | drm_crtc_vblank_get(crtc); | 899 | drm_crtc_vblank_get(crtc); |
771 | 900 | ||
772 | /* Emit a page flip */ | 901 | /* Emit a page flip */ |
773 | if (drm->device.info.family >= NV_DEVICE_INFO_V0_TESLA) { | 902 | if (swap_interval) { |
774 | ret = nv50_display_flip_next(crtc, fb, chan, swap_interval); | 903 | ret = RING_SPACE(chan, 8); |
775 | if (ret) | 904 | if (ret) |
776 | goto fail_unreserve; | 905 | goto fail_unreserve; |
777 | } else { | ||
778 | struct nv04_display *dispnv04 = nv04_display(dev); | ||
779 | int head = nouveau_crtc(crtc)->index; | ||
780 | |||
781 | if (swap_interval) { | ||
782 | ret = RING_SPACE(chan, 8); | ||
783 | if (ret) | ||
784 | goto fail_unreserve; | ||
785 | |||
786 | BEGIN_NV04(chan, NvSubImageBlit, 0x012c, 1); | ||
787 | OUT_RING (chan, 0); | ||
788 | BEGIN_NV04(chan, NvSubImageBlit, 0x0134, 1); | ||
789 | OUT_RING (chan, head); | ||
790 | BEGIN_NV04(chan, NvSubImageBlit, 0x0100, 1); | ||
791 | OUT_RING (chan, 0); | ||
792 | BEGIN_NV04(chan, NvSubImageBlit, 0x0130, 1); | ||
793 | OUT_RING (chan, 0); | ||
794 | } | ||
795 | 906 | ||
796 | nouveau_bo_ref(new_bo, &dispnv04->image[head]); | 907 | BEGIN_NV04(chan, NvSubImageBlit, 0x012c, 1); |
908 | OUT_RING (chan, 0); | ||
909 | BEGIN_NV04(chan, NvSubImageBlit, 0x0134, 1); | ||
910 | OUT_RING (chan, head); | ||
911 | BEGIN_NV04(chan, NvSubImageBlit, 0x0100, 1); | ||
912 | OUT_RING (chan, 0); | ||
913 | BEGIN_NV04(chan, NvSubImageBlit, 0x0130, 1); | ||
914 | OUT_RING (chan, 0); | ||
797 | } | 915 | } |
798 | 916 | ||
917 | nouveau_bo_ref(new_bo, &dispnv04->image[head]); | ||
918 | |||
799 | ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence); | 919 | ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence); |
800 | if (ret) | 920 | if (ret) |
801 | goto fail_unreserve; | 921 | goto fail_unreserve; |
@@ -843,16 +963,8 @@ nouveau_finish_page_flip(struct nouveau_channel *chan, | |||
843 | 963 | ||
844 | s = list_first_entry(&fctx->flip, struct nouveau_page_flip_state, head); | 964 | s = list_first_entry(&fctx->flip, struct nouveau_page_flip_state, head); |
845 | if (s->event) { | 965 | if (s->event) { |
846 | if (drm->device.info.family < NV_DEVICE_INFO_V0_TESLA) { | 966 | drm_crtc_arm_vblank_event(s->crtc, s->event); |
847 | drm_crtc_arm_vblank_event(s->crtc, s->event); | 967 | } else { |
848 | } else { | ||
849 | drm_crtc_send_vblank_event(s->crtc, s->event); | ||
850 | |||
851 | /* Give up ownership of vblank for page-flipped crtc */ | ||
852 | drm_crtc_vblank_put(s->crtc); | ||
853 | } | ||
854 | } | ||
855 | else { | ||
856 | /* Give up ownership of vblank for page-flipped crtc */ | 968 | /* Give up ownership of vblank for page-flipped crtc */ |
857 | drm_crtc_vblank_put(s->crtc); | 969 | drm_crtc_vblank_put(s->crtc); |
858 | } | 970 | } |
@@ -874,12 +986,10 @@ nouveau_flip_complete(struct nvif_notify *notify) | |||
874 | struct nouveau_page_flip_state state; | 986 | struct nouveau_page_flip_state state; |
875 | 987 | ||
876 | if (!nouveau_finish_page_flip(chan, &state)) { | 988 | if (!nouveau_finish_page_flip(chan, &state)) { |
877 | if (drm->device.info.family < NV_DEVICE_INFO_V0_TESLA) { | 989 | nv_set_crtc_base(drm->dev, drm_crtc_index(state.crtc), |
878 | nv_set_crtc_base(drm->dev, drm_crtc_index(state.crtc), | 990 | state.offset + state.crtc->y * |
879 | state.offset + state.crtc->y * | 991 | state.pitch + state.crtc->x * |
880 | state.pitch + state.crtc->x * | 992 | state.bpp / 8); |
881 | state.bpp / 8); | ||
882 | } | ||
883 | } | 993 | } |
884 | 994 | ||
885 | return NVIF_NOTIFY_KEEP; | 995 | return NVIF_NOTIFY_KEEP; |
diff --git a/drivers/gpu/drm/nouveau/nouveau_display.h b/drivers/gpu/drm/nouveau/nouveau_display.h index 0420ee861ea4..330fe0fc5c11 100644 --- a/drivers/gpu/drm/nouveau/nouveau_display.h +++ b/drivers/gpu/drm/nouveau/nouveau_display.h | |||
@@ -22,8 +22,9 @@ nouveau_framebuffer(struct drm_framebuffer *fb) | |||
22 | return container_of(fb, struct nouveau_framebuffer, base); | 22 | return container_of(fb, struct nouveau_framebuffer, base); |
23 | } | 23 | } |
24 | 24 | ||
25 | int nouveau_framebuffer_init(struct drm_device *, struct nouveau_framebuffer *, | 25 | int nouveau_framebuffer_new(struct drm_device *, |
26 | const struct drm_mode_fb_cmd2 *, struct nouveau_bo *); | 26 | const struct drm_mode_fb_cmd2 *, |
27 | struct nouveau_bo *, struct nouveau_framebuffer **); | ||
27 | 28 | ||
28 | struct nouveau_page_flip_state { | 29 | struct nouveau_page_flip_state { |
29 | struct list_head head; | 30 | struct list_head head; |
@@ -39,9 +40,6 @@ struct nouveau_display { | |||
39 | int (*init)(struct drm_device *); | 40 | int (*init)(struct drm_device *); |
40 | void (*fini)(struct drm_device *); | 41 | void (*fini)(struct drm_device *); |
41 | 42 | ||
42 | int (*fb_ctor)(struct drm_framebuffer *); | ||
43 | void (*fb_dtor)(struct drm_framebuffer *); | ||
44 | |||
45 | struct nvif_object disp; | 43 | struct nvif_object disp; |
46 | 44 | ||
47 | struct drm_property *dithering_mode; | 45 | struct drm_property *dithering_mode; |
@@ -52,6 +50,8 @@ struct nouveau_display { | |||
52 | /* not really hue and saturation: */ | 50 | /* not really hue and saturation: */ |
53 | struct drm_property *vibrant_hue_property; | 51 | struct drm_property *vibrant_hue_property; |
54 | struct drm_property *color_vibrance_property; | 52 | struct drm_property *color_vibrance_property; |
53 | |||
54 | struct drm_atomic_state *suspend; | ||
55 | }; | 55 | }; |
56 | 56 | ||
57 | static inline struct nouveau_display * | 57 | static inline struct nouveau_display * |
@@ -63,7 +63,7 @@ nouveau_display(struct drm_device *dev) | |||
63 | int nouveau_display_create(struct drm_device *dev); | 63 | int nouveau_display_create(struct drm_device *dev); |
64 | void nouveau_display_destroy(struct drm_device *dev); | 64 | void nouveau_display_destroy(struct drm_device *dev); |
65 | int nouveau_display_init(struct drm_device *dev); | 65 | int nouveau_display_init(struct drm_device *dev); |
66 | void nouveau_display_fini(struct drm_device *dev); | 66 | void nouveau_display_fini(struct drm_device *dev, bool suspend); |
67 | int nouveau_display_suspend(struct drm_device *dev, bool runtime); | 67 | int nouveau_display_suspend(struct drm_device *dev, bool runtime); |
68 | void nouveau_display_resume(struct drm_device *dev, bool runtime); | 68 | void nouveau_display_resume(struct drm_device *dev, bool runtime); |
69 | int nouveau_display_vblank_enable(struct drm_device *, unsigned int); | 69 | int nouveau_display_vblank_enable(struct drm_device *, unsigned int); |
@@ -103,4 +103,7 @@ nouveau_backlight_exit(struct drm_device *dev) { | |||
103 | } | 103 | } |
104 | #endif | 104 | #endif |
105 | 105 | ||
106 | struct drm_framebuffer * | ||
107 | nouveau_user_framebuffer_create(struct drm_device *, struct drm_file *, | ||
108 | const struct drm_mode_fb_cmd2 *); | ||
106 | #endif | 109 | #endif |
diff --git a/drivers/gpu/drm/nouveau/nouveau_dp.c b/drivers/gpu/drm/nouveau/nouveau_dp.c index 87d52d36f4fc..0d052e1660f8 100644 --- a/drivers/gpu/drm/nouveau/nouveau_dp.c +++ b/drivers/gpu/drm/nouveau/nouveau_dp.c | |||
@@ -30,6 +30,13 @@ | |||
30 | #include "nouveau_encoder.h" | 30 | #include "nouveau_encoder.h" |
31 | #include "nouveau_crtc.h" | 31 | #include "nouveau_crtc.h" |
32 | 32 | ||
33 | #include <nvif/class.h> | ||
34 | #include <nvif/cl5070.h> | ||
35 | |||
36 | MODULE_PARM_DESC(mst, "Enable DisplayPort multi-stream (default: enabled)"); | ||
37 | static int nouveau_mst = 1; | ||
38 | module_param_named(mst, nouveau_mst, int, 0400); | ||
39 | |||
33 | static void | 40 | static void |
34 | nouveau_dp_probe_oui(struct drm_device *dev, struct nvkm_i2c_aux *aux, u8 *dpcd) | 41 | nouveau_dp_probe_oui(struct drm_device *dev, struct nvkm_i2c_aux *aux, u8 *dpcd) |
35 | { | 42 | { |
@@ -55,14 +62,14 @@ nouveau_dp_detect(struct nouveau_encoder *nv_encoder) | |||
55 | struct drm_device *dev = nv_encoder->base.base.dev; | 62 | struct drm_device *dev = nv_encoder->base.base.dev; |
56 | struct nouveau_drm *drm = nouveau_drm(dev); | 63 | struct nouveau_drm *drm = nouveau_drm(dev); |
57 | struct nvkm_i2c_aux *aux; | 64 | struct nvkm_i2c_aux *aux; |
58 | u8 *dpcd = nv_encoder->dp.dpcd; | 65 | u8 dpcd[8]; |
59 | int ret; | 66 | int ret; |
60 | 67 | ||
61 | aux = nv_encoder->aux; | 68 | aux = nv_encoder->aux; |
62 | if (!aux) | 69 | if (!aux) |
63 | return -ENODEV; | 70 | return -ENODEV; |
64 | 71 | ||
65 | ret = nvkm_rdaux(aux, DP_DPCD_REV, dpcd, 8); | 72 | ret = nvkm_rdaux(aux, DP_DPCD_REV, dpcd, sizeof(dpcd)); |
66 | if (ret) | 73 | if (ret) |
67 | return ret; | 74 | return ret; |
68 | 75 | ||
@@ -84,5 +91,11 @@ nouveau_dp_detect(struct nouveau_encoder *nv_encoder) | |||
84 | nv_encoder->dp.link_nr, nv_encoder->dp.link_bw); | 91 | nv_encoder->dp.link_nr, nv_encoder->dp.link_bw); |
85 | 92 | ||
86 | nouveau_dp_probe_oui(dev, aux, dpcd); | 93 | nouveau_dp_probe_oui(dev, aux, dpcd); |
87 | return 0; | 94 | |
95 | ret = nv50_mstm_detect(nv_encoder->dp.mstm, dpcd, nouveau_mst); | ||
96 | if (ret == 1) | ||
97 | return NOUVEAU_DP_MST; | ||
98 | if (ret == 0) | ||
99 | return NOUVEAU_DP_SST; | ||
100 | return ret; | ||
88 | } | 101 | } |
diff --git a/drivers/gpu/drm/nouveau/nouveau_drm.c b/drivers/gpu/drm/nouveau/nouveau_drm.c index 6adf94789417..9876e6fcfcf0 100644 --- a/drivers/gpu/drm/nouveau/nouveau_drm.c +++ b/drivers/gpu/drm/nouveau/nouveau_drm.c | |||
@@ -519,7 +519,7 @@ nouveau_drm_unload(struct drm_device *dev) | |||
519 | nouveau_debugfs_fini(drm); | 519 | nouveau_debugfs_fini(drm); |
520 | 520 | ||
521 | if (dev->mode_config.num_crtc) | 521 | if (dev->mode_config.num_crtc) |
522 | nouveau_display_fini(dev); | 522 | nouveau_display_fini(dev, false); |
523 | nouveau_display_destroy(dev); | 523 | nouveau_display_destroy(dev); |
524 | 524 | ||
525 | nouveau_bios_takedown(dev); | 525 | nouveau_bios_takedown(dev); |
@@ -1037,6 +1037,7 @@ static void nouveau_display_options(void) | |||
1037 | DRM_DEBUG_DRIVER("... modeset : %d\n", nouveau_modeset); | 1037 | DRM_DEBUG_DRIVER("... modeset : %d\n", nouveau_modeset); |
1038 | DRM_DEBUG_DRIVER("... runpm : %d\n", nouveau_runtime_pm); | 1038 | DRM_DEBUG_DRIVER("... runpm : %d\n", nouveau_runtime_pm); |
1039 | DRM_DEBUG_DRIVER("... vram_pushbuf : %d\n", nouveau_vram_pushbuf); | 1039 | DRM_DEBUG_DRIVER("... vram_pushbuf : %d\n", nouveau_vram_pushbuf); |
1040 | DRM_DEBUG_DRIVER("... hdmimhz : %d\n", nouveau_hdmimhz); | ||
1040 | } | 1041 | } |
1041 | 1042 | ||
1042 | static const struct dev_pm_ops nouveau_pm_ops = { | 1043 | static const struct dev_pm_ops nouveau_pm_ops = { |
diff --git a/drivers/gpu/drm/nouveau/nouveau_drv.h b/drivers/gpu/drm/nouveau/nouveau_drv.h index c0e2b3207503..4cd47bae73c7 100644 --- a/drivers/gpu/drm/nouveau/nouveau_drv.h +++ b/drivers/gpu/drm/nouveau/nouveau_drv.h | |||
@@ -204,6 +204,10 @@ void nouveau_drm_device_remove(struct drm_device *dev); | |||
204 | if (unlikely(drm_debug & DRM_UT_DRIVER)) \ | 204 | if (unlikely(drm_debug & DRM_UT_DRIVER)) \ |
205 | NV_PRINTK(info, &(drm)->client, f, ##a); \ | 205 | NV_PRINTK(info, &(drm)->client, f, ##a); \ |
206 | } while(0) | 206 | } while(0) |
207 | #define NV_ATOMIC(drm,f,a...) do { \ | ||
208 | if (unlikely(drm_debug & DRM_UT_ATOMIC)) \ | ||
209 | NV_PRINTK(info, &(drm)->client, f, ##a); \ | ||
210 | } while(0) | ||
207 | 211 | ||
208 | extern int nouveau_modeset; | 212 | extern int nouveau_modeset; |
209 | 213 | ||
diff --git a/drivers/gpu/drm/nouveau/nouveau_encoder.h b/drivers/gpu/drm/nouveau/nouveau_encoder.h index ee6a6d3fc80f..198e5f27682f 100644 --- a/drivers/gpu/drm/nouveau/nouveau_encoder.h +++ b/drivers/gpu/drm/nouveau/nouveau_encoder.h | |||
@@ -30,6 +30,7 @@ | |||
30 | #include <subdev/bios/dcb.h> | 30 | #include <subdev/bios/dcb.h> |
31 | 31 | ||
32 | #include <drm/drm_encoder_slave.h> | 32 | #include <drm/drm_encoder_slave.h> |
33 | #include <drm/drm_dp_mst_helper.h> | ||
33 | #include "dispnv04/disp.h" | 34 | #include "dispnv04/disp.h" |
34 | 35 | ||
35 | #define NV_DPMS_CLEARED 0x80 | 36 | #define NV_DPMS_CLEARED 0x80 |
@@ -57,15 +58,16 @@ struct nouveau_encoder { | |||
57 | 58 | ||
58 | union { | 59 | union { |
59 | struct { | 60 | struct { |
60 | u8 dpcd[8]; | 61 | struct nv50_mstm *mstm; |
61 | int link_nr; | 62 | int link_nr; |
62 | int link_bw; | 63 | int link_bw; |
63 | u32 datarate; | ||
64 | } dp; | 64 | } dp; |
65 | }; | 65 | }; |
66 | 66 | ||
67 | void (*enc_save)(struct drm_encoder *encoder); | 67 | void (*enc_save)(struct drm_encoder *encoder); |
68 | void (*enc_restore)(struct drm_encoder *encoder); | 68 | void (*enc_restore)(struct drm_encoder *encoder); |
69 | void (*update)(struct nouveau_encoder *, u8 head, | ||
70 | struct drm_display_mode *, u8 proto, u8 depth); | ||
69 | }; | 71 | }; |
70 | 72 | ||
71 | struct nouveau_encoder * | 73 | struct nouveau_encoder * |
@@ -90,9 +92,17 @@ get_slave_funcs(struct drm_encoder *enc) | |||
90 | } | 92 | } |
91 | 93 | ||
92 | /* nouveau_dp.c */ | 94 | /* nouveau_dp.c */ |
95 | enum nouveau_dp_status { | ||
96 | NOUVEAU_DP_SST, | ||
97 | NOUVEAU_DP_MST, | ||
98 | }; | ||
99 | |||
93 | int nouveau_dp_detect(struct nouveau_encoder *); | 100 | int nouveau_dp_detect(struct nouveau_encoder *); |
94 | 101 | ||
95 | struct nouveau_connector * | 102 | struct nouveau_connector * |
96 | nouveau_encoder_connector_get(struct nouveau_encoder *encoder); | 103 | nouveau_encoder_connector_get(struct nouveau_encoder *encoder); |
97 | 104 | ||
105 | int nv50_mstm_detect(struct nv50_mstm *, u8 dpcd[8], int allow); | ||
106 | void nv50_mstm_remove(struct nv50_mstm *); | ||
107 | void nv50_mstm_service(struct nv50_mstm *); | ||
98 | #endif /* __NOUVEAU_ENCODER_H__ */ | 108 | #endif /* __NOUVEAU_ENCODER_H__ */ |
diff --git a/drivers/gpu/drm/nouveau/nouveau_fbcon.c b/drivers/gpu/drm/nouveau/nouveau_fbcon.c index 9f5692726c16..cc8c58337981 100644 --- a/drivers/gpu/drm/nouveau/nouveau_fbcon.c +++ b/drivers/gpu/drm/nouveau/nouveau_fbcon.c | |||
@@ -58,7 +58,7 @@ static void | |||
58 | nouveau_fbcon_fillrect(struct fb_info *info, const struct fb_fillrect *rect) | 58 | nouveau_fbcon_fillrect(struct fb_info *info, const struct fb_fillrect *rect) |
59 | { | 59 | { |
60 | struct nouveau_fbdev *fbcon = info->par; | 60 | struct nouveau_fbdev *fbcon = info->par; |
61 | struct nouveau_drm *drm = nouveau_drm(fbcon->dev); | 61 | struct nouveau_drm *drm = nouveau_drm(fbcon->helper.dev); |
62 | struct nvif_device *device = &drm->device; | 62 | struct nvif_device *device = &drm->device; |
63 | int ret; | 63 | int ret; |
64 | 64 | ||
@@ -90,7 +90,7 @@ static void | |||
90 | nouveau_fbcon_copyarea(struct fb_info *info, const struct fb_copyarea *image) | 90 | nouveau_fbcon_copyarea(struct fb_info *info, const struct fb_copyarea *image) |
91 | { | 91 | { |
92 | struct nouveau_fbdev *fbcon = info->par; | 92 | struct nouveau_fbdev *fbcon = info->par; |
93 | struct nouveau_drm *drm = nouveau_drm(fbcon->dev); | 93 | struct nouveau_drm *drm = nouveau_drm(fbcon->helper.dev); |
94 | struct nvif_device *device = &drm->device; | 94 | struct nvif_device *device = &drm->device; |
95 | int ret; | 95 | int ret; |
96 | 96 | ||
@@ -122,7 +122,7 @@ static void | |||
122 | nouveau_fbcon_imageblit(struct fb_info *info, const struct fb_image *image) | 122 | nouveau_fbcon_imageblit(struct fb_info *info, const struct fb_image *image) |
123 | { | 123 | { |
124 | struct nouveau_fbdev *fbcon = info->par; | 124 | struct nouveau_fbdev *fbcon = info->par; |
125 | struct nouveau_drm *drm = nouveau_drm(fbcon->dev); | 125 | struct nouveau_drm *drm = nouveau_drm(fbcon->helper.dev); |
126 | struct nvif_device *device = &drm->device; | 126 | struct nvif_device *device = &drm->device; |
127 | int ret; | 127 | int ret; |
128 | 128 | ||
@@ -154,7 +154,7 @@ static int | |||
154 | nouveau_fbcon_sync(struct fb_info *info) | 154 | nouveau_fbcon_sync(struct fb_info *info) |
155 | { | 155 | { |
156 | struct nouveau_fbdev *fbcon = info->par; | 156 | struct nouveau_fbdev *fbcon = info->par; |
157 | struct nouveau_drm *drm = nouveau_drm(fbcon->dev); | 157 | struct nouveau_drm *drm = nouveau_drm(fbcon->helper.dev); |
158 | struct nouveau_channel *chan = drm->channel; | 158 | struct nouveau_channel *chan = drm->channel; |
159 | int ret; | 159 | int ret; |
160 | 160 | ||
@@ -181,7 +181,7 @@ static int | |||
181 | nouveau_fbcon_open(struct fb_info *info, int user) | 181 | nouveau_fbcon_open(struct fb_info *info, int user) |
182 | { | 182 | { |
183 | struct nouveau_fbdev *fbcon = info->par; | 183 | struct nouveau_fbdev *fbcon = info->par; |
184 | struct nouveau_drm *drm = nouveau_drm(fbcon->dev); | 184 | struct nouveau_drm *drm = nouveau_drm(fbcon->helper.dev); |
185 | int ret = pm_runtime_get_sync(drm->dev->dev); | 185 | int ret = pm_runtime_get_sync(drm->dev->dev); |
186 | if (ret < 0 && ret != -EACCES) | 186 | if (ret < 0 && ret != -EACCES) |
187 | return ret; | 187 | return ret; |
@@ -192,7 +192,7 @@ static int | |||
192 | nouveau_fbcon_release(struct fb_info *info, int user) | 192 | nouveau_fbcon_release(struct fb_info *info, int user) |
193 | { | 193 | { |
194 | struct nouveau_fbdev *fbcon = info->par; | 194 | struct nouveau_fbdev *fbcon = info->par; |
195 | struct nouveau_drm *drm = nouveau_drm(fbcon->dev); | 195 | struct nouveau_drm *drm = nouveau_drm(fbcon->helper.dev); |
196 | pm_runtime_put(drm->dev->dev); | 196 | pm_runtime_put(drm->dev->dev); |
197 | return 0; | 197 | return 0; |
198 | } | 198 | } |
@@ -333,16 +333,15 @@ nouveau_fbcon_create(struct drm_fb_helper *helper, | |||
333 | { | 333 | { |
334 | struct nouveau_fbdev *fbcon = | 334 | struct nouveau_fbdev *fbcon = |
335 | container_of(helper, struct nouveau_fbdev, helper); | 335 | container_of(helper, struct nouveau_fbdev, helper); |
336 | struct drm_device *dev = fbcon->dev; | 336 | struct drm_device *dev = fbcon->helper.dev; |
337 | struct nouveau_drm *drm = nouveau_drm(dev); | 337 | struct nouveau_drm *drm = nouveau_drm(dev); |
338 | struct nvif_device *device = &drm->device; | 338 | struct nvif_device *device = &drm->device; |
339 | struct fb_info *info; | 339 | struct fb_info *info; |
340 | struct drm_framebuffer *fb; | 340 | struct nouveau_framebuffer *fb; |
341 | struct nouveau_framebuffer *nouveau_fb; | ||
342 | struct nouveau_channel *chan; | 341 | struct nouveau_channel *chan; |
343 | struct nouveau_bo *nvbo; | 342 | struct nouveau_bo *nvbo; |
344 | struct drm_mode_fb_cmd2 mode_cmd; | 343 | struct drm_mode_fb_cmd2 mode_cmd; |
345 | int size, ret; | 344 | int ret; |
346 | 345 | ||
347 | mode_cmd.width = sizes->surface_width; | 346 | mode_cmd.width = sizes->surface_width; |
348 | mode_cmd.height = sizes->surface_height; | 347 | mode_cmd.height = sizes->surface_height; |
@@ -353,16 +352,17 @@ nouveau_fbcon_create(struct drm_fb_helper *helper, | |||
353 | mode_cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp, | 352 | mode_cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp, |
354 | sizes->surface_depth); | 353 | sizes->surface_depth); |
355 | 354 | ||
356 | size = mode_cmd.pitches[0] * mode_cmd.height; | 355 | ret = nouveau_gem_new(dev, mode_cmd.pitches[0] * mode_cmd.height, |
357 | size = roundup(size, PAGE_SIZE); | 356 | 0, NOUVEAU_GEM_DOMAIN_VRAM, 0, 0x0000, &nvbo); |
358 | |||
359 | ret = nouveau_gem_new(dev, size, 0, NOUVEAU_GEM_DOMAIN_VRAM, | ||
360 | 0, 0x0000, &nvbo); | ||
361 | if (ret) { | 357 | if (ret) { |
362 | NV_ERROR(drm, "failed to allocate framebuffer\n"); | 358 | NV_ERROR(drm, "failed to allocate framebuffer\n"); |
363 | goto out; | 359 | goto out; |
364 | } | 360 | } |
365 | 361 | ||
362 | ret = nouveau_framebuffer_new(dev, &mode_cmd, nvbo, &fb); | ||
363 | if (ret) | ||
364 | goto out_unref; | ||
365 | |||
366 | ret = nouveau_bo_pin(nvbo, TTM_PL_FLAG_VRAM, false); | 366 | ret = nouveau_bo_pin(nvbo, TTM_PL_FLAG_VRAM, false); |
367 | if (ret) { | 367 | if (ret) { |
368 | NV_ERROR(drm, "failed to pin fb: %d\n", ret); | 368 | NV_ERROR(drm, "failed to pin fb: %d\n", ret); |
@@ -377,8 +377,7 @@ nouveau_fbcon_create(struct drm_fb_helper *helper, | |||
377 | 377 | ||
378 | chan = nouveau_nofbaccel ? NULL : drm->channel; | 378 | chan = nouveau_nofbaccel ? NULL : drm->channel; |
379 | if (chan && device->info.family >= NV_DEVICE_INFO_V0_TESLA) { | 379 | if (chan && device->info.family >= NV_DEVICE_INFO_V0_TESLA) { |
380 | ret = nouveau_bo_vma_add(nvbo, drm->client.vm, | 380 | ret = nouveau_bo_vma_add(nvbo, drm->client.vm, &fb->vma); |
381 | &fbcon->nouveau_fb.vma); | ||
382 | if (ret) { | 381 | if (ret) { |
383 | NV_ERROR(drm, "failed to map fb into chan: %d\n", ret); | 382 | NV_ERROR(drm, "failed to map fb into chan: %d\n", ret); |
384 | chan = NULL; | 383 | chan = NULL; |
@@ -394,13 +393,8 @@ nouveau_fbcon_create(struct drm_fb_helper *helper, | |||
394 | 393 | ||
395 | info->par = fbcon; | 394 | info->par = fbcon; |
396 | 395 | ||
397 | nouveau_framebuffer_init(dev, &fbcon->nouveau_fb, &mode_cmd, nvbo); | ||
398 | |||
399 | nouveau_fb = &fbcon->nouveau_fb; | ||
400 | fb = &nouveau_fb->base; | ||
401 | |||
402 | /* setup helper */ | 396 | /* setup helper */ |
403 | fbcon->helper.fb = fb; | 397 | fbcon->helper.fb = &fb->base; |
404 | 398 | ||
405 | strcpy(info->fix.id, "nouveaufb"); | 399 | strcpy(info->fix.id, "nouveaufb"); |
406 | if (!chan) | 400 | if (!chan) |
@@ -411,14 +405,14 @@ nouveau_fbcon_create(struct drm_fb_helper *helper, | |||
411 | FBINFO_HWACCEL_IMAGEBLIT; | 405 | FBINFO_HWACCEL_IMAGEBLIT; |
412 | info->flags |= FBINFO_CAN_FORCE_OUTPUT; | 406 | info->flags |= FBINFO_CAN_FORCE_OUTPUT; |
413 | info->fbops = &nouveau_fbcon_sw_ops; | 407 | info->fbops = &nouveau_fbcon_sw_ops; |
414 | info->fix.smem_start = nvbo->bo.mem.bus.base + | 408 | info->fix.smem_start = fb->nvbo->bo.mem.bus.base + |
415 | nvbo->bo.mem.bus.offset; | 409 | fb->nvbo->bo.mem.bus.offset; |
416 | info->fix.smem_len = size; | 410 | info->fix.smem_len = fb->nvbo->bo.mem.num_pages << PAGE_SHIFT; |
417 | 411 | ||
418 | info->screen_base = nvbo_kmap_obj_iovirtual(nouveau_fb->nvbo); | 412 | info->screen_base = nvbo_kmap_obj_iovirtual(fb->nvbo); |
419 | info->screen_size = size; | 413 | info->screen_size = fb->nvbo->bo.mem.num_pages << PAGE_SHIFT; |
420 | 414 | ||
421 | drm_fb_helper_fill_fix(info, fb->pitches[0], fb->depth); | 415 | drm_fb_helper_fill_fix(info, fb->base.pitches[0], fb->base.depth); |
422 | drm_fb_helper_fill_var(info, &fbcon->helper, sizes->fb_width, sizes->fb_height); | 416 | drm_fb_helper_fill_var(info, &fbcon->helper, sizes->fb_width, sizes->fb_height); |
423 | 417 | ||
424 | /* Use default scratch pixmap (info->pixmap.flags = FB_PIXMAP_SYSTEM) */ | 418 | /* Use default scratch pixmap (info->pixmap.flags = FB_PIXMAP_SYSTEM) */ |
@@ -429,20 +423,19 @@ nouveau_fbcon_create(struct drm_fb_helper *helper, | |||
429 | 423 | ||
430 | /* To allow resizeing without swapping buffers */ | 424 | /* To allow resizeing without swapping buffers */ |
431 | NV_INFO(drm, "allocated %dx%d fb: 0x%llx, bo %p\n", | 425 | NV_INFO(drm, "allocated %dx%d fb: 0x%llx, bo %p\n", |
432 | nouveau_fb->base.width, nouveau_fb->base.height, | 426 | fb->base.width, fb->base.height, fb->nvbo->bo.offset, nvbo); |
433 | nvbo->bo.offset, nvbo); | ||
434 | 427 | ||
435 | vga_switcheroo_client_fb_set(dev->pdev, info); | 428 | vga_switcheroo_client_fb_set(dev->pdev, info); |
436 | return 0; | 429 | return 0; |
437 | 430 | ||
438 | out_unlock: | 431 | out_unlock: |
439 | if (chan) | 432 | if (chan) |
440 | nouveau_bo_vma_del(nvbo, &fbcon->nouveau_fb.vma); | 433 | nouveau_bo_vma_del(fb->nvbo, &fb->vma); |
441 | nouveau_bo_unmap(nvbo); | 434 | nouveau_bo_unmap(fb->nvbo); |
442 | out_unpin: | 435 | out_unpin: |
443 | nouveau_bo_unpin(nvbo); | 436 | nouveau_bo_unpin(fb->nvbo); |
444 | out_unref: | 437 | out_unref: |
445 | nouveau_bo_ref(NULL, &nvbo); | 438 | nouveau_bo_ref(NULL, &fb->nvbo); |
446 | out: | 439 | out: |
447 | return ret; | 440 | return ret; |
448 | } | 441 | } |
@@ -458,28 +451,26 @@ nouveau_fbcon_output_poll_changed(struct drm_device *dev) | |||
458 | static int | 451 | static int |
459 | nouveau_fbcon_destroy(struct drm_device *dev, struct nouveau_fbdev *fbcon) | 452 | nouveau_fbcon_destroy(struct drm_device *dev, struct nouveau_fbdev *fbcon) |
460 | { | 453 | { |
461 | struct nouveau_framebuffer *nouveau_fb = &fbcon->nouveau_fb; | 454 | struct nouveau_framebuffer *nouveau_fb = nouveau_framebuffer(fbcon->helper.fb); |
462 | 455 | ||
463 | drm_fb_helper_unregister_fbi(&fbcon->helper); | 456 | drm_fb_helper_unregister_fbi(&fbcon->helper); |
464 | drm_fb_helper_release_fbi(&fbcon->helper); | 457 | drm_fb_helper_release_fbi(&fbcon->helper); |
458 | drm_fb_helper_fini(&fbcon->helper); | ||
465 | 459 | ||
466 | if (nouveau_fb->nvbo) { | 460 | if (nouveau_fb->nvbo) { |
467 | nouveau_bo_unmap(nouveau_fb->nvbo); | ||
468 | nouveau_bo_vma_del(nouveau_fb->nvbo, &nouveau_fb->vma); | 461 | nouveau_bo_vma_del(nouveau_fb->nvbo, &nouveau_fb->vma); |
462 | nouveau_bo_unmap(nouveau_fb->nvbo); | ||
469 | nouveau_bo_unpin(nouveau_fb->nvbo); | 463 | nouveau_bo_unpin(nouveau_fb->nvbo); |
470 | drm_gem_object_unreference_unlocked(&nouveau_fb->nvbo->gem); | 464 | drm_framebuffer_unreference(&nouveau_fb->base); |
471 | nouveau_fb->nvbo = NULL; | ||
472 | } | 465 | } |
473 | drm_fb_helper_fini(&fbcon->helper); | 466 | |
474 | drm_framebuffer_unregister_private(&nouveau_fb->base); | ||
475 | drm_framebuffer_cleanup(&nouveau_fb->base); | ||
476 | return 0; | 467 | return 0; |
477 | } | 468 | } |
478 | 469 | ||
479 | void nouveau_fbcon_gpu_lockup(struct fb_info *info) | 470 | void nouveau_fbcon_gpu_lockup(struct fb_info *info) |
480 | { | 471 | { |
481 | struct nouveau_fbdev *fbcon = info->par; | 472 | struct nouveau_fbdev *fbcon = info->par; |
482 | struct nouveau_drm *drm = nouveau_drm(fbcon->dev); | 473 | struct nouveau_drm *drm = nouveau_drm(fbcon->helper.dev); |
483 | 474 | ||
484 | NV_ERROR(drm, "GPU lockup - switching to software fbcon\n"); | 475 | NV_ERROR(drm, "GPU lockup - switching to software fbcon\n"); |
485 | info->flags |= FBINFO_HWACCEL_DISABLED; | 476 | info->flags |= FBINFO_HWACCEL_DISABLED; |
@@ -522,7 +513,6 @@ nouveau_fbcon_init(struct drm_device *dev) | |||
522 | if (!fbcon) | 513 | if (!fbcon) |
523 | return -ENOMEM; | 514 | return -ENOMEM; |
524 | 515 | ||
525 | fbcon->dev = dev; | ||
526 | drm->fbcon = fbcon; | 516 | drm->fbcon = fbcon; |
527 | 517 | ||
528 | drm_fb_helper_prepare(dev, &fbcon->helper, &nouveau_fbcon_helper_funcs); | 518 | drm_fb_helper_prepare(dev, &fbcon->helper, &nouveau_fbcon_helper_funcs); |
@@ -545,7 +535,8 @@ nouveau_fbcon_init(struct drm_device *dev) | |||
545 | preferred_bpp = 32; | 535 | preferred_bpp = 32; |
546 | 536 | ||
547 | /* disable all the possible outputs/crtcs before entering KMS mode */ | 537 | /* disable all the possible outputs/crtcs before entering KMS mode */ |
548 | drm_helper_disable_unused_functions(dev); | 538 | if (!dev->mode_config.funcs->atomic_commit) |
539 | drm_helper_disable_unused_functions(dev); | ||
549 | 540 | ||
550 | ret = drm_fb_helper_initial_config(&fbcon->helper, preferred_bpp); | 541 | ret = drm_fb_helper_initial_config(&fbcon->helper, preferred_bpp); |
551 | if (ret) | 542 | if (ret) |
diff --git a/drivers/gpu/drm/nouveau/nouveau_fbcon.h b/drivers/gpu/drm/nouveau/nouveau_fbcon.h index ca77ad001978..e2bca729721e 100644 --- a/drivers/gpu/drm/nouveau/nouveau_fbcon.h +++ b/drivers/gpu/drm/nouveau/nouveau_fbcon.h | |||
@@ -33,8 +33,6 @@ | |||
33 | 33 | ||
34 | struct nouveau_fbdev { | 34 | struct nouveau_fbdev { |
35 | struct drm_fb_helper helper; | 35 | struct drm_fb_helper helper; |
36 | struct nouveau_framebuffer nouveau_fb; | ||
37 | struct drm_device *dev; | ||
38 | unsigned int saved_flags; | 36 | unsigned int saved_flags; |
39 | struct nvif_object surf2d; | 37 | struct nvif_object surf2d; |
40 | struct nvif_object clip; | 38 | struct nvif_object clip; |
diff --git a/drivers/gpu/drm/nouveau/nouveau_fence.c b/drivers/gpu/drm/nouveau/nouveau_fence.c index e9529ee6bc23..f2f348f0160c 100644 --- a/drivers/gpu/drm/nouveau/nouveau_fence.c +++ b/drivers/gpu/drm/nouveau/nouveau_fence.c | |||
@@ -586,5 +586,5 @@ static const struct dma_fence_ops nouveau_fence_ops_uevent = { | |||
586 | .enable_signaling = nouveau_fence_enable_signaling, | 586 | .enable_signaling = nouveau_fence_enable_signaling, |
587 | .signaled = nouveau_fence_is_signaled, | 587 | .signaled = nouveau_fence_is_signaled, |
588 | .wait = dma_fence_default_wait, | 588 | .wait = dma_fence_default_wait, |
589 | .release = NULL | 589 | .release = nouveau_fence_release |
590 | }; | 590 | }; |
diff --git a/drivers/gpu/drm/nouveau/nouveau_fence.h b/drivers/gpu/drm/nouveau/nouveau_fence.h index 41f3c019e534..ccdce1b4eec4 100644 --- a/drivers/gpu/drm/nouveau/nouveau_fence.h +++ b/drivers/gpu/drm/nouveau/nouveau_fence.h | |||
@@ -92,7 +92,6 @@ struct nv84_fence_chan { | |||
92 | struct nouveau_fence_chan base; | 92 | struct nouveau_fence_chan base; |
93 | struct nvkm_vma vma; | 93 | struct nvkm_vma vma; |
94 | struct nvkm_vma vma_gart; | 94 | struct nvkm_vma vma_gart; |
95 | struct nvkm_vma dispc_vma[4]; | ||
96 | }; | 95 | }; |
97 | 96 | ||
98 | struct nv84_fence_priv { | 97 | struct nv84_fence_priv { |
@@ -102,7 +101,6 @@ struct nv84_fence_priv { | |||
102 | u32 *suspend; | 101 | u32 *suspend; |
103 | }; | 102 | }; |
104 | 103 | ||
105 | u64 nv84_fence_crtc(struct nouveau_channel *, int); | ||
106 | int nv84_fence_context_new(struct nouveau_channel *); | 104 | int nv84_fence_context_new(struct nouveau_channel *); |
107 | 105 | ||
108 | #endif | 106 | #endif |
diff --git a/drivers/gpu/drm/nouveau/nouveau_gem.c b/drivers/gpu/drm/nouveau/nouveau_gem.c index 7f083c95f422..201b52b750dd 100644 --- a/drivers/gpu/drm/nouveau/nouveau_gem.c +++ b/drivers/gpu/drm/nouveau/nouveau_gem.c | |||
@@ -369,7 +369,7 @@ validate_init(struct nouveau_channel *chan, struct drm_file *file_priv, | |||
369 | { | 369 | { |
370 | struct nouveau_cli *cli = nouveau_cli(file_priv); | 370 | struct nouveau_cli *cli = nouveau_cli(file_priv); |
371 | int trycnt = 0; | 371 | int trycnt = 0; |
372 | int ret, i; | 372 | int ret = -EINVAL, i; |
373 | struct nouveau_bo *res_bo = NULL; | 373 | struct nouveau_bo *res_bo = NULL; |
374 | LIST_HEAD(gart_list); | 374 | LIST_HEAD(gart_list); |
375 | LIST_HEAD(vram_list); | 375 | LIST_HEAD(vram_list); |
diff --git a/drivers/gpu/drm/nouveau/nv04_fbcon.c b/drivers/gpu/drm/nouveau/nv04_fbcon.c index da8fd5ff9d0f..6a2b187e3c3b 100644 --- a/drivers/gpu/drm/nouveau/nv04_fbcon.c +++ b/drivers/gpu/drm/nouveau/nv04_fbcon.c | |||
@@ -30,7 +30,7 @@ int | |||
30 | nv04_fbcon_copyarea(struct fb_info *info, const struct fb_copyarea *region) | 30 | nv04_fbcon_copyarea(struct fb_info *info, const struct fb_copyarea *region) |
31 | { | 31 | { |
32 | struct nouveau_fbdev *nfbdev = info->par; | 32 | struct nouveau_fbdev *nfbdev = info->par; |
33 | struct nouveau_drm *drm = nouveau_drm(nfbdev->dev); | 33 | struct nouveau_drm *drm = nouveau_drm(nfbdev->helper.dev); |
34 | struct nouveau_channel *chan = drm->channel; | 34 | struct nouveau_channel *chan = drm->channel; |
35 | int ret; | 35 | int ret; |
36 | 36 | ||
@@ -50,7 +50,7 @@ int | |||
50 | nv04_fbcon_fillrect(struct fb_info *info, const struct fb_fillrect *rect) | 50 | nv04_fbcon_fillrect(struct fb_info *info, const struct fb_fillrect *rect) |
51 | { | 51 | { |
52 | struct nouveau_fbdev *nfbdev = info->par; | 52 | struct nouveau_fbdev *nfbdev = info->par; |
53 | struct nouveau_drm *drm = nouveau_drm(nfbdev->dev); | 53 | struct nouveau_drm *drm = nouveau_drm(nfbdev->helper.dev); |
54 | struct nouveau_channel *chan = drm->channel; | 54 | struct nouveau_channel *chan = drm->channel; |
55 | int ret; | 55 | int ret; |
56 | 56 | ||
@@ -77,7 +77,7 @@ int | |||
77 | nv04_fbcon_imageblit(struct fb_info *info, const struct fb_image *image) | 77 | nv04_fbcon_imageblit(struct fb_info *info, const struct fb_image *image) |
78 | { | 78 | { |
79 | struct nouveau_fbdev *nfbdev = info->par; | 79 | struct nouveau_fbdev *nfbdev = info->par; |
80 | struct nouveau_drm *drm = nouveau_drm(nfbdev->dev); | 80 | struct nouveau_drm *drm = nouveau_drm(nfbdev->helper.dev); |
81 | struct nouveau_channel *chan = drm->channel; | 81 | struct nouveau_channel *chan = drm->channel; |
82 | uint32_t fg; | 82 | uint32_t fg; |
83 | uint32_t bg; | 83 | uint32_t bg; |
@@ -133,7 +133,7 @@ int | |||
133 | nv04_fbcon_accel_init(struct fb_info *info) | 133 | nv04_fbcon_accel_init(struct fb_info *info) |
134 | { | 134 | { |
135 | struct nouveau_fbdev *nfbdev = info->par; | 135 | struct nouveau_fbdev *nfbdev = info->par; |
136 | struct drm_device *dev = nfbdev->dev; | 136 | struct drm_device *dev = nfbdev->helper.dev; |
137 | struct nouveau_drm *drm = nouveau_drm(dev); | 137 | struct nouveau_drm *drm = nouveau_drm(dev); |
138 | struct nouveau_channel *chan = drm->channel; | 138 | struct nouveau_channel *chan = drm->channel; |
139 | struct nvif_device *device = &drm->device; | 139 | struct nvif_device *device = &drm->device; |
diff --git a/drivers/gpu/drm/nouveau/nv10_fence.c b/drivers/gpu/drm/nouveau/nv10_fence.c index f99fcf56928a..2998bde29211 100644 --- a/drivers/gpu/drm/nouveau/nv10_fence.c +++ b/drivers/gpu/drm/nouveau/nv10_fence.c | |||
@@ -57,16 +57,13 @@ void | |||
57 | nv10_fence_context_del(struct nouveau_channel *chan) | 57 | nv10_fence_context_del(struct nouveau_channel *chan) |
58 | { | 58 | { |
59 | struct nv10_fence_chan *fctx = chan->fence; | 59 | struct nv10_fence_chan *fctx = chan->fence; |
60 | int i; | ||
61 | nouveau_fence_context_del(&fctx->base); | 60 | nouveau_fence_context_del(&fctx->base); |
62 | for (i = 0; i < ARRAY_SIZE(fctx->head); i++) | ||
63 | nvif_object_fini(&fctx->head[i]); | ||
64 | nvif_object_fini(&fctx->sema); | 61 | nvif_object_fini(&fctx->sema); |
65 | chan->fence = NULL; | 62 | chan->fence = NULL; |
66 | nouveau_fence_context_free(&fctx->base); | 63 | nouveau_fence_context_free(&fctx->base); |
67 | } | 64 | } |
68 | 65 | ||
69 | int | 66 | static int |
70 | nv10_fence_context_new(struct nouveau_channel *chan) | 67 | nv10_fence_context_new(struct nouveau_channel *chan) |
71 | { | 68 | { |
72 | struct nv10_fence_chan *fctx; | 69 | struct nv10_fence_chan *fctx; |
diff --git a/drivers/gpu/drm/nouveau/nv10_fence.h b/drivers/gpu/drm/nouveau/nv10_fence.h index a87259f3983a..b7a508585304 100644 --- a/drivers/gpu/drm/nouveau/nv10_fence.h +++ b/drivers/gpu/drm/nouveau/nv10_fence.h | |||
@@ -7,7 +7,6 @@ | |||
7 | struct nv10_fence_chan { | 7 | struct nv10_fence_chan { |
8 | struct nouveau_fence_chan base; | 8 | struct nouveau_fence_chan base; |
9 | struct nvif_object sema; | 9 | struct nvif_object sema; |
10 | struct nvif_object head[4]; | ||
11 | }; | 10 | }; |
12 | 11 | ||
13 | struct nv10_fence_priv { | 12 | struct nv10_fence_priv { |
diff --git a/drivers/gpu/drm/nouveau/nv50_display.c b/drivers/gpu/drm/nouveau/nv50_display.c index 7d0edcbcfca7..a9855a4ec532 100644 --- a/drivers/gpu/drm/nouveau/nv50_display.c +++ b/drivers/gpu/drm/nouveau/nv50_display.c | |||
@@ -25,10 +25,12 @@ | |||
25 | #include <linux/dma-mapping.h> | 25 | #include <linux/dma-mapping.h> |
26 | 26 | ||
27 | #include <drm/drmP.h> | 27 | #include <drm/drmP.h> |
28 | #include <drm/drm_atomic.h> | ||
29 | #include <drm/drm_atomic_helper.h> | ||
28 | #include <drm/drm_crtc_helper.h> | 30 | #include <drm/drm_crtc_helper.h> |
29 | #include <drm/drm_plane_helper.h> | ||
30 | #include <drm/drm_dp_helper.h> | 31 | #include <drm/drm_dp_helper.h> |
31 | #include <drm/drm_fb_helper.h> | 32 | #include <drm/drm_fb_helper.h> |
33 | #include <drm/drm_plane_helper.h> | ||
32 | 34 | ||
33 | #include <nvif/class.h> | 35 | #include <nvif/class.h> |
34 | #include <nvif/cl0002.h> | 36 | #include <nvif/cl0002.h> |
@@ -38,6 +40,7 @@ | |||
38 | #include <nvif/cl507c.h> | 40 | #include <nvif/cl507c.h> |
39 | #include <nvif/cl507d.h> | 41 | #include <nvif/cl507d.h> |
40 | #include <nvif/cl507e.h> | 42 | #include <nvif/cl507e.h> |
43 | #include <nvif/event.h> | ||
41 | 44 | ||
42 | #include "nouveau_drv.h" | 45 | #include "nouveau_drv.h" |
43 | #include "nouveau_dma.h" | 46 | #include "nouveau_dma.h" |
@@ -46,6 +49,7 @@ | |||
46 | #include "nouveau_encoder.h" | 49 | #include "nouveau_encoder.h" |
47 | #include "nouveau_crtc.h" | 50 | #include "nouveau_crtc.h" |
48 | #include "nouveau_fence.h" | 51 | #include "nouveau_fence.h" |
52 | #include "nouveau_fbcon.h" | ||
49 | #include "nv50_display.h" | 53 | #include "nv50_display.h" |
50 | 54 | ||
51 | #define EVO_DMA_NR 9 | 55 | #define EVO_DMA_NR 9 |
@@ -61,6 +65,227 @@ | |||
61 | #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00) | 65 | #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00) |
62 | #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00) | 66 | #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00) |
63 | #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10) | 67 | #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10) |
68 | #define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20) | ||
69 | #define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30) | ||
70 | |||
71 | /****************************************************************************** | ||
72 | * Atomic state | ||
73 | *****************************************************************************/ | ||
74 | #define nv50_atom(p) container_of((p), struct nv50_atom, state) | ||
75 | |||
76 | struct nv50_atom { | ||
77 | struct drm_atomic_state state; | ||
78 | |||
79 | struct list_head outp; | ||
80 | bool lock_core; | ||
81 | bool flush_disable; | ||
82 | }; | ||
83 | |||
84 | struct nv50_outp_atom { | ||
85 | struct list_head head; | ||
86 | |||
87 | struct drm_encoder *encoder; | ||
88 | bool flush_disable; | ||
89 | |||
90 | union { | ||
91 | struct { | ||
92 | bool ctrl:1; | ||
93 | }; | ||
94 | u8 mask; | ||
95 | } clr; | ||
96 | |||
97 | union { | ||
98 | struct { | ||
99 | bool ctrl:1; | ||
100 | }; | ||
101 | u8 mask; | ||
102 | } set; | ||
103 | }; | ||
104 | |||
105 | #define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state) | ||
106 | |||
107 | struct nv50_head_atom { | ||
108 | struct drm_crtc_state state; | ||
109 | |||
110 | struct { | ||
111 | u16 iW; | ||
112 | u16 iH; | ||
113 | u16 oW; | ||
114 | u16 oH; | ||
115 | } view; | ||
116 | |||
117 | struct nv50_head_mode { | ||
118 | bool interlace; | ||
119 | u32 clock; | ||
120 | struct { | ||
121 | u16 active; | ||
122 | u16 synce; | ||
123 | u16 blanke; | ||
124 | u16 blanks; | ||
125 | } h; | ||
126 | struct { | ||
127 | u32 active; | ||
128 | u16 synce; | ||
129 | u16 blanke; | ||
130 | u16 blanks; | ||
131 | u16 blank2s; | ||
132 | u16 blank2e; | ||
133 | u16 blankus; | ||
134 | } v; | ||
135 | } mode; | ||
136 | |||
137 | struct { | ||
138 | u32 handle; | ||
139 | u64 offset:40; | ||
140 | } lut; | ||
141 | |||
142 | struct { | ||
143 | bool visible; | ||
144 | u32 handle; | ||
145 | u64 offset:40; | ||
146 | u8 format; | ||
147 | u8 kind:7; | ||
148 | u8 layout:1; | ||
149 | u8 block:4; | ||
150 | u32 pitch:20; | ||
151 | u16 x; | ||
152 | u16 y; | ||
153 | u16 w; | ||
154 | u16 h; | ||
155 | } core; | ||
156 | |||
157 | struct { | ||
158 | bool visible; | ||
159 | u32 handle; | ||
160 | u64 offset:40; | ||
161 | u8 layout:1; | ||
162 | u8 format:1; | ||
163 | } curs; | ||
164 | |||
165 | struct { | ||
166 | u8 depth; | ||
167 | u8 cpp; | ||
168 | u16 x; | ||
169 | u16 y; | ||
170 | u16 w; | ||
171 | u16 h; | ||
172 | } base; | ||
173 | |||
174 | struct { | ||
175 | u8 cpp; | ||
176 | } ovly; | ||
177 | |||
178 | struct { | ||
179 | bool enable:1; | ||
180 | u8 bits:2; | ||
181 | u8 mode:4; | ||
182 | } dither; | ||
183 | |||
184 | struct { | ||
185 | struct { | ||
186 | u16 cos:12; | ||
187 | u16 sin:12; | ||
188 | } sat; | ||
189 | } procamp; | ||
190 | |||
191 | union { | ||
192 | struct { | ||
193 | bool core:1; | ||
194 | bool curs:1; | ||
195 | }; | ||
196 | u8 mask; | ||
197 | } clr; | ||
198 | |||
199 | union { | ||
200 | struct { | ||
201 | bool core:1; | ||
202 | bool curs:1; | ||
203 | bool view:1; | ||
204 | bool mode:1; | ||
205 | bool base:1; | ||
206 | bool ovly:1; | ||
207 | bool dither:1; | ||
208 | bool procamp:1; | ||
209 | }; | ||
210 | u16 mask; | ||
211 | } set; | ||
212 | }; | ||
213 | |||
214 | static inline struct nv50_head_atom * | ||
215 | nv50_head_atom_get(struct drm_atomic_state *state, struct drm_crtc *crtc) | ||
216 | { | ||
217 | struct drm_crtc_state *statec = drm_atomic_get_crtc_state(state, crtc); | ||
218 | if (IS_ERR(statec)) | ||
219 | return (void *)statec; | ||
220 | return nv50_head_atom(statec); | ||
221 | } | ||
222 | |||
223 | #define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state) | ||
224 | |||
225 | struct nv50_wndw_atom { | ||
226 | struct drm_plane_state state; | ||
227 | u8 interval; | ||
228 | |||
229 | struct drm_rect clip; | ||
230 | |||
231 | struct { | ||
232 | u32 handle; | ||
233 | u16 offset:12; | ||
234 | bool awaken:1; | ||
235 | } ntfy; | ||
236 | |||
237 | struct { | ||
238 | u32 handle; | ||
239 | u16 offset:12; | ||
240 | u32 acquire; | ||
241 | u32 release; | ||
242 | } sema; | ||
243 | |||
244 | struct { | ||
245 | u8 enable:2; | ||
246 | } lut; | ||
247 | |||
248 | struct { | ||
249 | u8 mode:2; | ||
250 | u8 interval:4; | ||
251 | |||
252 | u8 format; | ||
253 | u8 kind:7; | ||
254 | u8 layout:1; | ||
255 | u8 block:4; | ||
256 | u32 pitch:20; | ||
257 | u16 w; | ||
258 | u16 h; | ||
259 | |||
260 | u32 handle; | ||
261 | u64 offset; | ||
262 | } image; | ||
263 | |||
264 | struct { | ||
265 | u16 x; | ||
266 | u16 y; | ||
267 | } point; | ||
268 | |||
269 | union { | ||
270 | struct { | ||
271 | bool ntfy:1; | ||
272 | bool sema:1; | ||
273 | bool image:1; | ||
274 | }; | ||
275 | u8 mask; | ||
276 | } clr; | ||
277 | |||
278 | union { | ||
279 | struct { | ||
280 | bool ntfy:1; | ||
281 | bool sema:1; | ||
282 | bool image:1; | ||
283 | bool lut:1; | ||
284 | bool point:1; | ||
285 | }; | ||
286 | u8 mask; | ||
287 | } set; | ||
288 | }; | ||
64 | 289 | ||
65 | /****************************************************************************** | 290 | /****************************************************************************** |
66 | * EVO channel | 291 | * EVO channel |
@@ -133,34 +358,6 @@ nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp, | |||
133 | } | 358 | } |
134 | 359 | ||
135 | /****************************************************************************** | 360 | /****************************************************************************** |
136 | * Cursor Immediate | ||
137 | *****************************************************************************/ | ||
138 | |||
139 | struct nv50_curs { | ||
140 | struct nv50_pioc base; | ||
141 | }; | ||
142 | |||
143 | static int | ||
144 | nv50_curs_create(struct nvif_device *device, struct nvif_object *disp, | ||
145 | int head, struct nv50_curs *curs) | ||
146 | { | ||
147 | struct nv50_disp_cursor_v0 args = { | ||
148 | .head = head, | ||
149 | }; | ||
150 | static const s32 oclass[] = { | ||
151 | GK104_DISP_CURSOR, | ||
152 | GF110_DISP_CURSOR, | ||
153 | GT214_DISP_CURSOR, | ||
154 | G82_DISP_CURSOR, | ||
155 | NV50_DISP_CURSOR, | ||
156 | 0 | ||
157 | }; | ||
158 | |||
159 | return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args), | ||
160 | &curs->base); | ||
161 | } | ||
162 | |||
163 | /****************************************************************************** | ||
164 | * Overlay Immediate | 361 | * Overlay Immediate |
165 | *****************************************************************************/ | 362 | *****************************************************************************/ |
166 | 363 | ||
@@ -192,6 +389,11 @@ nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp, | |||
192 | * DMA EVO channel | 389 | * DMA EVO channel |
193 | *****************************************************************************/ | 390 | *****************************************************************************/ |
194 | 391 | ||
392 | struct nv50_dmac_ctxdma { | ||
393 | struct list_head head; | ||
394 | struct nvif_object object; | ||
395 | }; | ||
396 | |||
195 | struct nv50_dmac { | 397 | struct nv50_dmac { |
196 | struct nv50_chan base; | 398 | struct nv50_chan base; |
197 | dma_addr_t handle; | 399 | dma_addr_t handle; |
@@ -199,6 +401,7 @@ struct nv50_dmac { | |||
199 | 401 | ||
200 | struct nvif_object sync; | 402 | struct nvif_object sync; |
201 | struct nvif_object vram; | 403 | struct nvif_object vram; |
404 | struct list_head ctxdma; | ||
202 | 405 | ||
203 | /* Protects against concurrent pushbuf access to this channel, lock is | 406 | /* Protects against concurrent pushbuf access to this channel, lock is |
204 | * grabbed by evo_wait (if the pushbuf reservation is successful) and | 407 | * grabbed by evo_wait (if the pushbuf reservation is successful) and |
@@ -207,9 +410,82 @@ struct nv50_dmac { | |||
207 | }; | 410 | }; |
208 | 411 | ||
209 | static void | 412 | static void |
413 | nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma *ctxdma) | ||
414 | { | ||
415 | nvif_object_fini(&ctxdma->object); | ||
416 | list_del(&ctxdma->head); | ||
417 | kfree(ctxdma); | ||
418 | } | ||
419 | |||
420 | static struct nv50_dmac_ctxdma * | ||
421 | nv50_dmac_ctxdma_new(struct nv50_dmac *dmac, struct nouveau_framebuffer *fb) | ||
422 | { | ||
423 | struct nouveau_drm *drm = nouveau_drm(fb->base.dev); | ||
424 | struct nv50_dmac_ctxdma *ctxdma; | ||
425 | const u8 kind = (fb->nvbo->tile_flags & 0x0000ff00) >> 8; | ||
426 | const u32 handle = 0xfb000000 | kind; | ||
427 | struct { | ||
428 | struct nv_dma_v0 base; | ||
429 | union { | ||
430 | struct nv50_dma_v0 nv50; | ||
431 | struct gf100_dma_v0 gf100; | ||
432 | struct gf119_dma_v0 gf119; | ||
433 | }; | ||
434 | } args = {}; | ||
435 | u32 argc = sizeof(args.base); | ||
436 | int ret; | ||
437 | |||
438 | list_for_each_entry(ctxdma, &dmac->ctxdma, head) { | ||
439 | if (ctxdma->object.handle == handle) | ||
440 | return ctxdma; | ||
441 | } | ||
442 | |||
443 | if (!(ctxdma = kzalloc(sizeof(*ctxdma), GFP_KERNEL))) | ||
444 | return ERR_PTR(-ENOMEM); | ||
445 | list_add(&ctxdma->head, &dmac->ctxdma); | ||
446 | |||
447 | args.base.target = NV_DMA_V0_TARGET_VRAM; | ||
448 | args.base.access = NV_DMA_V0_ACCESS_RDWR; | ||
449 | args.base.start = 0; | ||
450 | args.base.limit = drm->device.info.ram_user - 1; | ||
451 | |||
452 | if (drm->device.info.chipset < 0x80) { | ||
453 | args.nv50.part = NV50_DMA_V0_PART_256; | ||
454 | argc += sizeof(args.nv50); | ||
455 | } else | ||
456 | if (drm->device.info.chipset < 0xc0) { | ||
457 | args.nv50.part = NV50_DMA_V0_PART_256; | ||
458 | args.nv50.kind = kind; | ||
459 | argc += sizeof(args.nv50); | ||
460 | } else | ||
461 | if (drm->device.info.chipset < 0xd0) { | ||
462 | args.gf100.kind = kind; | ||
463 | argc += sizeof(args.gf100); | ||
464 | } else { | ||
465 | args.gf119.page = GF119_DMA_V0_PAGE_LP; | ||
466 | args.gf119.kind = kind; | ||
467 | argc += sizeof(args.gf119); | ||
468 | } | ||
469 | |||
470 | ret = nvif_object_init(&dmac->base.user, handle, NV_DMA_IN_MEMORY, | ||
471 | &args, argc, &ctxdma->object); | ||
472 | if (ret) { | ||
473 | nv50_dmac_ctxdma_del(ctxdma); | ||
474 | return ERR_PTR(ret); | ||
475 | } | ||
476 | |||
477 | return ctxdma; | ||
478 | } | ||
479 | |||
480 | static void | ||
210 | nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp) | 481 | nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp) |
211 | { | 482 | { |
212 | struct nvif_device *device = dmac->base.device; | 483 | struct nvif_device *device = dmac->base.device; |
484 | struct nv50_dmac_ctxdma *ctxdma, *ctxtmp; | ||
485 | |||
486 | list_for_each_entry_safe(ctxdma, ctxtmp, &dmac->ctxdma, head) { | ||
487 | nv50_dmac_ctxdma_del(ctxdma); | ||
488 | } | ||
213 | 489 | ||
214 | nvif_object_fini(&dmac->vram); | 490 | nvif_object_fini(&dmac->vram); |
215 | nvif_object_fini(&dmac->sync); | 491 | nvif_object_fini(&dmac->sync); |
@@ -278,6 +554,7 @@ nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp, | |||
278 | if (ret) | 554 | if (ret) |
279 | return ret; | 555 | return ret; |
280 | 556 | ||
557 | INIT_LIST_HEAD(&dmac->ctxdma); | ||
281 | return ret; | 558 | return ret; |
282 | } | 559 | } |
283 | 560 | ||
@@ -381,34 +658,23 @@ nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp, | |||
381 | 658 | ||
382 | struct nv50_head { | 659 | struct nv50_head { |
383 | struct nouveau_crtc base; | 660 | struct nouveau_crtc base; |
384 | struct nouveau_bo *image; | ||
385 | struct nv50_curs curs; | ||
386 | struct nv50_sync sync; | ||
387 | struct nv50_ovly ovly; | 661 | struct nv50_ovly ovly; |
388 | struct nv50_oimm oimm; | 662 | struct nv50_oimm oimm; |
389 | }; | 663 | }; |
390 | 664 | ||
391 | #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c)) | 665 | #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c)) |
392 | #define nv50_curs(c) (&nv50_head(c)->curs) | ||
393 | #define nv50_sync(c) (&nv50_head(c)->sync) | ||
394 | #define nv50_ovly(c) (&nv50_head(c)->ovly) | 666 | #define nv50_ovly(c) (&nv50_head(c)->ovly) |
395 | #define nv50_oimm(c) (&nv50_head(c)->oimm) | 667 | #define nv50_oimm(c) (&nv50_head(c)->oimm) |
396 | #define nv50_chan(c) (&(c)->base.base) | 668 | #define nv50_chan(c) (&(c)->base.base) |
397 | #define nv50_vers(c) nv50_chan(c)->user.oclass | 669 | #define nv50_vers(c) nv50_chan(c)->user.oclass |
398 | 670 | ||
399 | struct nv50_fbdma { | ||
400 | struct list_head head; | ||
401 | struct nvif_object core; | ||
402 | struct nvif_object base[4]; | ||
403 | }; | ||
404 | |||
405 | struct nv50_disp { | 671 | struct nv50_disp { |
406 | struct nvif_object *disp; | 672 | struct nvif_object *disp; |
407 | struct nv50_mast mast; | 673 | struct nv50_mast mast; |
408 | 674 | ||
409 | struct list_head fbdma; | ||
410 | |||
411 | struct nouveau_bo *sync; | 675 | struct nouveau_bo *sync; |
676 | |||
677 | struct mutex mutex; | ||
412 | }; | 678 | }; |
413 | 679 | ||
414 | static struct nv50_disp * | 680 | static struct nv50_disp * |
@@ -419,12 +685,6 @@ nv50_disp(struct drm_device *dev) | |||
419 | 685 | ||
420 | #define nv50_mast(d) (&nv50_disp(d)->mast) | 686 | #define nv50_mast(d) (&nv50_disp(d)->mast) |
421 | 687 | ||
422 | static struct drm_crtc * | ||
423 | nv50_display_crtc_get(struct drm_encoder *encoder) | ||
424 | { | ||
425 | return nouveau_encoder(encoder)->crtc; | ||
426 | } | ||
427 | |||
428 | /****************************************************************************** | 688 | /****************************************************************************** |
429 | * EVO channel helpers | 689 | * EVO channel helpers |
430 | *****************************************************************************/ | 690 | *****************************************************************************/ |
@@ -463,812 +723,1460 @@ evo_kick(u32 *push, void *evoc) | |||
463 | mutex_unlock(&dmac->lock); | 723 | mutex_unlock(&dmac->lock); |
464 | } | 724 | } |
465 | 725 | ||
466 | #if 1 | ||
467 | #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m)) | ||
468 | #define evo_data(p,d) *((p)++) = (d) | ||
469 | #else | ||
470 | #define evo_mthd(p,m,s) do { \ | 726 | #define evo_mthd(p,m,s) do { \ |
471 | const u32 _m = (m), _s = (s); \ | 727 | const u32 _m = (m), _s = (s); \ |
472 | printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__); \ | 728 | if (drm_debug & DRM_UT_KMS) \ |
729 | printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__); \ | ||
473 | *((p)++) = ((_s << 18) | _m); \ | 730 | *((p)++) = ((_s << 18) | _m); \ |
474 | } while(0) | 731 | } while(0) |
732 | |||
475 | #define evo_data(p,d) do { \ | 733 | #define evo_data(p,d) do { \ |
476 | const u32 _d = (d); \ | 734 | const u32 _d = (d); \ |
477 | printk(KERN_ERR "\t%08x\n", _d); \ | 735 | if (drm_debug & DRM_UT_KMS) \ |
736 | printk(KERN_ERR "\t%08x\n", _d); \ | ||
478 | *((p)++) = _d; \ | 737 | *((p)++) = _d; \ |
479 | } while(0) | 738 | } while(0) |
480 | #endif | ||
481 | 739 | ||
482 | static bool | 740 | /****************************************************************************** |
483 | evo_sync_wait(void *data) | 741 | * Plane |
742 | *****************************************************************************/ | ||
743 | #define nv50_wndw(p) container_of((p), struct nv50_wndw, plane) | ||
744 | |||
745 | struct nv50_wndw { | ||
746 | const struct nv50_wndw_func *func; | ||
747 | struct nv50_dmac *dmac; | ||
748 | |||
749 | struct drm_plane plane; | ||
750 | |||
751 | struct nvif_notify notify; | ||
752 | u16 ntfy; | ||
753 | u16 sema; | ||
754 | u32 data; | ||
755 | }; | ||
756 | |||
757 | struct nv50_wndw_func { | ||
758 | void *(*dtor)(struct nv50_wndw *); | ||
759 | int (*acquire)(struct nv50_wndw *, struct nv50_wndw_atom *asyw, | ||
760 | struct nv50_head_atom *asyh); | ||
761 | void (*release)(struct nv50_wndw *, struct nv50_wndw_atom *asyw, | ||
762 | struct nv50_head_atom *asyh); | ||
763 | void (*prepare)(struct nv50_wndw *, struct nv50_head_atom *asyh, | ||
764 | struct nv50_wndw_atom *asyw); | ||
765 | |||
766 | void (*sema_set)(struct nv50_wndw *, struct nv50_wndw_atom *); | ||
767 | void (*sema_clr)(struct nv50_wndw *); | ||
768 | void (*ntfy_set)(struct nv50_wndw *, struct nv50_wndw_atom *); | ||
769 | void (*ntfy_clr)(struct nv50_wndw *); | ||
770 | int (*ntfy_wait_begun)(struct nv50_wndw *, struct nv50_wndw_atom *); | ||
771 | void (*image_set)(struct nv50_wndw *, struct nv50_wndw_atom *); | ||
772 | void (*image_clr)(struct nv50_wndw *); | ||
773 | void (*lut)(struct nv50_wndw *, struct nv50_wndw_atom *); | ||
774 | void (*point)(struct nv50_wndw *, struct nv50_wndw_atom *); | ||
775 | |||
776 | u32 (*update)(struct nv50_wndw *, u32 interlock); | ||
777 | }; | ||
778 | |||
779 | static int | ||
780 | nv50_wndw_wait_armed(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw) | ||
484 | { | 781 | { |
485 | if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000) | 782 | if (asyw->set.ntfy) |
486 | return true; | 783 | return wndw->func->ntfy_wait_begun(wndw, asyw); |
487 | usleep_range(1, 2); | 784 | return 0; |
488 | return false; | ||
489 | } | 785 | } |
490 | 786 | ||
491 | static int | 787 | static u32 |
492 | evo_sync(struct drm_device *dev) | 788 | nv50_wndw_flush_clr(struct nv50_wndw *wndw, u32 interlock, bool flush, |
789 | struct nv50_wndw_atom *asyw) | ||
493 | { | 790 | { |
494 | struct nvif_device *device = &nouveau_drm(dev)->device; | 791 | if (asyw->clr.sema && (!asyw->set.sema || flush)) |
495 | struct nv50_disp *disp = nv50_disp(dev); | 792 | wndw->func->sema_clr(wndw); |
496 | struct nv50_mast *mast = nv50_mast(dev); | 793 | if (asyw->clr.ntfy && (!asyw->set.ntfy || flush)) |
497 | u32 *push = evo_wait(mast, 8); | 794 | wndw->func->ntfy_clr(wndw); |
498 | if (push) { | 795 | if (asyw->clr.image && (!asyw->set.image || flush)) |
499 | nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000); | 796 | wndw->func->image_clr(wndw); |
500 | evo_mthd(push, 0x0084, 1); | 797 | |
501 | evo_data(push, 0x80000000 | EVO_MAST_NTFY); | 798 | return flush ? wndw->func->update(wndw, interlock) : 0; |
502 | evo_mthd(push, 0x0080, 2); | 799 | } |
503 | evo_data(push, 0x00000000); | 800 | |
504 | evo_data(push, 0x00000000); | 801 | static u32 |
505 | evo_kick(push, mast); | 802 | nv50_wndw_flush_set(struct nv50_wndw *wndw, u32 interlock, |
506 | if (nvif_msec(device, 2000, | 803 | struct nv50_wndw_atom *asyw) |
507 | if (evo_sync_wait(disp->sync)) | 804 | { |
508 | break; | 805 | if (interlock) { |
509 | ) >= 0) | 806 | asyw->image.mode = 0; |
510 | return 0; | 807 | asyw->image.interval = 1; |
511 | } | 808 | } |
512 | 809 | ||
513 | return -EBUSY; | 810 | if (asyw->set.sema ) wndw->func->sema_set (wndw, asyw); |
811 | if (asyw->set.ntfy ) wndw->func->ntfy_set (wndw, asyw); | ||
812 | if (asyw->set.image) wndw->func->image_set(wndw, asyw); | ||
813 | if (asyw->set.lut ) wndw->func->lut (wndw, asyw); | ||
814 | if (asyw->set.point) wndw->func->point (wndw, asyw); | ||
815 | |||
816 | return wndw->func->update(wndw, interlock); | ||
514 | } | 817 | } |
515 | 818 | ||
516 | /****************************************************************************** | 819 | static void |
517 | * Page flipping channel | 820 | nv50_wndw_atomic_check_release(struct nv50_wndw *wndw, |
518 | *****************************************************************************/ | 821 | struct nv50_wndw_atom *asyw, |
519 | struct nouveau_bo * | 822 | struct nv50_head_atom *asyh) |
520 | nv50_display_crtc_sema(struct drm_device *dev, int crtc) | ||
521 | { | 823 | { |
522 | return nv50_disp(dev)->sync; | 824 | struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev); |
825 | NV_ATOMIC(drm, "%s release\n", wndw->plane.name); | ||
826 | wndw->func->release(wndw, asyw, asyh); | ||
827 | asyw->ntfy.handle = 0; | ||
828 | asyw->sema.handle = 0; | ||
523 | } | 829 | } |
524 | 830 | ||
525 | struct nv50_display_flip { | 831 | static int |
526 | struct nv50_disp *disp; | 832 | nv50_wndw_atomic_check_acquire(struct nv50_wndw *wndw, |
527 | struct nv50_sync *chan; | 833 | struct nv50_wndw_atom *asyw, |
528 | }; | 834 | struct nv50_head_atom *asyh) |
529 | |||
530 | static bool | ||
531 | nv50_display_flip_wait(void *data) | ||
532 | { | 835 | { |
533 | struct nv50_display_flip *flip = data; | 836 | struct nouveau_framebuffer *fb = nouveau_framebuffer(asyw->state.fb); |
534 | if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) == | 837 | struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev); |
535 | flip->chan->data) | 838 | int ret; |
536 | return true; | 839 | |
537 | usleep_range(1, 2); | 840 | NV_ATOMIC(drm, "%s acquire\n", wndw->plane.name); |
538 | return false; | 841 | asyw->clip.x1 = 0; |
842 | asyw->clip.y1 = 0; | ||
843 | asyw->clip.x2 = asyh->state.mode.hdisplay; | ||
844 | asyw->clip.y2 = asyh->state.mode.vdisplay; | ||
845 | |||
846 | asyw->image.w = fb->base.width; | ||
847 | asyw->image.h = fb->base.height; | ||
848 | asyw->image.kind = (fb->nvbo->tile_flags & 0x0000ff00) >> 8; | ||
849 | if (asyw->image.kind) { | ||
850 | asyw->image.layout = 0; | ||
851 | if (drm->device.info.chipset >= 0xc0) | ||
852 | asyw->image.block = fb->nvbo->tile_mode >> 4; | ||
853 | else | ||
854 | asyw->image.block = fb->nvbo->tile_mode; | ||
855 | asyw->image.pitch = (fb->base.pitches[0] / 4) << 4; | ||
856 | } else { | ||
857 | asyw->image.layout = 1; | ||
858 | asyw->image.block = 0; | ||
859 | asyw->image.pitch = fb->base.pitches[0]; | ||
860 | } | ||
861 | |||
862 | ret = wndw->func->acquire(wndw, asyw, asyh); | ||
863 | if (ret) | ||
864 | return ret; | ||
865 | |||
866 | if (asyw->set.image) { | ||
867 | if (!(asyw->image.mode = asyw->interval ? 0 : 1)) | ||
868 | asyw->image.interval = asyw->interval; | ||
869 | else | ||
870 | asyw->image.interval = 0; | ||
871 | } | ||
872 | |||
873 | return 0; | ||
539 | } | 874 | } |
540 | 875 | ||
541 | void | 876 | static int |
542 | nv50_display_flip_stop(struct drm_crtc *crtc) | 877 | nv50_wndw_atomic_check(struct drm_plane *plane, struct drm_plane_state *state) |
543 | { | 878 | { |
544 | struct nvif_device *device = &nouveau_drm(crtc->dev)->device; | 879 | struct nouveau_drm *drm = nouveau_drm(plane->dev); |
545 | struct nv50_display_flip flip = { | 880 | struct nv50_wndw *wndw = nv50_wndw(plane); |
546 | .disp = nv50_disp(crtc->dev), | 881 | struct nv50_wndw_atom *armw = nv50_wndw_atom(wndw->plane.state); |
547 | .chan = nv50_sync(crtc), | 882 | struct nv50_wndw_atom *asyw = nv50_wndw_atom(state); |
548 | }; | 883 | struct nv50_head_atom *harm = NULL, *asyh = NULL; |
549 | u32 *push; | 884 | bool varm = false, asyv = false, asym = false; |
885 | int ret; | ||
550 | 886 | ||
551 | push = evo_wait(flip.chan, 8); | 887 | NV_ATOMIC(drm, "%s atomic_check\n", plane->name); |
552 | if (push) { | 888 | if (asyw->state.crtc) { |
553 | evo_mthd(push, 0x0084, 1); | 889 | asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc); |
554 | evo_data(push, 0x00000000); | 890 | if (IS_ERR(asyh)) |
555 | evo_mthd(push, 0x0094, 1); | 891 | return PTR_ERR(asyh); |
556 | evo_data(push, 0x00000000); | 892 | asym = drm_atomic_crtc_needs_modeset(&asyh->state); |
557 | evo_mthd(push, 0x00c0, 1); | 893 | asyv = asyh->state.active; |
558 | evo_data(push, 0x00000000); | ||
559 | evo_mthd(push, 0x0080, 1); | ||
560 | evo_data(push, 0x00000000); | ||
561 | evo_kick(push, flip.chan); | ||
562 | } | 894 | } |
563 | 895 | ||
564 | nvif_msec(device, 2000, | 896 | if (armw->state.crtc) { |
565 | if (nv50_display_flip_wait(&flip)) | 897 | harm = nv50_head_atom_get(asyw->state.state, armw->state.crtc); |
566 | break; | 898 | if (IS_ERR(harm)) |
567 | ); | 899 | return PTR_ERR(harm); |
900 | varm = harm->state.crtc->state->active; | ||
901 | } | ||
902 | |||
903 | if (asyv) { | ||
904 | asyw->point.x = asyw->state.crtc_x; | ||
905 | asyw->point.y = asyw->state.crtc_y; | ||
906 | if (memcmp(&armw->point, &asyw->point, sizeof(asyw->point))) | ||
907 | asyw->set.point = true; | ||
908 | |||
909 | if (!varm || asym || armw->state.fb != asyw->state.fb) { | ||
910 | ret = nv50_wndw_atomic_check_acquire(wndw, asyw, asyh); | ||
911 | if (ret) | ||
912 | return ret; | ||
913 | } | ||
914 | } else | ||
915 | if (varm) { | ||
916 | nv50_wndw_atomic_check_release(wndw, asyw, harm); | ||
917 | } else { | ||
918 | return 0; | ||
919 | } | ||
920 | |||
921 | if (!asyv || asym) { | ||
922 | asyw->clr.ntfy = armw->ntfy.handle != 0; | ||
923 | asyw->clr.sema = armw->sema.handle != 0; | ||
924 | if (wndw->func->image_clr) | ||
925 | asyw->clr.image = armw->image.handle != 0; | ||
926 | asyw->set.lut = wndw->func->lut && asyv; | ||
927 | } | ||
928 | |||
929 | return 0; | ||
568 | } | 930 | } |
569 | 931 | ||
570 | int | 932 | static void |
571 | nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb, | 933 | nv50_wndw_cleanup_fb(struct drm_plane *plane, struct drm_plane_state *old_state) |
572 | struct nouveau_channel *chan, u32 swap_interval) | ||
573 | { | 934 | { |
574 | struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb); | 935 | struct nouveau_framebuffer *fb = nouveau_framebuffer(old_state->fb); |
575 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); | 936 | struct nouveau_drm *drm = nouveau_drm(plane->dev); |
576 | struct nv50_head *head = nv50_head(crtc); | ||
577 | struct nv50_sync *sync = nv50_sync(crtc); | ||
578 | u32 *push; | ||
579 | int ret; | ||
580 | 937 | ||
581 | if (crtc->primary->fb->width != fb->width || | 938 | NV_ATOMIC(drm, "%s cleanup: %p\n", plane->name, old_state->fb); |
582 | crtc->primary->fb->height != fb->height) | 939 | if (!old_state->fb) |
583 | return -EINVAL; | 940 | return; |
941 | |||
942 | nouveau_bo_unpin(fb->nvbo); | ||
943 | } | ||
584 | 944 | ||
585 | swap_interval <<= 4; | 945 | static int |
586 | if (swap_interval == 0) | 946 | nv50_wndw_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state) |
587 | swap_interval |= 0x100; | 947 | { |
588 | if (chan == NULL) | 948 | struct nouveau_framebuffer *fb = nouveau_framebuffer(state->fb); |
589 | evo_sync(crtc->dev); | 949 | struct nouveau_drm *drm = nouveau_drm(plane->dev); |
950 | struct nv50_wndw *wndw = nv50_wndw(plane); | ||
951 | struct nv50_wndw_atom *asyw = nv50_wndw_atom(state); | ||
952 | struct nv50_head_atom *asyh; | ||
953 | struct nv50_dmac_ctxdma *ctxdma; | ||
954 | int ret; | ||
590 | 955 | ||
591 | push = evo_wait(sync, 128); | 956 | NV_ATOMIC(drm, "%s prepare: %p\n", plane->name, state->fb); |
592 | if (unlikely(push == NULL)) | 957 | if (!asyw->state.fb) |
593 | return -EBUSY; | 958 | return 0; |
594 | 959 | ||
595 | if (chan && chan->user.oclass < G82_CHANNEL_GPFIFO) { | 960 | ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM, true); |
596 | ret = RING_SPACE(chan, 8); | 961 | if (ret) |
597 | if (ret) | 962 | return ret; |
598 | return ret; | ||
599 | 963 | ||
600 | BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2); | 964 | ctxdma = nv50_dmac_ctxdma_new(wndw->dmac, fb); |
601 | OUT_RING (chan, NvEvoSema0 + nv_crtc->index); | 965 | if (IS_ERR(ctxdma)) { |
602 | OUT_RING (chan, sync->addr ^ 0x10); | 966 | nouveau_bo_unpin(fb->nvbo); |
603 | BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1); | 967 | return PTR_ERR(ctxdma); |
604 | OUT_RING (chan, sync->data + 1); | 968 | } |
605 | BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2); | ||
606 | OUT_RING (chan, sync->addr); | ||
607 | OUT_RING (chan, sync->data); | ||
608 | } else | ||
609 | if (chan && chan->user.oclass < FERMI_CHANNEL_GPFIFO) { | ||
610 | u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr; | ||
611 | ret = RING_SPACE(chan, 12); | ||
612 | if (ret) | ||
613 | return ret; | ||
614 | 969 | ||
615 | BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1); | 970 | asyw->state.fence = reservation_object_get_excl_rcu(fb->nvbo->bo.resv); |
616 | OUT_RING (chan, chan->vram.handle); | 971 | asyw->image.handle = ctxdma->object.handle; |
617 | BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); | 972 | asyw->image.offset = fb->nvbo->bo.offset; |
618 | OUT_RING (chan, upper_32_bits(addr ^ 0x10)); | ||
619 | OUT_RING (chan, lower_32_bits(addr ^ 0x10)); | ||
620 | OUT_RING (chan, sync->data + 1); | ||
621 | OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG); | ||
622 | BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); | ||
623 | OUT_RING (chan, upper_32_bits(addr)); | ||
624 | OUT_RING (chan, lower_32_bits(addr)); | ||
625 | OUT_RING (chan, sync->data); | ||
626 | OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL); | ||
627 | } else | ||
628 | if (chan) { | ||
629 | u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr; | ||
630 | ret = RING_SPACE(chan, 10); | ||
631 | if (ret) | ||
632 | return ret; | ||
633 | 973 | ||
634 | BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); | 974 | if (wndw->func->prepare) { |
635 | OUT_RING (chan, upper_32_bits(addr ^ 0x10)); | 975 | asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc); |
636 | OUT_RING (chan, lower_32_bits(addr ^ 0x10)); | 976 | if (IS_ERR(asyh)) |
637 | OUT_RING (chan, sync->data + 1); | 977 | return PTR_ERR(asyh); |
638 | OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG | | 978 | |
639 | NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD); | 979 | wndw->func->prepare(wndw, asyh, asyw); |
640 | BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); | ||
641 | OUT_RING (chan, upper_32_bits(addr)); | ||
642 | OUT_RING (chan, lower_32_bits(addr)); | ||
643 | OUT_RING (chan, sync->data); | ||
644 | OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL | | ||
645 | NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD); | ||
646 | } | ||
647 | |||
648 | if (chan) { | ||
649 | sync->addr ^= 0x10; | ||
650 | sync->data++; | ||
651 | FIRE_RING (chan); | ||
652 | } | ||
653 | |||
654 | /* queue the flip */ | ||
655 | evo_mthd(push, 0x0100, 1); | ||
656 | evo_data(push, 0xfffe0000); | ||
657 | evo_mthd(push, 0x0084, 1); | ||
658 | evo_data(push, swap_interval); | ||
659 | if (!(swap_interval & 0x00000100)) { | ||
660 | evo_mthd(push, 0x00e0, 1); | ||
661 | evo_data(push, 0x40000000); | ||
662 | } | ||
663 | evo_mthd(push, 0x0088, 4); | ||
664 | evo_data(push, sync->addr); | ||
665 | evo_data(push, sync->data++); | ||
666 | evo_data(push, sync->data); | ||
667 | evo_data(push, sync->base.sync.handle); | ||
668 | evo_mthd(push, 0x00a0, 2); | ||
669 | evo_data(push, 0x00000000); | ||
670 | evo_data(push, 0x00000000); | ||
671 | evo_mthd(push, 0x00c0, 1); | ||
672 | evo_data(push, nv_fb->r_handle); | ||
673 | evo_mthd(push, 0x0110, 2); | ||
674 | evo_data(push, 0x00000000); | ||
675 | evo_data(push, 0x00000000); | ||
676 | if (nv50_vers(sync) < GF110_DISP_BASE_CHANNEL_DMA) { | ||
677 | evo_mthd(push, 0x0800, 5); | ||
678 | evo_data(push, nv_fb->nvbo->bo.offset >> 8); | ||
679 | evo_data(push, 0); | ||
680 | evo_data(push, (fb->height << 16) | fb->width); | ||
681 | evo_data(push, nv_fb->r_pitch); | ||
682 | evo_data(push, nv_fb->r_format); | ||
683 | } else { | ||
684 | evo_mthd(push, 0x0400, 5); | ||
685 | evo_data(push, nv_fb->nvbo->bo.offset >> 8); | ||
686 | evo_data(push, 0); | ||
687 | evo_data(push, (fb->height << 16) | fb->width); | ||
688 | evo_data(push, nv_fb->r_pitch); | ||
689 | evo_data(push, nv_fb->r_format); | ||
690 | } | 980 | } |
691 | evo_mthd(push, 0x0080, 1); | ||
692 | evo_data(push, 0x00000000); | ||
693 | evo_kick(push, sync); | ||
694 | 981 | ||
695 | nouveau_bo_ref(nv_fb->nvbo, &head->image); | 982 | return 0; |
983 | } | ||
984 | |||
985 | static const struct drm_plane_helper_funcs | ||
986 | nv50_wndw_helper = { | ||
987 | .prepare_fb = nv50_wndw_prepare_fb, | ||
988 | .cleanup_fb = nv50_wndw_cleanup_fb, | ||
989 | .atomic_check = nv50_wndw_atomic_check, | ||
990 | }; | ||
991 | |||
992 | static void | ||
993 | nv50_wndw_atomic_destroy_state(struct drm_plane *plane, | ||
994 | struct drm_plane_state *state) | ||
995 | { | ||
996 | struct nv50_wndw_atom *asyw = nv50_wndw_atom(state); | ||
997 | __drm_atomic_helper_plane_destroy_state(&asyw->state); | ||
998 | dma_fence_put(asyw->state.fence); | ||
999 | kfree(asyw); | ||
1000 | } | ||
1001 | |||
1002 | static struct drm_plane_state * | ||
1003 | nv50_wndw_atomic_duplicate_state(struct drm_plane *plane) | ||
1004 | { | ||
1005 | struct nv50_wndw_atom *armw = nv50_wndw_atom(plane->state); | ||
1006 | struct nv50_wndw_atom *asyw; | ||
1007 | if (!(asyw = kmalloc(sizeof(*asyw), GFP_KERNEL))) | ||
1008 | return NULL; | ||
1009 | __drm_atomic_helper_plane_duplicate_state(plane, &asyw->state); | ||
1010 | asyw->state.fence = NULL; | ||
1011 | asyw->interval = 1; | ||
1012 | asyw->sema = armw->sema; | ||
1013 | asyw->ntfy = armw->ntfy; | ||
1014 | asyw->image = armw->image; | ||
1015 | asyw->point = armw->point; | ||
1016 | asyw->lut = armw->lut; | ||
1017 | asyw->clr.mask = 0; | ||
1018 | asyw->set.mask = 0; | ||
1019 | return &asyw->state; | ||
1020 | } | ||
1021 | |||
1022 | static void | ||
1023 | nv50_wndw_reset(struct drm_plane *plane) | ||
1024 | { | ||
1025 | struct nv50_wndw_atom *asyw; | ||
1026 | |||
1027 | if (WARN_ON(!(asyw = kzalloc(sizeof(*asyw), GFP_KERNEL)))) | ||
1028 | return; | ||
1029 | |||
1030 | if (plane->state) | ||
1031 | plane->funcs->atomic_destroy_state(plane, plane->state); | ||
1032 | plane->state = &asyw->state; | ||
1033 | plane->state->plane = plane; | ||
1034 | plane->state->rotation = DRM_ROTATE_0; | ||
1035 | } | ||
1036 | |||
1037 | static void | ||
1038 | nv50_wndw_destroy(struct drm_plane *plane) | ||
1039 | { | ||
1040 | struct nv50_wndw *wndw = nv50_wndw(plane); | ||
1041 | void *data; | ||
1042 | nvif_notify_fini(&wndw->notify); | ||
1043 | data = wndw->func->dtor(wndw); | ||
1044 | drm_plane_cleanup(&wndw->plane); | ||
1045 | kfree(data); | ||
1046 | } | ||
1047 | |||
1048 | static const struct drm_plane_funcs | ||
1049 | nv50_wndw = { | ||
1050 | .update_plane = drm_atomic_helper_update_plane, | ||
1051 | .disable_plane = drm_atomic_helper_disable_plane, | ||
1052 | .destroy = nv50_wndw_destroy, | ||
1053 | .reset = nv50_wndw_reset, | ||
1054 | .set_property = drm_atomic_helper_plane_set_property, | ||
1055 | .atomic_duplicate_state = nv50_wndw_atomic_duplicate_state, | ||
1056 | .atomic_destroy_state = nv50_wndw_atomic_destroy_state, | ||
1057 | }; | ||
1058 | |||
1059 | static void | ||
1060 | nv50_wndw_fini(struct nv50_wndw *wndw) | ||
1061 | { | ||
1062 | nvif_notify_put(&wndw->notify); | ||
1063 | } | ||
1064 | |||
1065 | static void | ||
1066 | nv50_wndw_init(struct nv50_wndw *wndw) | ||
1067 | { | ||
1068 | nvif_notify_get(&wndw->notify); | ||
1069 | } | ||
1070 | |||
1071 | static int | ||
1072 | nv50_wndw_ctor(const struct nv50_wndw_func *func, struct drm_device *dev, | ||
1073 | enum drm_plane_type type, const char *name, int index, | ||
1074 | struct nv50_dmac *dmac, const u32 *format, int nformat, | ||
1075 | struct nv50_wndw *wndw) | ||
1076 | { | ||
1077 | int ret; | ||
1078 | |||
1079 | wndw->func = func; | ||
1080 | wndw->dmac = dmac; | ||
1081 | |||
1082 | ret = drm_universal_plane_init(dev, &wndw->plane, 0, &nv50_wndw, format, | ||
1083 | nformat, type, "%s-%d", name, index); | ||
1084 | if (ret) | ||
1085 | return ret; | ||
1086 | |||
1087 | drm_plane_helper_add(&wndw->plane, &nv50_wndw_helper); | ||
696 | return 0; | 1088 | return 0; |
697 | } | 1089 | } |
698 | 1090 | ||
699 | /****************************************************************************** | 1091 | /****************************************************************************** |
700 | * CRTC | 1092 | * Cursor plane |
701 | *****************************************************************************/ | 1093 | *****************************************************************************/ |
1094 | #define nv50_curs(p) container_of((p), struct nv50_curs, wndw) | ||
1095 | |||
1096 | struct nv50_curs { | ||
1097 | struct nv50_wndw wndw; | ||
1098 | struct nvif_object chan; | ||
1099 | }; | ||
1100 | |||
1101 | static u32 | ||
1102 | nv50_curs_update(struct nv50_wndw *wndw, u32 interlock) | ||
1103 | { | ||
1104 | struct nv50_curs *curs = nv50_curs(wndw); | ||
1105 | nvif_wr32(&curs->chan, 0x0080, 0x00000000); | ||
1106 | return 0; | ||
1107 | } | ||
1108 | |||
1109 | static void | ||
1110 | nv50_curs_point(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw) | ||
1111 | { | ||
1112 | struct nv50_curs *curs = nv50_curs(wndw); | ||
1113 | nvif_wr32(&curs->chan, 0x0084, (asyw->point.y << 16) | asyw->point.x); | ||
1114 | } | ||
1115 | |||
1116 | static void | ||
1117 | nv50_curs_prepare(struct nv50_wndw *wndw, struct nv50_head_atom *asyh, | ||
1118 | struct nv50_wndw_atom *asyw) | ||
1119 | { | ||
1120 | asyh->curs.handle = nv50_disp(wndw->plane.dev)->mast.base.vram.handle; | ||
1121 | asyh->curs.offset = asyw->image.offset; | ||
1122 | asyh->set.curs = asyh->curs.visible; | ||
1123 | } | ||
1124 | |||
1125 | static void | ||
1126 | nv50_curs_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw, | ||
1127 | struct nv50_head_atom *asyh) | ||
1128 | { | ||
1129 | asyh->curs.visible = false; | ||
1130 | } | ||
1131 | |||
702 | static int | 1132 | static int |
703 | nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update) | 1133 | nv50_curs_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw, |
1134 | struct nv50_head_atom *asyh) | ||
704 | { | 1135 | { |
705 | struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); | 1136 | int ret; |
706 | struct nouveau_connector *nv_connector; | ||
707 | struct drm_connector *connector; | ||
708 | u32 *push, mode = 0x00; | ||
709 | 1137 | ||
710 | nv_connector = nouveau_crtc_connector_get(nv_crtc); | 1138 | ret = drm_plane_helper_check_state(&asyw->state, &asyw->clip, |
711 | connector = &nv_connector->base; | 1139 | DRM_PLANE_HELPER_NO_SCALING, |
712 | if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) { | 1140 | DRM_PLANE_HELPER_NO_SCALING, |
713 | if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3) | 1141 | true, true); |
714 | mode = DITHERING_MODE_DYNAMIC2X2; | 1142 | asyh->curs.visible = asyw->state.visible; |
715 | } else { | 1143 | if (ret || !asyh->curs.visible) |
716 | mode = nv_connector->dithering_mode; | 1144 | return ret; |
717 | } | ||
718 | 1145 | ||
719 | if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) { | 1146 | switch (asyw->state.fb->width) { |
720 | if (connector->display_info.bpc >= 8) | 1147 | case 32: asyh->curs.layout = 0; break; |
721 | mode |= DITHERING_DEPTH_8BPC; | 1148 | case 64: asyh->curs.layout = 1; break; |
722 | } else { | 1149 | default: |
723 | mode |= nv_connector->dithering_depth; | 1150 | return -EINVAL; |
724 | } | 1151 | } |
725 | 1152 | ||
726 | push = evo_wait(mast, 4); | 1153 | if (asyw->state.fb->width != asyw->state.fb->height) |
727 | if (push) { | 1154 | return -EINVAL; |
728 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | ||
729 | evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1); | ||
730 | evo_data(push, mode); | ||
731 | } else | ||
732 | if (nv50_vers(mast) < GK104_DISP_CORE_CHANNEL_DMA) { | ||
733 | evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1); | ||
734 | evo_data(push, mode); | ||
735 | } else { | ||
736 | evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1); | ||
737 | evo_data(push, mode); | ||
738 | } | ||
739 | 1155 | ||
740 | if (update) { | 1156 | switch (asyw->state.fb->pixel_format) { |
741 | evo_mthd(push, 0x0080, 1); | 1157 | case DRM_FORMAT_ARGB8888: asyh->curs.format = 1; break; |
742 | evo_data(push, 0x00000000); | 1158 | default: |
743 | } | 1159 | WARN_ON(1); |
744 | evo_kick(push, mast); | 1160 | return -EINVAL; |
745 | } | 1161 | } |
746 | 1162 | ||
747 | return 0; | 1163 | return 0; |
748 | } | 1164 | } |
749 | 1165 | ||
1166 | static void * | ||
1167 | nv50_curs_dtor(struct nv50_wndw *wndw) | ||
1168 | { | ||
1169 | struct nv50_curs *curs = nv50_curs(wndw); | ||
1170 | nvif_object_fini(&curs->chan); | ||
1171 | return curs; | ||
1172 | } | ||
1173 | |||
1174 | static const u32 | ||
1175 | nv50_curs_format[] = { | ||
1176 | DRM_FORMAT_ARGB8888, | ||
1177 | }; | ||
1178 | |||
1179 | static const struct nv50_wndw_func | ||
1180 | nv50_curs = { | ||
1181 | .dtor = nv50_curs_dtor, | ||
1182 | .acquire = nv50_curs_acquire, | ||
1183 | .release = nv50_curs_release, | ||
1184 | .prepare = nv50_curs_prepare, | ||
1185 | .point = nv50_curs_point, | ||
1186 | .update = nv50_curs_update, | ||
1187 | }; | ||
1188 | |||
750 | static int | 1189 | static int |
751 | nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update) | 1190 | nv50_curs_new(struct nouveau_drm *drm, struct nv50_head *head, |
1191 | struct nv50_curs **pcurs) | ||
752 | { | 1192 | { |
753 | struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); | 1193 | static const struct nvif_mclass curses[] = { |
754 | struct drm_display_mode *omode, *umode = &nv_crtc->base.mode; | 1194 | { GK104_DISP_CURSOR, 0 }, |
755 | struct drm_crtc *crtc = &nv_crtc->base; | 1195 | { GF110_DISP_CURSOR, 0 }, |
756 | struct nouveau_connector *nv_connector; | 1196 | { GT214_DISP_CURSOR, 0 }, |
757 | int mode = DRM_MODE_SCALE_NONE; | 1197 | { G82_DISP_CURSOR, 0 }, |
758 | u32 oX, oY, *push; | 1198 | { NV50_DISP_CURSOR, 0 }, |
1199 | {} | ||
1200 | }; | ||
1201 | struct nv50_disp_cursor_v0 args = { | ||
1202 | .head = head->base.index, | ||
1203 | }; | ||
1204 | struct nv50_disp *disp = nv50_disp(drm->dev); | ||
1205 | struct nv50_curs *curs; | ||
1206 | int cid, ret; | ||
1207 | |||
1208 | cid = nvif_mclass(disp->disp, curses); | ||
1209 | if (cid < 0) { | ||
1210 | NV_ERROR(drm, "No supported cursor immediate class\n"); | ||
1211 | return cid; | ||
1212 | } | ||
759 | 1213 | ||
760 | /* start off at the resolution we programmed the crtc for, this | 1214 | if (!(curs = *pcurs = kzalloc(sizeof(*curs), GFP_KERNEL))) |
761 | * effectively handles NONE/FULL scaling | 1215 | return -ENOMEM; |
762 | */ | 1216 | |
763 | nv_connector = nouveau_crtc_connector_get(nv_crtc); | 1217 | ret = nv50_wndw_ctor(&nv50_curs, drm->dev, DRM_PLANE_TYPE_CURSOR, |
764 | if (nv_connector && nv_connector->native_mode) { | 1218 | "curs", head->base.index, &disp->mast.base, |
765 | mode = nv_connector->scaling_mode; | 1219 | nv50_curs_format, ARRAY_SIZE(nv50_curs_format), |
766 | if (nv_connector->scaling_full) /* non-EDID LVDS/eDP mode */ | 1220 | &curs->wndw); |
767 | mode = DRM_MODE_SCALE_FULLSCREEN; | 1221 | if (ret) { |
1222 | kfree(curs); | ||
1223 | return ret; | ||
768 | } | 1224 | } |
769 | 1225 | ||
770 | if (mode != DRM_MODE_SCALE_NONE) | 1226 | ret = nvif_object_init(disp->disp, 0, curses[cid].oclass, &args, |
771 | omode = nv_connector->native_mode; | 1227 | sizeof(args), &curs->chan); |
772 | else | 1228 | if (ret) { |
773 | omode = umode; | 1229 | NV_ERROR(drm, "curs%04x allocation failed: %d\n", |
1230 | curses[cid].oclass, ret); | ||
1231 | return ret; | ||
1232 | } | ||
774 | 1233 | ||
775 | oX = omode->hdisplay; | 1234 | return 0; |
776 | oY = omode->vdisplay; | 1235 | } |
777 | if (omode->flags & DRM_MODE_FLAG_DBLSCAN) | ||
778 | oY *= 2; | ||
779 | 1236 | ||
780 | /* add overscan compensation if necessary, will keep the aspect | 1237 | /****************************************************************************** |
781 | * ratio the same as the backend mode unless overridden by the | 1238 | * Primary plane |
782 | * user setting both hborder and vborder properties. | 1239 | *****************************************************************************/ |
783 | */ | 1240 | #define nv50_base(p) container_of((p), struct nv50_base, wndw) |
784 | if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON || | ||
785 | (nv_connector->underscan == UNDERSCAN_AUTO && | ||
786 | drm_detect_hdmi_monitor(nv_connector->edid)))) { | ||
787 | u32 bX = nv_connector->underscan_hborder; | ||
788 | u32 bY = nv_connector->underscan_vborder; | ||
789 | u32 aspect = (oY << 19) / oX; | ||
790 | 1241 | ||
791 | if (bX) { | 1242 | struct nv50_base { |
792 | oX -= (bX * 2); | 1243 | struct nv50_wndw wndw; |
793 | if (bY) oY -= (bY * 2); | 1244 | struct nv50_sync chan; |
794 | else oY = ((oX * aspect) + (aspect / 2)) >> 19; | 1245 | int id; |
795 | } else { | 1246 | }; |
796 | oX -= (oX >> 4) + 32; | 1247 | |
797 | if (bY) oY -= (bY * 2); | 1248 | static int |
798 | else oY = ((oX * aspect) + (aspect / 2)) >> 19; | 1249 | nv50_base_notify(struct nvif_notify *notify) |
799 | } | 1250 | { |
1251 | return NVIF_NOTIFY_KEEP; | ||
1252 | } | ||
1253 | |||
1254 | static void | ||
1255 | nv50_base_lut(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw) | ||
1256 | { | ||
1257 | struct nv50_base *base = nv50_base(wndw); | ||
1258 | u32 *push; | ||
1259 | if ((push = evo_wait(&base->chan, 2))) { | ||
1260 | evo_mthd(push, 0x00e0, 1); | ||
1261 | evo_data(push, asyw->lut.enable << 30); | ||
1262 | evo_kick(push, &base->chan); | ||
800 | } | 1263 | } |
1264 | } | ||
801 | 1265 | ||
802 | /* handle CENTER/ASPECT scaling, taking into account the areas | 1266 | static void |
803 | * removed already for overscan compensation | 1267 | nv50_base_image_clr(struct nv50_wndw *wndw) |
804 | */ | 1268 | { |
805 | switch (mode) { | 1269 | struct nv50_base *base = nv50_base(wndw); |
806 | case DRM_MODE_SCALE_CENTER: | 1270 | u32 *push; |
807 | oX = min((u32)umode->hdisplay, oX); | 1271 | if ((push = evo_wait(&base->chan, 4))) { |
808 | oY = min((u32)umode->vdisplay, oY); | 1272 | evo_mthd(push, 0x0084, 1); |
809 | /* fall-through */ | 1273 | evo_data(push, 0x00000000); |
810 | case DRM_MODE_SCALE_ASPECT: | 1274 | evo_mthd(push, 0x00c0, 1); |
811 | if (oY < oX) { | 1275 | evo_data(push, 0x00000000); |
812 | u32 aspect = (umode->hdisplay << 19) / umode->vdisplay; | 1276 | evo_kick(push, &base->chan); |
813 | oX = ((oY * aspect) + (aspect / 2)) >> 19; | ||
814 | } else { | ||
815 | u32 aspect = (umode->vdisplay << 19) / umode->hdisplay; | ||
816 | oY = ((oX * aspect) + (aspect / 2)) >> 19; | ||
817 | } | ||
818 | break; | ||
819 | default: | ||
820 | break; | ||
821 | } | 1277 | } |
1278 | } | ||
822 | 1279 | ||
823 | push = evo_wait(mast, 8); | 1280 | static void |
824 | if (push) { | 1281 | nv50_base_image_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw) |
825 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | 1282 | { |
826 | /*XXX: SCALE_CTRL_ACTIVE??? */ | 1283 | struct nv50_base *base = nv50_base(wndw); |
827 | evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2); | 1284 | const s32 oclass = base->chan.base.base.user.oclass; |
828 | evo_data(push, (oY << 16) | oX); | 1285 | u32 *push; |
829 | evo_data(push, (oY << 16) | oX); | 1286 | if ((push = evo_wait(&base->chan, 10))) { |
830 | evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1); | 1287 | evo_mthd(push, 0x0084, 1); |
1288 | evo_data(push, (asyw->image.mode << 8) | | ||
1289 | (asyw->image.interval << 4)); | ||
1290 | evo_mthd(push, 0x00c0, 1); | ||
1291 | evo_data(push, asyw->image.handle); | ||
1292 | if (oclass < G82_DISP_BASE_CHANNEL_DMA) { | ||
1293 | evo_mthd(push, 0x0800, 5); | ||
1294 | evo_data(push, asyw->image.offset >> 8); | ||
1295 | evo_data(push, 0x00000000); | ||
1296 | evo_data(push, (asyw->image.h << 16) | asyw->image.w); | ||
1297 | evo_data(push, (asyw->image.layout << 20) | | ||
1298 | asyw->image.pitch | | ||
1299 | asyw->image.block); | ||
1300 | evo_data(push, (asyw->image.kind << 16) | | ||
1301 | (asyw->image.format << 8)); | ||
1302 | } else | ||
1303 | if (oclass < GF110_DISP_BASE_CHANNEL_DMA) { | ||
1304 | evo_mthd(push, 0x0800, 5); | ||
1305 | evo_data(push, asyw->image.offset >> 8); | ||
831 | evo_data(push, 0x00000000); | 1306 | evo_data(push, 0x00000000); |
832 | evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1); | 1307 | evo_data(push, (asyw->image.h << 16) | asyw->image.w); |
833 | evo_data(push, umode->vdisplay << 16 | umode->hdisplay); | 1308 | evo_data(push, (asyw->image.layout << 20) | |
1309 | asyw->image.pitch | | ||
1310 | asyw->image.block); | ||
1311 | evo_data(push, asyw->image.format << 8); | ||
834 | } else { | 1312 | } else { |
835 | evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3); | 1313 | evo_mthd(push, 0x0400, 5); |
836 | evo_data(push, (oY << 16) | oX); | 1314 | evo_data(push, asyw->image.offset >> 8); |
837 | evo_data(push, (oY << 16) | oX); | ||
838 | evo_data(push, (oY << 16) | oX); | ||
839 | evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1); | ||
840 | evo_data(push, 0x00000000); | 1315 | evo_data(push, 0x00000000); |
841 | evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1); | 1316 | evo_data(push, (asyw->image.h << 16) | asyw->image.w); |
842 | evo_data(push, umode->vdisplay << 16 | umode->hdisplay); | 1317 | evo_data(push, (asyw->image.layout << 24) | |
1318 | asyw->image.pitch | | ||
1319 | asyw->image.block); | ||
1320 | evo_data(push, asyw->image.format << 8); | ||
843 | } | 1321 | } |
1322 | evo_kick(push, &base->chan); | ||
1323 | } | ||
1324 | } | ||
844 | 1325 | ||
845 | evo_kick(push, mast); | 1326 | static void |
1327 | nv50_base_ntfy_clr(struct nv50_wndw *wndw) | ||
1328 | { | ||
1329 | struct nv50_base *base = nv50_base(wndw); | ||
1330 | u32 *push; | ||
1331 | if ((push = evo_wait(&base->chan, 2))) { | ||
1332 | evo_mthd(push, 0x00a4, 1); | ||
1333 | evo_data(push, 0x00000000); | ||
1334 | evo_kick(push, &base->chan); | ||
1335 | } | ||
1336 | } | ||
846 | 1337 | ||
847 | if (update) { | 1338 | static void |
848 | nv50_display_flip_stop(crtc); | 1339 | nv50_base_ntfy_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw) |
849 | nv50_display_flip_next(crtc, crtc->primary->fb, | 1340 | { |
850 | NULL, 1); | 1341 | struct nv50_base *base = nv50_base(wndw); |
851 | } | 1342 | u32 *push; |
1343 | if ((push = evo_wait(&base->chan, 3))) { | ||
1344 | evo_mthd(push, 0x00a0, 2); | ||
1345 | evo_data(push, (asyw->ntfy.awaken << 30) | asyw->ntfy.offset); | ||
1346 | evo_data(push, asyw->ntfy.handle); | ||
1347 | evo_kick(push, &base->chan); | ||
852 | } | 1348 | } |
1349 | } | ||
853 | 1350 | ||
854 | return 0; | 1351 | static void |
1352 | nv50_base_sema_clr(struct nv50_wndw *wndw) | ||
1353 | { | ||
1354 | struct nv50_base *base = nv50_base(wndw); | ||
1355 | u32 *push; | ||
1356 | if ((push = evo_wait(&base->chan, 2))) { | ||
1357 | evo_mthd(push, 0x0094, 1); | ||
1358 | evo_data(push, 0x00000000); | ||
1359 | evo_kick(push, &base->chan); | ||
1360 | } | ||
855 | } | 1361 | } |
856 | 1362 | ||
857 | static int | 1363 | static void |
858 | nv50_crtc_set_raster_vblank_dmi(struct nouveau_crtc *nv_crtc, u32 usec) | 1364 | nv50_base_sema_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw) |
859 | { | 1365 | { |
860 | struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); | 1366 | struct nv50_base *base = nv50_base(wndw); |
861 | u32 *push; | 1367 | u32 *push; |
1368 | if ((push = evo_wait(&base->chan, 5))) { | ||
1369 | evo_mthd(push, 0x0088, 4); | ||
1370 | evo_data(push, asyw->sema.offset); | ||
1371 | evo_data(push, asyw->sema.acquire); | ||
1372 | evo_data(push, asyw->sema.release); | ||
1373 | evo_data(push, asyw->sema.handle); | ||
1374 | evo_kick(push, &base->chan); | ||
1375 | } | ||
1376 | } | ||
862 | 1377 | ||
863 | push = evo_wait(mast, 8); | 1378 | static u32 |
864 | if (!push) | 1379 | nv50_base_update(struct nv50_wndw *wndw, u32 interlock) |
865 | return -ENOMEM; | 1380 | { |
1381 | struct nv50_base *base = nv50_base(wndw); | ||
1382 | u32 *push; | ||
866 | 1383 | ||
867 | evo_mthd(push, 0x0828 + (nv_crtc->index * 0x400), 1); | 1384 | if (!(push = evo_wait(&base->chan, 2))) |
868 | evo_data(push, usec); | 1385 | return 0; |
869 | evo_kick(push, mast); | 1386 | evo_mthd(push, 0x0080, 1); |
1387 | evo_data(push, interlock); | ||
1388 | evo_kick(push, &base->chan); | ||
1389 | |||
1390 | if (base->chan.base.base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA) | ||
1391 | return interlock ? 2 << (base->id * 8) : 0; | ||
1392 | return interlock ? 2 << (base->id * 4) : 0; | ||
1393 | } | ||
1394 | |||
1395 | static int | ||
1396 | nv50_base_ntfy_wait_begun(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw) | ||
1397 | { | ||
1398 | struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev); | ||
1399 | struct nv50_disp *disp = nv50_disp(wndw->plane.dev); | ||
1400 | if (nvif_msec(&drm->device, 2000ULL, | ||
1401 | u32 data = nouveau_bo_rd32(disp->sync, asyw->ntfy.offset / 4); | ||
1402 | if ((data & 0xc0000000) == 0x40000000) | ||
1403 | break; | ||
1404 | usleep_range(1, 2); | ||
1405 | ) < 0) | ||
1406 | return -ETIMEDOUT; | ||
870 | return 0; | 1407 | return 0; |
871 | } | 1408 | } |
872 | 1409 | ||
1410 | static void | ||
1411 | nv50_base_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw, | ||
1412 | struct nv50_head_atom *asyh) | ||
1413 | { | ||
1414 | asyh->base.cpp = 0; | ||
1415 | } | ||
1416 | |||
873 | static int | 1417 | static int |
874 | nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update) | 1418 | nv50_base_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw, |
1419 | struct nv50_head_atom *asyh) | ||
875 | { | 1420 | { |
876 | struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); | 1421 | const u32 format = asyw->state.fb->pixel_format; |
877 | u32 *push, hue, vib; | 1422 | const struct drm_format_info *info; |
878 | int adj; | 1423 | int ret; |
879 | 1424 | ||
880 | adj = (nv_crtc->color_vibrance > 0) ? 50 : 0; | 1425 | info = drm_format_info(format); |
881 | vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff; | 1426 | if (!info || !info->depth) |
882 | hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff; | 1427 | return -EINVAL; |
883 | 1428 | ||
884 | push = evo_wait(mast, 16); | 1429 | ret = drm_plane_helper_check_state(&asyw->state, &asyw->clip, |
885 | if (push) { | 1430 | DRM_PLANE_HELPER_NO_SCALING, |
886 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | 1431 | DRM_PLANE_HELPER_NO_SCALING, |
887 | evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1); | 1432 | false, true); |
888 | evo_data(push, (hue << 20) | (vib << 8)); | 1433 | if (ret) |
889 | } else { | 1434 | return ret; |
890 | evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1); | ||
891 | evo_data(push, (hue << 20) | (vib << 8)); | ||
892 | } | ||
893 | 1435 | ||
894 | if (update) { | 1436 | asyh->base.depth = info->depth; |
895 | evo_mthd(push, 0x0080, 1); | 1437 | asyh->base.cpp = info->cpp[0]; |
896 | evo_data(push, 0x00000000); | 1438 | asyh->base.x = asyw->state.src.x1 >> 16; |
897 | } | 1439 | asyh->base.y = asyw->state.src.y1 >> 16; |
898 | evo_kick(push, mast); | 1440 | asyh->base.w = asyw->state.fb->width; |
1441 | asyh->base.h = asyw->state.fb->height; | ||
1442 | |||
1443 | switch (format) { | ||
1444 | case DRM_FORMAT_C8 : asyw->image.format = 0x1e; break; | ||
1445 | case DRM_FORMAT_RGB565 : asyw->image.format = 0xe8; break; | ||
1446 | case DRM_FORMAT_XRGB1555 : | ||
1447 | case DRM_FORMAT_ARGB1555 : asyw->image.format = 0xe9; break; | ||
1448 | case DRM_FORMAT_XRGB8888 : | ||
1449 | case DRM_FORMAT_ARGB8888 : asyw->image.format = 0xcf; break; | ||
1450 | case DRM_FORMAT_XBGR2101010: | ||
1451 | case DRM_FORMAT_ABGR2101010: asyw->image.format = 0xd1; break; | ||
1452 | case DRM_FORMAT_XBGR8888 : | ||
1453 | case DRM_FORMAT_ABGR8888 : asyw->image.format = 0xd5; break; | ||
1454 | default: | ||
1455 | WARN_ON(1); | ||
1456 | return -EINVAL; | ||
899 | } | 1457 | } |
900 | 1458 | ||
1459 | asyw->lut.enable = 1; | ||
1460 | asyw->set.image = true; | ||
901 | return 0; | 1461 | return 0; |
902 | } | 1462 | } |
903 | 1463 | ||
1464 | static void * | ||
1465 | nv50_base_dtor(struct nv50_wndw *wndw) | ||
1466 | { | ||
1467 | struct nv50_disp *disp = nv50_disp(wndw->plane.dev); | ||
1468 | struct nv50_base *base = nv50_base(wndw); | ||
1469 | nv50_dmac_destroy(&base->chan.base, disp->disp); | ||
1470 | return base; | ||
1471 | } | ||
1472 | |||
1473 | static const u32 | ||
1474 | nv50_base_format[] = { | ||
1475 | DRM_FORMAT_C8, | ||
1476 | DRM_FORMAT_RGB565, | ||
1477 | DRM_FORMAT_XRGB1555, | ||
1478 | DRM_FORMAT_ARGB1555, | ||
1479 | DRM_FORMAT_XRGB8888, | ||
1480 | DRM_FORMAT_ARGB8888, | ||
1481 | DRM_FORMAT_XBGR2101010, | ||
1482 | DRM_FORMAT_ABGR2101010, | ||
1483 | DRM_FORMAT_XBGR8888, | ||
1484 | DRM_FORMAT_ABGR8888, | ||
1485 | }; | ||
1486 | |||
1487 | static const struct nv50_wndw_func | ||
1488 | nv50_base = { | ||
1489 | .dtor = nv50_base_dtor, | ||
1490 | .acquire = nv50_base_acquire, | ||
1491 | .release = nv50_base_release, | ||
1492 | .sema_set = nv50_base_sema_set, | ||
1493 | .sema_clr = nv50_base_sema_clr, | ||
1494 | .ntfy_set = nv50_base_ntfy_set, | ||
1495 | .ntfy_clr = nv50_base_ntfy_clr, | ||
1496 | .ntfy_wait_begun = nv50_base_ntfy_wait_begun, | ||
1497 | .image_set = nv50_base_image_set, | ||
1498 | .image_clr = nv50_base_image_clr, | ||
1499 | .lut = nv50_base_lut, | ||
1500 | .update = nv50_base_update, | ||
1501 | }; | ||
1502 | |||
904 | static int | 1503 | static int |
905 | nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb, | 1504 | nv50_base_new(struct nouveau_drm *drm, struct nv50_head *head, |
906 | int x, int y, bool update) | 1505 | struct nv50_base **pbase) |
1506 | { | ||
1507 | struct nv50_disp *disp = nv50_disp(drm->dev); | ||
1508 | struct nv50_base *base; | ||
1509 | int ret; | ||
1510 | |||
1511 | if (!(base = *pbase = kzalloc(sizeof(*base), GFP_KERNEL))) | ||
1512 | return -ENOMEM; | ||
1513 | base->id = head->base.index; | ||
1514 | base->wndw.ntfy = EVO_FLIP_NTFY0(base->id); | ||
1515 | base->wndw.sema = EVO_FLIP_SEM0(base->id); | ||
1516 | base->wndw.data = 0x00000000; | ||
1517 | |||
1518 | ret = nv50_wndw_ctor(&nv50_base, drm->dev, DRM_PLANE_TYPE_PRIMARY, | ||
1519 | "base", base->id, &base->chan.base, | ||
1520 | nv50_base_format, ARRAY_SIZE(nv50_base_format), | ||
1521 | &base->wndw); | ||
1522 | if (ret) { | ||
1523 | kfree(base); | ||
1524 | return ret; | ||
1525 | } | ||
1526 | |||
1527 | ret = nv50_base_create(&drm->device, disp->disp, base->id, | ||
1528 | disp->sync->bo.offset, &base->chan); | ||
1529 | if (ret) | ||
1530 | return ret; | ||
1531 | |||
1532 | return nvif_notify_init(&base->chan.base.base.user, nv50_base_notify, | ||
1533 | false, | ||
1534 | NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT, | ||
1535 | &(struct nvif_notify_uevent_req) {}, | ||
1536 | sizeof(struct nvif_notify_uevent_req), | ||
1537 | sizeof(struct nvif_notify_uevent_rep), | ||
1538 | &base->wndw.notify); | ||
1539 | } | ||
1540 | |||
1541 | /****************************************************************************** | ||
1542 | * Head | ||
1543 | *****************************************************************************/ | ||
1544 | static void | ||
1545 | nv50_head_procamp(struct nv50_head *head, struct nv50_head_atom *asyh) | ||
907 | { | 1546 | { |
908 | struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb); | 1547 | struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base; |
909 | struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); | ||
910 | u32 *push; | 1548 | u32 *push; |
1549 | if ((push = evo_wait(core, 2))) { | ||
1550 | if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) | ||
1551 | evo_mthd(push, 0x08a8 + (head->base.index * 0x400), 1); | ||
1552 | else | ||
1553 | evo_mthd(push, 0x0498 + (head->base.index * 0x300), 1); | ||
1554 | evo_data(push, (asyh->procamp.sat.sin << 20) | | ||
1555 | (asyh->procamp.sat.cos << 8)); | ||
1556 | evo_kick(push, core); | ||
1557 | } | ||
1558 | } | ||
911 | 1559 | ||
912 | push = evo_wait(mast, 16); | 1560 | static void |
913 | if (push) { | 1561 | nv50_head_dither(struct nv50_head *head, struct nv50_head_atom *asyh) |
914 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | 1562 | { |
915 | evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1); | 1563 | struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base; |
916 | evo_data(push, nvfb->nvbo->bo.offset >> 8); | 1564 | u32 *push; |
917 | evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3); | 1565 | if ((push = evo_wait(core, 2))) { |
918 | evo_data(push, (fb->height << 16) | fb->width); | 1566 | if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) |
919 | evo_data(push, nvfb->r_pitch); | 1567 | evo_mthd(push, 0x08a0 + (head->base.index * 0x0400), 1); |
920 | evo_data(push, nvfb->r_format); | 1568 | else |
921 | evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1); | 1569 | if (core->base.user.oclass < GK104_DISP_CORE_CHANNEL_DMA) |
922 | evo_data(push, (y << 16) | x); | 1570 | evo_mthd(push, 0x0490 + (head->base.index * 0x0300), 1); |
923 | if (nv50_vers(mast) > NV50_DISP_CORE_CHANNEL_DMA) { | 1571 | else |
924 | evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); | 1572 | evo_mthd(push, 0x04a0 + (head->base.index * 0x0300), 1); |
925 | evo_data(push, nvfb->r_handle); | 1573 | evo_data(push, (asyh->dither.mode << 3) | |
926 | } | 1574 | (asyh->dither.bits << 1) | |
927 | } else { | 1575 | asyh->dither.enable); |
928 | evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1); | 1576 | evo_kick(push, core); |
929 | evo_data(push, nvfb->nvbo->bo.offset >> 8); | 1577 | } |
930 | evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4); | 1578 | } |
931 | evo_data(push, (fb->height << 16) | fb->width); | ||
932 | evo_data(push, nvfb->r_pitch); | ||
933 | evo_data(push, nvfb->r_format); | ||
934 | evo_data(push, nvfb->r_handle); | ||
935 | evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1); | ||
936 | evo_data(push, (y << 16) | x); | ||
937 | } | ||
938 | 1579 | ||
939 | if (update) { | 1580 | static void |
940 | evo_mthd(push, 0x0080, 1); | 1581 | nv50_head_ovly(struct nv50_head *head, struct nv50_head_atom *asyh) |
941 | evo_data(push, 0x00000000); | 1582 | { |
1583 | struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base; | ||
1584 | u32 bounds = 0; | ||
1585 | u32 *push; | ||
1586 | |||
1587 | if (asyh->base.cpp) { | ||
1588 | switch (asyh->base.cpp) { | ||
1589 | case 8: bounds |= 0x00000500; break; | ||
1590 | case 4: bounds |= 0x00000300; break; | ||
1591 | case 2: bounds |= 0x00000100; break; | ||
1592 | default: | ||
1593 | WARN_ON(1); | ||
1594 | break; | ||
942 | } | 1595 | } |
943 | evo_kick(push, mast); | 1596 | bounds |= 0x00000001; |
944 | } | 1597 | } |
945 | 1598 | ||
946 | nv_crtc->fb.handle = nvfb->r_handle; | 1599 | if ((push = evo_wait(core, 2))) { |
947 | return 0; | 1600 | if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) |
1601 | evo_mthd(push, 0x0904 + head->base.index * 0x400, 1); | ||
1602 | else | ||
1603 | evo_mthd(push, 0x04d4 + head->base.index * 0x300, 1); | ||
1604 | evo_data(push, bounds); | ||
1605 | evo_kick(push, core); | ||
1606 | } | ||
948 | } | 1607 | } |
949 | 1608 | ||
950 | static void | 1609 | static void |
951 | nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc) | 1610 | nv50_head_base(struct nv50_head *head, struct nv50_head_atom *asyh) |
952 | { | 1611 | { |
953 | struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); | 1612 | struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base; |
954 | u32 *push = evo_wait(mast, 16); | 1613 | u32 bounds = 0; |
955 | if (push) { | 1614 | u32 *push; |
956 | if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) { | 1615 | |
957 | evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2); | 1616 | if (asyh->base.cpp) { |
958 | evo_data(push, 0x85000000); | 1617 | switch (asyh->base.cpp) { |
959 | evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); | 1618 | case 8: bounds |= 0x00000500; break; |
960 | } else | 1619 | case 4: bounds |= 0x00000300; break; |
961 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | 1620 | case 2: bounds |= 0x00000100; break; |
962 | evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2); | 1621 | case 1: bounds |= 0x00000000; break; |
963 | evo_data(push, 0x85000000); | 1622 | default: |
964 | evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); | 1623 | WARN_ON(1); |
965 | evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1); | 1624 | break; |
966 | evo_data(push, mast->base.vram.handle); | ||
967 | } else { | ||
968 | evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2); | ||
969 | evo_data(push, 0x85000000); | ||
970 | evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); | ||
971 | evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1); | ||
972 | evo_data(push, mast->base.vram.handle); | ||
973 | } | 1625 | } |
974 | evo_kick(push, mast); | 1626 | bounds |= 0x00000001; |
1627 | } | ||
1628 | |||
1629 | if ((push = evo_wait(core, 2))) { | ||
1630 | if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) | ||
1631 | evo_mthd(push, 0x0900 + head->base.index * 0x400, 1); | ||
1632 | else | ||
1633 | evo_mthd(push, 0x04d0 + head->base.index * 0x300, 1); | ||
1634 | evo_data(push, bounds); | ||
1635 | evo_kick(push, core); | ||
975 | } | 1636 | } |
976 | nv_crtc->cursor.visible = true; | ||
977 | } | 1637 | } |
978 | 1638 | ||
979 | static void | 1639 | static void |
980 | nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc) | 1640 | nv50_head_curs_clr(struct nv50_head *head) |
981 | { | 1641 | { |
982 | struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); | 1642 | struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base; |
983 | u32 *push = evo_wait(mast, 16); | 1643 | u32 *push; |
984 | if (push) { | 1644 | if ((push = evo_wait(core, 4))) { |
985 | if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) { | 1645 | if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) { |
986 | evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1); | 1646 | evo_mthd(push, 0x0880 + head->base.index * 0x400, 1); |
987 | evo_data(push, 0x05000000); | 1647 | evo_data(push, 0x05000000); |
988 | } else | 1648 | } else |
989 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | 1649 | if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) { |
990 | evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1); | 1650 | evo_mthd(push, 0x0880 + head->base.index * 0x400, 1); |
991 | evo_data(push, 0x05000000); | 1651 | evo_data(push, 0x05000000); |
992 | evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1); | 1652 | evo_mthd(push, 0x089c + head->base.index * 0x400, 1); |
993 | evo_data(push, 0x00000000); | 1653 | evo_data(push, 0x00000000); |
994 | } else { | 1654 | } else { |
995 | evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1); | 1655 | evo_mthd(push, 0x0480 + head->base.index * 0x300, 1); |
996 | evo_data(push, 0x05000000); | 1656 | evo_data(push, 0x05000000); |
997 | evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1); | 1657 | evo_mthd(push, 0x048c + head->base.index * 0x300, 1); |
998 | evo_data(push, 0x00000000); | 1658 | evo_data(push, 0x00000000); |
999 | } | 1659 | } |
1000 | evo_kick(push, mast); | 1660 | evo_kick(push, core); |
1001 | } | 1661 | } |
1002 | nv_crtc->cursor.visible = false; | ||
1003 | } | 1662 | } |
1004 | 1663 | ||
1005 | static void | 1664 | static void |
1006 | nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update) | 1665 | nv50_head_curs_set(struct nv50_head *head, struct nv50_head_atom *asyh) |
1007 | { | 1666 | { |
1008 | struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); | 1667 | struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base; |
1009 | 1668 | u32 *push; | |
1010 | if (show && nv_crtc->cursor.nvbo && nv_crtc->base.enabled) | 1669 | if ((push = evo_wait(core, 5))) { |
1011 | nv50_crtc_cursor_show(nv_crtc); | 1670 | if (core->base.user.oclass < G82_DISP_BASE_CHANNEL_DMA) { |
1012 | else | 1671 | evo_mthd(push, 0x0880 + head->base.index * 0x400, 2); |
1013 | nv50_crtc_cursor_hide(nv_crtc); | 1672 | evo_data(push, 0x80000000 | (asyh->curs.layout << 26) | |
1014 | 1673 | (asyh->curs.format << 24)); | |
1015 | if (update) { | 1674 | evo_data(push, asyh->curs.offset >> 8); |
1016 | u32 *push = evo_wait(mast, 2); | 1675 | } else |
1017 | if (push) { | 1676 | if (core->base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA) { |
1018 | evo_mthd(push, 0x0080, 1); | 1677 | evo_mthd(push, 0x0880 + head->base.index * 0x400, 2); |
1019 | evo_data(push, 0x00000000); | 1678 | evo_data(push, 0x80000000 | (asyh->curs.layout << 26) | |
1020 | evo_kick(push, mast); | 1679 | (asyh->curs.format << 24)); |
1680 | evo_data(push, asyh->curs.offset >> 8); | ||
1681 | evo_mthd(push, 0x089c + head->base.index * 0x400, 1); | ||
1682 | evo_data(push, asyh->curs.handle); | ||
1683 | } else { | ||
1684 | evo_mthd(push, 0x0480 + head->base.index * 0x300, 2); | ||
1685 | evo_data(push, 0x80000000 | (asyh->curs.layout << 26) | | ||
1686 | (asyh->curs.format << 24)); | ||
1687 | evo_data(push, asyh->curs.offset >> 8); | ||
1688 | evo_mthd(push, 0x048c + head->base.index * 0x300, 1); | ||
1689 | evo_data(push, asyh->curs.handle); | ||
1021 | } | 1690 | } |
1691 | evo_kick(push, core); | ||
1022 | } | 1692 | } |
1023 | } | 1693 | } |
1024 | 1694 | ||
1025 | static void | 1695 | static void |
1026 | nv50_crtc_dpms(struct drm_crtc *crtc, int mode) | 1696 | nv50_head_core_clr(struct nv50_head *head) |
1027 | { | 1697 | { |
1698 | struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base; | ||
1699 | u32 *push; | ||
1700 | if ((push = evo_wait(core, 2))) { | ||
1701 | if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) | ||
1702 | evo_mthd(push, 0x0874 + head->base.index * 0x400, 1); | ||
1703 | else | ||
1704 | evo_mthd(push, 0x0474 + head->base.index * 0x300, 1); | ||
1705 | evo_data(push, 0x00000000); | ||
1706 | evo_kick(push, core); | ||
1707 | } | ||
1028 | } | 1708 | } |
1029 | 1709 | ||
1030 | static void | 1710 | static void |
1031 | nv50_crtc_prepare(struct drm_crtc *crtc) | 1711 | nv50_head_core_set(struct nv50_head *head, struct nv50_head_atom *asyh) |
1032 | { | 1712 | { |
1033 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); | 1713 | struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base; |
1034 | struct nv50_mast *mast = nv50_mast(crtc->dev); | ||
1035 | u32 *push; | 1714 | u32 *push; |
1715 | if ((push = evo_wait(core, 9))) { | ||
1716 | if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) { | ||
1717 | evo_mthd(push, 0x0860 + head->base.index * 0x400, 1); | ||
1718 | evo_data(push, asyh->core.offset >> 8); | ||
1719 | evo_mthd(push, 0x0868 + head->base.index * 0x400, 4); | ||
1720 | evo_data(push, (asyh->core.h << 16) | asyh->core.w); | ||
1721 | evo_data(push, asyh->core.layout << 20 | | ||
1722 | (asyh->core.pitch >> 8) << 8 | | ||
1723 | asyh->core.block); | ||
1724 | evo_data(push, asyh->core.kind << 16 | | ||
1725 | asyh->core.format << 8); | ||
1726 | evo_data(push, asyh->core.handle); | ||
1727 | evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1); | ||
1728 | evo_data(push, (asyh->core.y << 16) | asyh->core.x); | ||
1729 | } else | ||
1730 | if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) { | ||
1731 | evo_mthd(push, 0x0860 + head->base.index * 0x400, 1); | ||
1732 | evo_data(push, asyh->core.offset >> 8); | ||
1733 | evo_mthd(push, 0x0868 + head->base.index * 0x400, 4); | ||
1734 | evo_data(push, (asyh->core.h << 16) | asyh->core.w); | ||
1735 | evo_data(push, asyh->core.layout << 20 | | ||
1736 | (asyh->core.pitch >> 8) << 8 | | ||
1737 | asyh->core.block); | ||
1738 | evo_data(push, asyh->core.format << 8); | ||
1739 | evo_data(push, asyh->core.handle); | ||
1740 | evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1); | ||
1741 | evo_data(push, (asyh->core.y << 16) | asyh->core.x); | ||
1742 | } else { | ||
1743 | evo_mthd(push, 0x0460 + head->base.index * 0x300, 1); | ||
1744 | evo_data(push, asyh->core.offset >> 8); | ||
1745 | evo_mthd(push, 0x0468 + head->base.index * 0x300, 4); | ||
1746 | evo_data(push, (asyh->core.h << 16) | asyh->core.w); | ||
1747 | evo_data(push, asyh->core.layout << 24 | | ||
1748 | (asyh->core.pitch >> 8) << 8 | | ||
1749 | asyh->core.block); | ||
1750 | evo_data(push, asyh->core.format << 8); | ||
1751 | evo_data(push, asyh->core.handle); | ||
1752 | evo_mthd(push, 0x04b0 + head->base.index * 0x300, 1); | ||
1753 | evo_data(push, (asyh->core.y << 16) | asyh->core.x); | ||
1754 | } | ||
1755 | evo_kick(push, core); | ||
1756 | } | ||
1757 | } | ||
1036 | 1758 | ||
1037 | nv50_display_flip_stop(crtc); | 1759 | static void |
1038 | 1760 | nv50_head_lut_clr(struct nv50_head *head) | |
1039 | push = evo_wait(mast, 6); | 1761 | { |
1040 | if (push) { | 1762 | struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base; |
1041 | if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) { | 1763 | u32 *push; |
1042 | evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); | 1764 | if ((push = evo_wait(core, 4))) { |
1043 | evo_data(push, 0x00000000); | 1765 | if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) { |
1044 | evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1); | 1766 | evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1); |
1045 | evo_data(push, 0x40000000); | 1767 | evo_data(push, 0x40000000); |
1046 | } else | 1768 | } else |
1047 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | 1769 | if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) { |
1048 | evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); | 1770 | evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1); |
1049 | evo_data(push, 0x00000000); | ||
1050 | evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1); | ||
1051 | evo_data(push, 0x40000000); | 1771 | evo_data(push, 0x40000000); |
1052 | evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1); | 1772 | evo_mthd(push, 0x085c + (head->base.index * 0x400), 1); |
1053 | evo_data(push, 0x00000000); | 1773 | evo_data(push, 0x00000000); |
1054 | } else { | 1774 | } else { |
1055 | evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1); | 1775 | evo_mthd(push, 0x0440 + (head->base.index * 0x300), 1); |
1056 | evo_data(push, 0x00000000); | ||
1057 | evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1); | ||
1058 | evo_data(push, 0x03000000); | 1776 | evo_data(push, 0x03000000); |
1059 | evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1); | 1777 | evo_mthd(push, 0x045c + (head->base.index * 0x300), 1); |
1060 | evo_data(push, 0x00000000); | 1778 | evo_data(push, 0x00000000); |
1061 | } | 1779 | } |
1062 | 1780 | evo_kick(push, core); | |
1063 | evo_kick(push, mast); | ||
1064 | } | 1781 | } |
1065 | |||
1066 | nv50_crtc_cursor_show_hide(nv_crtc, false, false); | ||
1067 | } | 1782 | } |
1068 | 1783 | ||
1069 | static void | 1784 | static void |
1070 | nv50_crtc_commit(struct drm_crtc *crtc) | 1785 | nv50_head_lut_set(struct nv50_head *head, struct nv50_head_atom *asyh) |
1071 | { | 1786 | { |
1072 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); | 1787 | struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base; |
1073 | struct nv50_mast *mast = nv50_mast(crtc->dev); | ||
1074 | u32 *push; | 1788 | u32 *push; |
1075 | 1789 | if ((push = evo_wait(core, 7))) { | |
1076 | push = evo_wait(mast, 32); | 1790 | if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) { |
1077 | if (push) { | 1791 | evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2); |
1078 | if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) { | ||
1079 | evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); | ||
1080 | evo_data(push, nv_crtc->fb.handle); | ||
1081 | evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2); | ||
1082 | evo_data(push, 0xc0000000); | 1792 | evo_data(push, 0xc0000000); |
1083 | evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); | 1793 | evo_data(push, asyh->lut.offset >> 8); |
1084 | } else | 1794 | } else |
1085 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | 1795 | if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) { |
1086 | evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); | 1796 | evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2); |
1087 | evo_data(push, nv_crtc->fb.handle); | ||
1088 | evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2); | ||
1089 | evo_data(push, 0xc0000000); | 1797 | evo_data(push, 0xc0000000); |
1090 | evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); | 1798 | evo_data(push, asyh->lut.offset >> 8); |
1091 | evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1); | 1799 | evo_mthd(push, 0x085c + (head->base.index * 0x400), 1); |
1092 | evo_data(push, mast->base.vram.handle); | 1800 | evo_data(push, asyh->lut.handle); |
1093 | } else { | 1801 | } else { |
1094 | evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1); | 1802 | evo_mthd(push, 0x0440 + (head->base.index * 0x300), 4); |
1095 | evo_data(push, nv_crtc->fb.handle); | ||
1096 | evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4); | ||
1097 | evo_data(push, 0x83000000); | 1803 | evo_data(push, 0x83000000); |
1098 | evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); | 1804 | evo_data(push, asyh->lut.offset >> 8); |
1099 | evo_data(push, 0x00000000); | 1805 | evo_data(push, 0x00000000); |
1100 | evo_data(push, 0x00000000); | 1806 | evo_data(push, 0x00000000); |
1101 | evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1); | 1807 | evo_mthd(push, 0x045c + (head->base.index * 0x300), 1); |
1102 | evo_data(push, mast->base.vram.handle); | 1808 | evo_data(push, asyh->lut.handle); |
1103 | evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1); | 1809 | } |
1810 | evo_kick(push, core); | ||
1811 | } | ||
1812 | } | ||
1813 | |||
1814 | static void | ||
1815 | nv50_head_mode(struct nv50_head *head, struct nv50_head_atom *asyh) | ||
1816 | { | ||
1817 | struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base; | ||
1818 | struct nv50_head_mode *m = &asyh->mode; | ||
1819 | u32 *push; | ||
1820 | if ((push = evo_wait(core, 14))) { | ||
1821 | if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) { | ||
1822 | evo_mthd(push, 0x0804 + (head->base.index * 0x400), 2); | ||
1823 | evo_data(push, 0x00800000 | m->clock); | ||
1824 | evo_data(push, m->interlace ? 0x00000002 : 0x00000000); | ||
1825 | evo_mthd(push, 0x0810 + (head->base.index * 0x400), 7); | ||
1826 | evo_data(push, 0x00000000); | ||
1827 | evo_data(push, (m->v.active << 16) | m->h.active ); | ||
1828 | evo_data(push, (m->v.synce << 16) | m->h.synce ); | ||
1829 | evo_data(push, (m->v.blanke << 16) | m->h.blanke ); | ||
1830 | evo_data(push, (m->v.blanks << 16) | m->h.blanks ); | ||
1831 | evo_data(push, (m->v.blank2e << 16) | m->v.blank2s); | ||
1832 | evo_data(push, asyh->mode.v.blankus); | ||
1833 | evo_mthd(push, 0x082c + (head->base.index * 0x400), 1); | ||
1834 | evo_data(push, 0x00000000); | ||
1835 | } else { | ||
1836 | evo_mthd(push, 0x0410 + (head->base.index * 0x300), 6); | ||
1837 | evo_data(push, 0x00000000); | ||
1838 | evo_data(push, (m->v.active << 16) | m->h.active ); | ||
1839 | evo_data(push, (m->v.synce << 16) | m->h.synce ); | ||
1840 | evo_data(push, (m->v.blanke << 16) | m->h.blanke ); | ||
1841 | evo_data(push, (m->v.blanks << 16) | m->h.blanks ); | ||
1842 | evo_data(push, (m->v.blank2e << 16) | m->v.blank2s); | ||
1843 | evo_mthd(push, 0x042c + (head->base.index * 0x300), 2); | ||
1844 | evo_data(push, 0x00000000); /* ??? */ | ||
1104 | evo_data(push, 0xffffff00); | 1845 | evo_data(push, 0xffffff00); |
1846 | evo_mthd(push, 0x0450 + (head->base.index * 0x300), 3); | ||
1847 | evo_data(push, m->clock * 1000); | ||
1848 | evo_data(push, 0x00200000); /* ??? */ | ||
1849 | evo_data(push, m->clock * 1000); | ||
1105 | } | 1850 | } |
1851 | evo_kick(push, core); | ||
1852 | } | ||
1853 | } | ||
1106 | 1854 | ||
1107 | evo_kick(push, mast); | 1855 | static void |
1856 | nv50_head_view(struct nv50_head *head, struct nv50_head_atom *asyh) | ||
1857 | { | ||
1858 | struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base; | ||
1859 | u32 *push; | ||
1860 | if ((push = evo_wait(core, 10))) { | ||
1861 | if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) { | ||
1862 | evo_mthd(push, 0x08a4 + (head->base.index * 0x400), 1); | ||
1863 | evo_data(push, 0x00000000); | ||
1864 | evo_mthd(push, 0x08c8 + (head->base.index * 0x400), 1); | ||
1865 | evo_data(push, (asyh->view.iH << 16) | asyh->view.iW); | ||
1866 | evo_mthd(push, 0x08d8 + (head->base.index * 0x400), 2); | ||
1867 | evo_data(push, (asyh->view.oH << 16) | asyh->view.oW); | ||
1868 | evo_data(push, (asyh->view.oH << 16) | asyh->view.oW); | ||
1869 | } else { | ||
1870 | evo_mthd(push, 0x0494 + (head->base.index * 0x300), 1); | ||
1871 | evo_data(push, 0x00000000); | ||
1872 | evo_mthd(push, 0x04b8 + (head->base.index * 0x300), 1); | ||
1873 | evo_data(push, (asyh->view.iH << 16) | asyh->view.iW); | ||
1874 | evo_mthd(push, 0x04c0 + (head->base.index * 0x300), 3); | ||
1875 | evo_data(push, (asyh->view.oH << 16) | asyh->view.oW); | ||
1876 | evo_data(push, (asyh->view.oH << 16) | asyh->view.oW); | ||
1877 | evo_data(push, (asyh->view.oH << 16) | asyh->view.oW); | ||
1878 | } | ||
1879 | evo_kick(push, core); | ||
1108 | } | 1880 | } |
1881 | } | ||
1109 | 1882 | ||
1110 | nv50_crtc_cursor_show_hide(nv_crtc, true, true); | 1883 | static void |
1111 | nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1); | 1884 | nv50_head_flush_clr(struct nv50_head *head, struct nv50_head_atom *asyh, bool y) |
1885 | { | ||
1886 | if (asyh->clr.core && (!asyh->set.core || y)) | ||
1887 | nv50_head_lut_clr(head); | ||
1888 | if (asyh->clr.core && (!asyh->set.core || y)) | ||
1889 | nv50_head_core_clr(head); | ||
1890 | if (asyh->clr.curs && (!asyh->set.curs || y)) | ||
1891 | nv50_head_curs_clr(head); | ||
1112 | } | 1892 | } |
1113 | 1893 | ||
1114 | static bool | 1894 | static void |
1115 | nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode, | 1895 | nv50_head_flush_set(struct nv50_head *head, struct nv50_head_atom *asyh) |
1116 | struct drm_display_mode *adjusted_mode) | ||
1117 | { | 1896 | { |
1118 | drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V); | 1897 | if (asyh->set.view ) nv50_head_view (head, asyh); |
1119 | return true; | 1898 | if (asyh->set.mode ) nv50_head_mode (head, asyh); |
1899 | if (asyh->set.core ) nv50_head_lut_set (head, asyh); | ||
1900 | if (asyh->set.core ) nv50_head_core_set(head, asyh); | ||
1901 | if (asyh->set.curs ) nv50_head_curs_set(head, asyh); | ||
1902 | if (asyh->set.base ) nv50_head_base (head, asyh); | ||
1903 | if (asyh->set.ovly ) nv50_head_ovly (head, asyh); | ||
1904 | if (asyh->set.dither ) nv50_head_dither (head, asyh); | ||
1905 | if (asyh->set.procamp) nv50_head_procamp (head, asyh); | ||
1120 | } | 1906 | } |
1121 | 1907 | ||
1122 | static int | 1908 | static void |
1123 | nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb) | 1909 | nv50_head_atomic_check_procamp(struct nv50_head_atom *armh, |
1910 | struct nv50_head_atom *asyh, | ||
1911 | struct nouveau_conn_atom *asyc) | ||
1124 | { | 1912 | { |
1125 | struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb); | 1913 | const int vib = asyc->procamp.color_vibrance - 100; |
1126 | struct nv50_head *head = nv50_head(crtc); | 1914 | const int hue = asyc->procamp.vibrant_hue - 90; |
1127 | int ret; | 1915 | const int adj = (vib > 0) ? 50 : 0; |
1916 | asyh->procamp.sat.cos = ((vib * 2047 + adj) / 100) & 0xfff; | ||
1917 | asyh->procamp.sat.sin = ((hue * 2047) / 100) & 0xfff; | ||
1918 | asyh->set.procamp = true; | ||
1919 | } | ||
1128 | 1920 | ||
1129 | ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM, true); | 1921 | static void |
1130 | if (ret == 0) { | 1922 | nv50_head_atomic_check_dither(struct nv50_head_atom *armh, |
1131 | if (head->image) | 1923 | struct nv50_head_atom *asyh, |
1132 | nouveau_bo_unpin(head->image); | 1924 | struct nouveau_conn_atom *asyc) |
1133 | nouveau_bo_ref(nvfb->nvbo, &head->image); | 1925 | { |
1926 | struct drm_connector *connector = asyc->state.connector; | ||
1927 | u32 mode = 0x00; | ||
1928 | |||
1929 | if (asyc->dither.mode == DITHERING_MODE_AUTO) { | ||
1930 | if (asyh->base.depth > connector->display_info.bpc * 3) | ||
1931 | mode = DITHERING_MODE_DYNAMIC2X2; | ||
1932 | } else { | ||
1933 | mode = asyc->dither.mode; | ||
1134 | } | 1934 | } |
1135 | 1935 | ||
1136 | return ret; | 1936 | if (asyc->dither.depth == DITHERING_DEPTH_AUTO) { |
1937 | if (connector->display_info.bpc >= 8) | ||
1938 | mode |= DITHERING_DEPTH_8BPC; | ||
1939 | } else { | ||
1940 | mode |= asyc->dither.depth; | ||
1941 | } | ||
1942 | |||
1943 | asyh->dither.enable = mode; | ||
1944 | asyh->dither.bits = mode >> 1; | ||
1945 | asyh->dither.mode = mode >> 3; | ||
1946 | asyh->set.dither = true; | ||
1137 | } | 1947 | } |
1138 | 1948 | ||
1139 | static int | 1949 | static void |
1140 | nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode, | 1950 | nv50_head_atomic_check_view(struct nv50_head_atom *armh, |
1141 | struct drm_display_mode *mode, int x, int y, | 1951 | struct nv50_head_atom *asyh, |
1142 | struct drm_framebuffer *old_fb) | 1952 | struct nouveau_conn_atom *asyc) |
1143 | { | 1953 | { |
1144 | struct nv50_mast *mast = nv50_mast(crtc->dev); | 1954 | struct drm_connector *connector = asyc->state.connector; |
1145 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); | 1955 | struct drm_display_mode *omode = &asyh->state.adjusted_mode; |
1146 | struct nouveau_connector *nv_connector; | 1956 | struct drm_display_mode *umode = &asyh->state.mode; |
1147 | u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1; | 1957 | int mode = asyc->scaler.mode; |
1148 | u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1; | 1958 | struct edid *edid; |
1149 | u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks; | 1959 | |
1150 | u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks; | 1960 | if (connector->edid_blob_ptr) |
1151 | u32 vblan2e = 0, vblan2s = 1, vblankus = 0; | 1961 | edid = (struct edid *)connector->edid_blob_ptr->data; |
1152 | u32 *push; | 1962 | else |
1153 | int ret; | 1963 | edid = NULL; |
1154 | |||
1155 | hactive = mode->htotal; | ||
1156 | hsynce = mode->hsync_end - mode->hsync_start - 1; | ||
1157 | hbackp = mode->htotal - mode->hsync_end; | ||
1158 | hblanke = hsynce + hbackp; | ||
1159 | hfrontp = mode->hsync_start - mode->hdisplay; | ||
1160 | hblanks = mode->htotal - hfrontp - 1; | ||
1161 | |||
1162 | vactive = mode->vtotal * vscan / ilace; | ||
1163 | vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1; | ||
1164 | vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace; | ||
1165 | vblanke = vsynce + vbackp; | ||
1166 | vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace; | ||
1167 | vblanks = vactive - vfrontp - 1; | ||
1168 | /* XXX: Safe underestimate, even "0" works */ | ||
1169 | vblankus = (vactive - mode->vdisplay - 2) * hactive; | ||
1170 | vblankus *= 1000; | ||
1171 | vblankus /= mode->clock; | ||
1172 | 1964 | ||
1173 | if (mode->flags & DRM_MODE_FLAG_INTERLACE) { | 1965 | if (!asyc->scaler.full) { |
1174 | vblan2e = vactive + vsynce + vbackp; | 1966 | if (mode == DRM_MODE_SCALE_NONE) |
1175 | vblan2s = vblan2e + (mode->vdisplay * vscan / ilace); | 1967 | omode = umode; |
1176 | vactive = (vactive * 2) + 1; | 1968 | } else { |
1969 | /* Non-EDID LVDS/eDP mode. */ | ||
1970 | mode = DRM_MODE_SCALE_FULLSCREEN; | ||
1177 | } | 1971 | } |
1178 | 1972 | ||
1179 | ret = nv50_crtc_swap_fbs(crtc, old_fb); | 1973 | asyh->view.iW = umode->hdisplay; |
1180 | if (ret) | 1974 | asyh->view.iH = umode->vdisplay; |
1181 | return ret; | 1975 | asyh->view.oW = omode->hdisplay; |
1976 | asyh->view.oH = omode->vdisplay; | ||
1977 | if (omode->flags & DRM_MODE_FLAG_DBLSCAN) | ||
1978 | asyh->view.oH *= 2; | ||
1182 | 1979 | ||
1183 | push = evo_wait(mast, 64); | 1980 | /* Add overscan compensation if necessary, will keep the aspect |
1184 | if (push) { | 1981 | * ratio the same as the backend mode unless overridden by the |
1185 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | 1982 | * user setting both hborder and vborder properties. |
1186 | evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2); | 1983 | */ |
1187 | evo_data(push, 0x00800000 | mode->clock); | 1984 | if ((asyc->scaler.underscan.mode == UNDERSCAN_ON || |
1188 | evo_data(push, (ilace == 2) ? 2 : 0); | 1985 | (asyc->scaler.underscan.mode == UNDERSCAN_AUTO && |
1189 | evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6); | 1986 | drm_detect_hdmi_monitor(edid)))) { |
1190 | evo_data(push, 0x00000000); | 1987 | u32 bX = asyc->scaler.underscan.hborder; |
1191 | evo_data(push, (vactive << 16) | hactive); | 1988 | u32 bY = asyc->scaler.underscan.vborder; |
1192 | evo_data(push, ( vsynce << 16) | hsynce); | 1989 | u32 r = (asyh->view.oH << 19) / asyh->view.oW; |
1193 | evo_data(push, (vblanke << 16) | hblanke); | 1990 | |
1194 | evo_data(push, (vblanks << 16) | hblanks); | 1991 | if (bX) { |
1195 | evo_data(push, (vblan2e << 16) | vblan2s); | 1992 | asyh->view.oW -= (bX * 2); |
1196 | evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1); | 1993 | if (bY) asyh->view.oH -= (bY * 2); |
1197 | evo_data(push, 0x00000000); | 1994 | else asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19; |
1198 | evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2); | ||
1199 | evo_data(push, 0x00000311); | ||
1200 | evo_data(push, 0x00000100); | ||
1201 | } else { | 1995 | } else { |
1202 | evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6); | 1996 | asyh->view.oW -= (asyh->view.oW >> 4) + 32; |
1203 | evo_data(push, 0x00000000); | 1997 | if (bY) asyh->view.oH -= (bY * 2); |
1204 | evo_data(push, (vactive << 16) | hactive); | 1998 | else asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19; |
1205 | evo_data(push, ( vsynce << 16) | hsynce); | ||
1206 | evo_data(push, (vblanke << 16) | hblanke); | ||
1207 | evo_data(push, (vblanks << 16) | hblanks); | ||
1208 | evo_data(push, (vblan2e << 16) | vblan2s); | ||
1209 | evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1); | ||
1210 | evo_data(push, 0x00000000); /* ??? */ | ||
1211 | evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3); | ||
1212 | evo_data(push, mode->clock * 1000); | ||
1213 | evo_data(push, 0x00200000); /* ??? */ | ||
1214 | evo_data(push, mode->clock * 1000); | ||
1215 | evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2); | ||
1216 | evo_data(push, 0x00000311); | ||
1217 | evo_data(push, 0x00000100); | ||
1218 | } | 1999 | } |
2000 | } | ||
1219 | 2001 | ||
1220 | evo_kick(push, mast); | 2002 | /* Handle CENTER/ASPECT scaling, taking into account the areas |
2003 | * removed already for overscan compensation. | ||
2004 | */ | ||
2005 | switch (mode) { | ||
2006 | case DRM_MODE_SCALE_CENTER: | ||
2007 | asyh->view.oW = min((u16)umode->hdisplay, asyh->view.oW); | ||
2008 | asyh->view.oH = min((u16)umode->vdisplay, asyh->view.oH); | ||
2009 | /* fall-through */ | ||
2010 | case DRM_MODE_SCALE_ASPECT: | ||
2011 | if (asyh->view.oH < asyh->view.oW) { | ||
2012 | u32 r = (asyh->view.iW << 19) / asyh->view.iH; | ||
2013 | asyh->view.oW = ((asyh->view.oH * r) + (r / 2)) >> 19; | ||
2014 | } else { | ||
2015 | u32 r = (asyh->view.iH << 19) / asyh->view.iW; | ||
2016 | asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19; | ||
2017 | } | ||
2018 | break; | ||
2019 | default: | ||
2020 | break; | ||
1221 | } | 2021 | } |
1222 | 2022 | ||
1223 | nv_connector = nouveau_crtc_connector_get(nv_crtc); | 2023 | asyh->set.view = true; |
1224 | nv50_crtc_set_dither(nv_crtc, false); | 2024 | } |
1225 | nv50_crtc_set_scale(nv_crtc, false); | 2025 | |
2026 | static void | ||
2027 | nv50_head_atomic_check_mode(struct nv50_head *head, struct nv50_head_atom *asyh) | ||
2028 | { | ||
2029 | struct drm_display_mode *mode = &asyh->state.adjusted_mode; | ||
2030 | u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1; | ||
2031 | u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1; | ||
2032 | u32 hbackp = mode->htotal - mode->hsync_end; | ||
2033 | u32 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace; | ||
2034 | u32 hfrontp = mode->hsync_start - mode->hdisplay; | ||
2035 | u32 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace; | ||
2036 | struct nv50_head_mode *m = &asyh->mode; | ||
2037 | |||
2038 | m->h.active = mode->htotal; | ||
2039 | m->h.synce = mode->hsync_end - mode->hsync_start - 1; | ||
2040 | m->h.blanke = m->h.synce + hbackp; | ||
2041 | m->h.blanks = mode->htotal - hfrontp - 1; | ||
2042 | |||
2043 | m->v.active = mode->vtotal * vscan / ilace; | ||
2044 | m->v.synce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1; | ||
2045 | m->v.blanke = m->v.synce + vbackp; | ||
2046 | m->v.blanks = m->v.active - vfrontp - 1; | ||
2047 | |||
2048 | /*XXX: Safe underestimate, even "0" works */ | ||
2049 | m->v.blankus = (m->v.active - mode->vdisplay - 2) * m->h.active; | ||
2050 | m->v.blankus *= 1000; | ||
2051 | m->v.blankus /= mode->clock; | ||
1226 | 2052 | ||
1227 | /* G94 only accepts this after setting scale */ | 2053 | if (mode->flags & DRM_MODE_FLAG_INTERLACE) { |
1228 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) | 2054 | m->v.blank2e = m->v.active + m->v.synce + vbackp; |
1229 | nv50_crtc_set_raster_vblank_dmi(nv_crtc, vblankus); | 2055 | m->v.blank2s = m->v.blank2e + (mode->vdisplay * vscan / ilace); |
2056 | m->v.active = (m->v.active * 2) + 1; | ||
2057 | m->interlace = true; | ||
2058 | } else { | ||
2059 | m->v.blank2e = 0; | ||
2060 | m->v.blank2s = 1; | ||
2061 | m->interlace = false; | ||
2062 | } | ||
2063 | m->clock = mode->clock; | ||
1230 | 2064 | ||
1231 | nv50_crtc_set_color_vibrance(nv_crtc, false); | 2065 | drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V); |
1232 | nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false); | 2066 | asyh->set.mode = true; |
1233 | return 0; | ||
1234 | } | 2067 | } |
1235 | 2068 | ||
1236 | static int | 2069 | static int |
1237 | nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y, | 2070 | nv50_head_atomic_check(struct drm_crtc *crtc, struct drm_crtc_state *state) |
1238 | struct drm_framebuffer *old_fb) | ||
1239 | { | 2071 | { |
1240 | struct nouveau_drm *drm = nouveau_drm(crtc->dev); | 2072 | struct nouveau_drm *drm = nouveau_drm(crtc->dev); |
1241 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); | 2073 | struct nv50_disp *disp = nv50_disp(crtc->dev); |
1242 | int ret; | 2074 | struct nv50_head *head = nv50_head(crtc); |
2075 | struct nv50_head_atom *armh = nv50_head_atom(crtc->state); | ||
2076 | struct nv50_head_atom *asyh = nv50_head_atom(state); | ||
2077 | struct nouveau_conn_atom *asyc = NULL; | ||
2078 | struct drm_connector_state *conns; | ||
2079 | struct drm_connector *conn; | ||
2080 | int i; | ||
1243 | 2081 | ||
1244 | if (!crtc->primary->fb) { | 2082 | NV_ATOMIC(drm, "%s atomic_check %d\n", crtc->name, asyh->state.active); |
1245 | NV_DEBUG(drm, "No FB bound\n"); | 2083 | if (asyh->state.active) { |
1246 | return 0; | 2084 | for_each_connector_in_state(asyh->state.state, conn, conns, i) { |
2085 | if (conns->crtc == crtc) { | ||
2086 | asyc = nouveau_conn_atom(conns); | ||
2087 | break; | ||
2088 | } | ||
2089 | } | ||
2090 | |||
2091 | if (armh->state.active) { | ||
2092 | if (asyc) { | ||
2093 | if (asyh->state.mode_changed) | ||
2094 | asyc->set.scaler = true; | ||
2095 | if (armh->base.depth != asyh->base.depth) | ||
2096 | asyc->set.dither = true; | ||
2097 | } | ||
2098 | } else { | ||
2099 | asyc->set.mask = ~0; | ||
2100 | asyh->set.mask = ~0; | ||
2101 | } | ||
2102 | |||
2103 | if (asyh->state.mode_changed) | ||
2104 | nv50_head_atomic_check_mode(head, asyh); | ||
2105 | |||
2106 | if (asyc) { | ||
2107 | if (asyc->set.scaler) | ||
2108 | nv50_head_atomic_check_view(armh, asyh, asyc); | ||
2109 | if (asyc->set.dither) | ||
2110 | nv50_head_atomic_check_dither(armh, asyh, asyc); | ||
2111 | if (asyc->set.procamp) | ||
2112 | nv50_head_atomic_check_procamp(armh, asyh, asyc); | ||
2113 | } | ||
2114 | |||
2115 | if ((asyh->core.visible = (asyh->base.cpp != 0))) { | ||
2116 | asyh->core.x = asyh->base.x; | ||
2117 | asyh->core.y = asyh->base.y; | ||
2118 | asyh->core.w = asyh->base.w; | ||
2119 | asyh->core.h = asyh->base.h; | ||
2120 | } else | ||
2121 | if ((asyh->core.visible = asyh->curs.visible)) { | ||
2122 | /*XXX: We need to either find some way of having the | ||
2123 | * primary base layer appear black, while still | ||
2124 | * being able to display the other layers, or we | ||
2125 | * need to allocate a dummy black surface here. | ||
2126 | */ | ||
2127 | asyh->core.x = 0; | ||
2128 | asyh->core.y = 0; | ||
2129 | asyh->core.w = asyh->state.mode.hdisplay; | ||
2130 | asyh->core.h = asyh->state.mode.vdisplay; | ||
2131 | } | ||
2132 | asyh->core.handle = disp->mast.base.vram.handle; | ||
2133 | asyh->core.offset = 0; | ||
2134 | asyh->core.format = 0xcf; | ||
2135 | asyh->core.kind = 0; | ||
2136 | asyh->core.layout = 1; | ||
2137 | asyh->core.block = 0; | ||
2138 | asyh->core.pitch = ALIGN(asyh->core.w, 64) * 4; | ||
2139 | asyh->lut.handle = disp->mast.base.vram.handle; | ||
2140 | asyh->lut.offset = head->base.lut.nvbo->bo.offset; | ||
2141 | asyh->set.base = armh->base.cpp != asyh->base.cpp; | ||
2142 | asyh->set.ovly = armh->ovly.cpp != asyh->ovly.cpp; | ||
2143 | } else { | ||
2144 | asyh->core.visible = false; | ||
2145 | asyh->curs.visible = false; | ||
2146 | asyh->base.cpp = 0; | ||
2147 | asyh->ovly.cpp = 0; | ||
1247 | } | 2148 | } |
1248 | 2149 | ||
1249 | ret = nv50_crtc_swap_fbs(crtc, old_fb); | 2150 | if (!drm_atomic_crtc_needs_modeset(&asyh->state)) { |
1250 | if (ret) | 2151 | if (asyh->core.visible) { |
1251 | return ret; | 2152 | if (memcmp(&armh->core, &asyh->core, sizeof(asyh->core))) |
2153 | asyh->set.core = true; | ||
2154 | } else | ||
2155 | if (armh->core.visible) { | ||
2156 | asyh->clr.core = true; | ||
2157 | } | ||
1252 | 2158 | ||
1253 | nv50_display_flip_stop(crtc); | 2159 | if (asyh->curs.visible) { |
1254 | nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true); | 2160 | if (memcmp(&armh->curs, &asyh->curs, sizeof(asyh->curs))) |
1255 | nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1); | 2161 | asyh->set.curs = true; |
1256 | return 0; | 2162 | } else |
1257 | } | 2163 | if (armh->curs.visible) { |
2164 | asyh->clr.curs = true; | ||
2165 | } | ||
2166 | } else { | ||
2167 | asyh->clr.core = armh->core.visible; | ||
2168 | asyh->clr.curs = armh->curs.visible; | ||
2169 | asyh->set.core = asyh->core.visible; | ||
2170 | asyh->set.curs = asyh->curs.visible; | ||
2171 | } | ||
1258 | 2172 | ||
1259 | static int | 2173 | if (asyh->clr.mask || asyh->set.mask) |
1260 | nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc, | 2174 | nv50_atom(asyh->state.state)->lock_core = true; |
1261 | struct drm_framebuffer *fb, int x, int y, | ||
1262 | enum mode_set_atomic state) | ||
1263 | { | ||
1264 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); | ||
1265 | nv50_display_flip_stop(crtc); | ||
1266 | nv50_crtc_set_image(nv_crtc, fb, x, y, true); | ||
1267 | return 0; | 2175 | return 0; |
1268 | } | 2176 | } |
1269 | 2177 | ||
1270 | static void | 2178 | static void |
1271 | nv50_crtc_lut_load(struct drm_crtc *crtc) | 2179 | nv50_head_lut_load(struct drm_crtc *crtc) |
1272 | { | 2180 | { |
1273 | struct nv50_disp *disp = nv50_disp(crtc->dev); | 2181 | struct nv50_disp *disp = nv50_disp(crtc->dev); |
1274 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); | 2182 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); |
@@ -1292,64 +2200,95 @@ nv50_crtc_lut_load(struct drm_crtc *crtc) | |||
1292 | } | 2200 | } |
1293 | } | 2201 | } |
1294 | 2202 | ||
1295 | static void | 2203 | static int |
1296 | nv50_crtc_disable(struct drm_crtc *crtc) | 2204 | nv50_head_mode_set_base_atomic(struct drm_crtc *crtc, |
2205 | struct drm_framebuffer *fb, int x, int y, | ||
2206 | enum mode_set_atomic state) | ||
1297 | { | 2207 | { |
1298 | struct nv50_head *head = nv50_head(crtc); | 2208 | WARN_ON(1); |
1299 | evo_sync(crtc->dev); | 2209 | return 0; |
1300 | if (head->image) | ||
1301 | nouveau_bo_unpin(head->image); | ||
1302 | nouveau_bo_ref(NULL, &head->image); | ||
1303 | } | 2210 | } |
1304 | 2211 | ||
2212 | static const struct drm_crtc_helper_funcs | ||
2213 | nv50_head_help = { | ||
2214 | .mode_set_base_atomic = nv50_head_mode_set_base_atomic, | ||
2215 | .load_lut = nv50_head_lut_load, | ||
2216 | .atomic_check = nv50_head_atomic_check, | ||
2217 | }; | ||
2218 | |||
2219 | /* This is identical to the version in the atomic helpers, except that | ||
2220 | * it supports non-vblanked ("async") page flips. | ||
2221 | */ | ||
1305 | static int | 2222 | static int |
1306 | nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv, | 2223 | nv50_head_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb, |
1307 | uint32_t handle, uint32_t width, uint32_t height) | 2224 | struct drm_pending_vblank_event *event, u32 flags) |
1308 | { | 2225 | { |
1309 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); | 2226 | struct drm_plane *plane = crtc->primary; |
1310 | struct drm_gem_object *gem = NULL; | 2227 | struct drm_atomic_state *state; |
1311 | struct nouveau_bo *nvbo = NULL; | 2228 | struct drm_plane_state *plane_state; |
2229 | struct drm_crtc_state *crtc_state; | ||
1312 | int ret = 0; | 2230 | int ret = 0; |
1313 | 2231 | ||
1314 | if (handle) { | 2232 | state = drm_atomic_state_alloc(plane->dev); |
1315 | if (width != 64 || height != 64) | 2233 | if (!state) |
1316 | return -EINVAL; | 2234 | return -ENOMEM; |
1317 | 2235 | ||
1318 | gem = drm_gem_object_lookup(file_priv, handle); | 2236 | state->acquire_ctx = drm_modeset_legacy_acquire_ctx(crtc); |
1319 | if (unlikely(!gem)) | 2237 | retry: |
1320 | return -ENOENT; | 2238 | crtc_state = drm_atomic_get_crtc_state(state, crtc); |
1321 | nvbo = nouveau_gem_object(gem); | 2239 | if (IS_ERR(crtc_state)) { |
2240 | ret = PTR_ERR(crtc_state); | ||
2241 | goto fail; | ||
2242 | } | ||
2243 | crtc_state->event = event; | ||
1322 | 2244 | ||
1323 | ret = nouveau_bo_pin(nvbo, TTM_PL_FLAG_VRAM, true); | 2245 | plane_state = drm_atomic_get_plane_state(state, plane); |
2246 | if (IS_ERR(plane_state)) { | ||
2247 | ret = PTR_ERR(plane_state); | ||
2248 | goto fail; | ||
1324 | } | 2249 | } |
1325 | 2250 | ||
1326 | if (ret == 0) { | 2251 | ret = drm_atomic_set_crtc_for_plane(plane_state, crtc); |
1327 | if (nv_crtc->cursor.nvbo) | 2252 | if (ret != 0) |
1328 | nouveau_bo_unpin(nv_crtc->cursor.nvbo); | 2253 | goto fail; |
1329 | nouveau_bo_ref(nvbo, &nv_crtc->cursor.nvbo); | 2254 | drm_atomic_set_fb_for_plane(plane_state, fb); |
2255 | |||
2256 | /* Make sure we don't accidentally do a full modeset. */ | ||
2257 | state->allow_modeset = false; | ||
2258 | if (!crtc_state->active) { | ||
2259 | DRM_DEBUG_ATOMIC("[CRTC:%d] disabled, rejecting legacy flip\n", | ||
2260 | crtc->base.id); | ||
2261 | ret = -EINVAL; | ||
2262 | goto fail; | ||
1330 | } | 2263 | } |
1331 | drm_gem_object_unreference_unlocked(gem); | ||
1332 | 2264 | ||
1333 | nv50_crtc_cursor_show_hide(nv_crtc, true, true); | 2265 | if (flags & DRM_MODE_PAGE_FLIP_ASYNC) |
2266 | nv50_wndw_atom(plane_state)->interval = 0; | ||
2267 | |||
2268 | ret = drm_atomic_nonblocking_commit(state); | ||
2269 | fail: | ||
2270 | if (ret == -EDEADLK) | ||
2271 | goto backoff; | ||
2272 | |||
2273 | drm_atomic_state_put(state); | ||
1334 | return ret; | 2274 | return ret; |
1335 | } | ||
1336 | 2275 | ||
1337 | static int | 2276 | backoff: |
1338 | nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y) | 2277 | drm_atomic_state_clear(state); |
1339 | { | 2278 | drm_atomic_legacy_backoff(state); |
1340 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); | ||
1341 | struct nv50_curs *curs = nv50_curs(crtc); | ||
1342 | struct nv50_chan *chan = nv50_chan(curs); | ||
1343 | nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff)); | ||
1344 | nvif_wr32(&chan->user, 0x0080, 0x00000000); | ||
1345 | 2279 | ||
1346 | nv_crtc->cursor_saved_x = x; | 2280 | /* |
1347 | nv_crtc->cursor_saved_y = y; | 2281 | * Someone might have exchanged the framebuffer while we dropped locks |
1348 | return 0; | 2282 | * in the backoff code. We need to fix up the fb refcount tracking the |
2283 | * core does for us. | ||
2284 | */ | ||
2285 | plane->old_fb = plane->fb; | ||
2286 | |||
2287 | goto retry; | ||
1349 | } | 2288 | } |
1350 | 2289 | ||
1351 | static int | 2290 | static int |
1352 | nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b, | 2291 | nv50_head_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b, |
1353 | uint32_t size) | 2292 | uint32_t size) |
1354 | { | 2293 | { |
1355 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); | 2294 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); |
@@ -1361,47 +2300,71 @@ nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b, | |||
1361 | nv_crtc->lut.b[i] = b[i]; | 2300 | nv_crtc->lut.b[i] = b[i]; |
1362 | } | 2301 | } |
1363 | 2302 | ||
1364 | nv50_crtc_lut_load(crtc); | 2303 | nv50_head_lut_load(crtc); |
1365 | |||
1366 | return 0; | 2304 | return 0; |
1367 | } | 2305 | } |
1368 | 2306 | ||
1369 | static void | 2307 | static void |
1370 | nv50_crtc_cursor_restore(struct nouveau_crtc *nv_crtc, int x, int y) | 2308 | nv50_head_atomic_destroy_state(struct drm_crtc *crtc, |
2309 | struct drm_crtc_state *state) | ||
1371 | { | 2310 | { |
1372 | nv50_crtc_cursor_move(&nv_crtc->base, x, y); | 2311 | struct nv50_head_atom *asyh = nv50_head_atom(state); |
2312 | __drm_atomic_helper_crtc_destroy_state(&asyh->state); | ||
2313 | kfree(asyh); | ||
2314 | } | ||
1373 | 2315 | ||
1374 | nv50_crtc_cursor_show_hide(nv_crtc, true, true); | 2316 | static struct drm_crtc_state * |
2317 | nv50_head_atomic_duplicate_state(struct drm_crtc *crtc) | ||
2318 | { | ||
2319 | struct nv50_head_atom *armh = nv50_head_atom(crtc->state); | ||
2320 | struct nv50_head_atom *asyh; | ||
2321 | if (!(asyh = kmalloc(sizeof(*asyh), GFP_KERNEL))) | ||
2322 | return NULL; | ||
2323 | __drm_atomic_helper_crtc_duplicate_state(crtc, &asyh->state); | ||
2324 | asyh->view = armh->view; | ||
2325 | asyh->mode = armh->mode; | ||
2326 | asyh->lut = armh->lut; | ||
2327 | asyh->core = armh->core; | ||
2328 | asyh->curs = armh->curs; | ||
2329 | asyh->base = armh->base; | ||
2330 | asyh->ovly = armh->ovly; | ||
2331 | asyh->dither = armh->dither; | ||
2332 | asyh->procamp = armh->procamp; | ||
2333 | asyh->clr.mask = 0; | ||
2334 | asyh->set.mask = 0; | ||
2335 | return &asyh->state; | ||
2336 | } | ||
2337 | |||
2338 | static void | ||
2339 | __drm_atomic_helper_crtc_reset(struct drm_crtc *crtc, | ||
2340 | struct drm_crtc_state *state) | ||
2341 | { | ||
2342 | if (crtc->state) | ||
2343 | crtc->funcs->atomic_destroy_state(crtc, crtc->state); | ||
2344 | crtc->state = state; | ||
2345 | crtc->state->crtc = crtc; | ||
1375 | } | 2346 | } |
1376 | 2347 | ||
1377 | static void | 2348 | static void |
1378 | nv50_crtc_destroy(struct drm_crtc *crtc) | 2349 | nv50_head_reset(struct drm_crtc *crtc) |
2350 | { | ||
2351 | struct nv50_head_atom *asyh; | ||
2352 | |||
2353 | if (WARN_ON(!(asyh = kzalloc(sizeof(*asyh), GFP_KERNEL)))) | ||
2354 | return; | ||
2355 | |||
2356 | __drm_atomic_helper_crtc_reset(crtc, &asyh->state); | ||
2357 | } | ||
2358 | |||
2359 | static void | ||
2360 | nv50_head_destroy(struct drm_crtc *crtc) | ||
1379 | { | 2361 | { |
1380 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); | 2362 | struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); |
1381 | struct nv50_disp *disp = nv50_disp(crtc->dev); | 2363 | struct nv50_disp *disp = nv50_disp(crtc->dev); |
1382 | struct nv50_head *head = nv50_head(crtc); | 2364 | struct nv50_head *head = nv50_head(crtc); |
1383 | struct nv50_fbdma *fbdma; | ||
1384 | |||
1385 | list_for_each_entry(fbdma, &disp->fbdma, head) { | ||
1386 | nvif_object_fini(&fbdma->base[nv_crtc->index]); | ||
1387 | } | ||
1388 | 2365 | ||
1389 | nv50_dmac_destroy(&head->ovly.base, disp->disp); | 2366 | nv50_dmac_destroy(&head->ovly.base, disp->disp); |
1390 | nv50_pioc_destroy(&head->oimm.base); | 2367 | nv50_pioc_destroy(&head->oimm.base); |
1391 | nv50_dmac_destroy(&head->sync.base, disp->disp); | ||
1392 | nv50_pioc_destroy(&head->curs.base); | ||
1393 | |||
1394 | /*XXX: this shouldn't be necessary, but the core doesn't call | ||
1395 | * disconnect() during the cleanup paths | ||
1396 | */ | ||
1397 | if (head->image) | ||
1398 | nouveau_bo_unpin(head->image); | ||
1399 | nouveau_bo_ref(NULL, &head->image); | ||
1400 | |||
1401 | /*XXX: ditto */ | ||
1402 | if (nv_crtc->cursor.nvbo) | ||
1403 | nouveau_bo_unpin(nv_crtc->cursor.nvbo); | ||
1404 | nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo); | ||
1405 | 2368 | ||
1406 | nouveau_bo_unmap(nv_crtc->lut.nvbo); | 2369 | nouveau_bo_unmap(nv_crtc->lut.nvbo); |
1407 | if (nv_crtc->lut.nvbo) | 2370 | if (nv_crtc->lut.nvbo) |
@@ -1412,34 +2375,27 @@ nv50_crtc_destroy(struct drm_crtc *crtc) | |||
1412 | kfree(crtc); | 2375 | kfree(crtc); |
1413 | } | 2376 | } |
1414 | 2377 | ||
1415 | static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = { | 2378 | static const struct drm_crtc_funcs |
1416 | .dpms = nv50_crtc_dpms, | 2379 | nv50_head_func = { |
1417 | .prepare = nv50_crtc_prepare, | 2380 | .reset = nv50_head_reset, |
1418 | .commit = nv50_crtc_commit, | 2381 | .gamma_set = nv50_head_gamma_set, |
1419 | .mode_fixup = nv50_crtc_mode_fixup, | 2382 | .destroy = nv50_head_destroy, |
1420 | .mode_set = nv50_crtc_mode_set, | 2383 | .set_config = drm_atomic_helper_set_config, |
1421 | .mode_set_base = nv50_crtc_mode_set_base, | 2384 | .page_flip = nv50_head_page_flip, |
1422 | .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic, | 2385 | .set_property = drm_atomic_helper_crtc_set_property, |
1423 | .load_lut = nv50_crtc_lut_load, | 2386 | .atomic_duplicate_state = nv50_head_atomic_duplicate_state, |
1424 | .disable = nv50_crtc_disable, | 2387 | .atomic_destroy_state = nv50_head_atomic_destroy_state, |
1425 | }; | ||
1426 | |||
1427 | static const struct drm_crtc_funcs nv50_crtc_func = { | ||
1428 | .cursor_set = nv50_crtc_cursor_set, | ||
1429 | .cursor_move = nv50_crtc_cursor_move, | ||
1430 | .gamma_set = nv50_crtc_gamma_set, | ||
1431 | .set_config = nouveau_crtc_set_config, | ||
1432 | .destroy = nv50_crtc_destroy, | ||
1433 | .page_flip = nouveau_crtc_page_flip, | ||
1434 | }; | 2388 | }; |
1435 | 2389 | ||
1436 | static int | 2390 | static int |
1437 | nv50_crtc_create(struct drm_device *dev, int index) | 2391 | nv50_head_create(struct drm_device *dev, int index) |
1438 | { | 2392 | { |
1439 | struct nouveau_drm *drm = nouveau_drm(dev); | 2393 | struct nouveau_drm *drm = nouveau_drm(dev); |
1440 | struct nvif_device *device = &drm->device; | 2394 | struct nvif_device *device = &drm->device; |
1441 | struct nv50_disp *disp = nv50_disp(dev); | 2395 | struct nv50_disp *disp = nv50_disp(dev); |
1442 | struct nv50_head *head; | 2396 | struct nv50_head *head; |
2397 | struct nv50_base *base; | ||
2398 | struct nv50_curs *curs; | ||
1443 | struct drm_crtc *crtc; | 2399 | struct drm_crtc *crtc; |
1444 | int ret, i; | 2400 | int ret, i; |
1445 | 2401 | ||
@@ -1448,21 +2404,25 @@ nv50_crtc_create(struct drm_device *dev, int index) | |||
1448 | return -ENOMEM; | 2404 | return -ENOMEM; |
1449 | 2405 | ||
1450 | head->base.index = index; | 2406 | head->base.index = index; |
1451 | head->base.set_dither = nv50_crtc_set_dither; | ||
1452 | head->base.set_scale = nv50_crtc_set_scale; | ||
1453 | head->base.set_color_vibrance = nv50_crtc_set_color_vibrance; | ||
1454 | head->base.color_vibrance = 50; | ||
1455 | head->base.vibrant_hue = 0; | ||
1456 | head->base.cursor.set_pos = nv50_crtc_cursor_restore; | ||
1457 | for (i = 0; i < 256; i++) { | 2407 | for (i = 0; i < 256; i++) { |
1458 | head->base.lut.r[i] = i << 8; | 2408 | head->base.lut.r[i] = i << 8; |
1459 | head->base.lut.g[i] = i << 8; | 2409 | head->base.lut.g[i] = i << 8; |
1460 | head->base.lut.b[i] = i << 8; | 2410 | head->base.lut.b[i] = i << 8; |
1461 | } | 2411 | } |
1462 | 2412 | ||
2413 | ret = nv50_base_new(drm, head, &base); | ||
2414 | if (ret == 0) | ||
2415 | ret = nv50_curs_new(drm, head, &curs); | ||
2416 | if (ret) { | ||
2417 | kfree(head); | ||
2418 | return ret; | ||
2419 | } | ||
2420 | |||
1463 | crtc = &head->base.base; | 2421 | crtc = &head->base.base; |
1464 | drm_crtc_init(dev, crtc, &nv50_crtc_func); | 2422 | drm_crtc_init_with_planes(dev, crtc, &base->wndw.plane, |
1465 | drm_crtc_helper_add(crtc, &nv50_crtc_hfunc); | 2423 | &curs->wndw.plane, &nv50_head_func, |
2424 | "head-%d", head->base.index); | ||
2425 | drm_crtc_helper_add(crtc, &nv50_head_help); | ||
1466 | drm_mode_crtc_set_gamma_size(crtc, 256); | 2426 | drm_mode_crtc_set_gamma_size(crtc, 256); |
1467 | 2427 | ||
1468 | ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM, | 2428 | ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM, |
@@ -1481,20 +2441,6 @@ nv50_crtc_create(struct drm_device *dev, int index) | |||
1481 | if (ret) | 2441 | if (ret) |
1482 | goto out; | 2442 | goto out; |
1483 | 2443 | ||
1484 | /* allocate cursor resources */ | ||
1485 | ret = nv50_curs_create(device, disp->disp, index, &head->curs); | ||
1486 | if (ret) | ||
1487 | goto out; | ||
1488 | |||
1489 | /* allocate page flip / sync resources */ | ||
1490 | ret = nv50_base_create(device, disp->disp, index, disp->sync->bo.offset, | ||
1491 | &head->sync); | ||
1492 | if (ret) | ||
1493 | goto out; | ||
1494 | |||
1495 | head->sync.addr = EVO_FLIP_SEM0(index); | ||
1496 | head->sync.data = 0x00000000; | ||
1497 | |||
1498 | /* allocate overlay resources */ | 2444 | /* allocate overlay resources */ |
1499 | ret = nv50_oimm_create(device, disp->disp, index, &head->oimm); | 2445 | ret = nv50_oimm_create(device, disp->disp, index, &head->oimm); |
1500 | if (ret) | 2446 | if (ret) |
@@ -1507,43 +2453,64 @@ nv50_crtc_create(struct drm_device *dev, int index) | |||
1507 | 2453 | ||
1508 | out: | 2454 | out: |
1509 | if (ret) | 2455 | if (ret) |
1510 | nv50_crtc_destroy(crtc); | 2456 | nv50_head_destroy(crtc); |
1511 | return ret; | 2457 | return ret; |
1512 | } | 2458 | } |
1513 | 2459 | ||
1514 | /****************************************************************************** | 2460 | /****************************************************************************** |
1515 | * Encoder helpers | 2461 | * Output path helpers |
1516 | *****************************************************************************/ | 2462 | *****************************************************************************/ |
1517 | static bool | 2463 | static int |
1518 | nv50_encoder_mode_fixup(struct drm_encoder *encoder, | 2464 | nv50_outp_atomic_check_view(struct drm_encoder *encoder, |
1519 | const struct drm_display_mode *mode, | 2465 | struct drm_crtc_state *crtc_state, |
1520 | struct drm_display_mode *adjusted_mode) | 2466 | struct drm_connector_state *conn_state, |
2467 | struct drm_display_mode *native_mode) | ||
1521 | { | 2468 | { |
1522 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | 2469 | struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode; |
1523 | struct nouveau_connector *nv_connector; | 2470 | struct drm_display_mode *mode = &crtc_state->mode; |
2471 | struct drm_connector *connector = conn_state->connector; | ||
2472 | struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state); | ||
2473 | struct nouveau_drm *drm = nouveau_drm(encoder->dev); | ||
2474 | |||
2475 | NV_ATOMIC(drm, "%s atomic_check\n", encoder->name); | ||
2476 | asyc->scaler.full = false; | ||
2477 | if (!native_mode) | ||
2478 | return 0; | ||
1524 | 2479 | ||
1525 | nv_connector = nouveau_encoder_connector_get(nv_encoder); | 2480 | if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) { |
1526 | if (nv_connector && nv_connector->native_mode) { | 2481 | switch (connector->connector_type) { |
1527 | nv_connector->scaling_full = false; | 2482 | case DRM_MODE_CONNECTOR_LVDS: |
1528 | if (nv_connector->scaling_mode == DRM_MODE_SCALE_NONE) { | 2483 | case DRM_MODE_CONNECTOR_eDP: |
1529 | switch (nv_connector->type) { | 2484 | /* Force use of scaler for non-EDID modes. */ |
1530 | case DCB_CONNECTOR_LVDS: | 2485 | if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER) |
1531 | case DCB_CONNECTOR_LVDS_SPWG: | ||
1532 | case DCB_CONNECTOR_eDP: | ||
1533 | /* force use of scaler for non-edid modes */ | ||
1534 | if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER) | ||
1535 | return true; | ||
1536 | nv_connector->scaling_full = true; | ||
1537 | break; | 2486 | break; |
1538 | default: | 2487 | mode = native_mode; |
1539 | return true; | 2488 | asyc->scaler.full = true; |
1540 | } | 2489 | break; |
2490 | default: | ||
2491 | break; | ||
1541 | } | 2492 | } |
2493 | } else { | ||
2494 | mode = native_mode; | ||
2495 | } | ||
1542 | 2496 | ||
1543 | drm_mode_copy(adjusted_mode, nv_connector->native_mode); | 2497 | if (!drm_mode_equal(adjusted_mode, mode)) { |
2498 | drm_mode_copy(adjusted_mode, mode); | ||
2499 | crtc_state->mode_changed = true; | ||
1544 | } | 2500 | } |
1545 | 2501 | ||
1546 | return true; | 2502 | return 0; |
2503 | } | ||
2504 | |||
2505 | static int | ||
2506 | nv50_outp_atomic_check(struct drm_encoder *encoder, | ||
2507 | struct drm_crtc_state *crtc_state, | ||
2508 | struct drm_connector_state *conn_state) | ||
2509 | { | ||
2510 | struct nouveau_connector *nv_connector = | ||
2511 | nouveau_connector(conn_state->connector); | ||
2512 | return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state, | ||
2513 | nv_connector->native_mode); | ||
1547 | } | 2514 | } |
1548 | 2515 | ||
1549 | /****************************************************************************** | 2516 | /****************************************************************************** |
@@ -1574,21 +2541,39 @@ nv50_dac_dpms(struct drm_encoder *encoder, int mode) | |||
1574 | } | 2541 | } |
1575 | 2542 | ||
1576 | static void | 2543 | static void |
1577 | nv50_dac_commit(struct drm_encoder *encoder) | 2544 | nv50_dac_disable(struct drm_encoder *encoder) |
1578 | { | 2545 | { |
2546 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | ||
2547 | struct nv50_mast *mast = nv50_mast(encoder->dev); | ||
2548 | const int or = nv_encoder->or; | ||
2549 | u32 *push; | ||
2550 | |||
2551 | if (nv_encoder->crtc) { | ||
2552 | push = evo_wait(mast, 4); | ||
2553 | if (push) { | ||
2554 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | ||
2555 | evo_mthd(push, 0x0400 + (or * 0x080), 1); | ||
2556 | evo_data(push, 0x00000000); | ||
2557 | } else { | ||
2558 | evo_mthd(push, 0x0180 + (or * 0x020), 1); | ||
2559 | evo_data(push, 0x00000000); | ||
2560 | } | ||
2561 | evo_kick(push, mast); | ||
2562 | } | ||
2563 | } | ||
2564 | |||
2565 | nv_encoder->crtc = NULL; | ||
1579 | } | 2566 | } |
1580 | 2567 | ||
1581 | static void | 2568 | static void |
1582 | nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, | 2569 | nv50_dac_enable(struct drm_encoder *encoder) |
1583 | struct drm_display_mode *adjusted_mode) | ||
1584 | { | 2570 | { |
1585 | struct nv50_mast *mast = nv50_mast(encoder->dev); | 2571 | struct nv50_mast *mast = nv50_mast(encoder->dev); |
1586 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | 2572 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); |
1587 | struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); | 2573 | struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); |
2574 | struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode; | ||
1588 | u32 *push; | 2575 | u32 *push; |
1589 | 2576 | ||
1590 | nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON); | ||
1591 | |||
1592 | push = evo_wait(mast, 8); | 2577 | push = evo_wait(mast, 8); |
1593 | if (push) { | 2578 | if (push) { |
1594 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | 2579 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { |
@@ -1627,33 +2612,6 @@ nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, | |||
1627 | nv_encoder->crtc = encoder->crtc; | 2612 | nv_encoder->crtc = encoder->crtc; |
1628 | } | 2613 | } |
1629 | 2614 | ||
1630 | static void | ||
1631 | nv50_dac_disconnect(struct drm_encoder *encoder) | ||
1632 | { | ||
1633 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | ||
1634 | struct nv50_mast *mast = nv50_mast(encoder->dev); | ||
1635 | const int or = nv_encoder->or; | ||
1636 | u32 *push; | ||
1637 | |||
1638 | if (nv_encoder->crtc) { | ||
1639 | nv50_crtc_prepare(nv_encoder->crtc); | ||
1640 | |||
1641 | push = evo_wait(mast, 4); | ||
1642 | if (push) { | ||
1643 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | ||
1644 | evo_mthd(push, 0x0400 + (or * 0x080), 1); | ||
1645 | evo_data(push, 0x00000000); | ||
1646 | } else { | ||
1647 | evo_mthd(push, 0x0180 + (or * 0x020), 1); | ||
1648 | evo_data(push, 0x00000000); | ||
1649 | } | ||
1650 | evo_kick(push, mast); | ||
1651 | } | ||
1652 | } | ||
1653 | |||
1654 | nv_encoder->crtc = NULL; | ||
1655 | } | ||
1656 | |||
1657 | static enum drm_connector_status | 2615 | static enum drm_connector_status |
1658 | nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector) | 2616 | nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector) |
1659 | { | 2617 | { |
@@ -1681,6 +2639,15 @@ nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector) | |||
1681 | return connector_status_connected; | 2639 | return connector_status_connected; |
1682 | } | 2640 | } |
1683 | 2641 | ||
2642 | static const struct drm_encoder_helper_funcs | ||
2643 | nv50_dac_help = { | ||
2644 | .dpms = nv50_dac_dpms, | ||
2645 | .atomic_check = nv50_outp_atomic_check, | ||
2646 | .enable = nv50_dac_enable, | ||
2647 | .disable = nv50_dac_disable, | ||
2648 | .detect = nv50_dac_detect | ||
2649 | }; | ||
2650 | |||
1684 | static void | 2651 | static void |
1685 | nv50_dac_destroy(struct drm_encoder *encoder) | 2652 | nv50_dac_destroy(struct drm_encoder *encoder) |
1686 | { | 2653 | { |
@@ -1688,18 +2655,8 @@ nv50_dac_destroy(struct drm_encoder *encoder) | |||
1688 | kfree(encoder); | 2655 | kfree(encoder); |
1689 | } | 2656 | } |
1690 | 2657 | ||
1691 | static const struct drm_encoder_helper_funcs nv50_dac_hfunc = { | 2658 | static const struct drm_encoder_funcs |
1692 | .dpms = nv50_dac_dpms, | 2659 | nv50_dac_func = { |
1693 | .mode_fixup = nv50_encoder_mode_fixup, | ||
1694 | .prepare = nv50_dac_disconnect, | ||
1695 | .commit = nv50_dac_commit, | ||
1696 | .mode_set = nv50_dac_mode_set, | ||
1697 | .disable = nv50_dac_disconnect, | ||
1698 | .get_crtc = nv50_display_crtc_get, | ||
1699 | .detect = nv50_dac_detect | ||
1700 | }; | ||
1701 | |||
1702 | static const struct drm_encoder_funcs nv50_dac_func = { | ||
1703 | .destroy = nv50_dac_destroy, | 2660 | .destroy = nv50_dac_destroy, |
1704 | }; | 2661 | }; |
1705 | 2662 | ||
@@ -1726,8 +2683,9 @@ nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe) | |||
1726 | encoder = to_drm_encoder(nv_encoder); | 2683 | encoder = to_drm_encoder(nv_encoder); |
1727 | encoder->possible_crtcs = dcbe->heads; | 2684 | encoder->possible_crtcs = dcbe->heads; |
1728 | encoder->possible_clones = 0; | 2685 | encoder->possible_clones = 0; |
1729 | drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type, NULL); | 2686 | drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type, |
1730 | drm_encoder_helper_add(encoder, &nv50_dac_hfunc); | 2687 | "dac-%04x-%04x", dcbe->hasht, dcbe->hashm); |
2688 | drm_encoder_helper_add(encoder, &nv50_dac_help); | ||
1731 | 2689 | ||
1732 | drm_mode_connector_attach_encoder(connector, encoder); | 2690 | drm_mode_connector_attach_encoder(connector, encoder); |
1733 | return 0; | 2691 | return 0; |
@@ -1737,7 +2695,26 @@ nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe) | |||
1737 | * Audio | 2695 | * Audio |
1738 | *****************************************************************************/ | 2696 | *****************************************************************************/ |
1739 | static void | 2697 | static void |
1740 | nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode) | 2698 | nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) |
2699 | { | ||
2700 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | ||
2701 | struct nv50_disp *disp = nv50_disp(encoder->dev); | ||
2702 | struct { | ||
2703 | struct nv50_disp_mthd_v1 base; | ||
2704 | struct nv50_disp_sor_hda_eld_v0 eld; | ||
2705 | } args = { | ||
2706 | .base.version = 1, | ||
2707 | .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD, | ||
2708 | .base.hasht = nv_encoder->dcb->hasht, | ||
2709 | .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | | ||
2710 | (0x0100 << nv_crtc->index), | ||
2711 | }; | ||
2712 | |||
2713 | nvif_mthd(disp->disp, 0, &args, sizeof(args)); | ||
2714 | } | ||
2715 | |||
2716 | static void | ||
2717 | nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode) | ||
1741 | { | 2718 | { |
1742 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | 2719 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); |
1743 | struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); | 2720 | struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); |
@@ -1768,30 +2745,30 @@ nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode) | |||
1768 | sizeof(args.base) + drm_eld_size(args.data)); | 2745 | sizeof(args.base) + drm_eld_size(args.data)); |
1769 | } | 2746 | } |
1770 | 2747 | ||
2748 | /****************************************************************************** | ||
2749 | * HDMI | ||
2750 | *****************************************************************************/ | ||
1771 | static void | 2751 | static void |
1772 | nv50_audio_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) | 2752 | nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) |
1773 | { | 2753 | { |
1774 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | 2754 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); |
1775 | struct nv50_disp *disp = nv50_disp(encoder->dev); | 2755 | struct nv50_disp *disp = nv50_disp(encoder->dev); |
1776 | struct { | 2756 | struct { |
1777 | struct nv50_disp_mthd_v1 base; | 2757 | struct nv50_disp_mthd_v1 base; |
1778 | struct nv50_disp_sor_hda_eld_v0 eld; | 2758 | struct nv50_disp_sor_hdmi_pwr_v0 pwr; |
1779 | } args = { | 2759 | } args = { |
1780 | .base.version = 1, | 2760 | .base.version = 1, |
1781 | .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD, | 2761 | .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR, |
1782 | .base.hasht = nv_encoder->dcb->hasht, | 2762 | .base.hasht = nv_encoder->dcb->hasht, |
1783 | .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | | 2763 | .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | |
1784 | (0x0100 << nv_crtc->index), | 2764 | (0x0100 << nv_crtc->index), |
1785 | }; | 2765 | }; |
1786 | 2766 | ||
1787 | nvif_mthd(disp->disp, 0, &args, sizeof(args)); | 2767 | nvif_mthd(disp->disp, 0, &args, sizeof(args)); |
1788 | } | 2768 | } |
1789 | 2769 | ||
1790 | /****************************************************************************** | ||
1791 | * HDMI | ||
1792 | *****************************************************************************/ | ||
1793 | static void | 2770 | static void |
1794 | nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode) | 2771 | nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode) |
1795 | { | 2772 | { |
1796 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | 2773 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); |
1797 | struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); | 2774 | struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); |
@@ -1821,26 +2798,632 @@ nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode) | |||
1821 | args.pwr.max_ac_packet = max_ac_packet / 32; | 2798 | args.pwr.max_ac_packet = max_ac_packet / 32; |
1822 | 2799 | ||
1823 | nvif_mthd(disp->disp, 0, &args, sizeof(args)); | 2800 | nvif_mthd(disp->disp, 0, &args, sizeof(args)); |
1824 | nv50_audio_mode_set(encoder, mode); | 2801 | nv50_audio_enable(encoder, mode); |
2802 | } | ||
2803 | |||
2804 | /****************************************************************************** | ||
2805 | * MST | ||
2806 | *****************************************************************************/ | ||
2807 | #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr) | ||
2808 | #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector) | ||
2809 | #define nv50_msto(p) container_of((p), struct nv50_msto, encoder) | ||
2810 | |||
2811 | struct nv50_mstm { | ||
2812 | struct nouveau_encoder *outp; | ||
2813 | |||
2814 | struct drm_dp_mst_topology_mgr mgr; | ||
2815 | struct nv50_msto *msto[4]; | ||
2816 | |||
2817 | bool modified; | ||
2818 | }; | ||
2819 | |||
2820 | struct nv50_mstc { | ||
2821 | struct nv50_mstm *mstm; | ||
2822 | struct drm_dp_mst_port *port; | ||
2823 | struct drm_connector connector; | ||
2824 | |||
2825 | struct drm_display_mode *native; | ||
2826 | struct edid *edid; | ||
2827 | |||
2828 | int pbn; | ||
2829 | }; | ||
2830 | |||
2831 | struct nv50_msto { | ||
2832 | struct drm_encoder encoder; | ||
2833 | |||
2834 | struct nv50_head *head; | ||
2835 | struct nv50_mstc *mstc; | ||
2836 | bool disabled; | ||
2837 | }; | ||
2838 | |||
2839 | static struct drm_dp_payload * | ||
2840 | nv50_msto_payload(struct nv50_msto *msto) | ||
2841 | { | ||
2842 | struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); | ||
2843 | struct nv50_mstc *mstc = msto->mstc; | ||
2844 | struct nv50_mstm *mstm = mstc->mstm; | ||
2845 | int vcpi = mstc->port->vcpi.vcpi, i; | ||
2846 | |||
2847 | NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi); | ||
2848 | for (i = 0; i < mstm->mgr.max_payloads; i++) { | ||
2849 | struct drm_dp_payload *payload = &mstm->mgr.payloads[i]; | ||
2850 | NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n", | ||
2851 | mstm->outp->base.base.name, i, payload->vcpi, | ||
2852 | payload->start_slot, payload->num_slots); | ||
2853 | } | ||
2854 | |||
2855 | for (i = 0; i < mstm->mgr.max_payloads; i++) { | ||
2856 | struct drm_dp_payload *payload = &mstm->mgr.payloads[i]; | ||
2857 | if (payload->vcpi == vcpi) | ||
2858 | return payload; | ||
2859 | } | ||
2860 | |||
2861 | return NULL; | ||
1825 | } | 2862 | } |
1826 | 2863 | ||
1827 | static void | 2864 | static void |
1828 | nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) | 2865 | nv50_msto_cleanup(struct nv50_msto *msto) |
1829 | { | 2866 | { |
1830 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | 2867 | struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); |
1831 | struct nv50_disp *disp = nv50_disp(encoder->dev); | 2868 | struct nv50_mstc *mstc = msto->mstc; |
2869 | struct nv50_mstm *mstm = mstc->mstm; | ||
2870 | |||
2871 | NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name); | ||
2872 | if (mstc->port && mstc->port->vcpi.vcpi > 0 && !nv50_msto_payload(msto)) | ||
2873 | drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port); | ||
2874 | if (msto->disabled) { | ||
2875 | msto->mstc = NULL; | ||
2876 | msto->head = NULL; | ||
2877 | msto->disabled = false; | ||
2878 | } | ||
2879 | } | ||
2880 | |||
2881 | static void | ||
2882 | nv50_msto_prepare(struct nv50_msto *msto) | ||
2883 | { | ||
2884 | struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); | ||
2885 | struct nv50_mstc *mstc = msto->mstc; | ||
2886 | struct nv50_mstm *mstm = mstc->mstm; | ||
1832 | struct { | 2887 | struct { |
1833 | struct nv50_disp_mthd_v1 base; | 2888 | struct nv50_disp_mthd_v1 base; |
1834 | struct nv50_disp_sor_hdmi_pwr_v0 pwr; | 2889 | struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi; |
1835 | } args = { | 2890 | } args = { |
1836 | .base.version = 1, | 2891 | .base.version = 1, |
1837 | .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR, | 2892 | .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI, |
1838 | .base.hasht = nv_encoder->dcb->hasht, | 2893 | .base.hasht = mstm->outp->dcb->hasht, |
1839 | .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | | 2894 | .base.hashm = (0xf0ff & mstm->outp->dcb->hashm) | |
1840 | (0x0100 << nv_crtc->index), | 2895 | (0x0100 << msto->head->base.index), |
1841 | }; | 2896 | }; |
1842 | 2897 | ||
1843 | nvif_mthd(disp->disp, 0, &args, sizeof(args)); | 2898 | NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name); |
2899 | if (mstc->port && mstc->port->vcpi.vcpi > 0) { | ||
2900 | struct drm_dp_payload *payload = nv50_msto_payload(msto); | ||
2901 | if (payload) { | ||
2902 | args.vcpi.start_slot = payload->start_slot; | ||
2903 | args.vcpi.num_slots = payload->num_slots; | ||
2904 | args.vcpi.pbn = mstc->port->vcpi.pbn; | ||
2905 | args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn; | ||
2906 | } | ||
2907 | } | ||
2908 | |||
2909 | NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n", | ||
2910 | msto->encoder.name, msto->head->base.base.name, | ||
2911 | args.vcpi.start_slot, args.vcpi.num_slots, | ||
2912 | args.vcpi.pbn, args.vcpi.aligned_pbn); | ||
2913 | nvif_mthd(&drm->display->disp, 0, &args, sizeof(args)); | ||
2914 | } | ||
2915 | |||
2916 | static int | ||
2917 | nv50_msto_atomic_check(struct drm_encoder *encoder, | ||
2918 | struct drm_crtc_state *crtc_state, | ||
2919 | struct drm_connector_state *conn_state) | ||
2920 | { | ||
2921 | struct nv50_mstc *mstc = nv50_mstc(conn_state->connector); | ||
2922 | struct nv50_mstm *mstm = mstc->mstm; | ||
2923 | int bpp = conn_state->connector->display_info.bpc * 3; | ||
2924 | int slots; | ||
2925 | |||
2926 | mstc->pbn = drm_dp_calc_pbn_mode(crtc_state->adjusted_mode.clock, bpp); | ||
2927 | |||
2928 | slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn); | ||
2929 | if (slots < 0) | ||
2930 | return slots; | ||
2931 | |||
2932 | return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state, | ||
2933 | mstc->native); | ||
2934 | } | ||
2935 | |||
2936 | static void | ||
2937 | nv50_msto_enable(struct drm_encoder *encoder) | ||
2938 | { | ||
2939 | struct nv50_head *head = nv50_head(encoder->crtc); | ||
2940 | struct nv50_msto *msto = nv50_msto(encoder); | ||
2941 | struct nv50_mstc *mstc = NULL; | ||
2942 | struct nv50_mstm *mstm = NULL; | ||
2943 | struct drm_connector *connector; | ||
2944 | u8 proto, depth; | ||
2945 | int slots; | ||
2946 | bool r; | ||
2947 | |||
2948 | drm_for_each_connector(connector, encoder->dev) { | ||
2949 | if (connector->state->best_encoder == &msto->encoder) { | ||
2950 | mstc = nv50_mstc(connector); | ||
2951 | mstm = mstc->mstm; | ||
2952 | break; | ||
2953 | } | ||
2954 | } | ||
2955 | |||
2956 | if (WARN_ON(!mstc)) | ||
2957 | return; | ||
2958 | |||
2959 | r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, mstc->pbn, &slots); | ||
2960 | WARN_ON(!r); | ||
2961 | |||
2962 | if (mstm->outp->dcb->sorconf.link & 1) | ||
2963 | proto = 0x8; | ||
2964 | else | ||
2965 | proto = 0x9; | ||
2966 | |||
2967 | switch (mstc->connector.display_info.bpc) { | ||
2968 | case 6: depth = 0x2; break; | ||
2969 | case 8: depth = 0x5; break; | ||
2970 | case 10: | ||
2971 | default: depth = 0x6; break; | ||
2972 | } | ||
2973 | |||
2974 | mstm->outp->update(mstm->outp, head->base.index, | ||
2975 | &head->base.base.state->adjusted_mode, proto, depth); | ||
2976 | |||
2977 | msto->head = head; | ||
2978 | msto->mstc = mstc; | ||
2979 | mstm->modified = true; | ||
2980 | } | ||
2981 | |||
2982 | static void | ||
2983 | nv50_msto_disable(struct drm_encoder *encoder) | ||
2984 | { | ||
2985 | struct nv50_msto *msto = nv50_msto(encoder); | ||
2986 | struct nv50_mstc *mstc = msto->mstc; | ||
2987 | struct nv50_mstm *mstm = mstc->mstm; | ||
2988 | |||
2989 | if (mstc->port) | ||
2990 | drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port); | ||
2991 | |||
2992 | mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0); | ||
2993 | mstm->modified = true; | ||
2994 | msto->disabled = true; | ||
2995 | } | ||
2996 | |||
2997 | static const struct drm_encoder_helper_funcs | ||
2998 | nv50_msto_help = { | ||
2999 | .disable = nv50_msto_disable, | ||
3000 | .enable = nv50_msto_enable, | ||
3001 | .atomic_check = nv50_msto_atomic_check, | ||
3002 | }; | ||
3003 | |||
3004 | static void | ||
3005 | nv50_msto_destroy(struct drm_encoder *encoder) | ||
3006 | { | ||
3007 | struct nv50_msto *msto = nv50_msto(encoder); | ||
3008 | drm_encoder_cleanup(&msto->encoder); | ||
3009 | kfree(msto); | ||
3010 | } | ||
3011 | |||
3012 | static const struct drm_encoder_funcs | ||
3013 | nv50_msto = { | ||
3014 | .destroy = nv50_msto_destroy, | ||
3015 | }; | ||
3016 | |||
3017 | static int | ||
3018 | nv50_msto_new(struct drm_device *dev, u32 heads, const char *name, int id, | ||
3019 | struct nv50_msto **pmsto) | ||
3020 | { | ||
3021 | struct nv50_msto *msto; | ||
3022 | int ret; | ||
3023 | |||
3024 | if (!(msto = *pmsto = kzalloc(sizeof(*msto), GFP_KERNEL))) | ||
3025 | return -ENOMEM; | ||
3026 | |||
3027 | ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto, | ||
3028 | DRM_MODE_ENCODER_DPMST, "%s-mst-%d", name, id); | ||
3029 | if (ret) { | ||
3030 | kfree(*pmsto); | ||
3031 | *pmsto = NULL; | ||
3032 | return ret; | ||
3033 | } | ||
3034 | |||
3035 | drm_encoder_helper_add(&msto->encoder, &nv50_msto_help); | ||
3036 | msto->encoder.possible_crtcs = heads; | ||
3037 | return 0; | ||
3038 | } | ||
3039 | |||
3040 | static struct drm_encoder * | ||
3041 | nv50_mstc_atomic_best_encoder(struct drm_connector *connector, | ||
3042 | struct drm_connector_state *connector_state) | ||
3043 | { | ||
3044 | struct nv50_head *head = nv50_head(connector_state->crtc); | ||
3045 | struct nv50_mstc *mstc = nv50_mstc(connector); | ||
3046 | if (mstc->port) { | ||
3047 | struct nv50_mstm *mstm = mstc->mstm; | ||
3048 | return &mstm->msto[head->base.index]->encoder; | ||
3049 | } | ||
3050 | return NULL; | ||
3051 | } | ||
3052 | |||
3053 | static struct drm_encoder * | ||
3054 | nv50_mstc_best_encoder(struct drm_connector *connector) | ||
3055 | { | ||
3056 | struct nv50_mstc *mstc = nv50_mstc(connector); | ||
3057 | if (mstc->port) { | ||
3058 | struct nv50_mstm *mstm = mstc->mstm; | ||
3059 | return &mstm->msto[0]->encoder; | ||
3060 | } | ||
3061 | return NULL; | ||
3062 | } | ||
3063 | |||
3064 | static enum drm_mode_status | ||
3065 | nv50_mstc_mode_valid(struct drm_connector *connector, | ||
3066 | struct drm_display_mode *mode) | ||
3067 | { | ||
3068 | return MODE_OK; | ||
3069 | } | ||
3070 | |||
3071 | static int | ||
3072 | nv50_mstc_get_modes(struct drm_connector *connector) | ||
3073 | { | ||
3074 | struct nv50_mstc *mstc = nv50_mstc(connector); | ||
3075 | int ret = 0; | ||
3076 | |||
3077 | mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port); | ||
3078 | drm_mode_connector_update_edid_property(&mstc->connector, mstc->edid); | ||
3079 | if (mstc->edid) { | ||
3080 | ret = drm_add_edid_modes(&mstc->connector, mstc->edid); | ||
3081 | drm_edid_to_eld(&mstc->connector, mstc->edid); | ||
3082 | } | ||
3083 | |||
3084 | if (!mstc->connector.display_info.bpc) | ||
3085 | mstc->connector.display_info.bpc = 8; | ||
3086 | |||
3087 | if (mstc->native) | ||
3088 | drm_mode_destroy(mstc->connector.dev, mstc->native); | ||
3089 | mstc->native = nouveau_conn_native_mode(&mstc->connector); | ||
3090 | return ret; | ||
3091 | } | ||
3092 | |||
3093 | static const struct drm_connector_helper_funcs | ||
3094 | nv50_mstc_help = { | ||
3095 | .get_modes = nv50_mstc_get_modes, | ||
3096 | .mode_valid = nv50_mstc_mode_valid, | ||
3097 | .best_encoder = nv50_mstc_best_encoder, | ||
3098 | .atomic_best_encoder = nv50_mstc_atomic_best_encoder, | ||
3099 | }; | ||
3100 | |||
3101 | static enum drm_connector_status | ||
3102 | nv50_mstc_detect(struct drm_connector *connector, bool force) | ||
3103 | { | ||
3104 | struct nv50_mstc *mstc = nv50_mstc(connector); | ||
3105 | if (!mstc->port) | ||
3106 | return connector_status_disconnected; | ||
3107 | return drm_dp_mst_detect_port(connector, mstc->port->mgr, mstc->port); | ||
3108 | } | ||
3109 | |||
3110 | static void | ||
3111 | nv50_mstc_destroy(struct drm_connector *connector) | ||
3112 | { | ||
3113 | struct nv50_mstc *mstc = nv50_mstc(connector); | ||
3114 | drm_connector_cleanup(&mstc->connector); | ||
3115 | kfree(mstc); | ||
3116 | } | ||
3117 | |||
3118 | static const struct drm_connector_funcs | ||
3119 | nv50_mstc = { | ||
3120 | .dpms = drm_atomic_helper_connector_dpms, | ||
3121 | .reset = nouveau_conn_reset, | ||
3122 | .detect = nv50_mstc_detect, | ||
3123 | .fill_modes = drm_helper_probe_single_connector_modes, | ||
3124 | .set_property = drm_atomic_helper_connector_set_property, | ||
3125 | .destroy = nv50_mstc_destroy, | ||
3126 | .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state, | ||
3127 | .atomic_destroy_state = nouveau_conn_atomic_destroy_state, | ||
3128 | .atomic_set_property = nouveau_conn_atomic_set_property, | ||
3129 | .atomic_get_property = nouveau_conn_atomic_get_property, | ||
3130 | }; | ||
3131 | |||
3132 | static int | ||
3133 | nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port, | ||
3134 | const char *path, struct nv50_mstc **pmstc) | ||
3135 | { | ||
3136 | struct drm_device *dev = mstm->outp->base.base.dev; | ||
3137 | struct nv50_mstc *mstc; | ||
3138 | int ret, i; | ||
3139 | |||
3140 | if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL))) | ||
3141 | return -ENOMEM; | ||
3142 | mstc->mstm = mstm; | ||
3143 | mstc->port = port; | ||
3144 | |||
3145 | ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc, | ||
3146 | DRM_MODE_CONNECTOR_DisplayPort); | ||
3147 | if (ret) { | ||
3148 | kfree(*pmstc); | ||
3149 | *pmstc = NULL; | ||
3150 | return ret; | ||
3151 | } | ||
3152 | |||
3153 | drm_connector_helper_add(&mstc->connector, &nv50_mstc_help); | ||
3154 | |||
3155 | mstc->connector.funcs->reset(&mstc->connector); | ||
3156 | nouveau_conn_attach_properties(&mstc->connector); | ||
3157 | |||
3158 | for (i = 0; i < ARRAY_SIZE(mstm->msto) && mstm->msto; i++) | ||
3159 | drm_mode_connector_attach_encoder(&mstc->connector, &mstm->msto[i]->encoder); | ||
3160 | |||
3161 | drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0); | ||
3162 | drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0); | ||
3163 | drm_mode_connector_set_path_property(&mstc->connector, path); | ||
3164 | return 0; | ||
3165 | } | ||
3166 | |||
3167 | static void | ||
3168 | nv50_mstm_cleanup(struct nv50_mstm *mstm) | ||
3169 | { | ||
3170 | struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev); | ||
3171 | struct drm_encoder *encoder; | ||
3172 | int ret; | ||
3173 | |||
3174 | NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name); | ||
3175 | ret = drm_dp_check_act_status(&mstm->mgr); | ||
3176 | |||
3177 | ret = drm_dp_update_payload_part2(&mstm->mgr); | ||
3178 | |||
3179 | drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { | ||
3180 | if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { | ||
3181 | struct nv50_msto *msto = nv50_msto(encoder); | ||
3182 | struct nv50_mstc *mstc = msto->mstc; | ||
3183 | if (mstc && mstc->mstm == mstm) | ||
3184 | nv50_msto_cleanup(msto); | ||
3185 | } | ||
3186 | } | ||
3187 | |||
3188 | mstm->modified = false; | ||
3189 | } | ||
3190 | |||
3191 | static void | ||
3192 | nv50_mstm_prepare(struct nv50_mstm *mstm) | ||
3193 | { | ||
3194 | struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev); | ||
3195 | struct drm_encoder *encoder; | ||
3196 | int ret; | ||
3197 | |||
3198 | NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name); | ||
3199 | ret = drm_dp_update_payload_part1(&mstm->mgr); | ||
3200 | |||
3201 | drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { | ||
3202 | if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { | ||
3203 | struct nv50_msto *msto = nv50_msto(encoder); | ||
3204 | struct nv50_mstc *mstc = msto->mstc; | ||
3205 | if (mstc && mstc->mstm == mstm) | ||
3206 | nv50_msto_prepare(msto); | ||
3207 | } | ||
3208 | } | ||
3209 | } | ||
3210 | |||
3211 | static void | ||
3212 | nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr *mgr) | ||
3213 | { | ||
3214 | struct nv50_mstm *mstm = nv50_mstm(mgr); | ||
3215 | drm_kms_helper_hotplug_event(mstm->outp->base.base.dev); | ||
3216 | } | ||
3217 | |||
3218 | static void | ||
3219 | nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr *mgr, | ||
3220 | struct drm_connector *connector) | ||
3221 | { | ||
3222 | struct nouveau_drm *drm = nouveau_drm(connector->dev); | ||
3223 | struct nv50_mstc *mstc = nv50_mstc(connector); | ||
3224 | |||
3225 | drm_connector_unregister(&mstc->connector); | ||
3226 | |||
3227 | drm_modeset_lock_all(drm->dev); | ||
3228 | drm_fb_helper_remove_one_connector(&drm->fbcon->helper, &mstc->connector); | ||
3229 | mstc->port = NULL; | ||
3230 | drm_modeset_unlock_all(drm->dev); | ||
3231 | |||
3232 | drm_connector_unreference(&mstc->connector); | ||
3233 | } | ||
3234 | |||
3235 | static void | ||
3236 | nv50_mstm_register_connector(struct drm_connector *connector) | ||
3237 | { | ||
3238 | struct nouveau_drm *drm = nouveau_drm(connector->dev); | ||
3239 | |||
3240 | drm_modeset_lock_all(drm->dev); | ||
3241 | drm_fb_helper_add_one_connector(&drm->fbcon->helper, connector); | ||
3242 | drm_modeset_unlock_all(drm->dev); | ||
3243 | |||
3244 | drm_connector_register(connector); | ||
3245 | } | ||
3246 | |||
3247 | static struct drm_connector * | ||
3248 | nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr, | ||
3249 | struct drm_dp_mst_port *port, const char *path) | ||
3250 | { | ||
3251 | struct nv50_mstm *mstm = nv50_mstm(mgr); | ||
3252 | struct nv50_mstc *mstc; | ||
3253 | int ret; | ||
3254 | |||
3255 | ret = nv50_mstc_new(mstm, port, path, &mstc); | ||
3256 | if (ret) { | ||
3257 | if (mstc) | ||
3258 | mstc->connector.funcs->destroy(&mstc->connector); | ||
3259 | return NULL; | ||
3260 | } | ||
3261 | |||
3262 | return &mstc->connector; | ||
3263 | } | ||
3264 | |||
3265 | static const struct drm_dp_mst_topology_cbs | ||
3266 | nv50_mstm = { | ||
3267 | .add_connector = nv50_mstm_add_connector, | ||
3268 | .register_connector = nv50_mstm_register_connector, | ||
3269 | .destroy_connector = nv50_mstm_destroy_connector, | ||
3270 | .hotplug = nv50_mstm_hotplug, | ||
3271 | }; | ||
3272 | |||
3273 | void | ||
3274 | nv50_mstm_service(struct nv50_mstm *mstm) | ||
3275 | { | ||
3276 | struct drm_dp_aux *aux = mstm->mgr.aux; | ||
3277 | bool handled = true; | ||
3278 | int ret; | ||
3279 | u8 esi[8] = {}; | ||
3280 | |||
3281 | while (handled) { | ||
3282 | ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8); | ||
3283 | if (ret != 8) { | ||
3284 | drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false); | ||
3285 | return; | ||
3286 | } | ||
3287 | |||
3288 | drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled); | ||
3289 | if (!handled) | ||
3290 | break; | ||
3291 | |||
3292 | drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3); | ||
3293 | } | ||
3294 | } | ||
3295 | |||
3296 | void | ||
3297 | nv50_mstm_remove(struct nv50_mstm *mstm) | ||
3298 | { | ||
3299 | if (mstm) | ||
3300 | drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false); | ||
3301 | } | ||
3302 | |||
3303 | static int | ||
3304 | nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state) | ||
3305 | { | ||
3306 | struct nouveau_encoder *outp = mstm->outp; | ||
3307 | struct { | ||
3308 | struct nv50_disp_mthd_v1 base; | ||
3309 | struct nv50_disp_sor_dp_mst_link_v0 mst; | ||
3310 | } args = { | ||
3311 | .base.version = 1, | ||
3312 | .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK, | ||
3313 | .base.hasht = outp->dcb->hasht, | ||
3314 | .base.hashm = outp->dcb->hashm, | ||
3315 | .mst.state = state, | ||
3316 | }; | ||
3317 | struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev); | ||
3318 | struct nvif_object *disp = &drm->display->disp; | ||
3319 | int ret; | ||
3320 | |||
3321 | if (dpcd >= 0x12) { | ||
3322 | ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CTRL, &dpcd); | ||
3323 | if (ret < 0) | ||
3324 | return ret; | ||
3325 | |||
3326 | dpcd &= ~DP_MST_EN; | ||
3327 | if (state) | ||
3328 | dpcd |= DP_MST_EN; | ||
3329 | |||
3330 | ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, dpcd); | ||
3331 | if (ret < 0) | ||
3332 | return ret; | ||
3333 | } | ||
3334 | |||
3335 | return nvif_mthd(disp, 0, &args, sizeof(args)); | ||
3336 | } | ||
3337 | |||
3338 | int | ||
3339 | nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow) | ||
3340 | { | ||
3341 | int ret, state = 0; | ||
3342 | |||
3343 | if (!mstm) | ||
3344 | return 0; | ||
3345 | |||
3346 | if (dpcd[0] >= 0x12 && allow) { | ||
3347 | ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CAP, &dpcd[1]); | ||
3348 | if (ret < 0) | ||
3349 | return ret; | ||
3350 | |||
3351 | state = dpcd[1] & DP_MST_CAP; | ||
3352 | } | ||
3353 | |||
3354 | ret = nv50_mstm_enable(mstm, dpcd[0], state); | ||
3355 | if (ret) | ||
3356 | return ret; | ||
3357 | |||
3358 | ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, state); | ||
3359 | if (ret) | ||
3360 | return nv50_mstm_enable(mstm, dpcd[0], 0); | ||
3361 | |||
3362 | return mstm->mgr.mst_state; | ||
3363 | } | ||
3364 | |||
3365 | static void | ||
3366 | nv50_mstm_fini(struct nv50_mstm *mstm) | ||
3367 | { | ||
3368 | if (mstm && mstm->mgr.mst_state) | ||
3369 | drm_dp_mst_topology_mgr_suspend(&mstm->mgr); | ||
3370 | } | ||
3371 | |||
3372 | static void | ||
3373 | nv50_mstm_init(struct nv50_mstm *mstm) | ||
3374 | { | ||
3375 | if (mstm && mstm->mgr.mst_state) | ||
3376 | drm_dp_mst_topology_mgr_resume(&mstm->mgr); | ||
3377 | } | ||
3378 | |||
3379 | static void | ||
3380 | nv50_mstm_del(struct nv50_mstm **pmstm) | ||
3381 | { | ||
3382 | struct nv50_mstm *mstm = *pmstm; | ||
3383 | if (mstm) { | ||
3384 | kfree(*pmstm); | ||
3385 | *pmstm = NULL; | ||
3386 | } | ||
3387 | } | ||
3388 | |||
3389 | static int | ||
3390 | nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max, | ||
3391 | int conn_base_id, struct nv50_mstm **pmstm) | ||
3392 | { | ||
3393 | const int max_payloads = hweight8(outp->dcb->heads); | ||
3394 | struct drm_device *dev = outp->base.base.dev; | ||
3395 | struct nv50_mstm *mstm; | ||
3396 | int ret, i; | ||
3397 | u8 dpcd; | ||
3398 | |||
3399 | /* This is a workaround for some monitors not functioning | ||
3400 | * correctly in MST mode on initial module load. I think | ||
3401 | * some bad interaction with the VBIOS may be responsible. | ||
3402 | * | ||
3403 | * A good ol' off and on again seems to work here ;) | ||
3404 | */ | ||
3405 | ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd); | ||
3406 | if (ret >= 0 && dpcd >= 0x12) | ||
3407 | drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0); | ||
3408 | |||
3409 | if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL))) | ||
3410 | return -ENOMEM; | ||
3411 | mstm->outp = outp; | ||
3412 | mstm->mgr.cbs = &nv50_mstm; | ||
3413 | |||
3414 | ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev->dev, aux, aux_max, | ||
3415 | max_payloads, conn_base_id); | ||
3416 | if (ret) | ||
3417 | return ret; | ||
3418 | |||
3419 | for (i = 0; i < max_payloads; i++) { | ||
3420 | ret = nv50_msto_new(dev, outp->dcb->heads, outp->base.base.name, | ||
3421 | i, &mstm->msto[i]); | ||
3422 | if (ret) | ||
3423 | return ret; | ||
3424 | } | ||
3425 | |||
3426 | return 0; | ||
1844 | } | 3427 | } |
1845 | 3428 | ||
1846 | /****************************************************************************** | 3429 | /****************************************************************************** |
@@ -1861,89 +3444,91 @@ nv50_sor_dpms(struct drm_encoder *encoder, int mode) | |||
1861 | .base.hashm = nv_encoder->dcb->hashm, | 3444 | .base.hashm = nv_encoder->dcb->hashm, |
1862 | .pwr.state = mode == DRM_MODE_DPMS_ON, | 3445 | .pwr.state = mode == DRM_MODE_DPMS_ON, |
1863 | }; | 3446 | }; |
1864 | struct { | ||
1865 | struct nv50_disp_mthd_v1 base; | ||
1866 | struct nv50_disp_sor_dp_pwr_v0 pwr; | ||
1867 | } link = { | ||
1868 | .base.version = 1, | ||
1869 | .base.method = NV50_DISP_MTHD_V1_SOR_DP_PWR, | ||
1870 | .base.hasht = nv_encoder->dcb->hasht, | ||
1871 | .base.hashm = nv_encoder->dcb->hashm, | ||
1872 | .pwr.state = mode == DRM_MODE_DPMS_ON, | ||
1873 | }; | ||
1874 | struct drm_device *dev = encoder->dev; | ||
1875 | struct drm_encoder *partner; | ||
1876 | 3447 | ||
1877 | nv_encoder->last_dpms = mode; | 3448 | nvif_mthd(disp->disp, 0, &args, sizeof(args)); |
1878 | 3449 | } | |
1879 | list_for_each_entry(partner, &dev->mode_config.encoder_list, head) { | ||
1880 | struct nouveau_encoder *nv_partner = nouveau_encoder(partner); | ||
1881 | |||
1882 | if (partner->encoder_type != DRM_MODE_ENCODER_TMDS) | ||
1883 | continue; | ||
1884 | 3450 | ||
1885 | if (nv_partner != nv_encoder && | 3451 | static void |
1886 | nv_partner->dcb->or == nv_encoder->dcb->or) { | 3452 | nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head, |
1887 | if (nv_partner->last_dpms == DRM_MODE_DPMS_ON) | 3453 | struct drm_display_mode *mode, u8 proto, u8 depth) |
1888 | return; | 3454 | { |
1889 | break; | 3455 | struct nv50_dmac *core = &nv50_mast(nv_encoder->base.base.dev)->base; |
1890 | } | 3456 | u32 *push; |
1891 | } | ||
1892 | 3457 | ||
1893 | if (nv_encoder->dcb->type == DCB_OUTPUT_DP) { | 3458 | if (!mode) { |
1894 | args.pwr.state = 1; | 3459 | nv_encoder->ctrl &= ~BIT(head); |
1895 | nvif_mthd(disp->disp, 0, &args, sizeof(args)); | 3460 | if (!(nv_encoder->ctrl & 0x0000000f)) |
1896 | nvif_mthd(disp->disp, 0, &link, sizeof(link)); | 3461 | nv_encoder->ctrl = 0; |
1897 | } else { | 3462 | } else { |
1898 | nvif_mthd(disp->disp, 0, &args, sizeof(args)); | 3463 | nv_encoder->ctrl |= proto << 8; |
3464 | nv_encoder->ctrl |= BIT(head); | ||
1899 | } | 3465 | } |
1900 | } | ||
1901 | 3466 | ||
1902 | static void | 3467 | if ((push = evo_wait(core, 6))) { |
1903 | nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data) | 3468 | if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) { |
1904 | { | 3469 | if (mode) { |
1905 | struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev); | 3470 | if (mode->flags & DRM_MODE_FLAG_NHSYNC) |
1906 | u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push; | 3471 | nv_encoder->ctrl |= 0x00001000; |
1907 | if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) { | 3472 | if (mode->flags & DRM_MODE_FLAG_NVSYNC) |
1908 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | 3473 | nv_encoder->ctrl |= 0x00002000; |
3474 | nv_encoder->ctrl |= depth << 16; | ||
3475 | } | ||
1909 | evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1); | 3476 | evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1); |
1910 | evo_data(push, (nv_encoder->ctrl = temp)); | ||
1911 | } else { | 3477 | } else { |
3478 | if (mode) { | ||
3479 | u32 magic = 0x31ec6000 | (head << 25); | ||
3480 | u32 syncs = 0x00000001; | ||
3481 | if (mode->flags & DRM_MODE_FLAG_NHSYNC) | ||
3482 | syncs |= 0x00000008; | ||
3483 | if (mode->flags & DRM_MODE_FLAG_NVSYNC) | ||
3484 | syncs |= 0x00000010; | ||
3485 | if (mode->flags & DRM_MODE_FLAG_INTERLACE) | ||
3486 | magic |= 0x00000001; | ||
3487 | |||
3488 | evo_mthd(push, 0x0404 + (head * 0x300), 2); | ||
3489 | evo_data(push, syncs | (depth << 6)); | ||
3490 | evo_data(push, magic); | ||
3491 | } | ||
1912 | evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1); | 3492 | evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1); |
1913 | evo_data(push, (nv_encoder->ctrl = temp)); | ||
1914 | } | 3493 | } |
1915 | evo_kick(push, mast); | 3494 | evo_data(push, nv_encoder->ctrl); |
3495 | evo_kick(push, core); | ||
1916 | } | 3496 | } |
1917 | } | 3497 | } |
1918 | 3498 | ||
1919 | static void | 3499 | static void |
1920 | nv50_sor_disconnect(struct drm_encoder *encoder) | 3500 | nv50_sor_disable(struct drm_encoder *encoder) |
1921 | { | 3501 | { |
1922 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | 3502 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); |
1923 | struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc); | 3503 | struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc); |
1924 | 3504 | ||
1925 | nv_encoder->last_dpms = DRM_MODE_DPMS_OFF; | ||
1926 | nv_encoder->crtc = NULL; | 3505 | nv_encoder->crtc = NULL; |
1927 | 3506 | ||
1928 | if (nv_crtc) { | 3507 | if (nv_crtc) { |
1929 | nv50_crtc_prepare(&nv_crtc->base); | 3508 | struct nvkm_i2c_aux *aux = nv_encoder->aux; |
1930 | nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0); | 3509 | u8 pwr; |
1931 | nv50_audio_disconnect(encoder, nv_crtc); | ||
1932 | nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc); | ||
1933 | } | ||
1934 | } | ||
1935 | 3510 | ||
1936 | static void | 3511 | if (aux) { |
1937 | nv50_sor_commit(struct drm_encoder *encoder) | 3512 | int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1); |
1938 | { | 3513 | if (ret == 0) { |
3514 | pwr &= ~DP_SET_POWER_MASK; | ||
3515 | pwr |= DP_SET_POWER_D3; | ||
3516 | nvkm_wraux(aux, DP_SET_POWER, &pwr, 1); | ||
3517 | } | ||
3518 | } | ||
3519 | |||
3520 | nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0); | ||
3521 | nv50_audio_disable(encoder, nv_crtc); | ||
3522 | nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc); | ||
3523 | } | ||
1939 | } | 3524 | } |
1940 | 3525 | ||
1941 | static void | 3526 | static void |
1942 | nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode, | 3527 | nv50_sor_enable(struct drm_encoder *encoder) |
1943 | struct drm_display_mode *mode) | ||
1944 | { | 3528 | { |
1945 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | 3529 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); |
1946 | struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); | 3530 | struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); |
3531 | struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode; | ||
1947 | struct { | 3532 | struct { |
1948 | struct nv50_disp_mthd_v1 base; | 3533 | struct nv50_disp_mthd_v1 base; |
1949 | struct nv50_disp_sor_lvds_script_v0 lvds; | 3534 | struct nv50_disp_sor_lvds_script_v0 lvds; |
@@ -1954,13 +3539,10 @@ nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode, | |||
1954 | .base.hashm = nv_encoder->dcb->hashm, | 3539 | .base.hashm = nv_encoder->dcb->hashm, |
1955 | }; | 3540 | }; |
1956 | struct nv50_disp *disp = nv50_disp(encoder->dev); | 3541 | struct nv50_disp *disp = nv50_disp(encoder->dev); |
1957 | struct nv50_mast *mast = nv50_mast(encoder->dev); | ||
1958 | struct drm_device *dev = encoder->dev; | 3542 | struct drm_device *dev = encoder->dev; |
1959 | struct nouveau_drm *drm = nouveau_drm(dev); | 3543 | struct nouveau_drm *drm = nouveau_drm(dev); |
1960 | struct nouveau_connector *nv_connector; | 3544 | struct nouveau_connector *nv_connector; |
1961 | struct nvbios *bios = &drm->vbios; | 3545 | struct nvbios *bios = &drm->vbios; |
1962 | u32 mask, ctrl; | ||
1963 | u8 owner = 1 << nv_crtc->index; | ||
1964 | u8 proto = 0xf; | 3546 | u8 proto = 0xf; |
1965 | u8 depth = 0x0; | 3547 | u8 depth = 0x0; |
1966 | 3548 | ||
@@ -1985,7 +3567,7 @@ nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode, | |||
1985 | proto = 0x2; | 3567 | proto = 0x2; |
1986 | } | 3568 | } |
1987 | 3569 | ||
1988 | nv50_hdmi_mode_set(&nv_encoder->base.base, mode); | 3570 | nv50_hdmi_enable(&nv_encoder->base.base, mode); |
1989 | break; | 3571 | break; |
1990 | case DCB_OUTPUT_LVDS: | 3572 | case DCB_OUTPUT_LVDS: |
1991 | proto = 0x0; | 3573 | proto = 0x0; |
@@ -2019,94 +3601,60 @@ nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode, | |||
2019 | nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds)); | 3601 | nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds)); |
2020 | break; | 3602 | break; |
2021 | case DCB_OUTPUT_DP: | 3603 | case DCB_OUTPUT_DP: |
2022 | if (nv_connector->base.display_info.bpc == 6) { | 3604 | if (nv_connector->base.display_info.bpc == 6) |
2023 | nv_encoder->dp.datarate = mode->clock * 18 / 8; | ||
2024 | depth = 0x2; | 3605 | depth = 0x2; |
2025 | } else | 3606 | else |
2026 | if (nv_connector->base.display_info.bpc == 8) { | 3607 | if (nv_connector->base.display_info.bpc == 8) |
2027 | nv_encoder->dp.datarate = mode->clock * 24 / 8; | ||
2028 | depth = 0x5; | 3608 | depth = 0x5; |
2029 | } else { | 3609 | else |
2030 | nv_encoder->dp.datarate = mode->clock * 30 / 8; | ||
2031 | depth = 0x6; | 3610 | depth = 0x6; |
2032 | } | ||
2033 | 3611 | ||
2034 | if (nv_encoder->dcb->sorconf.link & 1) | 3612 | if (nv_encoder->dcb->sorconf.link & 1) |
2035 | proto = 0x8; | 3613 | proto = 0x8; |
2036 | else | 3614 | else |
2037 | proto = 0x9; | 3615 | proto = 0x9; |
2038 | nv50_audio_mode_set(encoder, mode); | 3616 | |
3617 | nv50_audio_enable(encoder, mode); | ||
2039 | break; | 3618 | break; |
2040 | default: | 3619 | default: |
2041 | BUG_ON(1); | 3620 | BUG_ON(1); |
2042 | break; | 3621 | break; |
2043 | } | 3622 | } |
2044 | 3623 | ||
2045 | nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON); | 3624 | nv_encoder->update(nv_encoder, nv_crtc->index, mode, proto, depth); |
2046 | |||
2047 | if (nv50_vers(mast) >= GF110_DISP) { | ||
2048 | u32 *push = evo_wait(mast, 3); | ||
2049 | if (push) { | ||
2050 | u32 magic = 0x31ec6000 | (nv_crtc->index << 25); | ||
2051 | u32 syncs = 0x00000001; | ||
2052 | |||
2053 | if (mode->flags & DRM_MODE_FLAG_NHSYNC) | ||
2054 | syncs |= 0x00000008; | ||
2055 | if (mode->flags & DRM_MODE_FLAG_NVSYNC) | ||
2056 | syncs |= 0x00000010; | ||
2057 | |||
2058 | if (mode->flags & DRM_MODE_FLAG_INTERLACE) | ||
2059 | magic |= 0x00000001; | ||
2060 | |||
2061 | evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2); | ||
2062 | evo_data(push, syncs | (depth << 6)); | ||
2063 | evo_data(push, magic); | ||
2064 | evo_kick(push, mast); | ||
2065 | } | ||
2066 | |||
2067 | ctrl = proto << 8; | ||
2068 | mask = 0x00000f00; | ||
2069 | } else { | ||
2070 | ctrl = (depth << 16) | (proto << 8); | ||
2071 | if (mode->flags & DRM_MODE_FLAG_NHSYNC) | ||
2072 | ctrl |= 0x00001000; | ||
2073 | if (mode->flags & DRM_MODE_FLAG_NVSYNC) | ||
2074 | ctrl |= 0x00002000; | ||
2075 | mask = 0x000f3f00; | ||
2076 | } | ||
2077 | |||
2078 | nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner); | ||
2079 | } | 3625 | } |
2080 | 3626 | ||
3627 | static const struct drm_encoder_helper_funcs | ||
3628 | nv50_sor_help = { | ||
3629 | .dpms = nv50_sor_dpms, | ||
3630 | .atomic_check = nv50_outp_atomic_check, | ||
3631 | .enable = nv50_sor_enable, | ||
3632 | .disable = nv50_sor_disable, | ||
3633 | }; | ||
3634 | |||
2081 | static void | 3635 | static void |
2082 | nv50_sor_destroy(struct drm_encoder *encoder) | 3636 | nv50_sor_destroy(struct drm_encoder *encoder) |
2083 | { | 3637 | { |
3638 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | ||
3639 | nv50_mstm_del(&nv_encoder->dp.mstm); | ||
2084 | drm_encoder_cleanup(encoder); | 3640 | drm_encoder_cleanup(encoder); |
2085 | kfree(encoder); | 3641 | kfree(encoder); |
2086 | } | 3642 | } |
2087 | 3643 | ||
2088 | static const struct drm_encoder_helper_funcs nv50_sor_hfunc = { | 3644 | static const struct drm_encoder_funcs |
2089 | .dpms = nv50_sor_dpms, | 3645 | nv50_sor_func = { |
2090 | .mode_fixup = nv50_encoder_mode_fixup, | ||
2091 | .prepare = nv50_sor_disconnect, | ||
2092 | .commit = nv50_sor_commit, | ||
2093 | .mode_set = nv50_sor_mode_set, | ||
2094 | .disable = nv50_sor_disconnect, | ||
2095 | .get_crtc = nv50_display_crtc_get, | ||
2096 | }; | ||
2097 | |||
2098 | static const struct drm_encoder_funcs nv50_sor_func = { | ||
2099 | .destroy = nv50_sor_destroy, | 3646 | .destroy = nv50_sor_destroy, |
2100 | }; | 3647 | }; |
2101 | 3648 | ||
2102 | static int | 3649 | static int |
2103 | nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe) | 3650 | nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe) |
2104 | { | 3651 | { |
3652 | struct nouveau_connector *nv_connector = nouveau_connector(connector); | ||
2105 | struct nouveau_drm *drm = nouveau_drm(connector->dev); | 3653 | struct nouveau_drm *drm = nouveau_drm(connector->dev); |
2106 | struct nvkm_i2c *i2c = nvxx_i2c(&drm->device); | 3654 | struct nvkm_i2c *i2c = nvxx_i2c(&drm->device); |
2107 | struct nouveau_encoder *nv_encoder; | 3655 | struct nouveau_encoder *nv_encoder; |
2108 | struct drm_encoder *encoder; | 3656 | struct drm_encoder *encoder; |
2109 | int type; | 3657 | int type, ret; |
2110 | 3658 | ||
2111 | switch (dcbe->type) { | 3659 | switch (dcbe->type) { |
2112 | case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break; | 3660 | case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break; |
@@ -2122,7 +3670,16 @@ nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe) | |||
2122 | return -ENOMEM; | 3670 | return -ENOMEM; |
2123 | nv_encoder->dcb = dcbe; | 3671 | nv_encoder->dcb = dcbe; |
2124 | nv_encoder->or = ffs(dcbe->or) - 1; | 3672 | nv_encoder->or = ffs(dcbe->or) - 1; |
2125 | nv_encoder->last_dpms = DRM_MODE_DPMS_OFF; | 3673 | nv_encoder->update = nv50_sor_update; |
3674 | |||
3675 | encoder = to_drm_encoder(nv_encoder); | ||
3676 | encoder->possible_crtcs = dcbe->heads; | ||
3677 | encoder->possible_clones = 0; | ||
3678 | drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type, | ||
3679 | "sor-%04x-%04x", dcbe->hasht, dcbe->hashm); | ||
3680 | drm_encoder_helper_add(encoder, &nv50_sor_help); | ||
3681 | |||
3682 | drm_mode_connector_attach_encoder(connector, encoder); | ||
2126 | 3683 | ||
2127 | if (dcbe->type == DCB_OUTPUT_DP) { | 3684 | if (dcbe->type == DCB_OUTPUT_DP) { |
2128 | struct nvkm_i2c_aux *aux = | 3685 | struct nvkm_i2c_aux *aux = |
@@ -2131,6 +3688,15 @@ nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe) | |||
2131 | nv_encoder->i2c = &aux->i2c; | 3688 | nv_encoder->i2c = &aux->i2c; |
2132 | nv_encoder->aux = aux; | 3689 | nv_encoder->aux = aux; |
2133 | } | 3690 | } |
3691 | |||
3692 | /*TODO: Use DP Info Table to check for support. */ | ||
3693 | if (nv50_disp(encoder->dev)->disp->oclass >= GF110_DISP) { | ||
3694 | ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 16, | ||
3695 | nv_connector->base.base.id, | ||
3696 | &nv_encoder->dp.mstm); | ||
3697 | if (ret) | ||
3698 | return ret; | ||
3699 | } | ||
2134 | } else { | 3700 | } else { |
2135 | struct nvkm_i2c_bus *bus = | 3701 | struct nvkm_i2c_bus *bus = |
2136 | nvkm_i2c_bus_find(i2c, dcbe->i2c_index); | 3702 | nvkm_i2c_bus_find(i2c, dcbe->i2c_index); |
@@ -2138,20 +3704,12 @@ nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe) | |||
2138 | nv_encoder->i2c = &bus->i2c; | 3704 | nv_encoder->i2c = &bus->i2c; |
2139 | } | 3705 | } |
2140 | 3706 | ||
2141 | encoder = to_drm_encoder(nv_encoder); | ||
2142 | encoder->possible_crtcs = dcbe->heads; | ||
2143 | encoder->possible_clones = 0; | ||
2144 | drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type, NULL); | ||
2145 | drm_encoder_helper_add(encoder, &nv50_sor_hfunc); | ||
2146 | |||
2147 | drm_mode_connector_attach_encoder(connector, encoder); | ||
2148 | return 0; | 3707 | return 0; |
2149 | } | 3708 | } |
2150 | 3709 | ||
2151 | /****************************************************************************** | 3710 | /****************************************************************************** |
2152 | * PIOR | 3711 | * PIOR |
2153 | *****************************************************************************/ | 3712 | *****************************************************************************/ |
2154 | |||
2155 | static void | 3713 | static void |
2156 | nv50_pior_dpms(struct drm_encoder *encoder, int mode) | 3714 | nv50_pior_dpms(struct drm_encoder *encoder, int mode) |
2157 | { | 3715 | { |
@@ -2172,30 +3730,48 @@ nv50_pior_dpms(struct drm_encoder *encoder, int mode) | |||
2172 | nvif_mthd(disp->disp, 0, &args, sizeof(args)); | 3730 | nvif_mthd(disp->disp, 0, &args, sizeof(args)); |
2173 | } | 3731 | } |
2174 | 3732 | ||
2175 | static bool | 3733 | static int |
2176 | nv50_pior_mode_fixup(struct drm_encoder *encoder, | 3734 | nv50_pior_atomic_check(struct drm_encoder *encoder, |
2177 | const struct drm_display_mode *mode, | 3735 | struct drm_crtc_state *crtc_state, |
2178 | struct drm_display_mode *adjusted_mode) | 3736 | struct drm_connector_state *conn_state) |
2179 | { | 3737 | { |
2180 | if (!nv50_encoder_mode_fixup(encoder, mode, adjusted_mode)) | 3738 | int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state); |
2181 | return false; | 3739 | if (ret) |
2182 | adjusted_mode->clock *= 2; | 3740 | return ret; |
2183 | return true; | 3741 | crtc_state->adjusted_mode.clock *= 2; |
3742 | return 0; | ||
2184 | } | 3743 | } |
2185 | 3744 | ||
2186 | static void | 3745 | static void |
2187 | nv50_pior_commit(struct drm_encoder *encoder) | 3746 | nv50_pior_disable(struct drm_encoder *encoder) |
2188 | { | 3747 | { |
3748 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | ||
3749 | struct nv50_mast *mast = nv50_mast(encoder->dev); | ||
3750 | const int or = nv_encoder->or; | ||
3751 | u32 *push; | ||
3752 | |||
3753 | if (nv_encoder->crtc) { | ||
3754 | push = evo_wait(mast, 4); | ||
3755 | if (push) { | ||
3756 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | ||
3757 | evo_mthd(push, 0x0700 + (or * 0x040), 1); | ||
3758 | evo_data(push, 0x00000000); | ||
3759 | } | ||
3760 | evo_kick(push, mast); | ||
3761 | } | ||
3762 | } | ||
3763 | |||
3764 | nv_encoder->crtc = NULL; | ||
2189 | } | 3765 | } |
2190 | 3766 | ||
2191 | static void | 3767 | static void |
2192 | nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, | 3768 | nv50_pior_enable(struct drm_encoder *encoder) |
2193 | struct drm_display_mode *adjusted_mode) | ||
2194 | { | 3769 | { |
2195 | struct nv50_mast *mast = nv50_mast(encoder->dev); | 3770 | struct nv50_mast *mast = nv50_mast(encoder->dev); |
2196 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | 3771 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); |
2197 | struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); | 3772 | struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); |
2198 | struct nouveau_connector *nv_connector; | 3773 | struct nouveau_connector *nv_connector; |
3774 | struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode; | ||
2199 | u8 owner = 1 << nv_crtc->index; | 3775 | u8 owner = 1 << nv_crtc->index; |
2200 | u8 proto, depth; | 3776 | u8 proto, depth; |
2201 | u32 *push; | 3777 | u32 *push; |
@@ -2218,8 +3794,6 @@ nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, | |||
2218 | break; | 3794 | break; |
2219 | } | 3795 | } |
2220 | 3796 | ||
2221 | nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON); | ||
2222 | |||
2223 | push = evo_wait(mast, 8); | 3797 | push = evo_wait(mast, 8); |
2224 | if (push) { | 3798 | if (push) { |
2225 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | 3799 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { |
@@ -2238,29 +3812,13 @@ nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, | |||
2238 | nv_encoder->crtc = encoder->crtc; | 3812 | nv_encoder->crtc = encoder->crtc; |
2239 | } | 3813 | } |
2240 | 3814 | ||
2241 | static void | 3815 | static const struct drm_encoder_helper_funcs |
2242 | nv50_pior_disconnect(struct drm_encoder *encoder) | 3816 | nv50_pior_help = { |
2243 | { | 3817 | .dpms = nv50_pior_dpms, |
2244 | struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); | 3818 | .atomic_check = nv50_pior_atomic_check, |
2245 | struct nv50_mast *mast = nv50_mast(encoder->dev); | 3819 | .enable = nv50_pior_enable, |
2246 | const int or = nv_encoder->or; | 3820 | .disable = nv50_pior_disable, |
2247 | u32 *push; | 3821 | }; |
2248 | |||
2249 | if (nv_encoder->crtc) { | ||
2250 | nv50_crtc_prepare(nv_encoder->crtc); | ||
2251 | |||
2252 | push = evo_wait(mast, 4); | ||
2253 | if (push) { | ||
2254 | if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { | ||
2255 | evo_mthd(push, 0x0700 + (or * 0x040), 1); | ||
2256 | evo_data(push, 0x00000000); | ||
2257 | } | ||
2258 | evo_kick(push, mast); | ||
2259 | } | ||
2260 | } | ||
2261 | |||
2262 | nv_encoder->crtc = NULL; | ||
2263 | } | ||
2264 | 3822 | ||
2265 | static void | 3823 | static void |
2266 | nv50_pior_destroy(struct drm_encoder *encoder) | 3824 | nv50_pior_destroy(struct drm_encoder *encoder) |
@@ -2269,17 +3827,8 @@ nv50_pior_destroy(struct drm_encoder *encoder) | |||
2269 | kfree(encoder); | 3827 | kfree(encoder); |
2270 | } | 3828 | } |
2271 | 3829 | ||
2272 | static const struct drm_encoder_helper_funcs nv50_pior_hfunc = { | 3830 | static const struct drm_encoder_funcs |
2273 | .dpms = nv50_pior_dpms, | 3831 | nv50_pior_func = { |
2274 | .mode_fixup = nv50_pior_mode_fixup, | ||
2275 | .prepare = nv50_pior_disconnect, | ||
2276 | .commit = nv50_pior_commit, | ||
2277 | .mode_set = nv50_pior_mode_set, | ||
2278 | .disable = nv50_pior_disconnect, | ||
2279 | .get_crtc = nv50_display_crtc_get, | ||
2280 | }; | ||
2281 | |||
2282 | static const struct drm_encoder_funcs nv50_pior_func = { | ||
2283 | .destroy = nv50_pior_destroy, | 3832 | .destroy = nv50_pior_destroy, |
2284 | }; | 3833 | }; |
2285 | 3834 | ||
@@ -2321,149 +3870,462 @@ nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe) | |||
2321 | encoder = to_drm_encoder(nv_encoder); | 3870 | encoder = to_drm_encoder(nv_encoder); |
2322 | encoder->possible_crtcs = dcbe->heads; | 3871 | encoder->possible_crtcs = dcbe->heads; |
2323 | encoder->possible_clones = 0; | 3872 | encoder->possible_clones = 0; |
2324 | drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type, NULL); | 3873 | drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type, |
2325 | drm_encoder_helper_add(encoder, &nv50_pior_hfunc); | 3874 | "pior-%04x-%04x", dcbe->hasht, dcbe->hashm); |
3875 | drm_encoder_helper_add(encoder, &nv50_pior_help); | ||
2326 | 3876 | ||
2327 | drm_mode_connector_attach_encoder(connector, encoder); | 3877 | drm_mode_connector_attach_encoder(connector, encoder); |
2328 | return 0; | 3878 | return 0; |
2329 | } | 3879 | } |
2330 | 3880 | ||
2331 | /****************************************************************************** | 3881 | /****************************************************************************** |
2332 | * Framebuffer | 3882 | * Atomic |
2333 | *****************************************************************************/ | 3883 | *****************************************************************************/ |
2334 | 3884 | ||
2335 | static void | 3885 | static void |
2336 | nv50_fbdma_fini(struct nv50_fbdma *fbdma) | 3886 | nv50_disp_atomic_commit_core(struct nouveau_drm *drm, u32 interlock) |
2337 | { | 3887 | { |
2338 | int i; | 3888 | struct nv50_disp *disp = nv50_disp(drm->dev); |
2339 | for (i = 0; i < ARRAY_SIZE(fbdma->base); i++) | 3889 | struct nv50_dmac *core = &disp->mast.base; |
2340 | nvif_object_fini(&fbdma->base[i]); | 3890 | struct nv50_mstm *mstm; |
2341 | nvif_object_fini(&fbdma->core); | 3891 | struct drm_encoder *encoder; |
2342 | list_del(&fbdma->head); | 3892 | u32 *push; |
2343 | kfree(fbdma); | 3893 | |
3894 | NV_ATOMIC(drm, "commit core %08x\n", interlock); | ||
3895 | |||
3896 | drm_for_each_encoder(encoder, drm->dev) { | ||
3897 | if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { | ||
3898 | mstm = nouveau_encoder(encoder)->dp.mstm; | ||
3899 | if (mstm && mstm->modified) | ||
3900 | nv50_mstm_prepare(mstm); | ||
3901 | } | ||
3902 | } | ||
3903 | |||
3904 | if ((push = evo_wait(core, 5))) { | ||
3905 | evo_mthd(push, 0x0084, 1); | ||
3906 | evo_data(push, 0x80000000); | ||
3907 | evo_mthd(push, 0x0080, 2); | ||
3908 | evo_data(push, interlock); | ||
3909 | evo_data(push, 0x00000000); | ||
3910 | nouveau_bo_wr32(disp->sync, 0, 0x00000000); | ||
3911 | evo_kick(push, core); | ||
3912 | if (nvif_msec(&drm->device, 2000ULL, | ||
3913 | if (nouveau_bo_rd32(disp->sync, 0)) | ||
3914 | break; | ||
3915 | usleep_range(1, 2); | ||
3916 | ) < 0) | ||
3917 | NV_ERROR(drm, "EVO timeout\n"); | ||
3918 | } | ||
3919 | |||
3920 | drm_for_each_encoder(encoder, drm->dev) { | ||
3921 | if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { | ||
3922 | mstm = nouveau_encoder(encoder)->dp.mstm; | ||
3923 | if (mstm && mstm->modified) | ||
3924 | nv50_mstm_cleanup(mstm); | ||
3925 | } | ||
3926 | } | ||
2344 | } | 3927 | } |
2345 | 3928 | ||
2346 | static int | 3929 | static void |
2347 | nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind) | 3930 | nv50_disp_atomic_commit_tail(struct drm_atomic_state *state) |
2348 | { | 3931 | { |
3932 | struct drm_device *dev = state->dev; | ||
3933 | struct drm_crtc_state *crtc_state; | ||
3934 | struct drm_crtc *crtc; | ||
3935 | struct drm_plane_state *plane_state; | ||
3936 | struct drm_plane *plane; | ||
2349 | struct nouveau_drm *drm = nouveau_drm(dev); | 3937 | struct nouveau_drm *drm = nouveau_drm(dev); |
2350 | struct nv50_disp *disp = nv50_disp(dev); | 3938 | struct nv50_disp *disp = nv50_disp(dev); |
2351 | struct nv50_mast *mast = nv50_mast(dev); | 3939 | struct nv50_atom *atom = nv50_atom(state); |
2352 | struct __attribute__ ((packed)) { | 3940 | struct nv50_outp_atom *outp, *outt; |
2353 | struct nv_dma_v0 base; | 3941 | u32 interlock_core = 0; |
2354 | union { | 3942 | u32 interlock_chan = 0; |
2355 | struct nv50_dma_v0 nv50; | 3943 | int i; |
2356 | struct gf100_dma_v0 gf100; | 3944 | |
2357 | struct gf119_dma_v0 gf119; | 3945 | NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable); |
2358 | }; | 3946 | drm_atomic_helper_wait_for_fences(dev, state, false); |
2359 | } args = {}; | 3947 | drm_atomic_helper_wait_for_dependencies(state); |
2360 | struct nv50_fbdma *fbdma; | 3948 | drm_atomic_helper_update_legacy_modeset_state(dev, state); |
2361 | struct drm_crtc *crtc; | ||
2362 | u32 size = sizeof(args.base); | ||
2363 | int ret; | ||
2364 | 3949 | ||
2365 | list_for_each_entry(fbdma, &disp->fbdma, head) { | 3950 | if (atom->lock_core) |
2366 | if (fbdma->core.handle == name) | 3951 | mutex_lock(&disp->mutex); |
2367 | return 0; | 3952 | |
3953 | /* Disable head(s). */ | ||
3954 | for_each_crtc_in_state(state, crtc, crtc_state, i) { | ||
3955 | struct nv50_head_atom *asyh = nv50_head_atom(crtc->state); | ||
3956 | struct nv50_head *head = nv50_head(crtc); | ||
3957 | |||
3958 | NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name, | ||
3959 | asyh->clr.mask, asyh->set.mask); | ||
3960 | |||
3961 | if (asyh->clr.mask) { | ||
3962 | nv50_head_flush_clr(head, asyh, atom->flush_disable); | ||
3963 | interlock_core |= 1; | ||
3964 | } | ||
2368 | } | 3965 | } |
2369 | 3966 | ||
2370 | fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL); | 3967 | /* Disable plane(s). */ |
2371 | if (!fbdma) | 3968 | for_each_plane_in_state(state, plane, plane_state, i) { |
2372 | return -ENOMEM; | 3969 | struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state); |
2373 | list_add(&fbdma->head, &disp->fbdma); | 3970 | struct nv50_wndw *wndw = nv50_wndw(plane); |
2374 | 3971 | ||
2375 | args.base.target = NV_DMA_V0_TARGET_VRAM; | 3972 | NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name, |
2376 | args.base.access = NV_DMA_V0_ACCESS_RDWR; | 3973 | asyw->clr.mask, asyw->set.mask); |
2377 | args.base.start = offset; | 3974 | if (!asyw->clr.mask) |
2378 | args.base.limit = offset + length - 1; | 3975 | continue; |
2379 | 3976 | ||
2380 | if (drm->device.info.chipset < 0x80) { | 3977 | interlock_chan |= nv50_wndw_flush_clr(wndw, interlock_core, |
2381 | args.nv50.part = NV50_DMA_V0_PART_256; | 3978 | atom->flush_disable, |
2382 | size += sizeof(args.nv50); | 3979 | asyw); |
2383 | } else | 3980 | } |
2384 | if (drm->device.info.chipset < 0xc0) { | 3981 | |
2385 | args.nv50.part = NV50_DMA_V0_PART_256; | 3982 | /* Disable output path(s). */ |
2386 | args.nv50.kind = kind; | 3983 | list_for_each_entry(outp, &atom->outp, head) { |
2387 | size += sizeof(args.nv50); | 3984 | const struct drm_encoder_helper_funcs *help; |
2388 | } else | 3985 | struct drm_encoder *encoder; |
2389 | if (drm->device.info.chipset < 0xd0) { | 3986 | |
2390 | args.gf100.kind = kind; | 3987 | encoder = outp->encoder; |
2391 | size += sizeof(args.gf100); | 3988 | help = encoder->helper_private; |
2392 | } else { | 3989 | |
2393 | args.gf119.page = GF119_DMA_V0_PAGE_LP; | 3990 | NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name, |
2394 | args.gf119.kind = kind; | 3991 | outp->clr.mask, outp->set.mask); |
2395 | size += sizeof(args.gf119); | 3992 | |
3993 | if (outp->clr.mask) { | ||
3994 | help->disable(encoder); | ||
3995 | interlock_core |= 1; | ||
3996 | if (outp->flush_disable) { | ||
3997 | nv50_disp_atomic_commit_core(drm, interlock_chan); | ||
3998 | interlock_core = 0; | ||
3999 | interlock_chan = 0; | ||
4000 | } | ||
4001 | } | ||
4002 | } | ||
4003 | |||
4004 | /* Flush disable. */ | ||
4005 | if (interlock_core) { | ||
4006 | if (atom->flush_disable) { | ||
4007 | nv50_disp_atomic_commit_core(drm, interlock_chan); | ||
4008 | interlock_core = 0; | ||
4009 | interlock_chan = 0; | ||
4010 | } | ||
2396 | } | 4011 | } |
2397 | 4012 | ||
2398 | list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { | 4013 | /* Update output path(s). */ |
4014 | list_for_each_entry_safe(outp, outt, &atom->outp, head) { | ||
4015 | const struct drm_encoder_helper_funcs *help; | ||
4016 | struct drm_encoder *encoder; | ||
4017 | |||
4018 | encoder = outp->encoder; | ||
4019 | help = encoder->helper_private; | ||
4020 | |||
4021 | NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name, | ||
4022 | outp->set.mask, outp->clr.mask); | ||
4023 | |||
4024 | if (outp->set.mask) { | ||
4025 | help->enable(encoder); | ||
4026 | interlock_core = 1; | ||
4027 | } | ||
4028 | |||
4029 | list_del(&outp->head); | ||
4030 | kfree(outp); | ||
4031 | } | ||
4032 | |||
4033 | /* Update head(s). */ | ||
4034 | for_each_crtc_in_state(state, crtc, crtc_state, i) { | ||
4035 | struct nv50_head_atom *asyh = nv50_head_atom(crtc->state); | ||
2399 | struct nv50_head *head = nv50_head(crtc); | 4036 | struct nv50_head *head = nv50_head(crtc); |
2400 | int ret = nvif_object_init(&head->sync.base.base.user, name, | 4037 | |
2401 | NV_DMA_IN_MEMORY, &args, size, | 4038 | NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name, |
2402 | &fbdma->base[head->base.index]); | 4039 | asyh->set.mask, asyh->clr.mask); |
2403 | if (ret) { | 4040 | |
2404 | nv50_fbdma_fini(fbdma); | 4041 | if (asyh->set.mask) { |
2405 | return ret; | 4042 | nv50_head_flush_set(head, asyh); |
4043 | interlock_core = 1; | ||
2406 | } | 4044 | } |
2407 | } | 4045 | } |
2408 | 4046 | ||
2409 | ret = nvif_object_init(&mast->base.base.user, name, NV_DMA_IN_MEMORY, | 4047 | /* Update plane(s). */ |
2410 | &args, size, &fbdma->core); | 4048 | for_each_plane_in_state(state, plane, plane_state, i) { |
2411 | if (ret) { | 4049 | struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state); |
2412 | nv50_fbdma_fini(fbdma); | 4050 | struct nv50_wndw *wndw = nv50_wndw(plane); |
4051 | |||
4052 | NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name, | ||
4053 | asyw->set.mask, asyw->clr.mask); | ||
4054 | if ( !asyw->set.mask && | ||
4055 | (!asyw->clr.mask || atom->flush_disable)) | ||
4056 | continue; | ||
4057 | |||
4058 | interlock_chan |= nv50_wndw_flush_set(wndw, interlock_core, asyw); | ||
4059 | } | ||
4060 | |||
4061 | /* Flush update. */ | ||
4062 | if (interlock_core) { | ||
4063 | if (!interlock_chan && atom->state.legacy_cursor_update) { | ||
4064 | u32 *push = evo_wait(&disp->mast, 2); | ||
4065 | if (push) { | ||
4066 | evo_mthd(push, 0x0080, 1); | ||
4067 | evo_data(push, 0x00000000); | ||
4068 | evo_kick(push, &disp->mast); | ||
4069 | } | ||
4070 | } else { | ||
4071 | nv50_disp_atomic_commit_core(drm, interlock_chan); | ||
4072 | } | ||
4073 | } | ||
4074 | |||
4075 | if (atom->lock_core) | ||
4076 | mutex_unlock(&disp->mutex); | ||
4077 | |||
4078 | /* Wait for HW to signal completion. */ | ||
4079 | for_each_plane_in_state(state, plane, plane_state, i) { | ||
4080 | struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state); | ||
4081 | struct nv50_wndw *wndw = nv50_wndw(plane); | ||
4082 | int ret = nv50_wndw_wait_armed(wndw, asyw); | ||
4083 | if (ret) | ||
4084 | NV_ERROR(drm, "%s: timeout\n", plane->name); | ||
4085 | } | ||
4086 | |||
4087 | for_each_crtc_in_state(state, crtc, crtc_state, i) { | ||
4088 | if (crtc->state->event) { | ||
4089 | unsigned long flags; | ||
4090 | spin_lock_irqsave(&crtc->dev->event_lock, flags); | ||
4091 | drm_crtc_send_vblank_event(crtc, crtc->state->event); | ||
4092 | spin_unlock_irqrestore(&crtc->dev->event_lock, flags); | ||
4093 | crtc->state->event = NULL; | ||
4094 | } | ||
4095 | } | ||
4096 | |||
4097 | drm_atomic_helper_commit_hw_done(state); | ||
4098 | drm_atomic_helper_cleanup_planes(dev, state); | ||
4099 | drm_atomic_helper_commit_cleanup_done(state); | ||
4100 | drm_atomic_state_put(state); | ||
4101 | } | ||
4102 | |||
4103 | static void | ||
4104 | nv50_disp_atomic_commit_work(struct work_struct *work) | ||
4105 | { | ||
4106 | struct drm_atomic_state *state = | ||
4107 | container_of(work, typeof(*state), commit_work); | ||
4108 | nv50_disp_atomic_commit_tail(state); | ||
4109 | } | ||
4110 | |||
4111 | static int | ||
4112 | nv50_disp_atomic_commit(struct drm_device *dev, | ||
4113 | struct drm_atomic_state *state, bool nonblock) | ||
4114 | { | ||
4115 | struct nouveau_drm *drm = nouveau_drm(dev); | ||
4116 | struct nv50_disp *disp = nv50_disp(dev); | ||
4117 | struct drm_plane_state *plane_state; | ||
4118 | struct drm_plane *plane; | ||
4119 | struct drm_crtc *crtc; | ||
4120 | bool active = false; | ||
4121 | int ret, i; | ||
4122 | |||
4123 | ret = pm_runtime_get_sync(dev->dev); | ||
4124 | if (ret < 0 && ret != -EACCES) | ||
2413 | return ret; | 4125 | return ret; |
4126 | |||
4127 | ret = drm_atomic_helper_setup_commit(state, nonblock); | ||
4128 | if (ret) | ||
4129 | goto done; | ||
4130 | |||
4131 | INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work); | ||
4132 | |||
4133 | ret = drm_atomic_helper_prepare_planes(dev, state); | ||
4134 | if (ret) | ||
4135 | goto done; | ||
4136 | |||
4137 | if (!nonblock) { | ||
4138 | ret = drm_atomic_helper_wait_for_fences(dev, state, true); | ||
4139 | if (ret) | ||
4140 | goto done; | ||
4141 | } | ||
4142 | |||
4143 | for_each_plane_in_state(state, plane, plane_state, i) { | ||
4144 | struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane_state); | ||
4145 | struct nv50_wndw *wndw = nv50_wndw(plane); | ||
4146 | if (asyw->set.image) { | ||
4147 | asyw->ntfy.handle = wndw->dmac->sync.handle; | ||
4148 | asyw->ntfy.offset = wndw->ntfy; | ||
4149 | asyw->ntfy.awaken = false; | ||
4150 | asyw->set.ntfy = true; | ||
4151 | nouveau_bo_wr32(disp->sync, wndw->ntfy / 4, 0x00000000); | ||
4152 | wndw->ntfy ^= 0x10; | ||
4153 | } | ||
4154 | } | ||
4155 | |||
4156 | drm_atomic_helper_swap_state(state, true); | ||
4157 | drm_atomic_state_get(state); | ||
4158 | |||
4159 | if (nonblock) | ||
4160 | queue_work(system_unbound_wq, &state->commit_work); | ||
4161 | else | ||
4162 | nv50_disp_atomic_commit_tail(state); | ||
4163 | |||
4164 | drm_for_each_crtc(crtc, dev) { | ||
4165 | if (crtc->state->enable) { | ||
4166 | if (!drm->have_disp_power_ref) { | ||
4167 | drm->have_disp_power_ref = true; | ||
4168 | return ret; | ||
4169 | } | ||
4170 | active = true; | ||
4171 | break; | ||
4172 | } | ||
4173 | } | ||
4174 | |||
4175 | if (!active && drm->have_disp_power_ref) { | ||
4176 | pm_runtime_put_autosuspend(dev->dev); | ||
4177 | drm->have_disp_power_ref = false; | ||
4178 | } | ||
4179 | |||
4180 | done: | ||
4181 | pm_runtime_put_autosuspend(dev->dev); | ||
4182 | return ret; | ||
4183 | } | ||
4184 | |||
4185 | static struct nv50_outp_atom * | ||
4186 | nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder) | ||
4187 | { | ||
4188 | struct nv50_outp_atom *outp; | ||
4189 | |||
4190 | list_for_each_entry(outp, &atom->outp, head) { | ||
4191 | if (outp->encoder == encoder) | ||
4192 | return outp; | ||
4193 | } | ||
4194 | |||
4195 | outp = kzalloc(sizeof(*outp), GFP_KERNEL); | ||
4196 | if (!outp) | ||
4197 | return ERR_PTR(-ENOMEM); | ||
4198 | |||
4199 | list_add(&outp->head, &atom->outp); | ||
4200 | outp->encoder = encoder; | ||
4201 | return outp; | ||
4202 | } | ||
4203 | |||
4204 | static int | ||
4205 | nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom, | ||
4206 | struct drm_connector *connector) | ||
4207 | { | ||
4208 | struct drm_encoder *encoder = connector->state->best_encoder; | ||
4209 | struct drm_crtc_state *crtc_state; | ||
4210 | struct drm_crtc *crtc; | ||
4211 | struct nv50_outp_atom *outp; | ||
4212 | |||
4213 | if (!(crtc = connector->state->crtc)) | ||
4214 | return 0; | ||
4215 | |||
4216 | crtc_state = drm_atomic_get_existing_crtc_state(&atom->state, crtc); | ||
4217 | if (crtc->state->active && drm_atomic_crtc_needs_modeset(crtc_state)) { | ||
4218 | outp = nv50_disp_outp_atomic_add(atom, encoder); | ||
4219 | if (IS_ERR(outp)) | ||
4220 | return PTR_ERR(outp); | ||
4221 | |||
4222 | if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { | ||
4223 | outp->flush_disable = true; | ||
4224 | atom->flush_disable = true; | ||
4225 | } | ||
4226 | outp->clr.ctrl = true; | ||
4227 | atom->lock_core = true; | ||
2414 | } | 4228 | } |
2415 | 4229 | ||
2416 | return 0; | 4230 | return 0; |
2417 | } | 4231 | } |
2418 | 4232 | ||
2419 | static void | 4233 | static int |
2420 | nv50_fb_dtor(struct drm_framebuffer *fb) | 4234 | nv50_disp_outp_atomic_check_set(struct nv50_atom *atom, |
4235 | struct drm_connector_state *connector_state) | ||
2421 | { | 4236 | { |
4237 | struct drm_encoder *encoder = connector_state->best_encoder; | ||
4238 | struct drm_crtc_state *crtc_state; | ||
4239 | struct drm_crtc *crtc; | ||
4240 | struct nv50_outp_atom *outp; | ||
4241 | |||
4242 | if (!(crtc = connector_state->crtc)) | ||
4243 | return 0; | ||
4244 | |||
4245 | crtc_state = drm_atomic_get_existing_crtc_state(&atom->state, crtc); | ||
4246 | if (crtc_state->active && drm_atomic_crtc_needs_modeset(crtc_state)) { | ||
4247 | outp = nv50_disp_outp_atomic_add(atom, encoder); | ||
4248 | if (IS_ERR(outp)) | ||
4249 | return PTR_ERR(outp); | ||
4250 | |||
4251 | outp->set.ctrl = true; | ||
4252 | atom->lock_core = true; | ||
4253 | } | ||
4254 | |||
4255 | return 0; | ||
2422 | } | 4256 | } |
2423 | 4257 | ||
2424 | static int | 4258 | static int |
2425 | nv50_fb_ctor(struct drm_framebuffer *fb) | 4259 | nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state) |
2426 | { | 4260 | { |
2427 | struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb); | 4261 | struct nv50_atom *atom = nv50_atom(state); |
2428 | struct nouveau_drm *drm = nouveau_drm(fb->dev); | 4262 | struct drm_connector_state *connector_state; |
2429 | struct nouveau_bo *nvbo = nv_fb->nvbo; | 4263 | struct drm_connector *connector; |
2430 | struct nv50_disp *disp = nv50_disp(fb->dev); | 4264 | int ret, i; |
2431 | u8 kind = nouveau_bo_tile_layout(nvbo) >> 8; | 4265 | |
2432 | u8 tile = nvbo->tile_mode; | 4266 | ret = drm_atomic_helper_check(dev, state); |
2433 | 4267 | if (ret) | |
2434 | if (drm->device.info.chipset >= 0xc0) | 4268 | return ret; |
2435 | tile >>= 4; /* yep.. */ | 4269 | |
2436 | 4270 | for_each_connector_in_state(state, connector, connector_state, i) { | |
2437 | switch (fb->depth) { | 4271 | ret = nv50_disp_outp_atomic_check_clr(atom, connector); |
2438 | case 8: nv_fb->r_format = 0x1e00; break; | 4272 | if (ret) |
2439 | case 15: nv_fb->r_format = 0xe900; break; | 4273 | return ret; |
2440 | case 16: nv_fb->r_format = 0xe800; break; | 4274 | |
2441 | case 24: | 4275 | ret = nv50_disp_outp_atomic_check_set(atom, connector_state); |
2442 | case 32: nv_fb->r_format = 0xcf00; break; | 4276 | if (ret) |
2443 | case 30: nv_fb->r_format = 0xd100; break; | 4277 | return ret; |
2444 | default: | ||
2445 | NV_ERROR(drm, "unknown depth %d\n", fb->depth); | ||
2446 | return -EINVAL; | ||
2447 | } | 4278 | } |
2448 | 4279 | ||
2449 | if (disp->disp->oclass < G82_DISP) { | 4280 | return 0; |
2450 | nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) : | 4281 | } |
2451 | (fb->pitches[0] | 0x00100000); | 4282 | |
2452 | nv_fb->r_format |= kind << 16; | 4283 | static void |
2453 | } else | 4284 | nv50_disp_atomic_state_clear(struct drm_atomic_state *state) |
2454 | if (disp->disp->oclass < GF110_DISP) { | 4285 | { |
2455 | nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) : | 4286 | struct nv50_atom *atom = nv50_atom(state); |
2456 | (fb->pitches[0] | 0x00100000); | 4287 | struct nv50_outp_atom *outp, *outt; |
2457 | } else { | 4288 | |
2458 | nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) : | 4289 | list_for_each_entry_safe(outp, outt, &atom->outp, head) { |
2459 | (fb->pitches[0] | 0x01000000); | 4290 | list_del(&outp->head); |
4291 | kfree(outp); | ||
2460 | } | 4292 | } |
2461 | nv_fb->r_handle = 0xffff0000 | kind; | ||
2462 | 4293 | ||
2463 | return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0, | 4294 | drm_atomic_state_default_clear(state); |
2464 | drm->device.info.ram_user, kind); | 4295 | } |
4296 | |||
4297 | static void | ||
4298 | nv50_disp_atomic_state_free(struct drm_atomic_state *state) | ||
4299 | { | ||
4300 | struct nv50_atom *atom = nv50_atom(state); | ||
4301 | drm_atomic_state_default_release(&atom->state); | ||
4302 | kfree(atom); | ||
2465 | } | 4303 | } |
2466 | 4304 | ||
4305 | static struct drm_atomic_state * | ||
4306 | nv50_disp_atomic_state_alloc(struct drm_device *dev) | ||
4307 | { | ||
4308 | struct nv50_atom *atom; | ||
4309 | if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) || | ||
4310 | drm_atomic_state_init(dev, &atom->state) < 0) { | ||
4311 | kfree(atom); | ||
4312 | return NULL; | ||
4313 | } | ||
4314 | INIT_LIST_HEAD(&atom->outp); | ||
4315 | return &atom->state; | ||
4316 | } | ||
4317 | |||
4318 | static const struct drm_mode_config_funcs | ||
4319 | nv50_disp_func = { | ||
4320 | .fb_create = nouveau_user_framebuffer_create, | ||
4321 | .output_poll_changed = nouveau_fbcon_output_poll_changed, | ||
4322 | .atomic_check = nv50_disp_atomic_check, | ||
4323 | .atomic_commit = nv50_disp_atomic_commit, | ||
4324 | .atomic_state_alloc = nv50_disp_atomic_state_alloc, | ||
4325 | .atomic_state_clear = nv50_disp_atomic_state_clear, | ||
4326 | .atomic_state_free = nv50_disp_atomic_state_free, | ||
4327 | }; | ||
4328 | |||
2467 | /****************************************************************************** | 4329 | /****************************************************************************** |
2468 | * Init | 4330 | * Init |
2469 | *****************************************************************************/ | 4331 | *****************************************************************************/ |
@@ -2471,12 +4333,30 @@ nv50_fb_ctor(struct drm_framebuffer *fb) | |||
2471 | void | 4333 | void |
2472 | nv50_display_fini(struct drm_device *dev) | 4334 | nv50_display_fini(struct drm_device *dev) |
2473 | { | 4335 | { |
4336 | struct nouveau_encoder *nv_encoder; | ||
4337 | struct drm_encoder *encoder; | ||
4338 | struct drm_plane *plane; | ||
4339 | |||
4340 | drm_for_each_plane(plane, dev) { | ||
4341 | struct nv50_wndw *wndw = nv50_wndw(plane); | ||
4342 | if (plane->funcs != &nv50_wndw) | ||
4343 | continue; | ||
4344 | nv50_wndw_fini(wndw); | ||
4345 | } | ||
4346 | |||
4347 | list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { | ||
4348 | if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { | ||
4349 | nv_encoder = nouveau_encoder(encoder); | ||
4350 | nv50_mstm_fini(nv_encoder->dp.mstm); | ||
4351 | } | ||
4352 | } | ||
2474 | } | 4353 | } |
2475 | 4354 | ||
2476 | int | 4355 | int |
2477 | nv50_display_init(struct drm_device *dev) | 4356 | nv50_display_init(struct drm_device *dev) |
2478 | { | 4357 | { |
2479 | struct nv50_disp *disp = nv50_disp(dev); | 4358 | struct drm_encoder *encoder; |
4359 | struct drm_plane *plane; | ||
2480 | struct drm_crtc *crtc; | 4360 | struct drm_crtc *crtc; |
2481 | u32 *push; | 4361 | u32 *push; |
2482 | 4362 | ||
@@ -2484,16 +4364,35 @@ nv50_display_init(struct drm_device *dev) | |||
2484 | if (!push) | 4364 | if (!push) |
2485 | return -EBUSY; | 4365 | return -EBUSY; |
2486 | 4366 | ||
2487 | list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { | ||
2488 | struct nv50_sync *sync = nv50_sync(crtc); | ||
2489 | |||
2490 | nv50_crtc_lut_load(crtc); | ||
2491 | nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data); | ||
2492 | } | ||
2493 | |||
2494 | evo_mthd(push, 0x0088, 1); | 4367 | evo_mthd(push, 0x0088, 1); |
2495 | evo_data(push, nv50_mast(dev)->base.sync.handle); | 4368 | evo_data(push, nv50_mast(dev)->base.sync.handle); |
2496 | evo_kick(push, nv50_mast(dev)); | 4369 | evo_kick(push, nv50_mast(dev)); |
4370 | |||
4371 | list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { | ||
4372 | if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { | ||
4373 | const struct drm_encoder_helper_funcs *help; | ||
4374 | struct nouveau_encoder *nv_encoder; | ||
4375 | |||
4376 | nv_encoder = nouveau_encoder(encoder); | ||
4377 | help = encoder->helper_private; | ||
4378 | if (help && help->dpms) | ||
4379 | help->dpms(encoder, DRM_MODE_DPMS_ON); | ||
4380 | |||
4381 | nv50_mstm_init(nv_encoder->dp.mstm); | ||
4382 | } | ||
4383 | } | ||
4384 | |||
4385 | drm_for_each_crtc(crtc, dev) { | ||
4386 | nv50_head_lut_load(crtc); | ||
4387 | } | ||
4388 | |||
4389 | drm_for_each_plane(plane, dev) { | ||
4390 | struct nv50_wndw *wndw = nv50_wndw(plane); | ||
4391 | if (plane->funcs != &nv50_wndw) | ||
4392 | continue; | ||
4393 | nv50_wndw_init(wndw); | ||
4394 | } | ||
4395 | |||
2497 | return 0; | 4396 | return 0; |
2498 | } | 4397 | } |
2499 | 4398 | ||
@@ -2501,11 +4400,6 @@ void | |||
2501 | nv50_display_destroy(struct drm_device *dev) | 4400 | nv50_display_destroy(struct drm_device *dev) |
2502 | { | 4401 | { |
2503 | struct nv50_disp *disp = nv50_disp(dev); | 4402 | struct nv50_disp *disp = nv50_disp(dev); |
2504 | struct nv50_fbdma *fbdma, *fbtmp; | ||
2505 | |||
2506 | list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) { | ||
2507 | nv50_fbdma_fini(fbdma); | ||
2508 | } | ||
2509 | 4403 | ||
2510 | nv50_dmac_destroy(&disp->mast.base, disp->disp); | 4404 | nv50_dmac_destroy(&disp->mast.base, disp->disp); |
2511 | 4405 | ||
@@ -2518,6 +4412,10 @@ nv50_display_destroy(struct drm_device *dev) | |||
2518 | kfree(disp); | 4412 | kfree(disp); |
2519 | } | 4413 | } |
2520 | 4414 | ||
4415 | MODULE_PARM_DESC(atomic, "Expose atomic ioctl (default: disabled)"); | ||
4416 | static int nouveau_atomic = 0; | ||
4417 | module_param_named(atomic, nouveau_atomic, int, 0400); | ||
4418 | |||
2521 | int | 4419 | int |
2522 | nv50_display_create(struct drm_device *dev) | 4420 | nv50_display_create(struct drm_device *dev) |
2523 | { | 4421 | { |
@@ -2532,15 +4430,17 @@ nv50_display_create(struct drm_device *dev) | |||
2532 | disp = kzalloc(sizeof(*disp), GFP_KERNEL); | 4430 | disp = kzalloc(sizeof(*disp), GFP_KERNEL); |
2533 | if (!disp) | 4431 | if (!disp) |
2534 | return -ENOMEM; | 4432 | return -ENOMEM; |
2535 | INIT_LIST_HEAD(&disp->fbdma); | 4433 | |
4434 | mutex_init(&disp->mutex); | ||
2536 | 4435 | ||
2537 | nouveau_display(dev)->priv = disp; | 4436 | nouveau_display(dev)->priv = disp; |
2538 | nouveau_display(dev)->dtor = nv50_display_destroy; | 4437 | nouveau_display(dev)->dtor = nv50_display_destroy; |
2539 | nouveau_display(dev)->init = nv50_display_init; | 4438 | nouveau_display(dev)->init = nv50_display_init; |
2540 | nouveau_display(dev)->fini = nv50_display_fini; | 4439 | nouveau_display(dev)->fini = nv50_display_fini; |
2541 | nouveau_display(dev)->fb_ctor = nv50_fb_ctor; | ||
2542 | nouveau_display(dev)->fb_dtor = nv50_fb_dtor; | ||
2543 | disp->disp = &nouveau_display(dev)->disp; | 4440 | disp->disp = &nouveau_display(dev)->disp; |
4441 | dev->mode_config.funcs = &nv50_disp_func; | ||
4442 | if (nouveau_atomic) | ||
4443 | dev->driver->driver_features |= DRIVER_ATOMIC; | ||
2544 | 4444 | ||
2545 | /* small shared memory area we use for notifiers and semaphores */ | 4445 | /* small shared memory area we use for notifiers and semaphores */ |
2546 | ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM, | 4446 | ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM, |
@@ -2572,7 +4472,7 @@ nv50_display_create(struct drm_device *dev) | |||
2572 | crtcs = 2; | 4472 | crtcs = 2; |
2573 | 4473 | ||
2574 | for (i = 0; i < crtcs; i++) { | 4474 | for (i = 0; i < crtcs; i++) { |
2575 | ret = nv50_crtc_create(dev, i); | 4475 | ret = nv50_head_create(dev, i); |
2576 | if (ret) | 4476 | if (ret) |
2577 | goto out; | 4477 | goto out; |
2578 | } | 4478 | } |
diff --git a/drivers/gpu/drm/nouveau/nv50_display.h b/drivers/gpu/drm/nouveau/nv50_display.h index 70da347aa8c5..918187cee84b 100644 --- a/drivers/gpu/drm/nouveau/nv50_display.h +++ b/drivers/gpu/drm/nouveau/nv50_display.h | |||
@@ -35,11 +35,4 @@ int nv50_display_create(struct drm_device *); | |||
35 | void nv50_display_destroy(struct drm_device *); | 35 | void nv50_display_destroy(struct drm_device *); |
36 | int nv50_display_init(struct drm_device *); | 36 | int nv50_display_init(struct drm_device *); |
37 | void nv50_display_fini(struct drm_device *); | 37 | void nv50_display_fini(struct drm_device *); |
38 | |||
39 | void nv50_display_flip_stop(struct drm_crtc *); | ||
40 | int nv50_display_flip_next(struct drm_crtc *, struct drm_framebuffer *, | ||
41 | struct nouveau_channel *, u32 swap_interval); | ||
42 | |||
43 | struct nouveau_bo *nv50_display_crtc_sema(struct drm_device *, int head); | ||
44 | |||
45 | #endif /* __NV50_DISPLAY_H__ */ | 38 | #endif /* __NV50_DISPLAY_H__ */ |
diff --git a/drivers/gpu/drm/nouveau/nv50_fbcon.c b/drivers/gpu/drm/nouveau/nv50_fbcon.c index af3d3c49411a..327dcd7901ed 100644 --- a/drivers/gpu/drm/nouveau/nv50_fbcon.c +++ b/drivers/gpu/drm/nouveau/nv50_fbcon.c | |||
@@ -30,7 +30,7 @@ int | |||
30 | nv50_fbcon_fillrect(struct fb_info *info, const struct fb_fillrect *rect) | 30 | nv50_fbcon_fillrect(struct fb_info *info, const struct fb_fillrect *rect) |
31 | { | 31 | { |
32 | struct nouveau_fbdev *nfbdev = info->par; | 32 | struct nouveau_fbdev *nfbdev = info->par; |
33 | struct nouveau_drm *drm = nouveau_drm(nfbdev->dev); | 33 | struct nouveau_drm *drm = nouveau_drm(nfbdev->helper.dev); |
34 | struct nouveau_channel *chan = drm->channel; | 34 | struct nouveau_channel *chan = drm->channel; |
35 | int ret; | 35 | int ret; |
36 | 36 | ||
@@ -65,7 +65,7 @@ int | |||
65 | nv50_fbcon_copyarea(struct fb_info *info, const struct fb_copyarea *region) | 65 | nv50_fbcon_copyarea(struct fb_info *info, const struct fb_copyarea *region) |
66 | { | 66 | { |
67 | struct nouveau_fbdev *nfbdev = info->par; | 67 | struct nouveau_fbdev *nfbdev = info->par; |
68 | struct nouveau_drm *drm = nouveau_drm(nfbdev->dev); | 68 | struct nouveau_drm *drm = nouveau_drm(nfbdev->helper.dev); |
69 | struct nouveau_channel *chan = drm->channel; | 69 | struct nouveau_channel *chan = drm->channel; |
70 | int ret; | 70 | int ret; |
71 | 71 | ||
@@ -93,7 +93,7 @@ int | |||
93 | nv50_fbcon_imageblit(struct fb_info *info, const struct fb_image *image) | 93 | nv50_fbcon_imageblit(struct fb_info *info, const struct fb_image *image) |
94 | { | 94 | { |
95 | struct nouveau_fbdev *nfbdev = info->par; | 95 | struct nouveau_fbdev *nfbdev = info->par; |
96 | struct nouveau_drm *drm = nouveau_drm(nfbdev->dev); | 96 | struct nouveau_drm *drm = nouveau_drm(nfbdev->helper.dev); |
97 | struct nouveau_channel *chan = drm->channel; | 97 | struct nouveau_channel *chan = drm->channel; |
98 | uint32_t dwords, *data = (uint32_t *)image->data; | 98 | uint32_t dwords, *data = (uint32_t *)image->data; |
99 | uint32_t mask = ~(~0 >> (32 - info->var.bits_per_pixel)); | 99 | uint32_t mask = ~(~0 >> (32 - info->var.bits_per_pixel)); |
@@ -148,8 +148,8 @@ int | |||
148 | nv50_fbcon_accel_init(struct fb_info *info) | 148 | nv50_fbcon_accel_init(struct fb_info *info) |
149 | { | 149 | { |
150 | struct nouveau_fbdev *nfbdev = info->par; | 150 | struct nouveau_fbdev *nfbdev = info->par; |
151 | struct nouveau_framebuffer *fb = &nfbdev->nouveau_fb; | 151 | struct nouveau_framebuffer *fb = nouveau_framebuffer(nfbdev->helper.fb); |
152 | struct drm_device *dev = nfbdev->dev; | 152 | struct drm_device *dev = nfbdev->helper.dev; |
153 | struct nouveau_drm *drm = nouveau_drm(dev); | 153 | struct nouveau_drm *drm = nouveau_drm(dev); |
154 | struct nouveau_channel *chan = drm->channel; | 154 | struct nouveau_channel *chan = drm->channel; |
155 | int ret, format; | 155 | int ret, format; |
diff --git a/drivers/gpu/drm/nouveau/nv50_fence.c b/drivers/gpu/drm/nouveau/nv50_fence.c index 8c5295414578..f68c7054fd53 100644 --- a/drivers/gpu/drm/nouveau/nv50_fence.c +++ b/drivers/gpu/drm/nouveau/nv50_fence.c | |||
@@ -35,13 +35,12 @@ | |||
35 | static int | 35 | static int |
36 | nv50_fence_context_new(struct nouveau_channel *chan) | 36 | nv50_fence_context_new(struct nouveau_channel *chan) |
37 | { | 37 | { |
38 | struct drm_device *dev = chan->drm->dev; | ||
39 | struct nv10_fence_priv *priv = chan->drm->fence; | 38 | struct nv10_fence_priv *priv = chan->drm->fence; |
40 | struct nv10_fence_chan *fctx; | 39 | struct nv10_fence_chan *fctx; |
41 | struct ttm_mem_reg *mem = &priv->bo->bo.mem; | 40 | struct ttm_mem_reg *mem = &priv->bo->bo.mem; |
42 | u32 start = mem->start * PAGE_SIZE; | 41 | u32 start = mem->start * PAGE_SIZE; |
43 | u32 limit = start + mem->size - 1; | 42 | u32 limit = start + mem->size - 1; |
44 | int ret, i; | 43 | int ret; |
45 | 44 | ||
46 | fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); | 45 | fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); |
47 | if (!fctx) | 46 | if (!fctx) |
@@ -60,23 +59,6 @@ nv50_fence_context_new(struct nouveau_channel *chan) | |||
60 | .limit = limit, | 59 | .limit = limit, |
61 | }, sizeof(struct nv_dma_v0), | 60 | }, sizeof(struct nv_dma_v0), |
62 | &fctx->sema); | 61 | &fctx->sema); |
63 | |||
64 | /* dma objects for display sync channel semaphore blocks */ | ||
65 | for (i = 0; !ret && i < dev->mode_config.num_crtc; i++) { | ||
66 | struct nouveau_bo *bo = nv50_display_crtc_sema(dev, i); | ||
67 | u32 start = bo->bo.mem.start * PAGE_SIZE; | ||
68 | u32 limit = start + bo->bo.mem.size - 1; | ||
69 | |||
70 | ret = nvif_object_init(&chan->user, NvEvoSema0 + i, | ||
71 | NV_DMA_IN_MEMORY, &(struct nv_dma_v0) { | ||
72 | .target = NV_DMA_V0_TARGET_VRAM, | ||
73 | .access = NV_DMA_V0_ACCESS_RDWR, | ||
74 | .start = start, | ||
75 | .limit = limit, | ||
76 | }, sizeof(struct nv_dma_v0), | ||
77 | &fctx->head[i]); | ||
78 | } | ||
79 | |||
80 | if (ret) | 62 | if (ret) |
81 | nv10_fence_context_del(chan); | 63 | nv10_fence_context_del(chan); |
82 | return ret; | 64 | return ret; |
diff --git a/drivers/gpu/drm/nouveau/nv84_fence.c b/drivers/gpu/drm/nouveau/nv84_fence.c index 23ef04b4e0b2..52b87ae83e7b 100644 --- a/drivers/gpu/drm/nouveau/nv84_fence.c +++ b/drivers/gpu/drm/nouveau/nv84_fence.c | |||
@@ -28,13 +28,6 @@ | |||
28 | 28 | ||
29 | #include "nv50_display.h" | 29 | #include "nv50_display.h" |
30 | 30 | ||
31 | u64 | ||
32 | nv84_fence_crtc(struct nouveau_channel *chan, int crtc) | ||
33 | { | ||
34 | struct nv84_fence_chan *fctx = chan->fence; | ||
35 | return fctx->dispc_vma[crtc].offset; | ||
36 | } | ||
37 | |||
38 | static int | 31 | static int |
39 | nv84_fence_emit32(struct nouveau_channel *chan, u64 virtual, u32 sequence) | 32 | nv84_fence_emit32(struct nouveau_channel *chan, u64 virtual, u32 sequence) |
40 | { | 33 | { |
@@ -110,15 +103,8 @@ nv84_fence_read(struct nouveau_channel *chan) | |||
110 | static void | 103 | static void |
111 | nv84_fence_context_del(struct nouveau_channel *chan) | 104 | nv84_fence_context_del(struct nouveau_channel *chan) |
112 | { | 105 | { |
113 | struct drm_device *dev = chan->drm->dev; | ||
114 | struct nv84_fence_priv *priv = chan->drm->fence; | 106 | struct nv84_fence_priv *priv = chan->drm->fence; |
115 | struct nv84_fence_chan *fctx = chan->fence; | 107 | struct nv84_fence_chan *fctx = chan->fence; |
116 | int i; | ||
117 | |||
118 | for (i = 0; i < dev->mode_config.num_crtc; i++) { | ||
119 | struct nouveau_bo *bo = nv50_display_crtc_sema(dev, i); | ||
120 | nouveau_bo_vma_del(bo, &fctx->dispc_vma[i]); | ||
121 | } | ||
122 | 108 | ||
123 | nouveau_bo_wr32(priv->bo, chan->chid * 16 / 4, fctx->base.sequence); | 109 | nouveau_bo_wr32(priv->bo, chan->chid * 16 / 4, fctx->base.sequence); |
124 | nouveau_bo_vma_del(priv->bo, &fctx->vma_gart); | 110 | nouveau_bo_vma_del(priv->bo, &fctx->vma_gart); |
@@ -134,7 +120,7 @@ nv84_fence_context_new(struct nouveau_channel *chan) | |||
134 | struct nouveau_cli *cli = (void *)chan->user.client; | 120 | struct nouveau_cli *cli = (void *)chan->user.client; |
135 | struct nv84_fence_priv *priv = chan->drm->fence; | 121 | struct nv84_fence_priv *priv = chan->drm->fence; |
136 | struct nv84_fence_chan *fctx; | 122 | struct nv84_fence_chan *fctx; |
137 | int ret, i; | 123 | int ret; |
138 | 124 | ||
139 | fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); | 125 | fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); |
140 | if (!fctx) | 126 | if (!fctx) |
@@ -154,12 +140,6 @@ nv84_fence_context_new(struct nouveau_channel *chan) | |||
154 | &fctx->vma_gart); | 140 | &fctx->vma_gart); |
155 | } | 141 | } |
156 | 142 | ||
157 | /* map display semaphore buffers into channel's vm */ | ||
158 | for (i = 0; !ret && i < chan->drm->dev->mode_config.num_crtc; i++) { | ||
159 | struct nouveau_bo *bo = nv50_display_crtc_sema(chan->drm->dev, i); | ||
160 | ret = nouveau_bo_vma_add(bo, cli->vm, &fctx->dispc_vma[i]); | ||
161 | } | ||
162 | |||
163 | if (ret) | 143 | if (ret) |
164 | nv84_fence_context_del(chan); | 144 | nv84_fence_context_del(chan); |
165 | return ret; | 145 | return ret; |
diff --git a/drivers/gpu/drm/nouveau/nvc0_fbcon.c b/drivers/gpu/drm/nouveau/nvc0_fbcon.c index 054b6a056d99..90f27bfa381f 100644 --- a/drivers/gpu/drm/nouveau/nvc0_fbcon.c +++ b/drivers/gpu/drm/nouveau/nvc0_fbcon.c | |||
@@ -30,7 +30,7 @@ int | |||
30 | nvc0_fbcon_fillrect(struct fb_info *info, const struct fb_fillrect *rect) | 30 | nvc0_fbcon_fillrect(struct fb_info *info, const struct fb_fillrect *rect) |
31 | { | 31 | { |
32 | struct nouveau_fbdev *nfbdev = info->par; | 32 | struct nouveau_fbdev *nfbdev = info->par; |
33 | struct nouveau_drm *drm = nouveau_drm(nfbdev->dev); | 33 | struct nouveau_drm *drm = nouveau_drm(nfbdev->helper.dev); |
34 | struct nouveau_channel *chan = drm->channel; | 34 | struct nouveau_channel *chan = drm->channel; |
35 | int ret; | 35 | int ret; |
36 | 36 | ||
@@ -65,7 +65,7 @@ int | |||
65 | nvc0_fbcon_copyarea(struct fb_info *info, const struct fb_copyarea *region) | 65 | nvc0_fbcon_copyarea(struct fb_info *info, const struct fb_copyarea *region) |
66 | { | 66 | { |
67 | struct nouveau_fbdev *nfbdev = info->par; | 67 | struct nouveau_fbdev *nfbdev = info->par; |
68 | struct nouveau_drm *drm = nouveau_drm(nfbdev->dev); | 68 | struct nouveau_drm *drm = nouveau_drm(nfbdev->helper.dev); |
69 | struct nouveau_channel *chan = drm->channel; | 69 | struct nouveau_channel *chan = drm->channel; |
70 | int ret; | 70 | int ret; |
71 | 71 | ||
@@ -93,7 +93,7 @@ int | |||
93 | nvc0_fbcon_imageblit(struct fb_info *info, const struct fb_image *image) | 93 | nvc0_fbcon_imageblit(struct fb_info *info, const struct fb_image *image) |
94 | { | 94 | { |
95 | struct nouveau_fbdev *nfbdev = info->par; | 95 | struct nouveau_fbdev *nfbdev = info->par; |
96 | struct nouveau_drm *drm = nouveau_drm(nfbdev->dev); | 96 | struct nouveau_drm *drm = nouveau_drm(nfbdev->helper.dev); |
97 | struct nouveau_channel *chan = drm->channel; | 97 | struct nouveau_channel *chan = drm->channel; |
98 | uint32_t dwords, *data = (uint32_t *)image->data; | 98 | uint32_t dwords, *data = (uint32_t *)image->data; |
99 | uint32_t mask = ~(~0 >> (32 - info->var.bits_per_pixel)); | 99 | uint32_t mask = ~(~0 >> (32 - info->var.bits_per_pixel)); |
@@ -148,8 +148,8 @@ int | |||
148 | nvc0_fbcon_accel_init(struct fb_info *info) | 148 | nvc0_fbcon_accel_init(struct fb_info *info) |
149 | { | 149 | { |
150 | struct nouveau_fbdev *nfbdev = info->par; | 150 | struct nouveau_fbdev *nfbdev = info->par; |
151 | struct drm_device *dev = nfbdev->dev; | 151 | struct drm_device *dev = nfbdev->helper.dev; |
152 | struct nouveau_framebuffer *fb = &nfbdev->nouveau_fb; | 152 | struct nouveau_framebuffer *fb = nouveau_framebuffer(nfbdev->helper.fb); |
153 | struct nouveau_drm *drm = nouveau_drm(dev); | 153 | struct nouveau_drm *drm = nouveau_drm(dev); |
154 | struct nouveau_channel *chan = drm->channel; | 154 | struct nouveau_channel *chan = drm->channel; |
155 | int ret, format; | 155 | int ret, format; |
diff --git a/drivers/gpu/drm/nouveau/nvif/client.c b/drivers/gpu/drm/nouveau/nvif/client.c index 1ee9294eca2e..29c20dfd894d 100644 --- a/drivers/gpu/drm/nouveau/nvif/client.c +++ b/drivers/gpu/drm/nouveau/nvif/client.c | |||
@@ -55,7 +55,7 @@ nvif_client_fini(struct nvif_client *client) | |||
55 | } | 55 | } |
56 | } | 56 | } |
57 | 57 | ||
58 | const struct nvif_driver * | 58 | static const struct nvif_driver * |
59 | nvif_drivers[] = { | 59 | nvif_drivers[] = { |
60 | #ifdef __KERNEL__ | 60 | #ifdef __KERNEL__ |
61 | &nvif_driver_nvkm, | 61 | &nvif_driver_nvkm, |
diff --git a/drivers/gpu/drm/nouveau/nvif/notify.c b/drivers/gpu/drm/nouveau/nvif/notify.c index b0787ff833ef..278b3933dc96 100644 --- a/drivers/gpu/drm/nouveau/nvif/notify.c +++ b/drivers/gpu/drm/nouveau/nvif/notify.c | |||
@@ -155,10 +155,8 @@ nvif_notify_fini(struct nvif_notify *notify) | |||
155 | int ret = nvif_notify_put(notify); | 155 | int ret = nvif_notify_put(notify); |
156 | if (ret >= 0 && object) { | 156 | if (ret >= 0 && object) { |
157 | ret = nvif_object_ioctl(object, &args, sizeof(args), NULL); | 157 | ret = nvif_object_ioctl(object, &args, sizeof(args), NULL); |
158 | if (ret == 0) { | 158 | notify->object = NULL; |
159 | notify->object = NULL; | 159 | kfree((void *)notify->data); |
160 | kfree((void *)notify->data); | ||
161 | } | ||
162 | } | 160 | } |
163 | return ret; | 161 | return ret; |
164 | } | 162 | } |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/ce/fuc/gf100.fuc3.h b/drivers/gpu/drm/nouveau/nvkm/engine/ce/fuc/gf100.fuc3.h index 05bb65608dfe..d9ca9636a3e3 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/ce/fuc/gf100.fuc3.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/ce/fuc/gf100.fuc3.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gf100_ce_data[] = { | 1 | static uint32_t gf100_ce_data[] = { |
2 | /* 0x0000: ctx_object */ | 2 | /* 0x0000: ctx_object */ |
3 | 0x00000000, | 3 | 0x00000000, |
4 | /* 0x0004: ctx_query_address_high */ | 4 | /* 0x0004: ctx_query_address_high */ |
@@ -171,7 +171,7 @@ uint32_t gf100_ce_data[] = { | |||
171 | 0x00000800, | 171 | 0x00000800, |
172 | }; | 172 | }; |
173 | 173 | ||
174 | uint32_t gf100_ce_code[] = { | 174 | static uint32_t gf100_ce_code[] = { |
175 | /* 0x0000: main */ | 175 | /* 0x0000: main */ |
176 | 0x04fe04bd, | 176 | 0x04fe04bd, |
177 | 0x3517f000, | 177 | 0x3517f000, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/ce/fuc/gt215.fuc3.h b/drivers/gpu/drm/nouveau/nvkm/engine/ce/fuc/gt215.fuc3.h index 972281d10f38..f0a1cf31c7ca 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/ce/fuc/gt215.fuc3.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/ce/fuc/gt215.fuc3.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gt215_ce_data[] = { | 1 | static uint32_t gt215_ce_data[] = { |
2 | /* 0x0000: ctx_object */ | 2 | /* 0x0000: ctx_object */ |
3 | 0x00000000, | 3 | 0x00000000, |
4 | /* 0x0004: ctx_dma */ | 4 | /* 0x0004: ctx_dma */ |
@@ -183,7 +183,7 @@ uint32_t gt215_ce_data[] = { | |||
183 | 0x00000800, | 183 | 0x00000800, |
184 | }; | 184 | }; |
185 | 185 | ||
186 | uint32_t gt215_ce_code[] = { | 186 | static uint32_t gt215_ce_code[] = { |
187 | /* 0x0000: main */ | 187 | /* 0x0000: main */ |
188 | 0x04fe04bd, | 188 | 0x04fe04bd, |
189 | 0x3517f000, | 189 | 0x3517f000, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/device/base.c b/drivers/gpu/drm/nouveau/nvkm/engine/device/base.c index 53d171729353..bd22526edb0b 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/device/base.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/device/base.c | |||
@@ -1852,7 +1852,7 @@ nvf1_chipset = { | |||
1852 | .fb = gk104_fb_new, | 1852 | .fb = gk104_fb_new, |
1853 | .fuse = gf100_fuse_new, | 1853 | .fuse = gf100_fuse_new, |
1854 | .gpio = gk104_gpio_new, | 1854 | .gpio = gk104_gpio_new, |
1855 | .i2c = gf119_i2c_new, | 1855 | .i2c = gk104_i2c_new, |
1856 | .ibus = gk104_ibus_new, | 1856 | .ibus = gk104_ibus_new, |
1857 | .iccsense = gf100_iccsense_new, | 1857 | .iccsense = gf100_iccsense_new, |
1858 | .imem = nv50_instmem_new, | 1858 | .imem = nv50_instmem_new, |
@@ -1966,7 +1966,7 @@ nv117_chipset = { | |||
1966 | .fb = gm107_fb_new, | 1966 | .fb = gm107_fb_new, |
1967 | .fuse = gm107_fuse_new, | 1967 | .fuse = gm107_fuse_new, |
1968 | .gpio = gk104_gpio_new, | 1968 | .gpio = gk104_gpio_new, |
1969 | .i2c = gf119_i2c_new, | 1969 | .i2c = gk104_i2c_new, |
1970 | .ibus = gk104_ibus_new, | 1970 | .ibus = gk104_ibus_new, |
1971 | .iccsense = gf100_iccsense_new, | 1971 | .iccsense = gf100_iccsense_new, |
1972 | .imem = nv50_instmem_new, | 1972 | .imem = nv50_instmem_new, |
@@ -2000,7 +2000,7 @@ nv118_chipset = { | |||
2000 | .fb = gm107_fb_new, | 2000 | .fb = gm107_fb_new, |
2001 | .fuse = gm107_fuse_new, | 2001 | .fuse = gm107_fuse_new, |
2002 | .gpio = gk104_gpio_new, | 2002 | .gpio = gk104_gpio_new, |
2003 | .i2c = gf119_i2c_new, | 2003 | .i2c = gk104_i2c_new, |
2004 | .ibus = gk104_ibus_new, | 2004 | .ibus = gk104_ibus_new, |
2005 | .iccsense = gf100_iccsense_new, | 2005 | .iccsense = gf100_iccsense_new, |
2006 | .imem = nv50_instmem_new, | 2006 | .imem = nv50_instmem_new, |
@@ -2131,7 +2131,7 @@ nv12b_chipset = { | |||
2131 | .bar = gk20a_bar_new, | 2131 | .bar = gk20a_bar_new, |
2132 | .bus = gf100_bus_new, | 2132 | .bus = gf100_bus_new, |
2133 | .clk = gm20b_clk_new, | 2133 | .clk = gm20b_clk_new, |
2134 | .fb = gk20a_fb_new, | 2134 | .fb = gm20b_fb_new, |
2135 | .fuse = gm107_fuse_new, | 2135 | .fuse = gm107_fuse_new, |
2136 | .ibus = gk20a_ibus_new, | 2136 | .ibus = gk20a_ibus_new, |
2137 | .imem = gk20a_instmem_new, | 2137 | .imem = gk20a_instmem_new, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c b/drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c index 9b638bd905ff..f2bc0b7d9b93 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c | |||
@@ -102,7 +102,7 @@ nvkm_device_tegra_probe_iommu(struct nvkm_device_tegra *tdev) | |||
102 | 102 | ||
103 | if (iommu_present(&platform_bus_type)) { | 103 | if (iommu_present(&platform_bus_type)) { |
104 | tdev->iommu.domain = iommu_domain_alloc(&platform_bus_type); | 104 | tdev->iommu.domain = iommu_domain_alloc(&platform_bus_type); |
105 | if (IS_ERR(tdev->iommu.domain)) | 105 | if (!tdev->iommu.domain) |
106 | goto error; | 106 | goto error; |
107 | 107 | ||
108 | /* | 108 | /* |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/device/user.c b/drivers/gpu/drm/nouveau/nvkm/engine/device/user.c index 79a8f71cf788..513ee6b79553 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/device/user.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/device/user.c | |||
@@ -326,7 +326,7 @@ nvkm_udevice = { | |||
326 | .sclass = nvkm_udevice_child_get, | 326 | .sclass = nvkm_udevice_child_get, |
327 | }; | 327 | }; |
328 | 328 | ||
329 | int | 329 | static int |
330 | nvkm_udevice_new(const struct nvkm_oclass *oclass, void *data, u32 size, | 330 | nvkm_udevice_new(const struct nvkm_oclass *oclass, void *data, u32 size, |
331 | struct nvkm_object **pobject) | 331 | struct nvkm_object **pobject) |
332 | { | 332 | { |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/channv50.c b/drivers/gpu/drm/nouveau/nvkm/engine/disp/channv50.c index dd2953bc9264..26990d44ae75 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/channv50.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/channv50.c | |||
@@ -153,7 +153,7 @@ nv50_disp_chan_uevent = { | |||
153 | .fini = nv50_disp_chan_uevent_fini, | 153 | .fini = nv50_disp_chan_uevent_fini, |
154 | }; | 154 | }; |
155 | 155 | ||
156 | int | 156 | static int |
157 | nv50_disp_chan_rd32(struct nvkm_object *object, u64 addr, u32 *data) | 157 | nv50_disp_chan_rd32(struct nvkm_object *object, u64 addr, u32 *data) |
158 | { | 158 | { |
159 | struct nv50_disp_chan *chan = nv50_disp_chan(object); | 159 | struct nv50_disp_chan *chan = nv50_disp_chan(object); |
@@ -163,7 +163,7 @@ nv50_disp_chan_rd32(struct nvkm_object *object, u64 addr, u32 *data) | |||
163 | return 0; | 163 | return 0; |
164 | } | 164 | } |
165 | 165 | ||
166 | int | 166 | static int |
167 | nv50_disp_chan_wr32(struct nvkm_object *object, u64 addr, u32 data) | 167 | nv50_disp_chan_wr32(struct nvkm_object *object, u64 addr, u32 data) |
168 | { | 168 | { |
169 | struct nv50_disp_chan *chan = nv50_disp_chan(object); | 169 | struct nv50_disp_chan *chan = nv50_disp_chan(object); |
@@ -173,7 +173,7 @@ nv50_disp_chan_wr32(struct nvkm_object *object, u64 addr, u32 data) | |||
173 | return 0; | 173 | return 0; |
174 | } | 174 | } |
175 | 175 | ||
176 | int | 176 | static int |
177 | nv50_disp_chan_ntfy(struct nvkm_object *object, u32 type, | 177 | nv50_disp_chan_ntfy(struct nvkm_object *object, u32 type, |
178 | struct nvkm_event **pevent) | 178 | struct nvkm_event **pevent) |
179 | { | 179 | { |
@@ -189,7 +189,7 @@ nv50_disp_chan_ntfy(struct nvkm_object *object, u32 type, | |||
189 | return -EINVAL; | 189 | return -EINVAL; |
190 | } | 190 | } |
191 | 191 | ||
192 | int | 192 | static int |
193 | nv50_disp_chan_map(struct nvkm_object *object, u64 *addr, u32 *size) | 193 | nv50_disp_chan_map(struct nvkm_object *object, u64 *addr, u32 *size) |
194 | { | 194 | { |
195 | struct nv50_disp_chan *chan = nv50_disp_chan(object); | 195 | struct nv50_disp_chan *chan = nv50_disp_chan(object); |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/coreg94.c b/drivers/gpu/drm/nouveau/nvkm/engine/disp/coreg94.c index 019379a3a01c..c65c9f3ff69f 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/coreg94.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/coreg94.c | |||
@@ -26,7 +26,7 @@ | |||
26 | 26 | ||
27 | #include <nvif/class.h> | 27 | #include <nvif/class.h> |
28 | 28 | ||
29 | const struct nv50_disp_mthd_list | 29 | static const struct nv50_disp_mthd_list |
30 | g94_disp_core_mthd_sor = { | 30 | g94_disp_core_mthd_sor = { |
31 | .mthd = 0x0040, | 31 | .mthd = 0x0040, |
32 | .addr = 0x000008, | 32 | .addr = 0x000008, |
@@ -43,8 +43,8 @@ g94_disp_core_chan_mthd = { | |||
43 | .prev = 0x000004, | 43 | .prev = 0x000004, |
44 | .data = { | 44 | .data = { |
45 | { "Global", 1, &nv50_disp_core_mthd_base }, | 45 | { "Global", 1, &nv50_disp_core_mthd_base }, |
46 | { "DAC", 3, &g84_disp_core_mthd_dac }, | 46 | { "DAC", 3, &g84_disp_core_mthd_dac }, |
47 | { "SOR", 4, &g94_disp_core_mthd_sor }, | 47 | { "SOR", 4, &g94_disp_core_mthd_sor }, |
48 | { "PIOR", 3, &nv50_disp_core_mthd_pior }, | 48 | { "PIOR", 3, &nv50_disp_core_mthd_pior }, |
49 | { "HEAD", 2, &g84_disp_core_mthd_head }, | 49 | { "HEAD", 2, &g84_disp_core_mthd_head }, |
50 | {} | 50 | {} |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/coregp104.c b/drivers/gpu/drm/nouveau/nvkm/engine/disp/coregp104.c index 6922f4007b61..e356f87fbe60 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/coregp104.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/coregp104.c | |||
@@ -59,7 +59,7 @@ gp104_disp_core_init(struct nv50_disp_dmac *chan) | |||
59 | return 0; | 59 | return 0; |
60 | } | 60 | } |
61 | 61 | ||
62 | const struct nv50_disp_dmac_func | 62 | static const struct nv50_disp_dmac_func |
63 | gp104_disp_core_func = { | 63 | gp104_disp_core_func = { |
64 | .init = gp104_disp_core_init, | 64 | .init = gp104_disp_core_init, |
65 | .fini = gf119_disp_core_fini, | 65 | .fini = gf119_disp_core_fini, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/dport.c b/drivers/gpu/drm/nouveau/nvkm/engine/disp/dport.c index 9688970eca47..4a93ceb850ac 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/dport.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/dport.c | |||
@@ -319,9 +319,8 @@ static const struct dp_rates { | |||
319 | }; | 319 | }; |
320 | 320 | ||
321 | void | 321 | void |
322 | nvkm_dp_train(struct work_struct *w) | 322 | nvkm_dp_train(struct nvkm_output_dp *outp) |
323 | { | 323 | { |
324 | struct nvkm_output_dp *outp = container_of(w, typeof(*outp), lt.work); | ||
325 | struct nv50_disp *disp = nv50_disp(outp->base.disp); | 324 | struct nv50_disp *disp = nv50_disp(outp->base.disp); |
326 | const struct dp_rates *cfg = nvkm_dp_rates; | 325 | const struct dp_rates *cfg = nvkm_dp_rates; |
327 | struct dp_state _dp = { | 326 | struct dp_state _dp = { |
@@ -353,9 +352,6 @@ nvkm_dp_train(struct work_struct *w) | |||
353 | } | 352 | } |
354 | cfg--; | 353 | cfg--; |
355 | 354 | ||
356 | /* disable link interrupt handling during link training */ | ||
357 | nvkm_notify_put(&outp->irq); | ||
358 | |||
359 | /* ensure sink is not in a low-power state */ | 355 | /* ensure sink is not in a low-power state */ |
360 | if (!nvkm_rdaux(outp->aux, DPCD_SC00, &pwr, 1)) { | 356 | if (!nvkm_rdaux(outp->aux, DPCD_SC00, &pwr, 1)) { |
361 | if ((pwr & DPCD_SC00_SET_POWER) != DPCD_SC00_SET_POWER_D0) { | 357 | if ((pwr & DPCD_SC00_SET_POWER) != DPCD_SC00_SET_POWER_D0) { |
@@ -400,9 +396,6 @@ nvkm_dp_train(struct work_struct *w) | |||
400 | 396 | ||
401 | dp_link_train_fini(dp); | 397 | dp_link_train_fini(dp); |
402 | 398 | ||
403 | /* signal completion and enable link interrupt handling */ | ||
404 | OUTP_DBG(&outp->base, "training complete"); | 399 | OUTP_DBG(&outp->base, "training complete"); |
405 | atomic_set(&outp->lt.done, 1); | 400 | atomic_set(&outp->lt.done, 1); |
406 | wake_up(&outp->lt.wait); | ||
407 | nvkm_notify_get(&outp->irq); | ||
408 | } | 401 | } |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/dport.h b/drivers/gpu/drm/nouveau/nvkm/engine/disp/dport.h index 6e10c5e0ef11..baf1dd9ff975 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/dport.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/dport.h | |||
@@ -1,6 +1,6 @@ | |||
1 | #ifndef __NVKM_DISP_DPORT_H__ | 1 | #ifndef __NVKM_DISP_DPORT_H__ |
2 | #define __NVKM_DISP_DPORT_H__ | 2 | #define __NVKM_DISP_DPORT_H__ |
3 | #include <core/os.h> | 3 | struct nvkm_output_dp; |
4 | 4 | ||
5 | /* DPCD Receiver Capabilities */ | 5 | /* DPCD Receiver Capabilities */ |
6 | #define DPCD_RC00_DPCD_REV 0x00000 | 6 | #define DPCD_RC00_DPCD_REV 0x00000 |
@@ -77,5 +77,5 @@ | |||
77 | #define DPCD_SC00_SET_POWER_D0 0x01 | 77 | #define DPCD_SC00_SET_POWER_D0 0x01 |
78 | #define DPCD_SC00_SET_POWER_D3 0x03 | 78 | #define DPCD_SC00_SET_POWER_D3 0x03 |
79 | 79 | ||
80 | void nvkm_dp_train(struct work_struct *); | 80 | void nvkm_dp_train(struct nvkm_output_dp *); |
81 | #endif | 81 | #endif |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/gf119.c b/drivers/gpu/drm/nouveau/nvkm/engine/disp/gf119.c index 29e84b241cca..7b346ccc38b7 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/gf119.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/gf119.c | |||
@@ -203,17 +203,20 @@ gf119_disp_intr_unk2_0(struct nv50_disp *disp, int head) | |||
203 | /* see note in nv50_disp_intr_unk20_0() */ | 203 | /* see note in nv50_disp_intr_unk20_0() */ |
204 | if (outp && outp->info.type == DCB_OUTPUT_DP) { | 204 | if (outp && outp->info.type == DCB_OUTPUT_DP) { |
205 | struct nvkm_output_dp *outpdp = nvkm_output_dp(outp); | 205 | struct nvkm_output_dp *outpdp = nvkm_output_dp(outp); |
206 | struct nvbios_init init = { | 206 | if (!outpdp->lt.mst) { |
207 | .subdev = subdev, | 207 | struct nvbios_init init = { |
208 | .bios = subdev->device->bios, | 208 | .subdev = subdev, |
209 | .outp = &outp->info, | 209 | .bios = subdev->device->bios, |
210 | .crtc = head, | 210 | .outp = &outp->info, |
211 | .offset = outpdp->info.script[4], | 211 | .crtc = head, |
212 | .execute = 1, | 212 | .offset = outpdp->info.script[4], |
213 | }; | 213 | .execute = 1, |
214 | }; | ||
214 | 215 | ||
215 | nvbios_exec(&init); | 216 | nvkm_notify_put(&outpdp->irq); |
216 | atomic_set(&outpdp->lt.done, 0); | 217 | nvbios_exec(&init); |
218 | atomic_set(&outpdp->lt.done, 0); | ||
219 | } | ||
217 | } | 220 | } |
218 | } | 221 | } |
219 | 222 | ||
@@ -314,7 +317,7 @@ gf119_disp_intr_unk2_2(struct nv50_disp *disp, int head) | |||
314 | break; | 317 | break; |
315 | } | 318 | } |
316 | 319 | ||
317 | if (nvkm_output_dp_train(outp, pclk, true)) | 320 | if (nvkm_output_dp_train(outp, pclk)) |
318 | OUTP_ERR(outp, "link not trained before attach"); | 321 | OUTP_ERR(outp, "link not trained before attach"); |
319 | } else { | 322 | } else { |
320 | if (disp->func->sor.magic) | 323 | if (disp->func->sor.magic) |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c b/drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c index fbb8c7dc18fd..567466f93cd5 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c | |||
@@ -590,6 +590,7 @@ nv50_disp_intr_unk20_0(struct nv50_disp *disp, int head) | |||
590 | .execute = 1, | 590 | .execute = 1, |
591 | }; | 591 | }; |
592 | 592 | ||
593 | nvkm_notify_put(&outpdp->irq); | ||
593 | nvbios_exec(&init); | 594 | nvbios_exec(&init); |
594 | atomic_set(&outpdp->lt.done, 0); | 595 | atomic_set(&outpdp->lt.done, 0); |
595 | } | 596 | } |
@@ -779,7 +780,7 @@ nv50_disp_intr_unk20_2(struct nv50_disp *disp, int head) | |||
779 | break; | 780 | break; |
780 | } | 781 | } |
781 | 782 | ||
782 | if (nvkm_output_dp_train(outp, datarate / soff, true)) | 783 | if (nvkm_output_dp_train(outp, datarate / soff)) |
783 | OUTP_ERR(outp, "link not trained before attach"); | 784 | OUTP_ERR(outp, "link not trained before attach"); |
784 | } | 785 | } |
785 | 786 | ||
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/outpdp.c b/drivers/gpu/drm/nouveau/nvkm/engine/disp/outpdp.c index 3b7a9e7a1ea8..de36f73b14dc 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/outpdp.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/outpdp.c | |||
@@ -31,7 +31,7 @@ | |||
31 | #include <nvif/event.h> | 31 | #include <nvif/event.h> |
32 | 32 | ||
33 | int | 33 | int |
34 | nvkm_output_dp_train(struct nvkm_output *base, u32 datarate, bool wait) | 34 | nvkm_output_dp_train(struct nvkm_output *base, u32 datarate) |
35 | { | 35 | { |
36 | struct nvkm_output_dp *outp = nvkm_output_dp(base); | 36 | struct nvkm_output_dp *outp = nvkm_output_dp(base); |
37 | bool retrain = true; | 37 | bool retrain = true; |
@@ -39,6 +39,8 @@ nvkm_output_dp_train(struct nvkm_output *base, u32 datarate, bool wait) | |||
39 | u32 linkrate; | 39 | u32 linkrate; |
40 | int ret, i; | 40 | int ret, i; |
41 | 41 | ||
42 | mutex_lock(&outp->mutex); | ||
43 | |||
42 | /* check that the link is trained at a high enough rate */ | 44 | /* check that the link is trained at a high enough rate */ |
43 | ret = nvkm_rdaux(outp->aux, DPCD_LC00_LINK_BW_SET, link, 2); | 45 | ret = nvkm_rdaux(outp->aux, DPCD_LC00_LINK_BW_SET, link, 2); |
44 | if (ret) { | 46 | if (ret) { |
@@ -88,19 +90,10 @@ done: | |||
88 | outp->dpcd[DPCD_RC02] = | 90 | outp->dpcd[DPCD_RC02] = |
89 | outp->base.info.dpconf.link_nr; | 91 | outp->base.info.dpconf.link_nr; |
90 | } | 92 | } |
91 | atomic_set(&outp->lt.done, 0); | 93 | nvkm_dp_train(outp); |
92 | schedule_work(&outp->lt.work); | ||
93 | } else { | ||
94 | nvkm_notify_get(&outp->irq); | ||
95 | } | ||
96 | |||
97 | if (wait) { | ||
98 | if (!wait_event_timeout(outp->lt.wait, | ||
99 | atomic_read(&outp->lt.done), | ||
100 | msecs_to_jiffies(2000))) | ||
101 | ret = -ETIMEDOUT; | ||
102 | } | 94 | } |
103 | 95 | ||
96 | mutex_unlock(&outp->mutex); | ||
104 | return ret; | 97 | return ret; |
105 | } | 98 | } |
106 | 99 | ||
@@ -118,7 +111,7 @@ nvkm_output_dp_enable(struct nvkm_output_dp *outp, bool enable) | |||
118 | 111 | ||
119 | if (!nvkm_rdaux(aux, DPCD_RC00_DPCD_REV, outp->dpcd, | 112 | if (!nvkm_rdaux(aux, DPCD_RC00_DPCD_REV, outp->dpcd, |
120 | sizeof(outp->dpcd))) { | 113 | sizeof(outp->dpcd))) { |
121 | nvkm_output_dp_train(&outp->base, 0, true); | 114 | nvkm_output_dp_train(&outp->base, 0); |
122 | return; | 115 | return; |
123 | } | 116 | } |
124 | } | 117 | } |
@@ -165,10 +158,10 @@ nvkm_output_dp_irq(struct nvkm_notify *notify) | |||
165 | }; | 158 | }; |
166 | 159 | ||
167 | OUTP_DBG(&outp->base, "IRQ: %d", line->mask); | 160 | OUTP_DBG(&outp->base, "IRQ: %d", line->mask); |
168 | nvkm_output_dp_train(&outp->base, 0, true); | 161 | nvkm_output_dp_train(&outp->base, 0); |
169 | 162 | ||
170 | nvkm_event_send(&disp->hpd, rep.mask, conn->index, &rep, sizeof(rep)); | 163 | nvkm_event_send(&disp->hpd, rep.mask, conn->index, &rep, sizeof(rep)); |
171 | return NVKM_NOTIFY_DROP; | 164 | return NVKM_NOTIFY_KEEP; |
172 | } | 165 | } |
173 | 166 | ||
174 | static void | 167 | static void |
@@ -177,7 +170,6 @@ nvkm_output_dp_fini(struct nvkm_output *base) | |||
177 | struct nvkm_output_dp *outp = nvkm_output_dp(base); | 170 | struct nvkm_output_dp *outp = nvkm_output_dp(base); |
178 | nvkm_notify_put(&outp->hpd); | 171 | nvkm_notify_put(&outp->hpd); |
179 | nvkm_notify_put(&outp->irq); | 172 | nvkm_notify_put(&outp->irq); |
180 | flush_work(&outp->lt.work); | ||
181 | nvkm_output_dp_enable(outp, false); | 173 | nvkm_output_dp_enable(outp, false); |
182 | } | 174 | } |
183 | 175 | ||
@@ -187,6 +179,7 @@ nvkm_output_dp_init(struct nvkm_output *base) | |||
187 | struct nvkm_output_dp *outp = nvkm_output_dp(base); | 179 | struct nvkm_output_dp *outp = nvkm_output_dp(base); |
188 | nvkm_notify_put(&outp->base.conn->hpd); | 180 | nvkm_notify_put(&outp->base.conn->hpd); |
189 | nvkm_output_dp_enable(outp, true); | 181 | nvkm_output_dp_enable(outp, true); |
182 | nvkm_notify_get(&outp->irq); | ||
190 | nvkm_notify_get(&outp->hpd); | 183 | nvkm_notify_get(&outp->hpd); |
191 | } | 184 | } |
192 | 185 | ||
@@ -238,11 +231,6 @@ nvkm_output_dp_ctor(const struct nvkm_output_dp_func *func, | |||
238 | OUTP_DBG(&outp->base, "bios dp %02x %02x %02x %02x", | 231 | OUTP_DBG(&outp->base, "bios dp %02x %02x %02x %02x", |
239 | outp->version, hdr, cnt, len); | 232 | outp->version, hdr, cnt, len); |
240 | 233 | ||
241 | /* link training */ | ||
242 | INIT_WORK(&outp->lt.work, nvkm_dp_train); | ||
243 | init_waitqueue_head(&outp->lt.wait); | ||
244 | atomic_set(&outp->lt.done, 0); | ||
245 | |||
246 | /* link maintenance */ | 234 | /* link maintenance */ |
247 | ret = nvkm_notify_init(NULL, &i2c->event, nvkm_output_dp_irq, true, | 235 | ret = nvkm_notify_init(NULL, &i2c->event, nvkm_output_dp_irq, true, |
248 | &(struct nvkm_i2c_ntfy_req) { | 236 | &(struct nvkm_i2c_ntfy_req) { |
@@ -257,6 +245,9 @@ nvkm_output_dp_ctor(const struct nvkm_output_dp_func *func, | |||
257 | return ret; | 245 | return ret; |
258 | } | 246 | } |
259 | 247 | ||
248 | mutex_init(&outp->mutex); | ||
249 | atomic_set(&outp->lt.done, 0); | ||
250 | |||
260 | /* hotplug detect, replaces gpio-based mechanism with aux events */ | 251 | /* hotplug detect, replaces gpio-based mechanism with aux events */ |
261 | ret = nvkm_notify_init(NULL, &i2c->event, nvkm_output_dp_hpd, true, | 252 | ret = nvkm_notify_init(NULL, &i2c->event, nvkm_output_dp_hpd, true, |
262 | &(struct nvkm_i2c_ntfy_req) { | 253 | &(struct nvkm_i2c_ntfy_req) { |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/outpdp.h b/drivers/gpu/drm/nouveau/nvkm/engine/disp/outpdp.h index 4e983f6d7032..3c83a561cd88 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/outpdp.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/outpdp.h | |||
@@ -29,10 +29,10 @@ struct nvkm_output_dp { | |||
29 | bool present; | 29 | bool present; |
30 | u8 dpcd[16]; | 30 | u8 dpcd[16]; |
31 | 31 | ||
32 | struct mutex mutex; | ||
32 | struct { | 33 | struct { |
33 | struct work_struct work; | ||
34 | wait_queue_head_t wait; | ||
35 | atomic_t done; | 34 | atomic_t done; |
35 | bool mst; | ||
36 | } lt; | 36 | } lt; |
37 | }; | 37 | }; |
38 | 38 | ||
@@ -41,9 +41,11 @@ struct nvkm_output_dp_func { | |||
41 | int (*lnk_pwr)(struct nvkm_output_dp *, int nr); | 41 | int (*lnk_pwr)(struct nvkm_output_dp *, int nr); |
42 | int (*lnk_ctl)(struct nvkm_output_dp *, int nr, int bw, bool ef); | 42 | int (*lnk_ctl)(struct nvkm_output_dp *, int nr, int bw, bool ef); |
43 | int (*drv_ctl)(struct nvkm_output_dp *, int ln, int vs, int pe, int pc); | 43 | int (*drv_ctl)(struct nvkm_output_dp *, int ln, int vs, int pe, int pc); |
44 | void (*vcpi)(struct nvkm_output_dp *, int head, u8 start_slot, | ||
45 | u8 num_slots, u16 pbn, u16 aligned_pbn); | ||
44 | }; | 46 | }; |
45 | 47 | ||
46 | int nvkm_output_dp_train(struct nvkm_output *, u32 rate, bool wait); | 48 | int nvkm_output_dp_train(struct nvkm_output *, u32 rate); |
47 | 49 | ||
48 | int nvkm_output_dp_ctor(const struct nvkm_output_dp_func *, struct nvkm_disp *, | 50 | int nvkm_output_dp_ctor(const struct nvkm_output_dp_func *, struct nvkm_disp *, |
49 | int index, struct dcb_output *, struct nvkm_i2c_aux *, | 51 | int index, struct dcb_output *, struct nvkm_i2c_aux *, |
@@ -63,6 +65,7 @@ int gf119_sor_dp_new(struct nvkm_disp *, int, struct dcb_output *, | |||
63 | struct nvkm_output **); | 65 | struct nvkm_output **); |
64 | int gf119_sor_dp_lnk_ctl(struct nvkm_output_dp *, int, int, bool); | 66 | int gf119_sor_dp_lnk_ctl(struct nvkm_output_dp *, int, int, bool); |
65 | int gf119_sor_dp_drv_ctl(struct nvkm_output_dp *, int, int, int, int); | 67 | int gf119_sor_dp_drv_ctl(struct nvkm_output_dp *, int, int, int, int); |
68 | void gf119_sor_dp_vcpi(struct nvkm_output_dp *, int, u8, u8, u16, u16); | ||
66 | 69 | ||
67 | int gm107_sor_dp_new(struct nvkm_disp *, int, struct dcb_output *, | 70 | int gm107_sor_dp_new(struct nvkm_disp *, int, struct dcb_output *, |
68 | struct nvkm_output **); | 71 | struct nvkm_output **); |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/rootnv50.c b/drivers/gpu/drm/nouveau/nvkm/engine/disp/rootnv50.c index 2f9cecd81d04..c1158b22a721 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/rootnv50.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/rootnv50.c | |||
@@ -66,7 +66,7 @@ nv50_disp_root_scanoutpos(NV50_DISP_MTHD_V0) | |||
66 | return 0; | 66 | return 0; |
67 | } | 67 | } |
68 | 68 | ||
69 | int | 69 | static int |
70 | nv50_disp_root_mthd_(struct nvkm_object *object, u32 mthd, void *data, u32 size) | 70 | nv50_disp_root_mthd_(struct nvkm_object *object, u32 mthd, void *data, u32 size) |
71 | { | 71 | { |
72 | union { | 72 | union { |
@@ -173,13 +173,56 @@ nv50_disp_root_mthd_(struct nvkm_object *object, u32 mthd, void *data, u32 size) | |||
173 | return 0; | 173 | return 0; |
174 | } else | 174 | } else |
175 | if (args->v0.state != 0) { | 175 | if (args->v0.state != 0) { |
176 | nvkm_output_dp_train(&outpdp->base, 0, true); | 176 | nvkm_output_dp_train(&outpdp->base, 0); |
177 | return 0; | 177 | return 0; |
178 | } | 178 | } |
179 | } else | 179 | } else |
180 | return ret; | 180 | return ret; |
181 | } | 181 | } |
182 | break; | 182 | break; |
183 | case NV50_DISP_MTHD_V1_SOR_DP_MST_LINK: { | ||
184 | struct nvkm_output_dp *outpdp = nvkm_output_dp(outp); | ||
185 | union { | ||
186 | struct nv50_disp_sor_dp_mst_link_v0 v0; | ||
187 | } *args = data; | ||
188 | int ret = -ENOSYS; | ||
189 | nvif_ioctl(object, "disp sor dp mst link size %d\n", size); | ||
190 | if (!(ret = nvif_unpack(ret, &data, &size, args->v0, 0, 0, false))) { | ||
191 | nvif_ioctl(object, "disp sor dp mst link vers %d state %d\n", | ||
192 | args->v0.version, args->v0.state); | ||
193 | if (outpdp->lt.mst != !!args->v0.state) { | ||
194 | outpdp->lt.mst = !!args->v0.state; | ||
195 | atomic_set(&outpdp->lt.done, 0); | ||
196 | nvkm_output_dp_train(&outpdp->base, 0); | ||
197 | } | ||
198 | return 0; | ||
199 | } else | ||
200 | return ret; | ||
201 | } | ||
202 | break; | ||
203 | case NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI: { | ||
204 | struct nvkm_output_dp *outpdp = nvkm_output_dp(outp); | ||
205 | union { | ||
206 | struct nv50_disp_sor_dp_mst_vcpi_v0 v0; | ||
207 | } *args = data; | ||
208 | int ret = -ENOSYS; | ||
209 | nvif_ioctl(object, "disp sor dp mst vcpi size %d\n", size); | ||
210 | if (!(ret = nvif_unpack(ret, &data, &size, args->v0, 0, 0, false))) { | ||
211 | nvif_ioctl(object, "disp sor dp mst vcpi vers %d " | ||
212 | "slot %02x/%02x pbn %04x/%04x\n", | ||
213 | args->v0.version, args->v0.start_slot, | ||
214 | args->v0.num_slots, args->v0.pbn, | ||
215 | args->v0.aligned_pbn); | ||
216 | if (!outpdp->func->vcpi) | ||
217 | return -ENODEV; | ||
218 | outpdp->func->vcpi(outpdp, head, args->v0.start_slot, | ||
219 | args->v0.num_slots, args->v0.pbn, | ||
220 | args->v0.aligned_pbn); | ||
221 | return 0; | ||
222 | } else | ||
223 | return ret; | ||
224 | } | ||
225 | break; | ||
183 | case NV50_DISP_MTHD_V1_PIOR_PWR: | 226 | case NV50_DISP_MTHD_V1_PIOR_PWR: |
184 | if (!func->pior.power) | 227 | if (!func->pior.power) |
185 | return -ENODEV; | 228 | return -ENODEV; |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgf119.c b/drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgf119.c index 49bd5da194e1..6ffdaa65aa77 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgf119.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgf119.c | |||
@@ -56,11 +56,13 @@ gf119_sor_dp_lnk_ctl(struct nvkm_output_dp *outp, int nr, int bw, bool ef) | |||
56 | 56 | ||
57 | clksor |= bw << 18; | 57 | clksor |= bw << 18; |
58 | dpctrl |= ((1 << nr) - 1) << 16; | 58 | dpctrl |= ((1 << nr) - 1) << 16; |
59 | if (outp->lt.mst) | ||
60 | dpctrl |= 0x40000000; | ||
59 | if (ef) | 61 | if (ef) |
60 | dpctrl |= 0x00004000; | 62 | dpctrl |= 0x00004000; |
61 | 63 | ||
62 | nvkm_mask(device, 0x612300 + soff, 0x007c0000, clksor); | 64 | nvkm_mask(device, 0x612300 + soff, 0x007c0000, clksor); |
63 | nvkm_mask(device, 0x61c10c + loff, 0x001f4000, dpctrl); | 65 | nvkm_mask(device, 0x61c10c + loff, 0x401f4000, dpctrl); |
64 | return 0; | 66 | return 0; |
65 | } | 67 | } |
66 | 68 | ||
@@ -101,12 +103,24 @@ gf119_sor_dp_drv_ctl(struct nvkm_output_dp *outp, | |||
101 | return 0; | 103 | return 0; |
102 | } | 104 | } |
103 | 105 | ||
106 | void | ||
107 | gf119_sor_dp_vcpi(struct nvkm_output_dp *outp, int head, u8 slot, | ||
108 | u8 slot_nr, u16 pbn, u16 aligned) | ||
109 | { | ||
110 | struct nvkm_device *device = outp->base.disp->engine.subdev.device; | ||
111 | const u32 hoff = head * 0x800; | ||
112 | |||
113 | nvkm_mask(device, 0x616588 + hoff, 0x00003f3f, (slot_nr << 8) | slot); | ||
114 | nvkm_mask(device, 0x61658c + hoff, 0xffffffff, (aligned << 16) | pbn); | ||
115 | } | ||
116 | |||
104 | static const struct nvkm_output_dp_func | 117 | static const struct nvkm_output_dp_func |
105 | gf119_sor_dp_func = { | 118 | gf119_sor_dp_func = { |
106 | .pattern = gf119_sor_dp_pattern, | 119 | .pattern = gf119_sor_dp_pattern, |
107 | .lnk_pwr = g94_sor_dp_lnk_pwr, | 120 | .lnk_pwr = g94_sor_dp_lnk_pwr, |
108 | .lnk_ctl = gf119_sor_dp_lnk_ctl, | 121 | .lnk_ctl = gf119_sor_dp_lnk_ctl, |
109 | .drv_ctl = gf119_sor_dp_drv_ctl, | 122 | .drv_ctl = gf119_sor_dp_drv_ctl, |
123 | .vcpi = gf119_sor_dp_vcpi, | ||
110 | }; | 124 | }; |
111 | 125 | ||
112 | int | 126 | int |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgm107.c b/drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgm107.c index 37790b2617c5..4cf8ad4d18ab 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgm107.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgm107.c | |||
@@ -43,6 +43,7 @@ gm107_sor_dp_func = { | |||
43 | .lnk_pwr = g94_sor_dp_lnk_pwr, | 43 | .lnk_pwr = g94_sor_dp_lnk_pwr, |
44 | .lnk_ctl = gf119_sor_dp_lnk_ctl, | 44 | .lnk_ctl = gf119_sor_dp_lnk_ctl, |
45 | .drv_ctl = gf119_sor_dp_drv_ctl, | 45 | .drv_ctl = gf119_sor_dp_drv_ctl, |
46 | .vcpi = gf119_sor_dp_vcpi, | ||
46 | }; | 47 | }; |
47 | 48 | ||
48 | int | 49 | int |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgm200.c b/drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgm200.c index c44fa7ea672a..81b788fa61be 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgm200.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgm200.c | |||
@@ -120,6 +120,7 @@ gm200_sor_dp_func = { | |||
120 | .lnk_pwr = gm200_sor_dp_lnk_pwr, | 120 | .lnk_pwr = gm200_sor_dp_lnk_pwr, |
121 | .lnk_ctl = gf119_sor_dp_lnk_ctl, | 121 | .lnk_ctl = gf119_sor_dp_lnk_ctl, |
122 | .drv_ctl = gm200_sor_dp_drv_ctl, | 122 | .drv_ctl = gm200_sor_dp_drv_ctl, |
123 | .vcpi = gf119_sor_dp_vcpi, | ||
123 | }; | 124 | }; |
124 | 125 | ||
125 | int | 126 | int |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c b/drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c index aeb3387a3fb0..15a992b3580a 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c | |||
@@ -129,7 +129,7 @@ g84_fifo_chan_engine_fini(struct nvkm_fifo_chan *base, | |||
129 | } | 129 | } |
130 | 130 | ||
131 | 131 | ||
132 | int | 132 | static int |
133 | g84_fifo_chan_engine_init(struct nvkm_fifo_chan *base, | 133 | g84_fifo_chan_engine_init(struct nvkm_fifo_chan *base, |
134 | struct nvkm_engine *engine) | 134 | struct nvkm_engine *engine) |
135 | { | 135 | { |
@@ -170,7 +170,7 @@ g84_fifo_chan_engine_ctor(struct nvkm_fifo_chan *base, | |||
170 | return nvkm_object_bind(object, NULL, 0, &chan->engn[engn]); | 170 | return nvkm_object_bind(object, NULL, 0, &chan->engn[engn]); |
171 | } | 171 | } |
172 | 172 | ||
173 | int | 173 | static int |
174 | g84_fifo_chan_object_ctor(struct nvkm_fifo_chan *base, | 174 | g84_fifo_chan_object_ctor(struct nvkm_fifo_chan *base, |
175 | struct nvkm_object *object) | 175 | struct nvkm_object *object) |
176 | { | 176 | { |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c b/drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c index cbc67f262322..12d964260a29 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c | |||
@@ -60,6 +60,7 @@ gf100_fifo_gpfifo_engine_fini(struct nvkm_fifo_chan *base, | |||
60 | struct nvkm_gpuobj *inst = chan->base.inst; | 60 | struct nvkm_gpuobj *inst = chan->base.inst; |
61 | int ret = 0; | 61 | int ret = 0; |
62 | 62 | ||
63 | mutex_lock(&subdev->mutex); | ||
63 | nvkm_wr32(device, 0x002634, chan->base.chid); | 64 | nvkm_wr32(device, 0x002634, chan->base.chid); |
64 | if (nvkm_msec(device, 2000, | 65 | if (nvkm_msec(device, 2000, |
65 | if (nvkm_rd32(device, 0x002634) == chan->base.chid) | 66 | if (nvkm_rd32(device, 0x002634) == chan->base.chid) |
@@ -67,10 +68,12 @@ gf100_fifo_gpfifo_engine_fini(struct nvkm_fifo_chan *base, | |||
67 | ) < 0) { | 68 | ) < 0) { |
68 | nvkm_error(subdev, "channel %d [%s] kick timeout\n", | 69 | nvkm_error(subdev, "channel %d [%s] kick timeout\n", |
69 | chan->base.chid, chan->base.object.client->name); | 70 | chan->base.chid, chan->base.object.client->name); |
70 | ret = -EBUSY; | 71 | ret = -ETIMEDOUT; |
71 | if (suspend) | ||
72 | return ret; | ||
73 | } | 72 | } |
73 | mutex_unlock(&subdev->mutex); | ||
74 | |||
75 | if (ret && suspend) | ||
76 | return ret; | ||
74 | 77 | ||
75 | if (offset) { | 78 | if (offset) { |
76 | nvkm_kmap(inst); | 79 | nvkm_kmap(inst); |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c b/drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c index ed4351032ed6..a2df4f3e7763 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c | |||
@@ -40,7 +40,9 @@ gk104_fifo_gpfifo_kick(struct gk104_fifo_chan *chan) | |||
40 | struct nvkm_subdev *subdev = &fifo->base.engine.subdev; | 40 | struct nvkm_subdev *subdev = &fifo->base.engine.subdev; |
41 | struct nvkm_device *device = subdev->device; | 41 | struct nvkm_device *device = subdev->device; |
42 | struct nvkm_client *client = chan->base.object.client; | 42 | struct nvkm_client *client = chan->base.object.client; |
43 | int ret = 0; | ||
43 | 44 | ||
45 | mutex_lock(&subdev->mutex); | ||
44 | nvkm_wr32(device, 0x002634, chan->base.chid); | 46 | nvkm_wr32(device, 0x002634, chan->base.chid); |
45 | if (nvkm_msec(device, 2000, | 47 | if (nvkm_msec(device, 2000, |
46 | if (!(nvkm_rd32(device, 0x002634) & 0x00100000)) | 48 | if (!(nvkm_rd32(device, 0x002634) & 0x00100000)) |
@@ -48,10 +50,10 @@ gk104_fifo_gpfifo_kick(struct gk104_fifo_chan *chan) | |||
48 | ) < 0) { | 50 | ) < 0) { |
49 | nvkm_error(subdev, "channel %d [%s] kick timeout\n", | 51 | nvkm_error(subdev, "channel %d [%s] kick timeout\n", |
50 | chan->base.chid, client->name); | 52 | chan->base.chid, client->name); |
51 | return -EBUSY; | 53 | ret = -ETIMEDOUT; |
52 | } | 54 | } |
53 | 55 | mutex_unlock(&subdev->mutex); | |
54 | return 0; | 56 | return ret; |
55 | } | 57 | } |
56 | 58 | ||
57 | static u32 | 59 | static u32 |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf117.c b/drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf117.c index c925ade5880e..74a64e3fd59a 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf117.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf117.c | |||
@@ -218,7 +218,7 @@ gf117_grctx_generate_attrib(struct gf100_grctx *info) | |||
218 | } | 218 | } |
219 | } | 219 | } |
220 | 220 | ||
221 | void | 221 | static void |
222 | gf117_grctx_generate_main(struct gf100_gr *gr, struct gf100_grctx *info) | 222 | gf117_grctx_generate_main(struct gf100_gr *gr, struct gf100_grctx *info) |
223 | { | 223 | { |
224 | struct nvkm_device *device = gr->base.engine.subdev.device; | 224 | struct nvkm_device *device = gr->base.engine.subdev.device; |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgm107.c b/drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgm107.c index 6d3c5011e18c..4c4b5ab6e46d 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgm107.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgm107.c | |||
@@ -933,7 +933,7 @@ gm107_grctx_generate_attrib(struct gf100_grctx *info) | |||
933 | } | 933 | } |
934 | } | 934 | } |
935 | 935 | ||
936 | void | 936 | static void |
937 | gm107_grctx_generate_tpcid(struct gf100_gr *gr) | 937 | gm107_grctx_generate_tpcid(struct gf100_gr *gr) |
938 | { | 938 | { |
939 | struct nvkm_device *device = gr->base.engine.subdev.device; | 939 | struct nvkm_device *device = gr->base.engine.subdev.device; |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c b/drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c index 1e13278cf306..c8bb9191f9a2 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c | |||
@@ -106,6 +106,7 @@ | |||
106 | #define CP_SEEK_2 0x00c800ff | 106 | #define CP_SEEK_2 0x00c800ff |
107 | 107 | ||
108 | #include "ctxnv40.h" | 108 | #include "ctxnv40.h" |
109 | #include "nv50.h" | ||
109 | 110 | ||
110 | #include <subdev/fb.h> | 111 | #include <subdev/fb.h> |
111 | 112 | ||
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgf100.fuc3.h b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgf100.fuc3.h index 8cb240b65ec2..12a703fe355d 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgf100.fuc3.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgf100.fuc3.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gf100_grgpc_data[] = { | 1 | static uint32_t gf100_grgpc_data[] = { |
2 | /* 0x0000: gpc_mmio_list_head */ | 2 | /* 0x0000: gpc_mmio_list_head */ |
3 | 0x00000064, | 3 | 0x00000064, |
4 | /* 0x0004: gpc_mmio_list_tail */ | 4 | /* 0x0004: gpc_mmio_list_tail */ |
@@ -36,7 +36,7 @@ uint32_t gf100_grgpc_data[] = { | |||
36 | 0x00000000, | 36 | 0x00000000, |
37 | }; | 37 | }; |
38 | 38 | ||
39 | uint32_t gf100_grgpc_code[] = { | 39 | static uint32_t gf100_grgpc_code[] = { |
40 | 0x03a10ef5, | 40 | 0x03a10ef5, |
41 | /* 0x0004: queue_put */ | 41 | /* 0x0004: queue_put */ |
42 | 0x9800d898, | 42 | 0x9800d898, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgf117.fuc3.h b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgf117.fuc3.h index 550d6ba0933b..ffbfc51200f1 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgf117.fuc3.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgf117.fuc3.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gf117_grgpc_data[] = { | 1 | static uint32_t gf117_grgpc_data[] = { |
2 | /* 0x0000: gpc_mmio_list_head */ | 2 | /* 0x0000: gpc_mmio_list_head */ |
3 | 0x0000006c, | 3 | 0x0000006c, |
4 | /* 0x0004: gpc_mmio_list_tail */ | 4 | /* 0x0004: gpc_mmio_list_tail */ |
@@ -40,7 +40,7 @@ uint32_t gf117_grgpc_data[] = { | |||
40 | 0x00000000, | 40 | 0x00000000, |
41 | }; | 41 | }; |
42 | 42 | ||
43 | uint32_t gf117_grgpc_code[] = { | 43 | static uint32_t gf117_grgpc_code[] = { |
44 | 0x03a10ef5, | 44 | 0x03a10ef5, |
45 | /* 0x0004: queue_put */ | 45 | /* 0x0004: queue_put */ |
46 | 0x9800d898, | 46 | 0x9800d898, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgk104.fuc3.h b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgk104.fuc3.h index 271b59d365e5..357f662de571 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgk104.fuc3.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgk104.fuc3.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gk104_grgpc_data[] = { | 1 | static uint32_t gk104_grgpc_data[] = { |
2 | /* 0x0000: gpc_mmio_list_head */ | 2 | /* 0x0000: gpc_mmio_list_head */ |
3 | 0x0000006c, | 3 | 0x0000006c, |
4 | /* 0x0004: gpc_mmio_list_tail */ | 4 | /* 0x0004: gpc_mmio_list_tail */ |
@@ -40,7 +40,7 @@ uint32_t gk104_grgpc_data[] = { | |||
40 | 0x00000000, | 40 | 0x00000000, |
41 | }; | 41 | }; |
42 | 42 | ||
43 | uint32_t gk104_grgpc_code[] = { | 43 | static uint32_t gk104_grgpc_code[] = { |
44 | 0x03a10ef5, | 44 | 0x03a10ef5, |
45 | /* 0x0004: queue_put */ | 45 | /* 0x0004: queue_put */ |
46 | 0x9800d898, | 46 | 0x9800d898, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgk110.fuc3.h b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgk110.fuc3.h index 73b4a32c5d29..4ffc8212a85c 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgk110.fuc3.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgk110.fuc3.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gk110_grgpc_data[] = { | 1 | static uint32_t gk110_grgpc_data[] = { |
2 | /* 0x0000: gpc_mmio_list_head */ | 2 | /* 0x0000: gpc_mmio_list_head */ |
3 | 0x0000006c, | 3 | 0x0000006c, |
4 | /* 0x0004: gpc_mmio_list_tail */ | 4 | /* 0x0004: gpc_mmio_list_tail */ |
@@ -40,7 +40,7 @@ uint32_t gk110_grgpc_data[] = { | |||
40 | 0x00000000, | 40 | 0x00000000, |
41 | }; | 41 | }; |
42 | 42 | ||
43 | uint32_t gk110_grgpc_code[] = { | 43 | static uint32_t gk110_grgpc_code[] = { |
44 | 0x03a10ef5, | 44 | 0x03a10ef5, |
45 | /* 0x0004: queue_put */ | 45 | /* 0x0004: queue_put */ |
46 | 0x9800d898, | 46 | 0x9800d898, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgk208.fuc5.h b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgk208.fuc5.h index 018169818317..09196206c9bc 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgk208.fuc5.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgk208.fuc5.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gk208_grgpc_data[] = { | 1 | static uint32_t gk208_grgpc_data[] = { |
2 | /* 0x0000: gpc_mmio_list_head */ | 2 | /* 0x0000: gpc_mmio_list_head */ |
3 | 0x0000006c, | 3 | 0x0000006c, |
4 | /* 0x0004: gpc_mmio_list_tail */ | 4 | /* 0x0004: gpc_mmio_list_tail */ |
@@ -40,7 +40,7 @@ uint32_t gk208_grgpc_data[] = { | |||
40 | 0x00000000, | 40 | 0x00000000, |
41 | }; | 41 | }; |
42 | 42 | ||
43 | uint32_t gk208_grgpc_code[] = { | 43 | static uint32_t gk208_grgpc_code[] = { |
44 | 0x03140ef5, | 44 | 0x03140ef5, |
45 | /* 0x0004: queue_put */ | 45 | /* 0x0004: queue_put */ |
46 | 0x9800d898, | 46 | 0x9800d898, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgm107.fuc5.h b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgm107.fuc5.h index eca007f03fa9..6d7d004363d9 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgm107.fuc5.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/gpcgm107.fuc5.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gm107_grgpc_data[] = { | 1 | static uint32_t gm107_grgpc_data[] = { |
2 | /* 0x0000: gpc_mmio_list_head */ | 2 | /* 0x0000: gpc_mmio_list_head */ |
3 | 0x0000006c, | 3 | 0x0000006c, |
4 | /* 0x0004: gpc_mmio_list_tail */ | 4 | /* 0x0004: gpc_mmio_list_tail */ |
@@ -40,7 +40,7 @@ uint32_t gm107_grgpc_data[] = { | |||
40 | 0x00000000, | 40 | 0x00000000, |
41 | }; | 41 | }; |
42 | 42 | ||
43 | uint32_t gm107_grgpc_code[] = { | 43 | static uint32_t gm107_grgpc_code[] = { |
44 | 0x03410ef5, | 44 | 0x03410ef5, |
45 | /* 0x0004: queue_put */ | 45 | /* 0x0004: queue_put */ |
46 | 0x9800d898, | 46 | 0x9800d898, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgf100.fuc3.h b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgf100.fuc3.h index 8015b40a61d6..7538404b8b13 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgf100.fuc3.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgf100.fuc3.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gf100_grhub_data[] = { | 1 | static uint32_t gf100_grhub_data[] = { |
2 | /* 0x0000: hub_mmio_list_head */ | 2 | /* 0x0000: hub_mmio_list_head */ |
3 | 0x00000300, | 3 | 0x00000300, |
4 | /* 0x0004: hub_mmio_list_tail */ | 4 | /* 0x0004: hub_mmio_list_tail */ |
@@ -205,7 +205,7 @@ uint32_t gf100_grhub_data[] = { | |||
205 | 0x0417e91c, | 205 | 0x0417e91c, |
206 | }; | 206 | }; |
207 | 207 | ||
208 | uint32_t gf100_grhub_code[] = { | 208 | static uint32_t gf100_grhub_code[] = { |
209 | 0x039b0ef5, | 209 | 0x039b0ef5, |
210 | /* 0x0004: queue_put */ | 210 | /* 0x0004: queue_put */ |
211 | 0x9800d898, | 211 | 0x9800d898, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgf117.fuc3.h b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgf117.fuc3.h index 2af90ec6852a..ce000a47ec6d 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgf117.fuc3.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgf117.fuc3.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gf117_grhub_data[] = { | 1 | static uint32_t gf117_grhub_data[] = { |
2 | /* 0x0000: hub_mmio_list_head */ | 2 | /* 0x0000: hub_mmio_list_head */ |
3 | 0x00000300, | 3 | 0x00000300, |
4 | /* 0x0004: hub_mmio_list_tail */ | 4 | /* 0x0004: hub_mmio_list_tail */ |
@@ -205,7 +205,7 @@ uint32_t gf117_grhub_data[] = { | |||
205 | 0x0417e91c, | 205 | 0x0417e91c, |
206 | }; | 206 | }; |
207 | 207 | ||
208 | uint32_t gf117_grhub_code[] = { | 208 | static uint32_t gf117_grhub_code[] = { |
209 | 0x039b0ef5, | 209 | 0x039b0ef5, |
210 | /* 0x0004: queue_put */ | 210 | /* 0x0004: queue_put */ |
211 | 0x9800d898, | 211 | 0x9800d898, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgk104.fuc3.h b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgk104.fuc3.h index e8b8c1c94700..1f26cb6a233c 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgk104.fuc3.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgk104.fuc3.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gk104_grhub_data[] = { | 1 | static uint32_t gk104_grhub_data[] = { |
2 | /* 0x0000: hub_mmio_list_head */ | 2 | /* 0x0000: hub_mmio_list_head */ |
3 | 0x00000300, | 3 | 0x00000300, |
4 | /* 0x0004: hub_mmio_list_tail */ | 4 | /* 0x0004: hub_mmio_list_tail */ |
@@ -205,7 +205,7 @@ uint32_t gk104_grhub_data[] = { | |||
205 | 0x0417e91c, | 205 | 0x0417e91c, |
206 | }; | 206 | }; |
207 | 207 | ||
208 | uint32_t gk104_grhub_code[] = { | 208 | static uint32_t gk104_grhub_code[] = { |
209 | 0x039b0ef5, | 209 | 0x039b0ef5, |
210 | /* 0x0004: queue_put */ | 210 | /* 0x0004: queue_put */ |
211 | 0x9800d898, | 211 | 0x9800d898, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgk110.fuc3.h b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgk110.fuc3.h index f4ed2fb6f714..70436d93efe3 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgk110.fuc3.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgk110.fuc3.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gk110_grhub_data[] = { | 1 | static uint32_t gk110_grhub_data[] = { |
2 | /* 0x0000: hub_mmio_list_head */ | 2 | /* 0x0000: hub_mmio_list_head */ |
3 | 0x00000300, | 3 | 0x00000300, |
4 | /* 0x0004: hub_mmio_list_tail */ | 4 | /* 0x0004: hub_mmio_list_tail */ |
@@ -205,7 +205,7 @@ uint32_t gk110_grhub_data[] = { | |||
205 | 0x0417e91c, | 205 | 0x0417e91c, |
206 | }; | 206 | }; |
207 | 207 | ||
208 | uint32_t gk110_grhub_code[] = { | 208 | static uint32_t gk110_grhub_code[] = { |
209 | 0x039b0ef5, | 209 | 0x039b0ef5, |
210 | /* 0x0004: queue_put */ | 210 | /* 0x0004: queue_put */ |
211 | 0x9800d898, | 211 | 0x9800d898, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgk208.fuc5.h b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgk208.fuc5.h index ed488973c117..e0933a07426a 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgk208.fuc5.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgk208.fuc5.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gk208_grhub_data[] = { | 1 | static uint32_t gk208_grhub_data[] = { |
2 | /* 0x0000: hub_mmio_list_head */ | 2 | /* 0x0000: hub_mmio_list_head */ |
3 | 0x00000300, | 3 | 0x00000300, |
4 | /* 0x0004: hub_mmio_list_tail */ | 4 | /* 0x0004: hub_mmio_list_tail */ |
@@ -205,7 +205,7 @@ uint32_t gk208_grhub_data[] = { | |||
205 | 0x0417e91c, | 205 | 0x0417e91c, |
206 | }; | 206 | }; |
207 | 207 | ||
208 | uint32_t gk208_grhub_code[] = { | 208 | static uint32_t gk208_grhub_code[] = { |
209 | 0x030e0ef5, | 209 | 0x030e0ef5, |
210 | /* 0x0004: queue_put */ | 210 | /* 0x0004: queue_put */ |
211 | 0x9800d898, | 211 | 0x9800d898, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgm107.fuc5.h b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgm107.fuc5.h index 5c9051839557..9b432823bcbe 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgm107.fuc5.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/hubgm107.fuc5.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gm107_grhub_data[] = { | 1 | static uint32_t gm107_grhub_data[] = { |
2 | /* 0x0000: hub_mmio_list_head */ | 2 | /* 0x0000: hub_mmio_list_head */ |
3 | 0x00000300, | 3 | 0x00000300, |
4 | /* 0x0004: hub_mmio_list_tail */ | 4 | /* 0x0004: hub_mmio_list_tail */ |
@@ -205,7 +205,7 @@ uint32_t gm107_grhub_data[] = { | |||
205 | 0x0417e91c, | 205 | 0x0417e91c, |
206 | }; | 206 | }; |
207 | 207 | ||
208 | uint32_t gm107_grhub_code[] = { | 208 | static uint32_t gm107_grhub_code[] = { |
209 | 0x030e0ef5, | 209 | 0x030e0ef5, |
210 | /* 0x0004: queue_put */ | 210 | /* 0x0004: queue_put */ |
211 | 0x9800d898, | 211 | 0x9800d898, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c b/drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c index 157919c788e6..60a1b5c8214b 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c | |||
@@ -1384,7 +1384,7 @@ gf100_gr_intr(struct nvkm_gr *base) | |||
1384 | nvkm_fifo_chan_put(device->fifo, flags, &chan); | 1384 | nvkm_fifo_chan_put(device->fifo, flags, &chan); |
1385 | } | 1385 | } |
1386 | 1386 | ||
1387 | void | 1387 | static void |
1388 | gf100_gr_init_fw(struct gf100_gr *gr, u32 fuc_base, | 1388 | gf100_gr_init_fw(struct gf100_gr *gr, u32 fuc_base, |
1389 | struct gf100_gr_fuc *code, struct gf100_gr_fuc *data) | 1389 | struct gf100_gr_fuc *code, struct gf100_gr_fuc *data) |
1390 | { | 1390 | { |
@@ -1701,7 +1701,7 @@ gf100_gr_oneinit(struct nvkm_gr *base) | |||
1701 | return 0; | 1701 | return 0; |
1702 | } | 1702 | } |
1703 | 1703 | ||
1704 | int | 1704 | static int |
1705 | gf100_gr_init_(struct nvkm_gr *base) | 1705 | gf100_gr_init_(struct nvkm_gr *base) |
1706 | { | 1706 | { |
1707 | struct gf100_gr *gr = gf100_gr(base); | 1707 | struct gf100_gr *gr = gf100_gr(base); |
@@ -1756,6 +1756,50 @@ gf100_gr_ = { | |||
1756 | }; | 1756 | }; |
1757 | 1757 | ||
1758 | int | 1758 | int |
1759 | gf100_gr_ctor_fw_legacy(struct gf100_gr *gr, const char *fwname, | ||
1760 | struct gf100_gr_fuc *fuc, int ret) | ||
1761 | { | ||
1762 | struct nvkm_subdev *subdev = &gr->base.engine.subdev; | ||
1763 | struct nvkm_device *device = subdev->device; | ||
1764 | const struct firmware *fw; | ||
1765 | char f[32]; | ||
1766 | |||
1767 | /* see if this firmware has a legacy path */ | ||
1768 | if (!strcmp(fwname, "fecs_inst")) | ||
1769 | fwname = "fuc409c"; | ||
1770 | else if (!strcmp(fwname, "fecs_data")) | ||
1771 | fwname = "fuc409d"; | ||
1772 | else if (!strcmp(fwname, "gpccs_inst")) | ||
1773 | fwname = "fuc41ac"; | ||
1774 | else if (!strcmp(fwname, "gpccs_data")) | ||
1775 | fwname = "fuc41ad"; | ||
1776 | else { | ||
1777 | /* nope, let's just return the error we got */ | ||
1778 | nvkm_error(subdev, "failed to load %s\n", fwname); | ||
1779 | return ret; | ||
1780 | } | ||
1781 | |||
1782 | /* yes, try to load from the legacy path */ | ||
1783 | nvkm_debug(subdev, "%s: falling back to legacy path\n", fwname); | ||
1784 | |||
1785 | snprintf(f, sizeof(f), "nouveau/nv%02x_%s", device->chipset, fwname); | ||
1786 | ret = request_firmware(&fw, f, device->dev); | ||
1787 | if (ret) { | ||
1788 | snprintf(f, sizeof(f), "nouveau/%s", fwname); | ||
1789 | ret = request_firmware(&fw, f, device->dev); | ||
1790 | if (ret) { | ||
1791 | nvkm_error(subdev, "failed to load %s\n", fwname); | ||
1792 | return ret; | ||
1793 | } | ||
1794 | } | ||
1795 | |||
1796 | fuc->size = fw->size; | ||
1797 | fuc->data = kmemdup(fw->data, fuc->size, GFP_KERNEL); | ||
1798 | release_firmware(fw); | ||
1799 | return (fuc->data != NULL) ? 0 : -ENOMEM; | ||
1800 | } | ||
1801 | |||
1802 | int | ||
1759 | gf100_gr_ctor_fw(struct gf100_gr *gr, const char *fwname, | 1803 | gf100_gr_ctor_fw(struct gf100_gr *gr, const char *fwname, |
1760 | struct gf100_gr_fuc *fuc) | 1804 | struct gf100_gr_fuc *fuc) |
1761 | { | 1805 | { |
@@ -1765,10 +1809,8 @@ gf100_gr_ctor_fw(struct gf100_gr *gr, const char *fwname, | |||
1765 | int ret; | 1809 | int ret; |
1766 | 1810 | ||
1767 | ret = nvkm_firmware_get(device, fwname, &fw); | 1811 | ret = nvkm_firmware_get(device, fwname, &fw); |
1768 | if (ret) { | 1812 | if (ret) |
1769 | nvkm_error(subdev, "failed to load %s\n", fwname); | 1813 | return gf100_gr_ctor_fw_legacy(gr, fwname, fuc, ret); |
1770 | return ret; | ||
1771 | } | ||
1772 | 1814 | ||
1773 | fuc->size = fw->size; | 1815 | fuc->size = fw->size; |
1774 | fuc->data = kmemdup(fw->data, fuc->size, GFP_KERNEL); | 1816 | fuc->data = kmemdup(fw->data, fuc->size, GFP_KERNEL); |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/gf117.c b/drivers/gpu/drm/nouveau/nvkm/engine/gr/gf117.c index 70335f65c51e..0124e468086e 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/gf117.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/gf117.c | |||
@@ -102,7 +102,7 @@ gf117_gr_pack_mmio[] = { | |||
102 | 102 | ||
103 | #include "fuc/hubgf117.fuc3.h" | 103 | #include "fuc/hubgf117.fuc3.h" |
104 | 104 | ||
105 | struct gf100_gr_ucode | 105 | static struct gf100_gr_ucode |
106 | gf117_gr_fecs_ucode = { | 106 | gf117_gr_fecs_ucode = { |
107 | .code.data = gf117_grhub_code, | 107 | .code.data = gf117_grhub_code, |
108 | .code.size = sizeof(gf117_grhub_code), | 108 | .code.size = sizeof(gf117_grhub_code), |
@@ -112,7 +112,7 @@ gf117_gr_fecs_ucode = { | |||
112 | 112 | ||
113 | #include "fuc/gpcgf117.fuc3.h" | 113 | #include "fuc/gpcgf117.fuc3.h" |
114 | 114 | ||
115 | struct gf100_gr_ucode | 115 | static struct gf100_gr_ucode |
116 | gf117_gr_gpccs_ucode = { | 116 | gf117_gr_gpccs_ucode = { |
117 | .code.data = gf117_grgpc_code, | 117 | .code.data = gf117_grgpc_code, |
118 | .code.size = sizeof(gf117_grgpc_code), | 118 | .code.size = sizeof(gf117_grgpc_code), |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/gr/gm107.c b/drivers/gpu/drm/nouveau/nvkm/engine/gr/gm107.c index 45f965f608a7..2c67fac576d1 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/gr/gm107.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/gr/gm107.c | |||
@@ -308,7 +308,7 @@ gm107_gr_init_bios(struct gf100_gr *gr) | |||
308 | } | 308 | } |
309 | } | 309 | } |
310 | 310 | ||
311 | int | 311 | static int |
312 | gm107_gr_init(struct gf100_gr *gr) | 312 | gm107_gr_init(struct gf100_gr *gr) |
313 | { | 313 | { |
314 | struct nvkm_device *device = gr->base.engine.subdev.device; | 314 | struct nvkm_device *device = gr->base.engine.subdev.device; |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c b/drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c index 8616636ad7b4..dde89a4a0f5b 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/pm/base.c | |||
@@ -71,7 +71,7 @@ nvkm_perfdom_find(struct nvkm_pm *pm, int di) | |||
71 | return NULL; | 71 | return NULL; |
72 | } | 72 | } |
73 | 73 | ||
74 | struct nvkm_perfsig * | 74 | static struct nvkm_perfsig * |
75 | nvkm_perfsig_find(struct nvkm_pm *pm, u8 di, u8 si, struct nvkm_perfdom **pdom) | 75 | nvkm_perfsig_find(struct nvkm_pm *pm, u8 di, u8 si, struct nvkm_perfdom **pdom) |
76 | { | 76 | { |
77 | struct nvkm_perfdom *dom = *pdom; | 77 | struct nvkm_perfdom *dom = *pdom; |
@@ -699,7 +699,7 @@ nvkm_pm_oclass_get(struct nvkm_oclass *oclass, int index, | |||
699 | return 1; | 699 | return 1; |
700 | } | 700 | } |
701 | 701 | ||
702 | int | 702 | static int |
703 | nvkm_perfsrc_new(struct nvkm_pm *pm, struct nvkm_perfsig *sig, | 703 | nvkm_perfsrc_new(struct nvkm_pm *pm, struct nvkm_perfsig *sig, |
704 | const struct nvkm_specsrc *spec) | 704 | const struct nvkm_specsrc *spec) |
705 | { | 705 | { |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c b/drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c index d2901e9a7808..fe2532ee4145 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c +++ b/drivers/gpu/drm/nouveau/nvkm/engine/pm/gf100.c | |||
@@ -102,7 +102,7 @@ gf100_pm_gpc[] = { | |||
102 | {} | 102 | {} |
103 | }; | 103 | }; |
104 | 104 | ||
105 | const struct nvkm_specdom | 105 | static const struct nvkm_specdom |
106 | gf100_pm_part[] = { | 106 | gf100_pm_part[] = { |
107 | { 0xe0, (const struct nvkm_specsig[]) { | 107 | { 0xe0, (const struct nvkm_specsig[]) { |
108 | { 0x0f, "part00_pbfb_00", gf100_pbfb_sources }, | 108 | { 0x0f, "part00_pbfb_00", gf100_pbfb_sources }, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/engine/sec/fuc/g98.fuc0s.h b/drivers/gpu/drm/nouveau/nvkm/engine/sec/fuc/g98.fuc0s.h index eca62221f299..4b57f8814560 100644 --- a/drivers/gpu/drm/nouveau/nvkm/engine/sec/fuc/g98.fuc0s.h +++ b/drivers/gpu/drm/nouveau/nvkm/engine/sec/fuc/g98.fuc0s.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t g98_sec_data[] = { | 1 | static uint32_t g98_sec_data[] = { |
2 | /* 0x0000: ctx_dma */ | 2 | /* 0x0000: ctx_dma */ |
3 | /* 0x0000: ctx_dma_query */ | 3 | /* 0x0000: ctx_dma_query */ |
4 | 0x00000000, | 4 | 0x00000000, |
@@ -150,7 +150,7 @@ uint32_t g98_sec_data[] = { | |||
150 | 0x00000000, | 150 | 0x00000000, |
151 | }; | 151 | }; |
152 | 152 | ||
153 | uint32_t g98_sec_code[] = { | 153 | static uint32_t g98_sec_code[] = { |
154 | 0x17f004bd, | 154 | 0x17f004bd, |
155 | 0x0010fe35, | 155 | 0x0010fe35, |
156 | 0xf10004fe, | 156 | 0xf10004fe, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/bios/fan.c b/drivers/gpu/drm/nouveau/nvkm/subdev/bios/fan.c index 80fed7e78dcb..e2905815049b 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/bios/fan.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/bios/fan.c | |||
@@ -25,7 +25,7 @@ | |||
25 | #include <subdev/bios/bit.h> | 25 | #include <subdev/bios/bit.h> |
26 | #include <subdev/bios/fan.h> | 26 | #include <subdev/bios/fan.h> |
27 | 27 | ||
28 | u16 | 28 | static u16 |
29 | nvbios_fan_table(struct nvkm_bios *bios, u8 *ver, u8 *hdr, u8 *cnt, u8 *len) | 29 | nvbios_fan_table(struct nvkm_bios *bios, u8 *ver, u8 *hdr, u8 *cnt, u8 *len) |
30 | { | 30 | { |
31 | struct bit_entry bit_P; | 31 | struct bit_entry bit_P; |
@@ -52,7 +52,7 @@ nvbios_fan_table(struct nvkm_bios *bios, u8 *ver, u8 *hdr, u8 *cnt, u8 *len) | |||
52 | return 0x0000; | 52 | return 0x0000; |
53 | } | 53 | } |
54 | 54 | ||
55 | u16 | 55 | static u16 |
56 | nvbios_fan_entry(struct nvkm_bios *bios, int idx, u8 *ver, u8 *hdr, | 56 | nvbios_fan_entry(struct nvkm_bios *bios, int idx, u8 *ver, u8 *hdr, |
57 | u8 *cnt, u8 *len) | 57 | u8 *cnt, u8 *len) |
58 | { | 58 | { |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/bios/priv.h b/drivers/gpu/drm/nouveau/nvkm/subdev/bios/priv.h index 212800ecdce9..7d1d3c6b4b72 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/bios/priv.h +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/bios/priv.h | |||
@@ -12,6 +12,7 @@ struct nvbios_source { | |||
12 | bool rw; | 12 | bool rw; |
13 | bool ignore_checksum; | 13 | bool ignore_checksum; |
14 | bool no_pcir; | 14 | bool no_pcir; |
15 | bool require_checksum; | ||
15 | }; | 16 | }; |
16 | 17 | ||
17 | int nvbios_extend(struct nvkm_bios *, u32 length); | 18 | int nvbios_extend(struct nvkm_bios *, u32 length); |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/bios/shadow.c b/drivers/gpu/drm/nouveau/nvkm/subdev/bios/shadow.c index b2557e87afdd..7deb81b6dbac 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/bios/shadow.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/bios/shadow.c | |||
@@ -86,9 +86,12 @@ shadow_image(struct nvkm_bios *bios, int idx, u32 offset, struct shadow *mthd) | |||
86 | nvbios_checksum(&bios->data[image.base], image.size)) { | 86 | nvbios_checksum(&bios->data[image.base], image.size)) { |
87 | nvkm_debug(subdev, "%08x: checksum failed\n", | 87 | nvkm_debug(subdev, "%08x: checksum failed\n", |
88 | image.base); | 88 | image.base); |
89 | if (mthd->func->rw) | 89 | if (!mthd->func->require_checksum) { |
90 | if (mthd->func->rw) | ||
91 | score += 1; | ||
90 | score += 1; | 92 | score += 1; |
91 | score += 1; | 93 | } else |
94 | return 0; | ||
92 | } else { | 95 | } else { |
93 | score += 3; | 96 | score += 3; |
94 | } | 97 | } |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/bios/shadowacpi.c b/drivers/gpu/drm/nouveau/nvkm/subdev/bios/shadowacpi.c index 8fecb5ff22a0..06572f8ce914 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/bios/shadowacpi.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/bios/shadowacpi.c | |||
@@ -99,6 +99,7 @@ nvbios_acpi_fast = { | |||
99 | .init = acpi_init, | 99 | .init = acpi_init, |
100 | .read = acpi_read_fast, | 100 | .read = acpi_read_fast, |
101 | .rw = false, | 101 | .rw = false, |
102 | .require_checksum = true, | ||
102 | }; | 103 | }; |
103 | 104 | ||
104 | const struct nvbios_source | 105 | const struct nvbios_source |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c b/drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c index 056702ef69aa..96e0941c8edd 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c | |||
@@ -180,7 +180,7 @@ gt215_clk_read(struct nvkm_clk *base, enum nv_clk_src src) | |||
180 | return 0; | 180 | return 0; |
181 | } | 181 | } |
182 | 182 | ||
183 | int | 183 | static int |
184 | gt215_clk_info(struct nvkm_clk *base, int idx, u32 khz, | 184 | gt215_clk_info(struct nvkm_clk *base, int idx, u32 khz, |
185 | struct gt215_clk_info *info) | 185 | struct gt215_clk_info *info) |
186 | { | 186 | { |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/Kbuild b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/Kbuild index edcc157e6ac8..ef47d57fcb87 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/Kbuild +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/Kbuild | |||
@@ -24,6 +24,7 @@ nvkm-y += nvkm/subdev/fb/gk104.o | |||
24 | nvkm-y += nvkm/subdev/fb/gk20a.o | 24 | nvkm-y += nvkm/subdev/fb/gk20a.o |
25 | nvkm-y += nvkm/subdev/fb/gm107.o | 25 | nvkm-y += nvkm/subdev/fb/gm107.o |
26 | nvkm-y += nvkm/subdev/fb/gm200.o | 26 | nvkm-y += nvkm/subdev/fb/gm200.o |
27 | nvkm-y += nvkm/subdev/fb/gm20b.o | ||
27 | nvkm-y += nvkm/subdev/fb/gp100.o | 28 | nvkm-y += nvkm/subdev/fb/gp100.o |
28 | nvkm-y += nvkm/subdev/fb/gp104.o | 29 | nvkm-y += nvkm/subdev/fb/gp104.o |
29 | 30 | ||
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gf100.h b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gf100.h index 449f431644b3..412eb89834e8 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gf100.h +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gf100.h | |||
@@ -16,4 +16,8 @@ void gf100_fb_init(struct nvkm_fb *); | |||
16 | void gf100_fb_intr(struct nvkm_fb *); | 16 | void gf100_fb_intr(struct nvkm_fb *); |
17 | 17 | ||
18 | void gp100_fb_init(struct nvkm_fb *); | 18 | void gp100_fb_init(struct nvkm_fb *); |
19 | |||
20 | void gm200_fb_init_page(struct nvkm_fb *fb); | ||
21 | void gm200_fb_init(struct nvkm_fb *base); | ||
22 | |||
19 | #endif | 23 | #endif |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gk20a.c b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gk20a.c index f815fe2bbf08..5d34d6136616 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gk20a.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gk20a.c | |||
@@ -1,5 +1,5 @@ | |||
1 | /* | 1 | /* |
2 | * Copyright (c) 2014, NVIDIA CORPORATION. All rights reserved. | 2 | * Copyright (c) 2014-2016, NVIDIA CORPORATION. All rights reserved. |
3 | * | 3 | * |
4 | * Permission is hereby granted, free of charge, to any person obtaining a | 4 | * Permission is hereby granted, free of charge, to any person obtaining a |
5 | * copy of this software and associated documentation files (the "Software"), | 5 | * copy of this software and associated documentation files (the "Software"), |
@@ -20,27 +20,21 @@ | |||
20 | * DEALINGS IN THE SOFTWARE. | 20 | * DEALINGS IN THE SOFTWARE. |
21 | */ | 21 | */ |
22 | #include "priv.h" | 22 | #include "priv.h" |
23 | #include "gf100.h" | ||
23 | 24 | ||
24 | #include <core/memory.h> | 25 | /* GK20A's FB is similar to GF100's, but without the ability to allocate VRAM */ |
25 | |||
26 | static void | ||
27 | gk20a_fb_init(struct nvkm_fb *fb) | ||
28 | { | ||
29 | struct nvkm_device *device = fb->subdev.device; | ||
30 | nvkm_wr32(device, 0x100cc8, nvkm_memory_addr(fb->mmu_wr) >> 8); | ||
31 | nvkm_wr32(device, 0x100ccc, nvkm_memory_addr(fb->mmu_rd) >> 8); | ||
32 | } | ||
33 | |||
34 | static const struct nvkm_fb_func | 26 | static const struct nvkm_fb_func |
35 | gk20a_fb = { | 27 | gk20a_fb = { |
28 | .dtor = gf100_fb_dtor, | ||
36 | .oneinit = gf100_fb_oneinit, | 29 | .oneinit = gf100_fb_oneinit, |
37 | .init = gk20a_fb_init, | 30 | .init = gf100_fb_init, |
38 | .init_page = gf100_fb_init_page, | 31 | .init_page = gf100_fb_init_page, |
32 | .intr = gf100_fb_intr, | ||
39 | .memtype_valid = gf100_fb_memtype_valid, | 33 | .memtype_valid = gf100_fb_memtype_valid, |
40 | }; | 34 | }; |
41 | 35 | ||
42 | int | 36 | int |
43 | gk20a_fb_new(struct nvkm_device *device, int index, struct nvkm_fb **pfb) | 37 | gk20a_fb_new(struct nvkm_device *device, int index, struct nvkm_fb **pfb) |
44 | { | 38 | { |
45 | return nvkm_fb_new_(&gk20a_fb, device, index, pfb); | 39 | return gf100_fb_new_(&gk20a_fb, device, index, pfb); |
46 | } | 40 | } |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gm200.c b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gm200.c index 62f653240be3..fe5886013ac0 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gm200.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gm200.c | |||
@@ -44,7 +44,7 @@ gm200_fb_init_page(struct nvkm_fb *fb) | |||
44 | } | 44 | } |
45 | } | 45 | } |
46 | 46 | ||
47 | static void | 47 | void |
48 | gm200_fb_init(struct nvkm_fb *base) | 48 | gm200_fb_init(struct nvkm_fb *base) |
49 | { | 49 | { |
50 | struct gf100_fb *fb = gf100_fb(base); | 50 | struct gf100_fb *fb = gf100_fb(base); |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gm20b.c b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gm20b.c new file mode 100644 index 000000000000..b87c233bcd6d --- /dev/null +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/gm20b.c | |||
@@ -0,0 +1,40 @@ | |||
1 | /* | ||
2 | * Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved. | ||
3 | * | ||
4 | * Permission is hereby granted, free of charge, to any person obtaining a | ||
5 | * copy of this software and associated documentation files (the "Software"), | ||
6 | * to deal in the Software without restriction, including without limitation | ||
7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | ||
8 | * and/or sell copies of the Software, and to permit persons to whom the | ||
9 | * Software is furnished to do so, subject to the following conditions: | ||
10 | * | ||
11 | * The above copyright notice and this permission notice shall be included in | ||
12 | * all copies or substantial portions of the Software. | ||
13 | * | ||
14 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
15 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
16 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | ||
17 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
18 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | ||
19 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER | ||
20 | * DEALINGS IN THE SOFTWARE. | ||
21 | */ | ||
22 | #include "priv.h" | ||
23 | #include "gf100.h" | ||
24 | |||
25 | /* GM20B's FB is similar to GM200, but without the ability to allocate VRAM */ | ||
26 | static const struct nvkm_fb_func | ||
27 | gm20b_fb = { | ||
28 | .dtor = gf100_fb_dtor, | ||
29 | .oneinit = gf100_fb_oneinit, | ||
30 | .init = gm200_fb_init, | ||
31 | .init_page = gm200_fb_init_page, | ||
32 | .intr = gf100_fb_intr, | ||
33 | .memtype_valid = gf100_fb_memtype_valid, | ||
34 | }; | ||
35 | |||
36 | int | ||
37 | gm20b_fb_new(struct nvkm_device *device, int index, struct nvkm_fb **pfb) | ||
38 | { | ||
39 | return gf100_fb_new_(&gm20b_fb, device, index, pfb); | ||
40 | } | ||
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgf100.c b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgf100.c index 772425ca5a9e..093223d1df4f 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgf100.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgf100.c | |||
@@ -420,8 +420,6 @@ gf100_ram_tidy(struct nvkm_ram *base) | |||
420 | ram_exec(&ram->fuc, false); | 420 | ram_exec(&ram->fuc, false); |
421 | } | 421 | } |
422 | 422 | ||
423 | extern const u8 gf100_pte_storage_type_map[256]; | ||
424 | |||
425 | void | 423 | void |
426 | gf100_ram_put(struct nvkm_ram *ram, struct nvkm_mem **pmem) | 424 | gf100_ram_put(struct nvkm_ram *ram, struct nvkm_mem **pmem) |
427 | { | 425 | { |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c index d15ea886df27..f10664372161 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c | |||
@@ -95,7 +95,7 @@ struct gt215_ram { | |||
95 | struct gt215_ltrain ltrain; | 95 | struct gt215_ltrain ltrain; |
96 | }; | 96 | }; |
97 | 97 | ||
98 | void | 98 | static void |
99 | gt215_link_train_calc(u32 *vals, struct gt215_ltrain *train) | 99 | gt215_link_train_calc(u32 *vals, struct gt215_ltrain *train) |
100 | { | 100 | { |
101 | int i, lo, hi; | 101 | int i, lo, hi; |
@@ -149,7 +149,7 @@ gt215_link_train_calc(u32 *vals, struct gt215_ltrain *train) | |||
149 | /* | 149 | /* |
150 | * Link training for (at least) DDR3 | 150 | * Link training for (at least) DDR3 |
151 | */ | 151 | */ |
152 | int | 152 | static int |
153 | gt215_link_train(struct gt215_ram *ram) | 153 | gt215_link_train(struct gt215_ram *ram) |
154 | { | 154 | { |
155 | struct gt215_ltrain *train = &ram->ltrain; | 155 | struct gt215_ltrain *train = &ram->ltrain; |
@@ -267,7 +267,7 @@ out: | |||
267 | return ret; | 267 | return ret; |
268 | } | 268 | } |
269 | 269 | ||
270 | int | 270 | static int |
271 | gt215_link_train_init(struct gt215_ram *ram) | 271 | gt215_link_train_init(struct gt215_ram *ram) |
272 | { | 272 | { |
273 | static const u32 pattern[16] = { | 273 | static const u32 pattern[16] = { |
@@ -333,7 +333,7 @@ gt215_link_train_init(struct gt215_ram *ram) | |||
333 | return 0; | 333 | return 0; |
334 | } | 334 | } |
335 | 335 | ||
336 | void | 336 | static void |
337 | gt215_link_train_fini(struct gt215_ram *ram) | 337 | gt215_link_train_fini(struct gt215_ram *ram) |
338 | { | 338 | { |
339 | if (ram->ltrain.mem) | 339 | if (ram->ltrain.mem) |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c index b9f1ffdfc602..4dcd8742f2da 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c | |||
@@ -23,6 +23,7 @@ | |||
23 | * Ben Skeggs | 23 | * Ben Skeggs |
24 | */ | 24 | */ |
25 | #include "priv.h" | 25 | #include "priv.h" |
26 | #include "ram.h" | ||
26 | 27 | ||
27 | struct ramxlat { | 28 | struct ramxlat { |
28 | int id; | 29 | int id; |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c index 26900333b1d6..eca8a445eab3 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c | |||
@@ -23,6 +23,7 @@ | |||
23 | * Roy Spliet <rspliet@eclipso.eu> | 23 | * Roy Spliet <rspliet@eclipso.eu> |
24 | */ | 24 | */ |
25 | #include "priv.h" | 25 | #include "priv.h" |
26 | #include "ram.h" | ||
26 | 27 | ||
27 | struct ramxlat { | 28 | struct ramxlat { |
28 | int id; | 29 | int id; |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/gpio/gk104.c b/drivers/gpu/drm/nouveau/nvkm/subdev/gpio/gk104.c index 3f45afd17d5a..2ead515b8530 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/gpio/gk104.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/gpio/gk104.c | |||
@@ -37,7 +37,7 @@ gk104_gpio_intr_stat(struct nvkm_gpio *gpio, u32 *hi, u32 *lo) | |||
37 | nvkm_wr32(device, 0x00dc80, intr1); | 37 | nvkm_wr32(device, 0x00dc80, intr1); |
38 | } | 38 | } |
39 | 39 | ||
40 | void | 40 | static void |
41 | gk104_gpio_intr_mask(struct nvkm_gpio *gpio, u32 type, u32 mask, u32 data) | 41 | gk104_gpio_intr_mask(struct nvkm_gpio *gpio, u32 type, u32 mask, u32 data) |
42 | { | 42 | { |
43 | struct nvkm_device *device = gpio->subdev.device; | 43 | struct nvkm_device *device = gpio->subdev.device; |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/aux.c b/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/aux.c index f0851d57df2f..01d5c5a56e2e 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/aux.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/aux.c | |||
@@ -74,7 +74,7 @@ nvkm_i2c_aux_i2c_func(struct i2c_adapter *adap) | |||
74 | return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL; | 74 | return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL; |
75 | } | 75 | } |
76 | 76 | ||
77 | const struct i2c_algorithm | 77 | static const struct i2c_algorithm |
78 | nvkm_i2c_aux_i2c_algo = { | 78 | nvkm_i2c_aux_i2c_algo = { |
79 | .master_xfer = nvkm_i2c_aux_i2c_xfer, | 79 | .master_xfer = nvkm_i2c_aux_i2c_xfer, |
80 | .functionality = nvkm_i2c_aux_i2c_func | 80 | .functionality = nvkm_i2c_aux_i2c_func |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/auxg94.c b/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/auxg94.c index 954f5b76bfcf..b80236a4eeac 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/auxg94.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/auxg94.c | |||
@@ -79,7 +79,7 @@ g94_i2c_aux_xfer(struct nvkm_i2c_aux *obj, bool retry, | |||
79 | struct g94_i2c_aux *aux = g94_i2c_aux(obj); | 79 | struct g94_i2c_aux *aux = g94_i2c_aux(obj); |
80 | struct nvkm_device *device = aux->base.pad->i2c->subdev.device; | 80 | struct nvkm_device *device = aux->base.pad->i2c->subdev.device; |
81 | const u32 base = aux->ch * 0x50; | 81 | const u32 base = aux->ch * 0x50; |
82 | u32 ctrl, stat, timeout, retries; | 82 | u32 ctrl, stat, timeout, retries = 0; |
83 | u32 xbuf[4] = {}; | 83 | u32 xbuf[4] = {}; |
84 | int ret, i; | 84 | int ret, i; |
85 | 85 | ||
@@ -111,7 +111,7 @@ g94_i2c_aux_xfer(struct nvkm_i2c_aux *obj, bool retry, | |||
111 | nvkm_wr32(device, 0x00e4e0 + base, addr); | 111 | nvkm_wr32(device, 0x00e4e0 + base, addr); |
112 | 112 | ||
113 | /* (maybe) retry transaction a number of times on failure... */ | 113 | /* (maybe) retry transaction a number of times on failure... */ |
114 | for (retries = 0; !ret && retries < 32; retries++) { | 114 | do { |
115 | /* reset, and delay a while if this is a retry */ | 115 | /* reset, and delay a while if this is a retry */ |
116 | nvkm_wr32(device, 0x00e4e4 + base, 0x80000000 | ctrl); | 116 | nvkm_wr32(device, 0x00e4e4 + base, 0x80000000 | ctrl); |
117 | nvkm_wr32(device, 0x00e4e4 + base, 0x00000000 | ctrl); | 117 | nvkm_wr32(device, 0x00e4e4 + base, 0x00000000 | ctrl); |
@@ -131,20 +131,20 @@ g94_i2c_aux_xfer(struct nvkm_i2c_aux *obj, bool retry, | |||
131 | goto out; | 131 | goto out; |
132 | } | 132 | } |
133 | } while (ctrl & 0x00010000); | 133 | } while (ctrl & 0x00010000); |
134 | ret = 1; | 134 | ret = 0; |
135 | 135 | ||
136 | /* read status, and check if transaction completed ok */ | 136 | /* read status, and check if transaction completed ok */ |
137 | stat = nvkm_mask(device, 0x00e4e8 + base, 0, 0); | 137 | stat = nvkm_mask(device, 0x00e4e8 + base, 0, 0); |
138 | if ((stat & 0x000f0000) == 0x00080000 || | 138 | if ((stat & 0x000f0000) == 0x00080000 || |
139 | (stat & 0x000f0000) == 0x00020000) | 139 | (stat & 0x000f0000) == 0x00020000) |
140 | ret = retry ? 0 : 1; | 140 | ret = 1; |
141 | if ((stat & 0x00000100)) | 141 | if ((stat & 0x00000100)) |
142 | ret = -ETIMEDOUT; | 142 | ret = -ETIMEDOUT; |
143 | if ((stat & 0x00000e00)) | 143 | if ((stat & 0x00000e00)) |
144 | ret = -EIO; | 144 | ret = -EIO; |
145 | 145 | ||
146 | AUX_TRACE(&aux->base, "%02d %08x %08x", retries, ctrl, stat); | 146 | AUX_TRACE(&aux->base, "%02d %08x %08x", retries, ctrl, stat); |
147 | } | 147 | } while (ret && retry && retries++ < 32); |
148 | 148 | ||
149 | if (type & 1) { | 149 | if (type & 1) { |
150 | for (i = 0; i < 16; i += 4) { | 150 | for (i = 0; i < 16; i += 4) { |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/auxgm200.c b/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/auxgm200.c index 61d729b82c69..ed458c7f056b 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/auxgm200.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/auxgm200.c | |||
@@ -79,7 +79,7 @@ gm200_i2c_aux_xfer(struct nvkm_i2c_aux *obj, bool retry, | |||
79 | struct gm200_i2c_aux *aux = gm200_i2c_aux(obj); | 79 | struct gm200_i2c_aux *aux = gm200_i2c_aux(obj); |
80 | struct nvkm_device *device = aux->base.pad->i2c->subdev.device; | 80 | struct nvkm_device *device = aux->base.pad->i2c->subdev.device; |
81 | const u32 base = aux->ch * 0x50; | 81 | const u32 base = aux->ch * 0x50; |
82 | u32 ctrl, stat, timeout, retries; | 82 | u32 ctrl, stat, timeout, retries = 0; |
83 | u32 xbuf[4] = {}; | 83 | u32 xbuf[4] = {}; |
84 | int ret, i; | 84 | int ret, i; |
85 | 85 | ||
@@ -111,7 +111,7 @@ gm200_i2c_aux_xfer(struct nvkm_i2c_aux *obj, bool retry, | |||
111 | nvkm_wr32(device, 0x00d950 + base, addr); | 111 | nvkm_wr32(device, 0x00d950 + base, addr); |
112 | 112 | ||
113 | /* (maybe) retry transaction a number of times on failure... */ | 113 | /* (maybe) retry transaction a number of times on failure... */ |
114 | for (retries = 0; !ret && retries < 32; retries++) { | 114 | do { |
115 | /* reset, and delay a while if this is a retry */ | 115 | /* reset, and delay a while if this is a retry */ |
116 | nvkm_wr32(device, 0x00d954 + base, 0x80000000 | ctrl); | 116 | nvkm_wr32(device, 0x00d954 + base, 0x80000000 | ctrl); |
117 | nvkm_wr32(device, 0x00d954 + base, 0x00000000 | ctrl); | 117 | nvkm_wr32(device, 0x00d954 + base, 0x00000000 | ctrl); |
@@ -131,20 +131,20 @@ gm200_i2c_aux_xfer(struct nvkm_i2c_aux *obj, bool retry, | |||
131 | goto out; | 131 | goto out; |
132 | } | 132 | } |
133 | } while (ctrl & 0x00010000); | 133 | } while (ctrl & 0x00010000); |
134 | ret = 1; | 134 | ret = 0; |
135 | 135 | ||
136 | /* read status, and check if transaction completed ok */ | 136 | /* read status, and check if transaction completed ok */ |
137 | stat = nvkm_mask(device, 0x00d958 + base, 0, 0); | 137 | stat = nvkm_mask(device, 0x00d958 + base, 0, 0); |
138 | if ((stat & 0x000f0000) == 0x00080000 || | 138 | if ((stat & 0x000f0000) == 0x00080000 || |
139 | (stat & 0x000f0000) == 0x00020000) | 139 | (stat & 0x000f0000) == 0x00020000) |
140 | ret = retry ? 0 : 1; | 140 | ret = 1; |
141 | if ((stat & 0x00000100)) | 141 | if ((stat & 0x00000100)) |
142 | ret = -ETIMEDOUT; | 142 | ret = -ETIMEDOUT; |
143 | if ((stat & 0x00000e00)) | 143 | if ((stat & 0x00000e00)) |
144 | ret = -EIO; | 144 | ret = -EIO; |
145 | 145 | ||
146 | AUX_TRACE(&aux->base, "%02d %08x %08x", retries, ctrl, stat); | 146 | AUX_TRACE(&aux->base, "%02d %08x %08x", retries, ctrl, stat); |
147 | } | 147 | } while (ret && retry && retries++ < 32); |
148 | 148 | ||
149 | if (type & 1) { | 149 | if (type & 1) { |
150 | for (i = 0; i < 16; i += 4) { | 150 | for (i = 0; i < 16; i += 4) { |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/iccsense/base.c b/drivers/gpu/drm/nouveau/nvkm/subdev/iccsense/base.c index 658355fc9354..f0af2a381eea 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/iccsense/base.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/iccsense/base.c | |||
@@ -288,7 +288,8 @@ nvkm_iccsense_init(struct nvkm_subdev *subdev) | |||
288 | return 0; | 288 | return 0; |
289 | } | 289 | } |
290 | 290 | ||
291 | struct nvkm_subdev_func iccsense_func = { | 291 | static const struct nvkm_subdev_func |
292 | iccsense_func = { | ||
292 | .oneinit = nvkm_iccsense_oneinit, | 293 | .oneinit = nvkm_iccsense_oneinit, |
293 | .init = nvkm_iccsense_init, | 294 | .init = nvkm_iccsense_init, |
294 | .dtor = nvkm_iccsense_dtor, | 295 | .dtor = nvkm_iccsense_dtor, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c b/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c index 8ed8f65ff664..10c987a654ec 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c | |||
@@ -104,7 +104,7 @@ nvkm_instobj_dtor(struct nvkm_memory *memory) | |||
104 | return iobj; | 104 | return iobj; |
105 | } | 105 | } |
106 | 106 | ||
107 | const struct nvkm_memory_func | 107 | static const struct nvkm_memory_func |
108 | nvkm_instobj_func = { | 108 | nvkm_instobj_func = { |
109 | .dtor = nvkm_instobj_dtor, | 109 | .dtor = nvkm_instobj_dtor, |
110 | .target = nvkm_instobj_target, | 110 | .target = nvkm_instobj_target, |
@@ -156,7 +156,7 @@ nvkm_instobj_wr32_slow(struct nvkm_memory *memory, u64 offset, u32 data) | |||
156 | return nvkm_wo32(iobj->parent, offset, data); | 156 | return nvkm_wo32(iobj->parent, offset, data); |
157 | } | 157 | } |
158 | 158 | ||
159 | const struct nvkm_memory_func | 159 | static const struct nvkm_memory_func |
160 | nvkm_instobj_func_slow = { | 160 | nvkm_instobj_func_slow = { |
161 | .dtor = nvkm_instobj_dtor, | 161 | .dtor = nvkm_instobj_dtor, |
162 | .target = nvkm_instobj_target, | 162 | .target = nvkm_instobj_target, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/mc/g84.c b/drivers/gpu/drm/nouveau/nvkm/subdev/mc/g84.c index c3d66ef5dc12..430a61c3df44 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/mc/g84.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/mc/g84.c | |||
@@ -34,7 +34,7 @@ g84_mc_reset[] = { | |||
34 | {} | 34 | {} |
35 | }; | 35 | }; |
36 | 36 | ||
37 | const struct nvkm_mc_map | 37 | static const struct nvkm_mc_map |
38 | g84_mc_intr[] = { | 38 | g84_mc_intr[] = { |
39 | { 0x04000000, NVKM_ENGINE_DISP }, | 39 | { 0x04000000, NVKM_ENGINE_DISP }, |
40 | { 0x00020000, NVKM_ENGINE_VP }, | 40 | { 0x00020000, NVKM_ENGINE_VP }, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c b/drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c index 21b65ee254e4..e3e2f5e83815 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/mxm/base.c | |||
@@ -250,6 +250,10 @@ nvkm_mxm_new_(struct nvkm_device *device, int index, struct nvkm_mxm **pmxm) | |||
250 | } | 250 | } |
251 | 251 | ||
252 | nvkm_info(&mxm->subdev, "BIOS version %d.%d\n", ver >> 4, ver & 0x0f); | 252 | nvkm_info(&mxm->subdev, "BIOS version %d.%d\n", ver >> 4, ver & 0x0f); |
253 | nvkm_debug(&mxm->subdev, "module flags: %02x\n", | ||
254 | nvbios_rd08(bios, data + 0x01)); | ||
255 | nvkm_debug(&mxm->subdev, "config flags: %02x\n", | ||
256 | nvbios_rd08(bios, data + 0x02)); | ||
253 | 257 | ||
254 | if (mxm_shadow(mxm, ver)) { | 258 | if (mxm_shadow(mxm, ver)) { |
255 | nvkm_warn(&mxm->subdev, "failed to locate valid SIS\n"); | 259 | nvkm_warn(&mxm->subdev, "failed to locate valid SIS\n"); |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gf100.fuc3.h b/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gf100.fuc3.h index e2faccffee6f..0bcf0b307a61 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gf100.fuc3.h +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gf100.fuc3.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gf100_pmu_data[] = { | 1 | static uint32_t gf100_pmu_data[] = { |
2 | /* 0x0000: proc_kern */ | 2 | /* 0x0000: proc_kern */ |
3 | 0x52544e49, | 3 | 0x52544e49, |
4 | 0x00000000, | 4 | 0x00000000, |
@@ -916,7 +916,7 @@ uint32_t gf100_pmu_data[] = { | |||
916 | 0x00000000, | 916 | 0x00000000, |
917 | }; | 917 | }; |
918 | 918 | ||
919 | uint32_t gf100_pmu_code[] = { | 919 | static uint32_t gf100_pmu_code[] = { |
920 | 0x03920ef5, | 920 | 0x03920ef5, |
921 | /* 0x0004: rd32 */ | 921 | /* 0x0004: rd32 */ |
922 | 0x07a007f1, | 922 | 0x07a007f1, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gf119.fuc4.h b/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gf119.fuc4.h index 2d5bdc539697..fe8905666c67 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gf119.fuc4.h +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gf119.fuc4.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gf119_pmu_data[] = { | 1 | static uint32_t gf119_pmu_data[] = { |
2 | /* 0x0000: proc_kern */ | 2 | /* 0x0000: proc_kern */ |
3 | 0x52544e49, | 3 | 0x52544e49, |
4 | 0x00000000, | 4 | 0x00000000, |
@@ -915,7 +915,7 @@ uint32_t gf119_pmu_data[] = { | |||
915 | 0x00000000, | 915 | 0x00000000, |
916 | }; | 916 | }; |
917 | 917 | ||
918 | uint32_t gf119_pmu_code[] = { | 918 | static uint32_t gf119_pmu_code[] = { |
919 | 0x03410ef5, | 919 | 0x03410ef5, |
920 | /* 0x0004: rd32 */ | 920 | /* 0x0004: rd32 */ |
921 | 0x07a007f1, | 921 | 0x07a007f1, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gk208.fuc5.h b/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gk208.fuc5.h index 3c731ff12871..9cf4e6fc724e 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gk208.fuc5.h +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gk208.fuc5.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gk208_pmu_data[] = { | 1 | static uint32_t gk208_pmu_data[] = { |
2 | /* 0x0000: proc_kern */ | 2 | /* 0x0000: proc_kern */ |
3 | 0x52544e49, | 3 | 0x52544e49, |
4 | 0x00000000, | 4 | 0x00000000, |
@@ -915,7 +915,7 @@ uint32_t gk208_pmu_data[] = { | |||
915 | 0x00000000, | 915 | 0x00000000, |
916 | }; | 916 | }; |
917 | 917 | ||
918 | uint32_t gk208_pmu_code[] = { | 918 | static uint32_t gk208_pmu_code[] = { |
919 | 0x02f90ef5, | 919 | 0x02f90ef5, |
920 | /* 0x0004: rd32 */ | 920 | /* 0x0004: rd32 */ |
921 | 0xf607a040, | 921 | 0xf607a040, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gt215.fuc3.h b/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gt215.fuc3.h index e83341815ec6..5d692425b190 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gt215.fuc3.h +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/gt215.fuc3.h | |||
@@ -1,4 +1,4 @@ | |||
1 | uint32_t gt215_pmu_data[] = { | 1 | static uint32_t gt215_pmu_data[] = { |
2 | /* 0x0000: proc_kern */ | 2 | /* 0x0000: proc_kern */ |
3 | 0x52544e49, | 3 | 0x52544e49, |
4 | 0x00000000, | 4 | 0x00000000, |
@@ -916,7 +916,7 @@ uint32_t gt215_pmu_data[] = { | |||
916 | 0x00000000, | 916 | 0x00000000, |
917 | }; | 917 | }; |
918 | 918 | ||
919 | uint32_t gt215_pmu_code[] = { | 919 | static uint32_t gt215_pmu_code[] = { |
920 | 0x03920ef5, | 920 | 0x03920ef5, |
921 | /* 0x0004: rd32 */ | 921 | /* 0x0004: rd32 */ |
922 | 0x07a007f1, | 922 | 0x07a007f1, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/gt215.c b/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/gt215.c index 8ba7fa4ca75b..dcf9eaf274aa 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/gt215.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/gt215.c | |||
@@ -24,17 +24,8 @@ | |||
24 | #include "priv.h" | 24 | #include "priv.h" |
25 | #include "fuc/gt215.fuc3.h" | 25 | #include "fuc/gt215.fuc3.h" |
26 | 26 | ||
27 | static void | ||
28 | gt215_pmu_reset(struct nvkm_pmu *pmu) | ||
29 | { | ||
30 | struct nvkm_device *device = pmu->subdev.device; | ||
31 | nvkm_mask(device, 0x022210, 0x00000001, 0x00000000); | ||
32 | nvkm_mask(device, 0x022210, 0x00000001, 0x00000001); | ||
33 | } | ||
34 | |||
35 | static const struct nvkm_pmu_func | 27 | static const struct nvkm_pmu_func |
36 | gt215_pmu = { | 28 | gt215_pmu = { |
37 | .reset = gt215_pmu_reset, | ||
38 | .code.data = gt215_pmu_code, | 29 | .code.data = gt215_pmu_code, |
39 | .code.size = sizeof(gt215_pmu_code), | 30 | .code.size = sizeof(gt215_pmu_code), |
40 | .data.data = gt215_pmu_data, | 31 | .data.data = gt215_pmu_data, |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/priv.h b/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/priv.h index f38c88fae3d6..73b811ccc2d5 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/priv.h +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/priv.h | |||
@@ -8,8 +8,6 @@ int nvkm_pmu_new_(const struct nvkm_pmu_func *, struct nvkm_device *, | |||
8 | int index, struct nvkm_pmu **); | 8 | int index, struct nvkm_pmu **); |
9 | 9 | ||
10 | struct nvkm_pmu_func { | 10 | struct nvkm_pmu_func { |
11 | void (*reset)(struct nvkm_pmu *); | ||
12 | |||
13 | struct { | 11 | struct { |
14 | u32 *data; | 12 | u32 *data; |
15 | u32 size; | 13 | u32 size; |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c b/drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c index f1e2dc914366..ec48e4ace37a 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c | |||
@@ -1364,7 +1364,7 @@ gm200_secboot_init(struct nvkm_secboot *sb) | |||
1364 | return 0; | 1364 | return 0; |
1365 | } | 1365 | } |
1366 | 1366 | ||
1367 | int | 1367 | static int |
1368 | gm200_secboot_fini(struct nvkm_secboot *sb, bool suspend) | 1368 | gm200_secboot_fini(struct nvkm_secboot *sb, bool suspend) |
1369 | { | 1369 | { |
1370 | struct gm200_secboot *gsb = gm200_secboot(sb); | 1370 | struct gm200_secboot *gsb = gm200_secboot(sb); |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/volt/gk104.c b/drivers/gpu/drm/nouveau/nvkm/subdev/volt/gk104.c index b2c5d1166a13..1c744e029454 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/volt/gk104.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/volt/gk104.c | |||
@@ -35,7 +35,7 @@ struct gk104_volt { | |||
35 | struct nvbios_volt bios; | 35 | struct nvbios_volt bios; |
36 | }; | 36 | }; |
37 | 37 | ||
38 | int | 38 | static int |
39 | gk104_volt_get(struct nvkm_volt *base) | 39 | gk104_volt_get(struct nvkm_volt *base) |
40 | { | 40 | { |
41 | struct nvbios_volt *bios = &gk104_volt(base)->bios; | 41 | struct nvbios_volt *bios = &gk104_volt(base)->bios; |
@@ -48,7 +48,7 @@ gk104_volt_get(struct nvkm_volt *base) | |||
48 | return bios->base + bios->pwm_range * duty / div; | 48 | return bios->base + bios->pwm_range * duty / div; |
49 | } | 49 | } |
50 | 50 | ||
51 | int | 51 | static int |
52 | gk104_volt_set(struct nvkm_volt *base, u32 uv) | 52 | gk104_volt_set(struct nvkm_volt *base, u32 uv) |
53 | { | 53 | { |
54 | struct nvbios_volt *bios = &gk104_volt(base)->bios; | 54 | struct nvbios_volt *bios = &gk104_volt(base)->bios; |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/volt/gm20b.c b/drivers/gpu/drm/nouveau/nvkm/subdev/volt/gm20b.c index 74db4d28930f..2925b9cae681 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/volt/gm20b.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/volt/gm20b.c | |||
@@ -25,7 +25,7 @@ | |||
25 | 25 | ||
26 | #include <core/tegra.h> | 26 | #include <core/tegra.h> |
27 | 27 | ||
28 | const struct cvb_coef gm20b_cvb_coef[] = { | 28 | static const struct cvb_coef gm20b_cvb_coef[] = { |
29 | /* KHz, c0, c1, c2 */ | 29 | /* KHz, c0, c1, c2 */ |
30 | /* 76800 */ { 1786666, -85625, 1632 }, | 30 | /* 76800 */ { 1786666, -85625, 1632 }, |
31 | /* 153600 */ { 1846729, -87525, 1632 }, | 31 | /* 153600 */ { 1846729, -87525, 1632 }, |
@@ -58,7 +58,7 @@ static const struct cvb_coef gm20b_na_cvb_coef[] = { | |||
58 | /* 998400 */ { 1316991, 8144, -940, 808, -21583, 226 }, | 58 | /* 998400 */ { 1316991, 8144, -940, 808, -21583, 226 }, |
59 | }; | 59 | }; |
60 | 60 | ||
61 | const u32 speedo_to_vmin[] = { | 61 | static const u32 speedo_to_vmin[] = { |
62 | /* 0, 1, 2, 3, 4, */ | 62 | /* 0, 1, 2, 3, 4, */ |
63 | 950000, 840000, 818750, 840000, 810000, | 63 | 950000, 840000, 818750, 840000, 810000, |
64 | }; | 64 | }; |
diff --git a/drivers/gpu/drm/nouveau/nvkm/subdev/volt/gpio.c b/drivers/gpu/drm/nouveau/nvkm/subdev/volt/gpio.c index d2bac1d77819..443c031b966b 100644 --- a/drivers/gpu/drm/nouveau/nvkm/subdev/volt/gpio.c +++ b/drivers/gpu/drm/nouveau/nvkm/subdev/volt/gpio.c | |||
@@ -25,6 +25,7 @@ | |||
25 | #include <subdev/bios.h> | 25 | #include <subdev/bios.h> |
26 | #include <subdev/bios/gpio.h> | 26 | #include <subdev/bios/gpio.h> |
27 | #include <subdev/gpio.h> | 27 | #include <subdev/gpio.h> |
28 | #include "priv.h" | ||
28 | 29 | ||
29 | static const u8 tags[] = { | 30 | static const u8 tags[] = { |
30 | DCB_GPIO_VID0, DCB_GPIO_VID1, DCB_GPIO_VID2, DCB_GPIO_VID3, | 31 | DCB_GPIO_VID0, DCB_GPIO_VID1, DCB_GPIO_VID2, DCB_GPIO_VID3, |