aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDave Airlie <airlied@redhat.com>2018-06-21 22:56:48 -0400
committerDave Airlie <airlied@redhat.com>2018-06-21 22:58:08 -0400
commitf4366e44efeb895c358fddd11f9ecee81bdad06b (patch)
tree1f4bfe33e8d2f93f3a654dedc58e0eaaa3165003
parent3069290d9d6a9afa93661c299419089eea57164b (diff)
parentc612ae0503af753c062594dcd14aecea121fa414 (diff)
Merge tag 'drm-misc-next-2018-06-21' of git://anongit.freedesktop.org/drm/drm-misc into drm-next
drm-misc-next for 4.19: UAPI Changes: - Add writeback connector (Brian Starkey/Liviu Dudau) - Add "content type" property to HDMI connectors (Stanislav Lisovskiy) Cross-subsystem Changes: - some devicetree Docs update - fix compile breakage on ION due to the dma-buf cleanups (Christian König) Core Changes: - Reject over-sized allocation requests early (Chris Wilson) - gem-fb-helper: Always do implicit sync (Daniel Vetter) - dma-buf cleanups (Christian König) Driver Changes: - Fixes for the otm8009a panel driver (Philippe Cornu) - Add Innolux TV123WAM panel driver support (Sandeep Panda) - Move GEM BO to drm_framebuffer in few drivers (Daniel Stone) - i915 pinning improvements (Chris Wilson) - Stop consulting plane->fb/crtc in a few drivers (Ville Syrjälä) Signed-off-by: Dave Airlie <airlied@redhat.com> Link: https://patchwork.freedesktop.org/patch/msgid/20180621105428.GA20795@juma
-rw-r--r--Documentation/devicetree/bindings/display/panel/auo,g070vvn01.txt29
-rw-r--r--Documentation/devicetree/bindings/display/panel/innolux,tv123wam.txt20
-rw-r--r--Documentation/gpu/drm-kms.rst15
-rw-r--r--Documentation/gpu/kms-properties.csv1
-rw-r--r--drivers/dma-buf/dma-buf.c56
-rw-r--r--drivers/gpu/drm/Makefile2
-rw-r--r--drivers/gpu/drm/amd/amdgpu/amdgpu_prime.c5
-rw-r--r--drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c2
-rw-r--r--drivers/gpu/drm/arc/arcpgu_crtc.c3
-rw-r--r--drivers/gpu/drm/armada/armada_fb.c23
-rw-r--r--drivers/gpu/drm/armada/armada_fb.h3
-rw-r--r--drivers/gpu/drm/armada/armada_gem.c2
-rw-r--r--drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_dc.c1
-rw-r--r--drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_plane.c19
-rw-r--r--drivers/gpu/drm/bridge/Kconfig4
-rw-r--r--drivers/gpu/drm/bridge/cdns-dsi.c4
-rw-r--r--drivers/gpu/drm/cirrus/cirrus_drv.h10
-rw-r--r--drivers/gpu/drm/cirrus/cirrus_fbdev.c20
-rw-r--r--drivers/gpu/drm/cirrus/cirrus_main.c43
-rw-r--r--drivers/gpu/drm/cirrus/cirrus_mode.c12
-rw-r--r--drivers/gpu/drm/drm_atomic.c391
-rw-r--r--drivers/gpu/drm/drm_atomic_helper.c40
-rw-r--r--drivers/gpu/drm/drm_connector.c121
-rw-r--r--drivers/gpu/drm/drm_crtc.c35
-rw-r--r--drivers/gpu/drm/drm_crtc_internal.h3
-rw-r--r--drivers/gpu/drm/drm_edid.c279
-rw-r--r--drivers/gpu/drm/drm_fb_helper.c9
-rw-r--r--drivers/gpu/drm/drm_framebuffer.c5
-rw-r--r--drivers/gpu/drm/drm_gem_framebuffer_helper.c2
-rw-r--r--drivers/gpu/drm/drm_ioctl.c7
-rw-r--r--drivers/gpu/drm/drm_mm.c91
-rw-r--r--drivers/gpu/drm/drm_mode_config.c5
-rw-r--r--drivers/gpu/drm/drm_modes.c2
-rw-r--r--drivers/gpu/drm/drm_panel.c16
-rw-r--r--drivers/gpu/drm/drm_plane.c41
-rw-r--r--drivers/gpu/drm/drm_plane_helper.c4
-rw-r--r--drivers/gpu/drm/drm_prime.c34
-rw-r--r--drivers/gpu/drm/drm_vm.c10
-rw-r--r--drivers/gpu/drm/drm_writeback.c350
-rw-r--r--drivers/gpu/drm/exynos/exynos_drm_plane.c2
-rw-r--r--drivers/gpu/drm/gma500/accel_2d.c2
-rw-r--r--drivers/gpu/drm/gma500/framebuffer.c62
-rw-r--r--drivers/gpu/drm/gma500/framebuffer.h1
-rw-r--r--drivers/gpu/drm/gma500/gma_display.c10
-rw-r--r--drivers/gpu/drm/gma500/gtt.h2
-rw-r--r--drivers/gpu/drm/gma500/mdfld_intel_display.c2
-rw-r--r--drivers/gpu/drm/gma500/oaktrail_crtc.c3
-rw-r--r--drivers/gpu/drm/gma500/psb_intel_sdvo.c11
-rw-r--r--drivers/gpu/drm/i2c/tda998x_drv.c13
-rw-r--r--drivers/gpu/drm/i915/i915_gem_dmabuf.c11
-rw-r--r--drivers/gpu/drm/i915/i915_gem_gtt.c11
-rw-r--r--drivers/gpu/drm/i915/intel_atomic.c1
-rw-r--r--drivers/gpu/drm/i915/intel_atomic_plane.c12
-rw-r--r--drivers/gpu/drm/i915/intel_display.c123
-rw-r--r--drivers/gpu/drm/i915/intel_drv.h1
-rw-r--r--drivers/gpu/drm/i915/intel_hdmi.c17
-rw-r--r--drivers/gpu/drm/i915/intel_ringbuffer.c2
-rw-r--r--drivers/gpu/drm/i915/intel_sprite.c127
-rw-r--r--drivers/gpu/drm/i915/selftests/mock_dmabuf.c14
-rw-r--r--drivers/gpu/drm/mediatek/mtk_drm_fb.c76
-rw-r--r--drivers/gpu/drm/mediatek/mtk_drm_fb.h1
-rw-r--r--drivers/gpu/drm/mediatek/mtk_drm_plane.c7
-rw-r--r--drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c3
-rw-r--r--drivers/gpu/drm/msm/disp/mdp4/mdp4_plane.c2
-rw-r--r--drivers/gpu/drm/msm/disp/mdp5/mdp5_crtc.c1
-rw-r--r--drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c4
-rw-r--r--drivers/gpu/drm/msm/msm_fb.c54
-rw-r--r--drivers/gpu/drm/omapdrm/omap_fb.c109
-rw-r--r--drivers/gpu/drm/omapdrm/omap_fb.h2
-rw-r--r--drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c2
-rw-r--r--drivers/gpu/drm/panel/panel-innolux-p079zca.c1
-rw-r--r--drivers/gpu/drm/panel/panel-jdi-lt070me05000.c1
-rw-r--r--drivers/gpu/drm/panel/panel-lvds.c1
-rw-r--r--drivers/gpu/drm/panel/panel-orisetech-otm8009a.c58
-rw-r--r--drivers/gpu/drm/panel/panel-panasonic-vvx10f034n00.c1
-rw-r--r--drivers/gpu/drm/panel/panel-seiko-43wvf1g.c1
-rw-r--r--drivers/gpu/drm/panel/panel-sharp-lq101r1sx01.c1
-rw-r--r--drivers/gpu/drm/panel/panel-sharp-ls043t1le01.c1
-rw-r--r--drivers/gpu/drm/panel/panel-simple.c65
-rw-r--r--drivers/gpu/drm/panel/panel-sitronix-st7789v.c1
-rw-r--r--drivers/gpu/drm/rockchip/cdn-dp-reg.c16
-rw-r--r--drivers/gpu/drm/rockchip/rockchip_drm_fb.c86
-rw-r--r--drivers/gpu/drm/rockchip/rockchip_drm_fb.h3
-rw-r--r--drivers/gpu/drm/rockchip/rockchip_drm_vop.c73
-rw-r--r--drivers/gpu/drm/rockchip/rockchip_lvds.c6
-rw-r--r--drivers/gpu/drm/selftests/drm_mm_selftests.h2
-rw-r--r--drivers/gpu/drm/selftests/test-drm_mm.c71
-rw-r--r--drivers/gpu/drm/sti/sti_gdp.c6
-rw-r--r--drivers/gpu/drm/sun4i/sun6i_mipi_dsi.c4
-rw-r--r--drivers/gpu/drm/tegra/gem.c14
-rw-r--r--drivers/gpu/drm/udl/udl_dmabuf.c18
-rw-r--r--drivers/gpu/drm/udl/udl_drv.h3
-rw-r--r--drivers/gpu/drm/udl/udl_gem.c15
-rw-r--r--drivers/gpu/drm/v3d/v3d_sched.c4
-rw-r--r--drivers/gpu/drm/vc4/vc4_crtc.c3
-rw-r--r--drivers/gpu/drm/vc4/vc4_plane.c96
-rw-r--r--drivers/gpu/drm/vc4/vc4_regs.h6
-rw-r--r--drivers/gpu/drm/vgem/vgem_drv.c5
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_display.c32
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_drv.h1
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_fb.c8
-rw-r--r--drivers/gpu/drm/virtio/virtgpu_plane.c4
-rw-r--r--drivers/gpu/drm/vmwgfx/vmwgfx_fb.c25
-rw-r--r--drivers/gpu/drm/vmwgfx/vmwgfx_kms.c20
-rw-r--r--drivers/gpu/drm/vmwgfx/vmwgfx_prime.c14
-rw-r--r--drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c2
-rw-r--r--drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c5
-rw-r--r--drivers/gpu/drm/xen/xen_drm_front.c2
-rw-r--r--drivers/gpu/drm/xen/xen_drm_front.h4
-rw-r--r--drivers/gpu/drm/xen/xen_drm_front_shbuf.c2
-rw-r--r--drivers/media/common/videobuf2/videobuf2-dma-contig.c3
-rw-r--r--drivers/media/common/videobuf2/videobuf2-dma-sg.c3
-rw-r--r--drivers/media/common/videobuf2/videobuf2-vmalloc.c3
-rw-r--r--drivers/staging/android/ion/ion.c6
-rw-r--r--drivers/tee/tee_shm.c6
-rw-r--r--include/drm/drm_atomic.h14
-rw-r--r--include/drm/drm_bridge.h26
-rw-r--r--include/drm/drm_connector.h30
-rw-r--r--include/drm/drm_crtc.h15
-rw-r--r--include/drm/drm_file.h7
-rw-r--r--include/drm/drm_mm.h34
-rw-r--r--include/drm/drm_mode_config.h28
-rw-r--r--include/drm/drm_modeset_helper_vtables.h11
-rw-r--r--include/drm/drm_panel.h1
-rw-r--r--include/drm/drm_plane.h9
-rw-r--r--include/drm/drm_prime.h6
-rw-r--r--include/drm/drm_writeback.h130
-rw-r--r--include/linux/dma-buf.h21
-rw-r--r--include/uapi/drm/drm.h9
-rw-r--r--include/uapi/drm/drm_fourcc.h59
-rw-r--r--include/uapi/drm/drm_mode.h8
131 files changed, 2277 insertions, 1240 deletions
diff --git a/Documentation/devicetree/bindings/display/panel/auo,g070vvn01.txt b/Documentation/devicetree/bindings/display/panel/auo,g070vvn01.txt
new file mode 100644
index 000000000000..49e4105378f6
--- /dev/null
+++ b/Documentation/devicetree/bindings/display/panel/auo,g070vvn01.txt
@@ -0,0 +1,29 @@
1AU Optronics Corporation 7.0" FHD (800 x 480) TFT LCD panel
2
3Required properties:
4- compatible: should be "auo,g070vvn01"
5- backlight: phandle of the backlight device attached to the panel
6- power-supply: single regulator to provide the supply voltage
7
8Required nodes:
9- port: Parallel port mapping to connect this display
10
11This panel needs single power supply voltage. Its backlight is conntrolled
12via PWM signal.
13
14Example:
15--------
16
17Example device-tree definition when connected to iMX6Q based board
18
19 lcd_panel: lcd-panel {
20 compatible = "auo,g070vvn01";
21 backlight = <&backlight_lcd>;
22 power-supply = <&reg_display>;
23
24 port {
25 lcd_panel_in: endpoint {
26 remote-endpoint = <&lcd_display_out>;
27 };
28 };
29 };
diff --git a/Documentation/devicetree/bindings/display/panel/innolux,tv123wam.txt b/Documentation/devicetree/bindings/display/panel/innolux,tv123wam.txt
new file mode 100644
index 000000000000..a9b35265fa13
--- /dev/null
+++ b/Documentation/devicetree/bindings/display/panel/innolux,tv123wam.txt
@@ -0,0 +1,20 @@
1Innolux TV123WAM 12.3 inch eDP 2K display panel
2
3This binding is compatible with the simple-panel binding, which is specified
4in simple-panel.txt in this directory.
5
6Required properties:
7- compatible: should be "innolux,tv123wam"
8- power-supply: regulator to provide the supply voltage
9
10Optional properties:
11- enable-gpios: GPIO pin to enable or disable the panel
12- backlight: phandle of the backlight device attached to the panel
13
14Example:
15 panel_edp: panel-edp {
16 compatible = "innolux,tv123wam";
17 enable-gpios = <&msmgpio 31 GPIO_ACTIVE_LOW>;
18 power-supply = <&pm8916_l2>;
19 backlight = <&backlight>;
20 };
diff --git a/Documentation/gpu/drm-kms.rst b/Documentation/gpu/drm-kms.rst
index 1dffd1ac4cd4..4f6f113a7f5d 100644
--- a/Documentation/gpu/drm-kms.rst
+++ b/Documentation/gpu/drm-kms.rst
@@ -373,6 +373,15 @@ Connector Functions Reference
373.. kernel-doc:: drivers/gpu/drm/drm_connector.c 373.. kernel-doc:: drivers/gpu/drm/drm_connector.c
374 :export: 374 :export:
375 375
376Writeback Connectors
377--------------------
378
379.. kernel-doc:: drivers/gpu/drm/drm_writeback.c
380 :doc: overview
381
382.. kernel-doc:: drivers/gpu/drm/drm_writeback.c
383 :export:
384
376Encoder Abstraction 385Encoder Abstraction
377=================== 386===================
378 387
@@ -517,6 +526,12 @@ Standard Connector Properties
517.. kernel-doc:: drivers/gpu/drm/drm_connector.c 526.. kernel-doc:: drivers/gpu/drm/drm_connector.c
518 :doc: standard connector properties 527 :doc: standard connector properties
519 528
529HDMI Specific Connector Properties
530-----------------------------
531
532.. kernel-doc:: drivers/gpu/drm/drm_connector.c
533 :doc: HDMI connector properties
534
520Plane Composition Properties 535Plane Composition Properties
521---------------------------- 536----------------------------
522 537
diff --git a/Documentation/gpu/kms-properties.csv b/Documentation/gpu/kms-properties.csv
index 07ed22ea3bd6..bfde04eddd14 100644
--- a/Documentation/gpu/kms-properties.csv
+++ b/Documentation/gpu/kms-properties.csv
@@ -17,6 +17,7 @@ Owner Module/Drivers,Group,Property Name,Type,Property Values,Object attached,De
17,Virtual GPU,“suggested X”,RANGE,"Min=0, Max=0xffffffff",Connector,property to suggest an X offset for a connector 17,Virtual GPU,“suggested X”,RANGE,"Min=0, Max=0xffffffff",Connector,property to suggest an X offset for a connector
18,,“suggested Y”,RANGE,"Min=0, Max=0xffffffff",Connector,property to suggest an Y offset for a connector 18,,“suggested Y”,RANGE,"Min=0, Max=0xffffffff",Connector,property to suggest an Y offset for a connector
19,Optional,"""aspect ratio""",ENUM,"{ ""None"", ""4:3"", ""16:9"" }",Connector,TDB 19,Optional,"""aspect ratio""",ENUM,"{ ""None"", ""4:3"", ""16:9"" }",Connector,TDB
20,Optional,"""content type""",ENUM,"{ ""No Data"", ""Graphics"", ""Photo"", ""Cinema"", ""Game"" }",Connector,TBD
20i915,Generic,"""Broadcast RGB""",ENUM,"{ ""Automatic"", ""Full"", ""Limited 16:235"" }",Connector,"When this property is set to Limited 16:235 and CTM is set, the hardware will be programmed with the result of the multiplication of CTM by the limited range matrix to ensure the pixels normaly in the range 0..1.0 are remapped to the range 16/255..235/255." 21i915,Generic,"""Broadcast RGB""",ENUM,"{ ""Automatic"", ""Full"", ""Limited 16:235"" }",Connector,"When this property is set to Limited 16:235 and CTM is set, the hardware will be programmed with the result of the multiplication of CTM by the limited range matrix to ensure the pixels normaly in the range 0..1.0 are remapped to the range 16/255..235/255."
21,,“audio”,ENUM,"{ ""force-dvi"", ""off"", ""auto"", ""on"" }",Connector,TBD 22,,“audio”,ENUM,"{ ""force-dvi"", ""off"", ""auto"", ""on"" }",Connector,TBD
22,SDVO-TV,“mode”,ENUM,"{ ""NTSC_M"", ""NTSC_J"", ""NTSC_443"", ""PAL_B"" } etc.",Connector,TBD 23,SDVO-TV,“mode”,ENUM,"{ ""NTSC_M"", ""NTSC_J"", ""NTSC_443"", ""PAL_B"" } etc.",Connector,TBD
diff --git a/drivers/dma-buf/dma-buf.c b/drivers/dma-buf/dma-buf.c
index d78d5fc173dc..13884474d158 100644
--- a/drivers/dma-buf/dma-buf.c
+++ b/drivers/dma-buf/dma-buf.c
@@ -405,7 +405,6 @@ struct dma_buf *dma_buf_export(const struct dma_buf_export_info *exp_info)
405 || !exp_info->ops->map_dma_buf 405 || !exp_info->ops->map_dma_buf
406 || !exp_info->ops->unmap_dma_buf 406 || !exp_info->ops->unmap_dma_buf
407 || !exp_info->ops->release 407 || !exp_info->ops->release
408 || !exp_info->ops->map_atomic
409 || !exp_info->ops->map 408 || !exp_info->ops->map
410 || !exp_info->ops->mmap)) { 409 || !exp_info->ops->mmap)) {
411 return ERR_PTR(-EINVAL); 410 return ERR_PTR(-EINVAL);
@@ -568,7 +567,7 @@ struct dma_buf_attachment *dma_buf_attach(struct dma_buf *dmabuf,
568 mutex_lock(&dmabuf->lock); 567 mutex_lock(&dmabuf->lock);
569 568
570 if (dmabuf->ops->attach) { 569 if (dmabuf->ops->attach) {
571 ret = dmabuf->ops->attach(dmabuf, dev, attach); 570 ret = dmabuf->ops->attach(dmabuf, attach);
572 if (ret) 571 if (ret)
573 goto err_attach; 572 goto err_attach;
574 } 573 }
@@ -687,26 +686,14 @@ EXPORT_SYMBOL_GPL(dma_buf_unmap_attachment);
687 * void \*dma_buf_kmap(struct dma_buf \*, unsigned long); 686 * void \*dma_buf_kmap(struct dma_buf \*, unsigned long);
688 * void dma_buf_kunmap(struct dma_buf \*, unsigned long, void \*); 687 * void dma_buf_kunmap(struct dma_buf \*, unsigned long, void \*);
689 * 688 *
690 * There are also atomic variants of these interfaces. Like for kmap they 689 * Implementing the functions is optional for exporters and for importers all
691 * facilitate non-blocking fast-paths. Neither the importer nor the exporter 690 * the restrictions of using kmap apply.
692 * (in the callback) is allowed to block when using these.
693 *
694 * Interfaces::
695 * void \*dma_buf_kmap_atomic(struct dma_buf \*, unsigned long);
696 * void dma_buf_kunmap_atomic(struct dma_buf \*, unsigned long, void \*);
697 *
698 * For importers all the restrictions of using kmap apply, like the limited
699 * supply of kmap_atomic slots. Hence an importer shall only hold onto at
700 * max 2 atomic dma_buf kmaps at the same time (in any given process context).
701 * 691 *
702 * dma_buf kmap calls outside of the range specified in begin_cpu_access are 692 * dma_buf kmap calls outside of the range specified in begin_cpu_access are
703 * undefined. If the range is not PAGE_SIZE aligned, kmap needs to succeed on 693 * undefined. If the range is not PAGE_SIZE aligned, kmap needs to succeed on
704 * the partial chunks at the beginning and end but may return stale or bogus 694 * the partial chunks at the beginning and end but may return stale or bogus
705 * data outside of the range (in these partial chunks). 695 * data outside of the range (in these partial chunks).
706 * 696 *
707 * Note that these calls need to always succeed. The exporter needs to
708 * complete any preparations that might fail in begin_cpu_access.
709 *
710 * For some cases the overhead of kmap can be too high, a vmap interface 697 * For some cases the overhead of kmap can be too high, a vmap interface
711 * is introduced. This interface should be used very carefully, as vmalloc 698 * is introduced. This interface should be used very carefully, as vmalloc
712 * space is a limited resources on many architectures. 699 * space is a limited resources on many architectures.
@@ -860,41 +847,6 @@ int dma_buf_end_cpu_access(struct dma_buf *dmabuf,
860EXPORT_SYMBOL_GPL(dma_buf_end_cpu_access); 847EXPORT_SYMBOL_GPL(dma_buf_end_cpu_access);
861 848
862/** 849/**
863 * dma_buf_kmap_atomic - Map a page of the buffer object into kernel address
864 * space. The same restrictions as for kmap_atomic and friends apply.
865 * @dmabuf: [in] buffer to map page from.
866 * @page_num: [in] page in PAGE_SIZE units to map.
867 *
868 * This call must always succeed, any necessary preparations that might fail
869 * need to be done in begin_cpu_access.
870 */
871void *dma_buf_kmap_atomic(struct dma_buf *dmabuf, unsigned long page_num)
872{
873 WARN_ON(!dmabuf);
874
875 return dmabuf->ops->map_atomic(dmabuf, page_num);
876}
877EXPORT_SYMBOL_GPL(dma_buf_kmap_atomic);
878
879/**
880 * dma_buf_kunmap_atomic - Unmap a page obtained by dma_buf_kmap_atomic.
881 * @dmabuf: [in] buffer to unmap page from.
882 * @page_num: [in] page in PAGE_SIZE units to unmap.
883 * @vaddr: [in] kernel space pointer obtained from dma_buf_kmap_atomic.
884 *
885 * This call must always succeed.
886 */
887void dma_buf_kunmap_atomic(struct dma_buf *dmabuf, unsigned long page_num,
888 void *vaddr)
889{
890 WARN_ON(!dmabuf);
891
892 if (dmabuf->ops->unmap_atomic)
893 dmabuf->ops->unmap_atomic(dmabuf, page_num, vaddr);
894}
895EXPORT_SYMBOL_GPL(dma_buf_kunmap_atomic);
896
897/**
898 * dma_buf_kmap - Map a page of the buffer object into kernel address space. The 850 * dma_buf_kmap - Map a page of the buffer object into kernel address space. The
899 * same restrictions as for kmap and friends apply. 851 * same restrictions as for kmap and friends apply.
900 * @dmabuf: [in] buffer to map page from. 852 * @dmabuf: [in] buffer to map page from.
@@ -907,6 +859,8 @@ void *dma_buf_kmap(struct dma_buf *dmabuf, unsigned long page_num)
907{ 859{
908 WARN_ON(!dmabuf); 860 WARN_ON(!dmabuf);
909 861
862 if (!dmabuf->ops->map)
863 return NULL;
910 return dmabuf->ops->map(dmabuf, page_num); 864 return dmabuf->ops->map(dmabuf, page_num);
911} 865}
912EXPORT_SYMBOL_GPL(dma_buf_kmap); 866EXPORT_SYMBOL_GPL(dma_buf_kmap);
diff --git a/drivers/gpu/drm/Makefile b/drivers/gpu/drm/Makefile
index ef9f3dab287f..69c13517ea3a 100644
--- a/drivers/gpu/drm/Makefile
+++ b/drivers/gpu/drm/Makefile
@@ -18,7 +18,7 @@ drm-y := drm_auth.o drm_bufs.o drm_cache.o \
18 drm_encoder.o drm_mode_object.o drm_property.o \ 18 drm_encoder.o drm_mode_object.o drm_property.o \
19 drm_plane.o drm_color_mgmt.o drm_print.o \ 19 drm_plane.o drm_color_mgmt.o drm_print.o \
20 drm_dumb_buffers.o drm_mode_config.o drm_vblank.o \ 20 drm_dumb_buffers.o drm_mode_config.o drm_vblank.o \
21 drm_syncobj.o drm_lease.o 21 drm_syncobj.o drm_lease.o drm_writeback.o
22 22
23drm-$(CONFIG_DRM_LIB_RANDOM) += lib/drm_random.o 23drm-$(CONFIG_DRM_LIB_RANDOM) += lib/drm_random.o
24drm-$(CONFIG_DRM_VM) += drm_vm.o 24drm-$(CONFIG_DRM_VM) += drm_vm.o
diff --git a/drivers/gpu/drm/amd/amdgpu/amdgpu_prime.c b/drivers/gpu/drm/amd/amdgpu/amdgpu_prime.c
index 4683626b065f..a156b3891a3f 100644
--- a/drivers/gpu/drm/amd/amdgpu/amdgpu_prime.c
+++ b/drivers/gpu/drm/amd/amdgpu/amdgpu_prime.c
@@ -133,7 +133,6 @@ error:
133} 133}
134 134
135static int amdgpu_gem_map_attach(struct dma_buf *dma_buf, 135static int amdgpu_gem_map_attach(struct dma_buf *dma_buf,
136 struct device *target_dev,
137 struct dma_buf_attachment *attach) 136 struct dma_buf_attachment *attach)
138{ 137{
139 struct drm_gem_object *obj = dma_buf->priv; 138 struct drm_gem_object *obj = dma_buf->priv;
@@ -141,7 +140,7 @@ static int amdgpu_gem_map_attach(struct dma_buf *dma_buf,
141 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); 140 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev);
142 long r; 141 long r;
143 142
144 r = drm_gem_map_attach(dma_buf, target_dev, attach); 143 r = drm_gem_map_attach(dma_buf, attach);
145 if (r) 144 if (r)
146 return r; 145 return r;
147 146
@@ -245,9 +244,7 @@ static const struct dma_buf_ops amdgpu_dmabuf_ops = {
245 .release = drm_gem_dmabuf_release, 244 .release = drm_gem_dmabuf_release,
246 .begin_cpu_access = amdgpu_gem_begin_cpu_access, 245 .begin_cpu_access = amdgpu_gem_begin_cpu_access,
247 .map = drm_gem_dmabuf_kmap, 246 .map = drm_gem_dmabuf_kmap,
248 .map_atomic = drm_gem_dmabuf_kmap_atomic,
249 .unmap = drm_gem_dmabuf_kunmap, 247 .unmap = drm_gem_dmabuf_kunmap,
250 .unmap_atomic = drm_gem_dmabuf_kunmap_atomic,
251 .mmap = drm_gem_dmabuf_mmap, 248 .mmap = drm_gem_dmabuf_mmap,
252 .vmap = drm_gem_dmabuf_vmap, 249 .vmap = drm_gem_dmabuf_vmap,
253 .vunmap = drm_gem_dmabuf_vunmap, 250 .vunmap = drm_gem_dmabuf_vunmap,
diff --git a/drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c b/drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
index f9add85157e7..a1dd49545a5b 100644
--- a/drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
+++ b/drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
@@ -3914,8 +3914,6 @@ static void amdgpu_dm_do_flip(struct drm_crtc *crtc,
3914 3914
3915 /* Flip */ 3915 /* Flip */
3916 spin_lock_irqsave(&crtc->dev->event_lock, flags); 3916 spin_lock_irqsave(&crtc->dev->event_lock, flags);
3917 /* update crtc fb */
3918 crtc->primary->fb = fb;
3919 3917
3920 WARN_ON(acrtc->pflip_status != AMDGPU_FLIP_NONE); 3918 WARN_ON(acrtc->pflip_status != AMDGPU_FLIP_NONE);
3921 WARN_ON(!acrtc_state->stream); 3919 WARN_ON(!acrtc_state->stream);
diff --git a/drivers/gpu/drm/arc/arcpgu_crtc.c b/drivers/gpu/drm/arc/arcpgu_crtc.c
index 16903dc7fe0d..c3349b8fb58b 100644
--- a/drivers/gpu/drm/arc/arcpgu_crtc.c
+++ b/drivers/gpu/drm/arc/arcpgu_crtc.c
@@ -136,9 +136,6 @@ static void arc_pgu_crtc_atomic_disable(struct drm_crtc *crtc,
136{ 136{
137 struct arcpgu_drm_private *arcpgu = crtc_to_arcpgu_priv(crtc); 137 struct arcpgu_drm_private *arcpgu = crtc_to_arcpgu_priv(crtc);
138 138
139 if (!crtc->primary->fb)
140 return;
141
142 clk_disable_unprepare(arcpgu->clk); 139 clk_disable_unprepare(arcpgu->clk);
143 arc_pgu_write(arcpgu, ARCPGU_REG_CTRL, 140 arc_pgu_write(arcpgu, ARCPGU_REG_CTRL,
144 arc_pgu_read(arcpgu, ARCPGU_REG_CTRL) & 141 arc_pgu_read(arcpgu, ARCPGU_REG_CTRL) &
diff --git a/drivers/gpu/drm/armada/armada_fb.c b/drivers/gpu/drm/armada/armada_fb.c
index ac92bce07ecd..edd15126bde9 100644
--- a/drivers/gpu/drm/armada/armada_fb.c
+++ b/drivers/gpu/drm/armada/armada_fb.c
@@ -7,30 +7,15 @@
7 */ 7 */
8#include <drm/drm_crtc_helper.h> 8#include <drm/drm_crtc_helper.h>
9#include <drm/drm_fb_helper.h> 9#include <drm/drm_fb_helper.h>
10#include <drm/drm_gem_framebuffer_helper.h>
10#include "armada_drm.h" 11#include "armada_drm.h"
11#include "armada_fb.h" 12#include "armada_fb.h"
12#include "armada_gem.h" 13#include "armada_gem.h"
13#include "armada_hw.h" 14#include "armada_hw.h"
14 15
15static void armada_fb_destroy(struct drm_framebuffer *fb)
16{
17 struct armada_framebuffer *dfb = drm_fb_to_armada_fb(fb);
18
19 drm_framebuffer_cleanup(&dfb->fb);
20 drm_gem_object_put_unlocked(&dfb->obj->obj);
21 kfree(dfb);
22}
23
24static int armada_fb_create_handle(struct drm_framebuffer *fb,
25 struct drm_file *dfile, unsigned int *handle)
26{
27 struct armada_framebuffer *dfb = drm_fb_to_armada_fb(fb);
28 return drm_gem_handle_create(dfile, &dfb->obj->obj, handle);
29}
30
31static const struct drm_framebuffer_funcs armada_fb_funcs = { 16static const struct drm_framebuffer_funcs armada_fb_funcs = {
32 .destroy = armada_fb_destroy, 17 .destroy = drm_gem_fb_destroy,
33 .create_handle = armada_fb_create_handle, 18 .create_handle = drm_gem_fb_create_handle,
34}; 19};
35 20
36struct armada_framebuffer *armada_framebuffer_create(struct drm_device *dev, 21struct armada_framebuffer *armada_framebuffer_create(struct drm_device *dev,
@@ -78,7 +63,7 @@ struct armada_framebuffer *armada_framebuffer_create(struct drm_device *dev,
78 63
79 dfb->fmt = format; 64 dfb->fmt = format;
80 dfb->mod = config; 65 dfb->mod = config;
81 dfb->obj = obj; 66 dfb->fb.obj[0] = &obj->obj;
82 67
83 drm_helper_mode_fill_fb_struct(dev, &dfb->fb, mode); 68 drm_helper_mode_fill_fb_struct(dev, &dfb->fb, mode);
84 69
diff --git a/drivers/gpu/drm/armada/armada_fb.h b/drivers/gpu/drm/armada/armada_fb.h
index 48073c4f54d8..5c130ff5da77 100644
--- a/drivers/gpu/drm/armada/armada_fb.h
+++ b/drivers/gpu/drm/armada/armada_fb.h
@@ -10,13 +10,12 @@
10 10
11struct armada_framebuffer { 11struct armada_framebuffer {
12 struct drm_framebuffer fb; 12 struct drm_framebuffer fb;
13 struct armada_gem_object *obj;
14 uint8_t fmt; 13 uint8_t fmt;
15 uint8_t mod; 14 uint8_t mod;
16}; 15};
17#define drm_fb_to_armada_fb(dfb) \ 16#define drm_fb_to_armada_fb(dfb) \
18 container_of(dfb, struct armada_framebuffer, fb) 17 container_of(dfb, struct armada_framebuffer, fb)
19#define drm_fb_obj(fb) drm_fb_to_armada_fb(fb)->obj 18#define drm_fb_obj(fb) drm_to_armada_gem((fb)->obj[0])
20 19
21struct armada_framebuffer *armada_framebuffer_create(struct drm_device *, 20struct armada_framebuffer *armada_framebuffer_create(struct drm_device *,
22 const struct drm_mode_fb_cmd2 *, struct armada_gem_object *); 21 const struct drm_mode_fb_cmd2 *, struct armada_gem_object *);
diff --git a/drivers/gpu/drm/armada/armada_gem.c b/drivers/gpu/drm/armada/armada_gem.c
index a97f509743a5..3fb37c75c065 100644
--- a/drivers/gpu/drm/armada/armada_gem.c
+++ b/drivers/gpu/drm/armada/armada_gem.c
@@ -490,8 +490,6 @@ static const struct dma_buf_ops armada_gem_prime_dmabuf_ops = {
490 .map_dma_buf = armada_gem_prime_map_dma_buf, 490 .map_dma_buf = armada_gem_prime_map_dma_buf,
491 .unmap_dma_buf = armada_gem_prime_unmap_dma_buf, 491 .unmap_dma_buf = armada_gem_prime_unmap_dma_buf,
492 .release = drm_gem_dmabuf_release, 492 .release = drm_gem_dmabuf_release,
493 .map_atomic = armada_gem_dmabuf_no_kmap,
494 .unmap_atomic = armada_gem_dmabuf_no_kunmap,
495 .map = armada_gem_dmabuf_no_kmap, 493 .map = armada_gem_dmabuf_no_kmap,
496 .unmap = armada_gem_dmabuf_no_kunmap, 494 .unmap = armada_gem_dmabuf_no_kunmap,
497 .mmap = armada_gem_dmabuf_mmap, 495 .mmap = armada_gem_dmabuf_mmap,
diff --git a/drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_dc.c b/drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_dc.c
index c1ea5c36b006..843cac222e60 100644
--- a/drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_dc.c
+++ b/drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_dc.c
@@ -681,6 +681,7 @@ static void atmel_hlcdc_dc_unload(struct drm_device *dev)
681 drm_fb_cma_fbdev_fini(dev); 681 drm_fb_cma_fbdev_fini(dev);
682 flush_workqueue(dc->wq); 682 flush_workqueue(dc->wq);
683 drm_kms_helper_poll_fini(dev); 683 drm_kms_helper_poll_fini(dev);
684 drm_atomic_helper_shutdown(dev);
684 drm_mode_config_cleanup(dev); 685 drm_mode_config_cleanup(dev);
685 686
686 pm_runtime_get_sync(dev->dev); 687 pm_runtime_get_sync(dev->dev);
diff --git a/drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_plane.c b/drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_plane.c
index 73c875db45f4..1aecc74cc463 100644
--- a/drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_plane.c
+++ b/drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_plane.c
@@ -412,9 +412,10 @@ static void atmel_hlcdc_plane_update_format(struct atmel_hlcdc_plane *plane,
412 ATMEL_HLCDC_LAYER_FORMAT_CFG, cfg); 412 ATMEL_HLCDC_LAYER_FORMAT_CFG, cfg);
413} 413}
414 414
415static void atmel_hlcdc_plane_update_clut(struct atmel_hlcdc_plane *plane) 415static void atmel_hlcdc_plane_update_clut(struct atmel_hlcdc_plane *plane,
416 struct atmel_hlcdc_plane_state *state)
416{ 417{
417 struct drm_crtc *crtc = plane->base.crtc; 418 struct drm_crtc *crtc = state->base.crtc;
418 struct drm_color_lut *lut; 419 struct drm_color_lut *lut;
419 int idx; 420 int idx;
420 421
@@ -779,7 +780,7 @@ static void atmel_hlcdc_plane_atomic_update(struct drm_plane *p,
779 atmel_hlcdc_plane_update_pos_and_size(plane, state); 780 atmel_hlcdc_plane_update_pos_and_size(plane, state);
780 atmel_hlcdc_plane_update_general_settings(plane, state); 781 atmel_hlcdc_plane_update_general_settings(plane, state);
781 atmel_hlcdc_plane_update_format(plane, state); 782 atmel_hlcdc_plane_update_format(plane, state);
782 atmel_hlcdc_plane_update_clut(plane); 783 atmel_hlcdc_plane_update_clut(plane, state);
783 atmel_hlcdc_plane_update_buffers(plane, state); 784 atmel_hlcdc_plane_update_buffers(plane, state);
784 atmel_hlcdc_plane_update_disc_area(plane, state); 785 atmel_hlcdc_plane_update_disc_area(plane, state);
785 786
@@ -816,16 +817,6 @@ static void atmel_hlcdc_plane_atomic_disable(struct drm_plane *p,
816 atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_ISR); 817 atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_ISR);
817} 818}
818 819
819static void atmel_hlcdc_plane_destroy(struct drm_plane *p)
820{
821 struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
822
823 if (plane->base.fb)
824 drm_framebuffer_put(plane->base.fb);
825
826 drm_plane_cleanup(p);
827}
828
829static int atmel_hlcdc_plane_init_properties(struct atmel_hlcdc_plane *plane) 820static int atmel_hlcdc_plane_init_properties(struct atmel_hlcdc_plane *plane)
830{ 821{
831 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc; 822 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
@@ -1002,7 +993,7 @@ static void atmel_hlcdc_plane_atomic_destroy_state(struct drm_plane *p,
1002static const struct drm_plane_funcs layer_plane_funcs = { 993static const struct drm_plane_funcs layer_plane_funcs = {
1003 .update_plane = drm_atomic_helper_update_plane, 994 .update_plane = drm_atomic_helper_update_plane,
1004 .disable_plane = drm_atomic_helper_disable_plane, 995 .disable_plane = drm_atomic_helper_disable_plane,
1005 .destroy = atmel_hlcdc_plane_destroy, 996 .destroy = drm_plane_cleanup,
1006 .reset = atmel_hlcdc_plane_reset, 997 .reset = atmel_hlcdc_plane_reset,
1007 .atomic_duplicate_state = atmel_hlcdc_plane_atomic_duplicate_state, 998 .atomic_duplicate_state = atmel_hlcdc_plane_atomic_duplicate_state,
1008 .atomic_destroy_state = atmel_hlcdc_plane_atomic_destroy_state, 999 .atomic_destroy_state = atmel_hlcdc_plane_atomic_destroy_state,
diff --git a/drivers/gpu/drm/bridge/Kconfig b/drivers/gpu/drm/bridge/Kconfig
index fa2c7997e2fd..bf6cad6c9178 100644
--- a/drivers/gpu/drm/bridge/Kconfig
+++ b/drivers/gpu/drm/bridge/Kconfig
@@ -82,9 +82,11 @@ config DRM_PARADE_PS8622
82 82
83config DRM_SIL_SII8620 83config DRM_SIL_SII8620
84 tristate "Silicon Image SII8620 HDMI/MHL bridge" 84 tristate "Silicon Image SII8620 HDMI/MHL bridge"
85 depends on OF && RC_CORE 85 depends on OF
86 select DRM_KMS_HELPER 86 select DRM_KMS_HELPER
87 imply EXTCON 87 imply EXTCON
88 select INPUT
89 select RC_CORE
88 help 90 help
89 Silicon Image SII8620 HDMI/MHL bridge chip driver. 91 Silicon Image SII8620 HDMI/MHL bridge chip driver.
90 92
diff --git a/drivers/gpu/drm/bridge/cdns-dsi.c b/drivers/gpu/drm/bridge/cdns-dsi.c
index c255fc3e1be5..f2d43f24acfb 100644
--- a/drivers/gpu/drm/bridge/cdns-dsi.c
+++ b/drivers/gpu/drm/bridge/cdns-dsi.c
@@ -1337,7 +1337,7 @@ static const struct mipi_dsi_host_ops cdns_dsi_ops = {
1337 .transfer = cdns_dsi_transfer, 1337 .transfer = cdns_dsi_transfer,
1338}; 1338};
1339 1339
1340static int cdns_dsi_resume(struct device *dev) 1340static int __maybe_unused cdns_dsi_resume(struct device *dev)
1341{ 1341{
1342 struct cdns_dsi *dsi = dev_get_drvdata(dev); 1342 struct cdns_dsi *dsi = dev_get_drvdata(dev);
1343 1343
@@ -1350,7 +1350,7 @@ static int cdns_dsi_resume(struct device *dev)
1350 return 0; 1350 return 0;
1351} 1351}
1352 1352
1353static int cdns_dsi_suspend(struct device *dev) 1353static int __maybe_unused cdns_dsi_suspend(struct device *dev)
1354{ 1354{
1355 struct cdns_dsi *dsi = dev_get_drvdata(dev); 1355 struct cdns_dsi *dsi = dev_get_drvdata(dev);
1356 1356
diff --git a/drivers/gpu/drm/cirrus/cirrus_drv.h b/drivers/gpu/drm/cirrus/cirrus_drv.h
index be2d7e488062..ce9db7aab225 100644
--- a/drivers/gpu/drm/cirrus/cirrus_drv.h
+++ b/drivers/gpu/drm/cirrus/cirrus_drv.h
@@ -92,7 +92,6 @@
92 92
93#define to_cirrus_crtc(x) container_of(x, struct cirrus_crtc, base) 93#define to_cirrus_crtc(x) container_of(x, struct cirrus_crtc, base)
94#define to_cirrus_encoder(x) container_of(x, struct cirrus_encoder, base) 94#define to_cirrus_encoder(x) container_of(x, struct cirrus_encoder, base)
95#define to_cirrus_framebuffer(x) container_of(x, struct cirrus_framebuffer, base)
96 95
97struct cirrus_crtc { 96struct cirrus_crtc {
98 struct drm_crtc base; 97 struct drm_crtc base;
@@ -117,11 +116,6 @@ struct cirrus_connector {
117 struct drm_connector base; 116 struct drm_connector base;
118}; 117};
119 118
120struct cirrus_framebuffer {
121 struct drm_framebuffer base;
122 struct drm_gem_object *obj;
123};
124
125struct cirrus_mc { 119struct cirrus_mc {
126 resource_size_t vram_size; 120 resource_size_t vram_size;
127 resource_size_t vram_base; 121 resource_size_t vram_base;
@@ -152,7 +146,7 @@ struct cirrus_device {
152 146
153struct cirrus_fbdev { 147struct cirrus_fbdev {
154 struct drm_fb_helper helper; 148 struct drm_fb_helper helper;
155 struct cirrus_framebuffer gfb; 149 struct drm_framebuffer gfb;
156 void *sysram; 150 void *sysram;
157 int size; 151 int size;
158 int x1, y1, x2, y2; /* dirty rect */ 152 int x1, y1, x2, y2; /* dirty rect */
@@ -198,7 +192,7 @@ int cirrus_dumb_create(struct drm_file *file,
198 struct drm_mode_create_dumb *args); 192 struct drm_mode_create_dumb *args);
199 193
200int cirrus_framebuffer_init(struct drm_device *dev, 194int cirrus_framebuffer_init(struct drm_device *dev,
201 struct cirrus_framebuffer *gfb, 195 struct drm_framebuffer *gfb,
202 const struct drm_mode_fb_cmd2 *mode_cmd, 196 const struct drm_mode_fb_cmd2 *mode_cmd,
203 struct drm_gem_object *obj); 197 struct drm_gem_object *obj);
204 198
diff --git a/drivers/gpu/drm/cirrus/cirrus_fbdev.c b/drivers/gpu/drm/cirrus/cirrus_fbdev.c
index 32fbfba2c623..b643ac92801c 100644
--- a/drivers/gpu/drm/cirrus/cirrus_fbdev.c
+++ b/drivers/gpu/drm/cirrus/cirrus_fbdev.c
@@ -22,14 +22,14 @@ static void cirrus_dirty_update(struct cirrus_fbdev *afbdev,
22 struct drm_gem_object *obj; 22 struct drm_gem_object *obj;
23 struct cirrus_bo *bo; 23 struct cirrus_bo *bo;
24 int src_offset, dst_offset; 24 int src_offset, dst_offset;
25 int bpp = afbdev->gfb.base.format->cpp[0]; 25 int bpp = afbdev->gfb.format->cpp[0];
26 int ret = -EBUSY; 26 int ret = -EBUSY;
27 bool unmap = false; 27 bool unmap = false;
28 bool store_for_later = false; 28 bool store_for_later = false;
29 int x2, y2; 29 int x2, y2;
30 unsigned long flags; 30 unsigned long flags;
31 31
32 obj = afbdev->gfb.obj; 32 obj = afbdev->gfb.obj[0];
33 bo = gem_to_cirrus_bo(obj); 33 bo = gem_to_cirrus_bo(obj);
34 34
35 /* 35 /*
@@ -82,7 +82,7 @@ static void cirrus_dirty_update(struct cirrus_fbdev *afbdev,
82 } 82 }
83 for (i = y; i < y + height; i++) { 83 for (i = y; i < y + height; i++) {
84 /* assume equal stride for now */ 84 /* assume equal stride for now */
85 src_offset = dst_offset = i * afbdev->gfb.base.pitches[0] + (x * bpp); 85 src_offset = dst_offset = i * afbdev->gfb.pitches[0] + (x * bpp);
86 memcpy_toio(bo->kmap.virtual + src_offset, afbdev->sysram + src_offset, width * bpp); 86 memcpy_toio(bo->kmap.virtual + src_offset, afbdev->sysram + src_offset, width * bpp);
87 87
88 } 88 }
@@ -204,7 +204,7 @@ static int cirrusfb_create(struct drm_fb_helper *helper,
204 gfbdev->sysram = sysram; 204 gfbdev->sysram = sysram;
205 gfbdev->size = size; 205 gfbdev->size = size;
206 206
207 fb = &gfbdev->gfb.base; 207 fb = &gfbdev->gfb;
208 if (!fb) { 208 if (!fb) {
209 DRM_INFO("fb is NULL\n"); 209 DRM_INFO("fb is NULL\n");
210 return -EINVAL; 210 return -EINVAL;
@@ -246,19 +246,19 @@ static int cirrusfb_create(struct drm_fb_helper *helper,
246static int cirrus_fbdev_destroy(struct drm_device *dev, 246static int cirrus_fbdev_destroy(struct drm_device *dev,
247 struct cirrus_fbdev *gfbdev) 247 struct cirrus_fbdev *gfbdev)
248{ 248{
249 struct cirrus_framebuffer *gfb = &gfbdev->gfb; 249 struct drm_framebuffer *gfb = &gfbdev->gfb;
250 250
251 drm_fb_helper_unregister_fbi(&gfbdev->helper); 251 drm_fb_helper_unregister_fbi(&gfbdev->helper);
252 252
253 if (gfb->obj) { 253 if (gfb->obj[0]) {
254 drm_gem_object_put_unlocked(gfb->obj); 254 drm_gem_object_put_unlocked(gfb->obj[0]);
255 gfb->obj = NULL; 255 gfb->obj[0] = NULL;
256 } 256 }
257 257
258 vfree(gfbdev->sysram); 258 vfree(gfbdev->sysram);
259 drm_fb_helper_fini(&gfbdev->helper); 259 drm_fb_helper_fini(&gfbdev->helper);
260 drm_framebuffer_unregister_private(&gfb->base); 260 drm_framebuffer_unregister_private(gfb);
261 drm_framebuffer_cleanup(&gfb->base); 261 drm_framebuffer_cleanup(gfb);
262 262
263 return 0; 263 return 0;
264} 264}
diff --git a/drivers/gpu/drm/cirrus/cirrus_main.c b/drivers/gpu/drm/cirrus/cirrus_main.c
index 26df1e8cd490..60d54e10a34d 100644
--- a/drivers/gpu/drm/cirrus/cirrus_main.c
+++ b/drivers/gpu/drm/cirrus/cirrus_main.c
@@ -10,42 +10,25 @@
10 */ 10 */
11#include <drm/drmP.h> 11#include <drm/drmP.h>
12#include <drm/drm_crtc_helper.h> 12#include <drm/drm_crtc_helper.h>
13#include <drm/drm_gem_framebuffer_helper.h>
13 14
14#include "cirrus_drv.h" 15#include "cirrus_drv.h"
15 16
16static int cirrus_create_handle(struct drm_framebuffer *fb,
17 struct drm_file* file_priv,
18 unsigned int* handle)
19{
20 struct cirrus_framebuffer *cirrus_fb = to_cirrus_framebuffer(fb);
21
22 return drm_gem_handle_create(file_priv, cirrus_fb->obj, handle);
23}
24
25static void cirrus_user_framebuffer_destroy(struct drm_framebuffer *fb)
26{
27 struct cirrus_framebuffer *cirrus_fb = to_cirrus_framebuffer(fb);
28
29 drm_gem_object_put_unlocked(cirrus_fb->obj);
30 drm_framebuffer_cleanup(fb);
31 kfree(fb);
32}
33
34static const struct drm_framebuffer_funcs cirrus_fb_funcs = { 17static const struct drm_framebuffer_funcs cirrus_fb_funcs = {
35 .create_handle = cirrus_create_handle, 18 .create_handle = drm_gem_fb_create_handle,
36 .destroy = cirrus_user_framebuffer_destroy, 19 .destroy = drm_gem_fb_destroy,
37}; 20};
38 21
39int cirrus_framebuffer_init(struct drm_device *dev, 22int cirrus_framebuffer_init(struct drm_device *dev,
40 struct cirrus_framebuffer *gfb, 23 struct drm_framebuffer *gfb,
41 const struct drm_mode_fb_cmd2 *mode_cmd, 24 const struct drm_mode_fb_cmd2 *mode_cmd,
42 struct drm_gem_object *obj) 25 struct drm_gem_object *obj)
43{ 26{
44 int ret; 27 int ret;
45 28
46 drm_helper_mode_fill_fb_struct(dev, &gfb->base, mode_cmd); 29 drm_helper_mode_fill_fb_struct(dev, gfb, mode_cmd);
47 gfb->obj = obj; 30 gfb->obj[0] = obj;
48 ret = drm_framebuffer_init(dev, &gfb->base, &cirrus_fb_funcs); 31 ret = drm_framebuffer_init(dev, gfb, &cirrus_fb_funcs);
49 if (ret) { 32 if (ret) {
50 DRM_ERROR("drm_framebuffer_init failed: %d\n", ret); 33 DRM_ERROR("drm_framebuffer_init failed: %d\n", ret);
51 return ret; 34 return ret;
@@ -60,7 +43,7 @@ cirrus_user_framebuffer_create(struct drm_device *dev,
60{ 43{
61 struct cirrus_device *cdev = dev->dev_private; 44 struct cirrus_device *cdev = dev->dev_private;
62 struct drm_gem_object *obj; 45 struct drm_gem_object *obj;
63 struct cirrus_framebuffer *cirrus_fb; 46 struct drm_framebuffer *fb;
64 u32 bpp; 47 u32 bpp;
65 int ret; 48 int ret;
66 49
@@ -74,19 +57,19 @@ cirrus_user_framebuffer_create(struct drm_device *dev,
74 if (obj == NULL) 57 if (obj == NULL)
75 return ERR_PTR(-ENOENT); 58 return ERR_PTR(-ENOENT);
76 59
77 cirrus_fb = kzalloc(sizeof(*cirrus_fb), GFP_KERNEL); 60 fb = kzalloc(sizeof(*fb), GFP_KERNEL);
78 if (!cirrus_fb) { 61 if (!fb) {
79 drm_gem_object_put_unlocked(obj); 62 drm_gem_object_put_unlocked(obj);
80 return ERR_PTR(-ENOMEM); 63 return ERR_PTR(-ENOMEM);
81 } 64 }
82 65
83 ret = cirrus_framebuffer_init(dev, cirrus_fb, mode_cmd, obj); 66 ret = cirrus_framebuffer_init(dev, fb, mode_cmd, obj);
84 if (ret) { 67 if (ret) {
85 drm_gem_object_put_unlocked(obj); 68 drm_gem_object_put_unlocked(obj);
86 kfree(cirrus_fb); 69 kfree(fb);
87 return ERR_PTR(ret); 70 return ERR_PTR(ret);
88 } 71 }
89 return &cirrus_fb->base; 72 return fb;
90} 73}
91 74
92static const struct drm_mode_config_funcs cirrus_mode_funcs = { 75static const struct drm_mode_config_funcs cirrus_mode_funcs = {
diff --git a/drivers/gpu/drm/cirrus/cirrus_mode.c b/drivers/gpu/drm/cirrus/cirrus_mode.c
index c91b9b054e3f..b529f8c8e2a6 100644
--- a/drivers/gpu/drm/cirrus/cirrus_mode.c
+++ b/drivers/gpu/drm/cirrus/cirrus_mode.c
@@ -101,17 +101,13 @@ static int cirrus_crtc_do_set_base(struct drm_crtc *crtc,
101 int x, int y, int atomic) 101 int x, int y, int atomic)
102{ 102{
103 struct cirrus_device *cdev = crtc->dev->dev_private; 103 struct cirrus_device *cdev = crtc->dev->dev_private;
104 struct drm_gem_object *obj;
105 struct cirrus_framebuffer *cirrus_fb;
106 struct cirrus_bo *bo; 104 struct cirrus_bo *bo;
107 int ret; 105 int ret;
108 u64 gpu_addr; 106 u64 gpu_addr;
109 107
110 /* push the previous fb to system ram */ 108 /* push the previous fb to system ram */
111 if (!atomic && fb) { 109 if (!atomic && fb) {
112 cirrus_fb = to_cirrus_framebuffer(fb); 110 bo = gem_to_cirrus_bo(fb->obj[0]);
113 obj = cirrus_fb->obj;
114 bo = gem_to_cirrus_bo(obj);
115 ret = cirrus_bo_reserve(bo, false); 111 ret = cirrus_bo_reserve(bo, false);
116 if (ret) 112 if (ret)
117 return ret; 113 return ret;
@@ -119,9 +115,7 @@ static int cirrus_crtc_do_set_base(struct drm_crtc *crtc,
119 cirrus_bo_unreserve(bo); 115 cirrus_bo_unreserve(bo);
120 } 116 }
121 117
122 cirrus_fb = to_cirrus_framebuffer(crtc->primary->fb); 118 bo = gem_to_cirrus_bo(crtc->primary->fb->obj[0]);
123 obj = cirrus_fb->obj;
124 bo = gem_to_cirrus_bo(obj);
125 119
126 ret = cirrus_bo_reserve(bo, false); 120 ret = cirrus_bo_reserve(bo, false);
127 if (ret) 121 if (ret)
@@ -133,7 +127,7 @@ static int cirrus_crtc_do_set_base(struct drm_crtc *crtc,
133 return ret; 127 return ret;
134 } 128 }
135 129
136 if (&cdev->mode_info.gfbdev->gfb == cirrus_fb) { 130 if (&cdev->mode_info.gfbdev->gfb == crtc->primary->fb) {
137 /* if pushing console in kmap it */ 131 /* if pushing console in kmap it */
138 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); 132 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap);
139 if (ret) 133 if (ret)
diff --git a/drivers/gpu/drm/drm_atomic.c b/drivers/gpu/drm/drm_atomic.c
index 895741e9cd7d..178842380f75 100644
--- a/drivers/gpu/drm/drm_atomic.c
+++ b/drivers/gpu/drm/drm_atomic.c
@@ -30,6 +30,7 @@
30#include <drm/drm_atomic.h> 30#include <drm/drm_atomic.h>
31#include <drm/drm_mode.h> 31#include <drm/drm_mode.h>
32#include <drm/drm_print.h> 32#include <drm/drm_print.h>
33#include <drm/drm_writeback.h>
33#include <linux/sync_file.h> 34#include <linux/sync_file.h>
34 35
35#include "drm_crtc_internal.h" 36#include "drm_crtc_internal.h"
@@ -325,6 +326,35 @@ static s32 __user *get_out_fence_for_crtc(struct drm_atomic_state *state,
325 return fence_ptr; 326 return fence_ptr;
326} 327}
327 328
329static int set_out_fence_for_connector(struct drm_atomic_state *state,
330 struct drm_connector *connector,
331 s32 __user *fence_ptr)
332{
333 unsigned int index = drm_connector_index(connector);
334
335 if (!fence_ptr)
336 return 0;
337
338 if (put_user(-1, fence_ptr))
339 return -EFAULT;
340
341 state->connectors[index].out_fence_ptr = fence_ptr;
342
343 return 0;
344}
345
346static s32 __user *get_out_fence_for_connector(struct drm_atomic_state *state,
347 struct drm_connector *connector)
348{
349 unsigned int index = drm_connector_index(connector);
350 s32 __user *fence_ptr;
351
352 fence_ptr = state->connectors[index].out_fence_ptr;
353 state->connectors[index].out_fence_ptr = NULL;
354
355 return fence_ptr;
356}
357
328/** 358/**
329 * drm_atomic_set_mode_for_crtc - set mode for CRTC 359 * drm_atomic_set_mode_for_crtc - set mode for CRTC
330 * @state: the CRTC whose incoming state to update 360 * @state: the CRTC whose incoming state to update
@@ -339,6 +369,7 @@ static s32 __user *get_out_fence_for_crtc(struct drm_atomic_state *state,
339int drm_atomic_set_mode_for_crtc(struct drm_crtc_state *state, 369int drm_atomic_set_mode_for_crtc(struct drm_crtc_state *state,
340 const struct drm_display_mode *mode) 370 const struct drm_display_mode *mode)
341{ 371{
372 struct drm_crtc *crtc = state->crtc;
342 struct drm_mode_modeinfo umode; 373 struct drm_mode_modeinfo umode;
343 374
344 /* Early return for no change. */ 375 /* Early return for no change. */
@@ -359,13 +390,13 @@ int drm_atomic_set_mode_for_crtc(struct drm_crtc_state *state,
359 390
360 drm_mode_copy(&state->mode, mode); 391 drm_mode_copy(&state->mode, mode);
361 state->enable = true; 392 state->enable = true;
362 DRM_DEBUG_ATOMIC("Set [MODE:%s] for CRTC state %p\n", 393 DRM_DEBUG_ATOMIC("Set [MODE:%s] for [CRTC:%d:%s] state %p\n",
363 mode->name, state); 394 mode->name, crtc->base.id, crtc->name, state);
364 } else { 395 } else {
365 memset(&state->mode, 0, sizeof(state->mode)); 396 memset(&state->mode, 0, sizeof(state->mode));
366 state->enable = false; 397 state->enable = false;
367 DRM_DEBUG_ATOMIC("Set [NOMODE] for CRTC state %p\n", 398 DRM_DEBUG_ATOMIC("Set [NOMODE] for [CRTC:%d:%s] state %p\n",
368 state); 399 crtc->base.id, crtc->name, state);
369 } 400 }
370 401
371 return 0; 402 return 0;
@@ -388,6 +419,8 @@ EXPORT_SYMBOL(drm_atomic_set_mode_for_crtc);
388int drm_atomic_set_mode_prop_for_crtc(struct drm_crtc_state *state, 419int drm_atomic_set_mode_prop_for_crtc(struct drm_crtc_state *state,
389 struct drm_property_blob *blob) 420 struct drm_property_blob *blob)
390{ 421{
422 struct drm_crtc *crtc = state->crtc;
423
391 if (blob == state->mode_blob) 424 if (blob == state->mode_blob)
392 return 0; 425 return 0;
393 426
@@ -397,19 +430,34 @@ int drm_atomic_set_mode_prop_for_crtc(struct drm_crtc_state *state,
397 memset(&state->mode, 0, sizeof(state->mode)); 430 memset(&state->mode, 0, sizeof(state->mode));
398 431
399 if (blob) { 432 if (blob) {
400 if (blob->length != sizeof(struct drm_mode_modeinfo) || 433 int ret;
401 drm_mode_convert_umode(state->crtc->dev, &state->mode, 434
402 blob->data)) 435 if (blob->length != sizeof(struct drm_mode_modeinfo)) {
436 DRM_DEBUG_ATOMIC("[CRTC:%d:%s] bad mode blob length: %zu\n",
437 crtc->base.id, crtc->name,
438 blob->length);
403 return -EINVAL; 439 return -EINVAL;
440 }
441
442 ret = drm_mode_convert_umode(crtc->dev,
443 &state->mode, blob->data);
444 if (ret) {
445 DRM_DEBUG_ATOMIC("[CRTC:%d:%s] invalid mode (ret=%d, status=%s):\n",
446 crtc->base.id, crtc->name,
447 ret, drm_get_mode_status_name(state->mode.status));
448 drm_mode_debug_printmodeline(&state->mode);
449 return -EINVAL;
450 }
404 451
405 state->mode_blob = drm_property_blob_get(blob); 452 state->mode_blob = drm_property_blob_get(blob);
406 state->enable = true; 453 state->enable = true;
407 DRM_DEBUG_ATOMIC("Set [MODE:%s] for CRTC state %p\n", 454 DRM_DEBUG_ATOMIC("Set [MODE:%s] for [CRTC:%d:%s] state %p\n",
408 state->mode.name, state); 455 state->mode.name, crtc->base.id, crtc->name,
456 state);
409 } else { 457 } else {
410 state->enable = false; 458 state->enable = false;
411 DRM_DEBUG_ATOMIC("Set [NOMODE] for CRTC state %p\n", 459 DRM_DEBUG_ATOMIC("Set [NOMODE] for [CRTC:%d:%s] state %p\n",
412 state); 460 crtc->base.id, crtc->name, state);
413 } 461 }
414 462
415 return 0; 463 return 0;
@@ -539,10 +587,14 @@ int drm_atomic_crtc_set_property(struct drm_crtc *crtc,
539 return -EFAULT; 587 return -EFAULT;
540 588
541 set_out_fence_for_crtc(state->state, crtc, fence_ptr); 589 set_out_fence_for_crtc(state->state, crtc, fence_ptr);
542 } else if (crtc->funcs->atomic_set_property) 590 } else if (crtc->funcs->atomic_set_property) {
543 return crtc->funcs->atomic_set_property(crtc, state, property, val); 591 return crtc->funcs->atomic_set_property(crtc, state, property, val);
544 else 592 } else {
593 DRM_DEBUG_ATOMIC("[CRTC:%d:%s] unknown property [PROP:%d:%s]]\n",
594 crtc->base.id, crtc->name,
595 property->base.id, property->name);
545 return -EINVAL; 596 return -EINVAL;
597 }
546 598
547 return 0; 599 return 0;
548} 600}
@@ -677,6 +729,51 @@ static void drm_atomic_crtc_print_state(struct drm_printer *p,
677} 729}
678 730
679/** 731/**
732 * drm_atomic_connector_check - check connector state
733 * @connector: connector to check
734 * @state: connector state to check
735 *
736 * Provides core sanity checks for connector state.
737 *
738 * RETURNS:
739 * Zero on success, error code on failure
740 */
741static int drm_atomic_connector_check(struct drm_connector *connector,
742 struct drm_connector_state *state)
743{
744 struct drm_crtc_state *crtc_state;
745 struct drm_writeback_job *writeback_job = state->writeback_job;
746
747 if ((connector->connector_type != DRM_MODE_CONNECTOR_WRITEBACK) || !writeback_job)
748 return 0;
749
750 if (writeback_job->fb && !state->crtc) {
751 DRM_DEBUG_ATOMIC("[CONNECTOR:%d:%s] framebuffer without CRTC\n",
752 connector->base.id, connector->name);
753 return -EINVAL;
754 }
755
756 if (state->crtc)
757 crtc_state = drm_atomic_get_existing_crtc_state(state->state,
758 state->crtc);
759
760 if (writeback_job->fb && !crtc_state->active) {
761 DRM_DEBUG_ATOMIC("[CONNECTOR:%d:%s] has framebuffer, but [CRTC:%d] is off\n",
762 connector->base.id, connector->name,
763 state->crtc->base.id);
764 return -EINVAL;
765 }
766
767 if (writeback_job->out_fence && !writeback_job->fb) {
768 DRM_DEBUG_ATOMIC("[CONNECTOR:%d:%s] requesting out-fence without framebuffer\n",
769 connector->base.id, connector->name);
770 return -EINVAL;
771 }
772
773 return 0;
774}
775
776/**
680 * drm_atomic_get_plane_state - get plane state 777 * drm_atomic_get_plane_state - get plane state
681 * @state: global atomic state object 778 * @state: global atomic state object
682 * @plane: plane to get state object for 779 * @plane: plane to get state object for
@@ -700,6 +797,11 @@ drm_atomic_get_plane_state(struct drm_atomic_state *state,
700 797
701 WARN_ON(!state->acquire_ctx); 798 WARN_ON(!state->acquire_ctx);
702 799
800 /* the legacy pointers should never be set */
801 WARN_ON(plane->fb);
802 WARN_ON(plane->old_fb);
803 WARN_ON(plane->crtc);
804
703 plane_state = drm_atomic_get_existing_plane_state(state, plane); 805 plane_state = drm_atomic_get_existing_plane_state(state, plane);
704 if (plane_state) 806 if (plane_state)
705 return plane_state; 807 return plane_state;
@@ -794,8 +896,11 @@ static int drm_atomic_plane_set_property(struct drm_plane *plane,
794 } else if (property == plane->alpha_property) { 896 } else if (property == plane->alpha_property) {
795 state->alpha = val; 897 state->alpha = val;
796 } else if (property == plane->rotation_property) { 898 } else if (property == plane->rotation_property) {
797 if (!is_power_of_2(val & DRM_MODE_ROTATE_MASK)) 899 if (!is_power_of_2(val & DRM_MODE_ROTATE_MASK)) {
900 DRM_DEBUG_ATOMIC("[PLANE:%d:%s] bad rotation bitmask: 0x%llx\n",
901 plane->base.id, plane->name, val);
798 return -EINVAL; 902 return -EINVAL;
903 }
799 state->rotation = val; 904 state->rotation = val;
800 } else if (property == plane->zpos_property) { 905 } else if (property == plane->zpos_property) {
801 state->zpos = val; 906 state->zpos = val;
@@ -807,6 +912,9 @@ static int drm_atomic_plane_set_property(struct drm_plane *plane,
807 return plane->funcs->atomic_set_property(plane, state, 912 return plane->funcs->atomic_set_property(plane, state,
808 property, val); 913 property, val);
809 } else { 914 } else {
915 DRM_DEBUG_ATOMIC("[PLANE:%d:%s] unknown property [PROP:%d:%s]]\n",
916 plane->base.id, plane->name,
917 property->base.id, property->name);
810 return -EINVAL; 918 return -EINVAL;
811 } 919 }
812 920
@@ -914,10 +1022,12 @@ static int drm_atomic_plane_check(struct drm_plane *plane,
914 1022
915 /* either *both* CRTC and FB must be set, or neither */ 1023 /* either *both* CRTC and FB must be set, or neither */
916 if (state->crtc && !state->fb) { 1024 if (state->crtc && !state->fb) {
917 DRM_DEBUG_ATOMIC("CRTC set but no FB\n"); 1025 DRM_DEBUG_ATOMIC("[PLANE:%d:%s] CRTC set but no FB\n",
1026 plane->base.id, plane->name);
918 return -EINVAL; 1027 return -EINVAL;
919 } else if (state->fb && !state->crtc) { 1028 } else if (state->fb && !state->crtc) {
920 DRM_DEBUG_ATOMIC("FB set but no CRTC\n"); 1029 DRM_DEBUG_ATOMIC("[PLANE:%d:%s] FB set but no CRTC\n",
1030 plane->base.id, plane->name);
921 return -EINVAL; 1031 return -EINVAL;
922 } 1032 }
923 1033
@@ -927,7 +1037,9 @@ static int drm_atomic_plane_check(struct drm_plane *plane,
927 1037
928 /* Check whether this plane is usable on this CRTC */ 1038 /* Check whether this plane is usable on this CRTC */
929 if (!(plane->possible_crtcs & drm_crtc_mask(state->crtc))) { 1039 if (!(plane->possible_crtcs & drm_crtc_mask(state->crtc))) {
930 DRM_DEBUG_ATOMIC("Invalid crtc for plane\n"); 1040 DRM_DEBUG_ATOMIC("Invalid [CRTC:%d:%s] for [PLANE:%d:%s]\n",
1041 state->crtc->base.id, state->crtc->name,
1042 plane->base.id, plane->name);
931 return -EINVAL; 1043 return -EINVAL;
932 } 1044 }
933 1045
@@ -936,7 +1048,8 @@ static int drm_atomic_plane_check(struct drm_plane *plane,
936 state->fb->modifier); 1048 state->fb->modifier);
937 if (ret) { 1049 if (ret) {
938 struct drm_format_name_buf format_name; 1050 struct drm_format_name_buf format_name;
939 DRM_DEBUG_ATOMIC("Invalid pixel format %s, modifier 0x%llx\n", 1051 DRM_DEBUG_ATOMIC("[PLANE:%d:%s] invalid pixel format %s, modifier 0x%llx\n",
1052 plane->base.id, plane->name,
940 drm_get_format_name(state->fb->format->format, 1053 drm_get_format_name(state->fb->format->format,
941 &format_name), 1054 &format_name),
942 state->fb->modifier); 1055 state->fb->modifier);
@@ -948,7 +1061,8 @@ static int drm_atomic_plane_check(struct drm_plane *plane,
948 state->crtc_x > INT_MAX - (int32_t) state->crtc_w || 1061 state->crtc_x > INT_MAX - (int32_t) state->crtc_w ||
949 state->crtc_h > INT_MAX || 1062 state->crtc_h > INT_MAX ||
950 state->crtc_y > INT_MAX - (int32_t) state->crtc_h) { 1063 state->crtc_y > INT_MAX - (int32_t) state->crtc_h) {
951 DRM_DEBUG_ATOMIC("Invalid CRTC coordinates %ux%u+%d+%d\n", 1064 DRM_DEBUG_ATOMIC("[PLANE:%d:%s] invalid CRTC coordinates %ux%u+%d+%d\n",
1065 plane->base.id, plane->name,
952 state->crtc_w, state->crtc_h, 1066 state->crtc_w, state->crtc_h,
953 state->crtc_x, state->crtc_y); 1067 state->crtc_x, state->crtc_y);
954 return -ERANGE; 1068 return -ERANGE;
@@ -962,8 +1076,9 @@ static int drm_atomic_plane_check(struct drm_plane *plane,
962 state->src_x > fb_width - state->src_w || 1076 state->src_x > fb_width - state->src_w ||
963 state->src_h > fb_height || 1077 state->src_h > fb_height ||
964 state->src_y > fb_height - state->src_h) { 1078 state->src_y > fb_height - state->src_h) {
965 DRM_DEBUG_ATOMIC("Invalid source coordinates " 1079 DRM_DEBUG_ATOMIC("[PLANE:%d:%s] invalid source coordinates "
966 "%u.%06ux%u.%06u+%u.%06u+%u.%06u (fb %ux%u)\n", 1080 "%u.%06ux%u.%06u+%u.%06u+%u.%06u (fb %ux%u)\n",
1081 plane->base.id, plane->name,
967 state->src_w >> 16, ((state->src_w & 0xffff) * 15625) >> 10, 1082 state->src_w >> 16, ((state->src_w & 0xffff) * 15625) >> 10,
968 state->src_h >> 16, ((state->src_h & 0xffff) * 15625) >> 10, 1083 state->src_h >> 16, ((state->src_h & 0xffff) * 15625) >> 10,
969 state->src_x >> 16, ((state->src_x & 0xffff) * 15625) >> 10, 1084 state->src_x >> 16, ((state->src_x & 0xffff) * 15625) >> 10,
@@ -1120,6 +1235,7 @@ drm_atomic_get_private_obj_state(struct drm_atomic_state *state,
1120 state->private_objs[index].old_state = obj->state; 1235 state->private_objs[index].old_state = obj->state;
1121 state->private_objs[index].new_state = obj_state; 1236 state->private_objs[index].new_state = obj_state;
1122 state->private_objs[index].ptr = obj; 1237 state->private_objs[index].ptr = obj;
1238 obj_state->state = state;
1123 1239
1124 state->num_private_objs = num_objs; 1240 state->num_private_objs = num_objs;
1125 1241
@@ -1278,6 +1394,8 @@ static int drm_atomic_connector_set_property(struct drm_connector *connector,
1278 state->link_status = val; 1394 state->link_status = val;
1279 } else if (property == config->aspect_ratio_property) { 1395 } else if (property == config->aspect_ratio_property) {
1280 state->picture_aspect_ratio = val; 1396 state->picture_aspect_ratio = val;
1397 } else if (property == config->content_type_property) {
1398 state->content_type = val;
1281 } else if (property == connector->scaling_mode_property) { 1399 } else if (property == connector->scaling_mode_property) {
1282 state->scaling_mode = val; 1400 state->scaling_mode = val;
1283 } else if (property == connector->content_protection_property) { 1401 } else if (property == connector->content_protection_property) {
@@ -1286,10 +1404,24 @@ static int drm_atomic_connector_set_property(struct drm_connector *connector,
1286 return -EINVAL; 1404 return -EINVAL;
1287 } 1405 }
1288 state->content_protection = val; 1406 state->content_protection = val;
1407 } else if (property == config->writeback_fb_id_property) {
1408 struct drm_framebuffer *fb = drm_framebuffer_lookup(dev, NULL, val);
1409 int ret = drm_atomic_set_writeback_fb_for_connector(state, fb);
1410 if (fb)
1411 drm_framebuffer_put(fb);
1412 return ret;
1413 } else if (property == config->writeback_out_fence_ptr_property) {
1414 s32 __user *fence_ptr = u64_to_user_ptr(val);
1415
1416 return set_out_fence_for_connector(state->state, connector,
1417 fence_ptr);
1289 } else if (connector->funcs->atomic_set_property) { 1418 } else if (connector->funcs->atomic_set_property) {
1290 return connector->funcs->atomic_set_property(connector, 1419 return connector->funcs->atomic_set_property(connector,
1291 state, property, val); 1420 state, property, val);
1292 } else { 1421 } else {
1422 DRM_DEBUG_ATOMIC("[CONNECTOR:%d:%s] unknown property [PROP:%d:%s]]\n",
1423 connector->base.id, connector->name,
1424 property->base.id, property->name);
1293 return -EINVAL; 1425 return -EINVAL;
1294 } 1426 }
1295 1427
@@ -1363,10 +1495,17 @@ drm_atomic_connector_get_property(struct drm_connector *connector,
1363 *val = state->link_status; 1495 *val = state->link_status;
1364 } else if (property == config->aspect_ratio_property) { 1496 } else if (property == config->aspect_ratio_property) {
1365 *val = state->picture_aspect_ratio; 1497 *val = state->picture_aspect_ratio;
1498 } else if (property == config->content_type_property) {
1499 *val = state->content_type;
1366 } else if (property == connector->scaling_mode_property) { 1500 } else if (property == connector->scaling_mode_property) {
1367 *val = state->scaling_mode; 1501 *val = state->scaling_mode;
1368 } else if (property == connector->content_protection_property) { 1502 } else if (property == connector->content_protection_property) {
1369 *val = state->content_protection; 1503 *val = state->content_protection;
1504 } else if (property == config->writeback_fb_id_property) {
1505 /* Writeback framebuffer is one-shot, write and forget */
1506 *val = 0;
1507 } else if (property == config->writeback_out_fence_ptr_property) {
1508 *val = 0;
1370 } else if (connector->funcs->atomic_get_property) { 1509 } else if (connector->funcs->atomic_get_property) {
1371 return connector->funcs->atomic_get_property(connector, 1510 return connector->funcs->atomic_get_property(connector,
1372 state, property, val); 1511 state, property, val);
@@ -1456,11 +1595,12 @@ drm_atomic_set_crtc_for_plane(struct drm_plane_state *plane_state,
1456 } 1595 }
1457 1596
1458 if (crtc) 1597 if (crtc)
1459 DRM_DEBUG_ATOMIC("Link plane state %p to [CRTC:%d:%s]\n", 1598 DRM_DEBUG_ATOMIC("Link [PLANE:%d:%s] state %p to [CRTC:%d:%s]\n",
1460 plane_state, crtc->base.id, crtc->name); 1599 plane->base.id, plane->name, plane_state,
1600 crtc->base.id, crtc->name);
1461 else 1601 else
1462 DRM_DEBUG_ATOMIC("Link plane state %p to [NOCRTC]\n", 1602 DRM_DEBUG_ATOMIC("Link [PLANE:%d:%s] state %p to [NOCRTC]\n",
1463 plane_state); 1603 plane->base.id, plane->name, plane_state);
1464 1604
1465 return 0; 1605 return 0;
1466} 1606}
@@ -1480,12 +1620,15 @@ void
1480drm_atomic_set_fb_for_plane(struct drm_plane_state *plane_state, 1620drm_atomic_set_fb_for_plane(struct drm_plane_state *plane_state,
1481 struct drm_framebuffer *fb) 1621 struct drm_framebuffer *fb)
1482{ 1622{
1623 struct drm_plane *plane = plane_state->plane;
1624
1483 if (fb) 1625 if (fb)
1484 DRM_DEBUG_ATOMIC("Set [FB:%d] for plane state %p\n", 1626 DRM_DEBUG_ATOMIC("Set [FB:%d] for [PLANE:%d:%s] state %p\n",
1485 fb->base.id, plane_state); 1627 fb->base.id, plane->base.id, plane->name,
1486 else
1487 DRM_DEBUG_ATOMIC("Set [NOFB] for plane state %p\n",
1488 plane_state); 1628 plane_state);
1629 else
1630 DRM_DEBUG_ATOMIC("Set [NOFB] for [PLANE:%d:%s] state %p\n",
1631 plane->base.id, plane->name, plane_state);
1489 1632
1490 drm_framebuffer_assign(&plane_state->fb, fb); 1633 drm_framebuffer_assign(&plane_state->fb, fb);
1491} 1634}
@@ -1546,6 +1689,7 @@ int
1546drm_atomic_set_crtc_for_connector(struct drm_connector_state *conn_state, 1689drm_atomic_set_crtc_for_connector(struct drm_connector_state *conn_state,
1547 struct drm_crtc *crtc) 1690 struct drm_crtc *crtc)
1548{ 1691{
1692 struct drm_connector *connector = conn_state->connector;
1549 struct drm_crtc_state *crtc_state; 1693 struct drm_crtc_state *crtc_state;
1550 1694
1551 if (conn_state->crtc == crtc) 1695 if (conn_state->crtc == crtc)
@@ -1573,10 +1717,12 @@ drm_atomic_set_crtc_for_connector(struct drm_connector_state *conn_state,
1573 drm_connector_get(conn_state->connector); 1717 drm_connector_get(conn_state->connector);
1574 conn_state->crtc = crtc; 1718 conn_state->crtc = crtc;
1575 1719
1576 DRM_DEBUG_ATOMIC("Link connector state %p to [CRTC:%d:%s]\n", 1720 DRM_DEBUG_ATOMIC("Link [CONNECTOR:%d:%s] state %p to [CRTC:%d:%s]\n",
1721 connector->base.id, connector->name,
1577 conn_state, crtc->base.id, crtc->name); 1722 conn_state, crtc->base.id, crtc->name);
1578 } else { 1723 } else {
1579 DRM_DEBUG_ATOMIC("Link connector state %p to [NOCRTC]\n", 1724 DRM_DEBUG_ATOMIC("Link [CONNECTOR:%d:%s] state %p to [NOCRTC]\n",
1725 connector->base.id, connector->name,
1580 conn_state); 1726 conn_state);
1581 } 1727 }
1582 1728
@@ -1584,6 +1730,70 @@ drm_atomic_set_crtc_for_connector(struct drm_connector_state *conn_state,
1584} 1730}
1585EXPORT_SYMBOL(drm_atomic_set_crtc_for_connector); 1731EXPORT_SYMBOL(drm_atomic_set_crtc_for_connector);
1586 1732
1733/*
1734 * drm_atomic_get_writeback_job - return or allocate a writeback job
1735 * @conn_state: Connector state to get the job for
1736 *
1737 * Writeback jobs have a different lifetime to the atomic state they are
1738 * associated with. This convenience function takes care of allocating a job
1739 * if there isn't yet one associated with the connector state, otherwise
1740 * it just returns the existing job.
1741 *
1742 * Returns: The writeback job for the given connector state
1743 */
1744static struct drm_writeback_job *
1745drm_atomic_get_writeback_job(struct drm_connector_state *conn_state)
1746{
1747 WARN_ON(conn_state->connector->connector_type != DRM_MODE_CONNECTOR_WRITEBACK);
1748
1749 if (!conn_state->writeback_job)
1750 conn_state->writeback_job =
1751 kzalloc(sizeof(*conn_state->writeback_job), GFP_KERNEL);
1752
1753 return conn_state->writeback_job;
1754}
1755
1756/**
1757 * drm_atomic_set_writeback_fb_for_connector - set writeback framebuffer
1758 * @conn_state: atomic state object for the connector
1759 * @fb: fb to use for the connector
1760 *
1761 * This is used to set the framebuffer for a writeback connector, which outputs
1762 * to a buffer instead of an actual physical connector.
1763 * Changing the assigned framebuffer requires us to grab a reference to the new
1764 * fb and drop the reference to the old fb, if there is one. This function
1765 * takes care of all these details besides updating the pointer in the
1766 * state object itself.
1767 *
1768 * Note: The only way conn_state can already have an fb set is if the commit
1769 * sets the property more than once.
1770 *
1771 * See also: drm_writeback_connector_init()
1772 *
1773 * Returns: 0 on success
1774 */
1775int drm_atomic_set_writeback_fb_for_connector(
1776 struct drm_connector_state *conn_state,
1777 struct drm_framebuffer *fb)
1778{
1779 struct drm_writeback_job *job =
1780 drm_atomic_get_writeback_job(conn_state);
1781 if (!job)
1782 return -ENOMEM;
1783
1784 drm_framebuffer_assign(&job->fb, fb);
1785
1786 if (fb)
1787 DRM_DEBUG_ATOMIC("Set [FB:%d] for connector state %p\n",
1788 fb->base.id, conn_state);
1789 else
1790 DRM_DEBUG_ATOMIC("Set [NOFB] for connector state %p\n",
1791 conn_state);
1792
1793 return 0;
1794}
1795EXPORT_SYMBOL(drm_atomic_set_writeback_fb_for_connector);
1796
1587/** 1797/**
1588 * drm_atomic_add_affected_connectors - add connectors for crtc 1798 * drm_atomic_add_affected_connectors - add connectors for crtc
1589 * @state: atomic state 1799 * @state: atomic state
@@ -1672,6 +1882,9 @@ drm_atomic_add_affected_planes(struct drm_atomic_state *state,
1672 1882
1673 WARN_ON(!drm_atomic_get_new_crtc_state(state, crtc)); 1883 WARN_ON(!drm_atomic_get_new_crtc_state(state, crtc));
1674 1884
1885 DRM_DEBUG_ATOMIC("Adding all current planes for [CRTC:%d:%s] to %p\n",
1886 crtc->base.id, crtc->name, state);
1887
1675 drm_for_each_plane_mask(plane, state->dev, crtc->state->plane_mask) { 1888 drm_for_each_plane_mask(plane, state->dev, crtc->state->plane_mask) {
1676 struct drm_plane_state *plane_state = 1889 struct drm_plane_state *plane_state =
1677 drm_atomic_get_plane_state(state, plane); 1890 drm_atomic_get_plane_state(state, plane);
@@ -1702,6 +1915,8 @@ int drm_atomic_check_only(struct drm_atomic_state *state)
1702 struct drm_plane_state *plane_state; 1915 struct drm_plane_state *plane_state;
1703 struct drm_crtc *crtc; 1916 struct drm_crtc *crtc;
1704 struct drm_crtc_state *crtc_state; 1917 struct drm_crtc_state *crtc_state;
1918 struct drm_connector *conn;
1919 struct drm_connector_state *conn_state;
1705 int i, ret = 0; 1920 int i, ret = 0;
1706 1921
1707 DRM_DEBUG_ATOMIC("checking %p\n", state); 1922 DRM_DEBUG_ATOMIC("checking %p\n", state);
@@ -1724,6 +1939,15 @@ int drm_atomic_check_only(struct drm_atomic_state *state)
1724 } 1939 }
1725 } 1940 }
1726 1941
1942 for_each_new_connector_in_state(state, conn, conn_state, i) {
1943 ret = drm_atomic_connector_check(conn, conn_state);
1944 if (ret) {
1945 DRM_DEBUG_ATOMIC("[CONNECTOR:%d:%s] atomic core check failed\n",
1946 conn->base.id, conn->name);
1947 return ret;
1948 }
1949 }
1950
1727 if (config->funcs->atomic_check) { 1951 if (config->funcs->atomic_check) {
1728 ret = config->funcs->atomic_check(state->dev, state); 1952 ret = config->funcs->atomic_check(state->dev, state);
1729 1953
@@ -2048,45 +2272,6 @@ int drm_atomic_set_property(struct drm_atomic_state *state,
2048} 2272}
2049 2273
2050/** 2274/**
2051 * drm_atomic_clean_old_fb -- Unset old_fb pointers and set plane->fb pointers.
2052 *
2053 * @dev: drm device to check.
2054 * @plane_mask: plane mask for planes that were updated.
2055 * @ret: return value, can be -EDEADLK for a retry.
2056 *
2057 * Before doing an update &drm_plane.old_fb is set to &drm_plane.fb, but before
2058 * dropping the locks old_fb needs to be set to NULL and plane->fb updated. This
2059 * is a common operation for each atomic update, so this call is split off as a
2060 * helper.
2061 */
2062void drm_atomic_clean_old_fb(struct drm_device *dev,
2063 unsigned plane_mask,
2064 int ret)
2065{
2066 struct drm_plane *plane;
2067
2068 /* if succeeded, fixup legacy plane crtc/fb ptrs before dropping
2069 * locks (ie. while it is still safe to deref plane->state). We
2070 * need to do this here because the driver entry points cannot
2071 * distinguish between legacy and atomic ioctls.
2072 */
2073 drm_for_each_plane_mask(plane, dev, plane_mask) {
2074 if (ret == 0) {
2075 struct drm_framebuffer *new_fb = plane->state->fb;
2076 if (new_fb)
2077 drm_framebuffer_get(new_fb);
2078 plane->fb = new_fb;
2079 plane->crtc = plane->state->crtc;
2080
2081 if (plane->old_fb)
2082 drm_framebuffer_put(plane->old_fb);
2083 }
2084 plane->old_fb = NULL;
2085 }
2086}
2087EXPORT_SYMBOL(drm_atomic_clean_old_fb);
2088
2089/**
2090 * DOC: explicit fencing properties 2275 * DOC: explicit fencing properties
2091 * 2276 *
2092 * Explicit fencing allows userspace to control the buffer synchronization 2277 * Explicit fencing allows userspace to control the buffer synchronization
@@ -2161,7 +2346,7 @@ static int setup_out_fence(struct drm_out_fence_state *fence_state,
2161 return 0; 2346 return 0;
2162} 2347}
2163 2348
2164static int prepare_crtc_signaling(struct drm_device *dev, 2349static int prepare_signaling(struct drm_device *dev,
2165 struct drm_atomic_state *state, 2350 struct drm_atomic_state *state,
2166 struct drm_mode_atomic *arg, 2351 struct drm_mode_atomic *arg,
2167 struct drm_file *file_priv, 2352 struct drm_file *file_priv,
@@ -2170,6 +2355,8 @@ static int prepare_crtc_signaling(struct drm_device *dev,
2170{ 2355{
2171 struct drm_crtc *crtc; 2356 struct drm_crtc *crtc;
2172 struct drm_crtc_state *crtc_state; 2357 struct drm_crtc_state *crtc_state;
2358 struct drm_connector *conn;
2359 struct drm_connector_state *conn_state;
2173 int i, c = 0, ret; 2360 int i, c = 0, ret;
2174 2361
2175 if (arg->flags & DRM_MODE_ATOMIC_TEST_ONLY) 2362 if (arg->flags & DRM_MODE_ATOMIC_TEST_ONLY)
@@ -2235,6 +2422,43 @@ static int prepare_crtc_signaling(struct drm_device *dev,
2235 c++; 2422 c++;
2236 } 2423 }
2237 2424
2425 for_each_new_connector_in_state(state, conn, conn_state, i) {
2426 struct drm_writeback_job *job;
2427 struct drm_out_fence_state *f;
2428 struct dma_fence *fence;
2429 s32 __user *fence_ptr;
2430
2431 fence_ptr = get_out_fence_for_connector(state, conn);
2432 if (!fence_ptr)
2433 continue;
2434
2435 job = drm_atomic_get_writeback_job(conn_state);
2436 if (!job)
2437 return -ENOMEM;
2438
2439 f = krealloc(*fence_state, sizeof(**fence_state) *
2440 (*num_fences + 1), GFP_KERNEL);
2441 if (!f)
2442 return -ENOMEM;
2443
2444 memset(&f[*num_fences], 0, sizeof(*f));
2445
2446 f[*num_fences].out_fence_ptr = fence_ptr;
2447 *fence_state = f;
2448
2449 fence = drm_writeback_get_out_fence((struct drm_writeback_connector *)conn);
2450 if (!fence)
2451 return -ENOMEM;
2452
2453 ret = setup_out_fence(&f[(*num_fences)++], fence);
2454 if (ret) {
2455 dma_fence_put(fence);
2456 return ret;
2457 }
2458
2459 job->out_fence = fence;
2460 }
2461
2238 /* 2462 /*
2239 * Having this flag means user mode pends on event which will never 2463 * Having this flag means user mode pends on event which will never
2240 * reach due to lack of at least one CRTC for signaling 2464 * reach due to lack of at least one CRTC for signaling
@@ -2245,11 +2469,11 @@ static int prepare_crtc_signaling(struct drm_device *dev,
2245 return 0; 2469 return 0;
2246} 2470}
2247 2471
2248static void complete_crtc_signaling(struct drm_device *dev, 2472static void complete_signaling(struct drm_device *dev,
2249 struct drm_atomic_state *state, 2473 struct drm_atomic_state *state,
2250 struct drm_out_fence_state *fence_state, 2474 struct drm_out_fence_state *fence_state,
2251 unsigned int num_fences, 2475 unsigned int num_fences,
2252 bool install_fds) 2476 bool install_fds)
2253{ 2477{
2254 struct drm_crtc *crtc; 2478 struct drm_crtc *crtc;
2255 struct drm_crtc_state *crtc_state; 2479 struct drm_crtc_state *crtc_state;
@@ -2306,9 +2530,7 @@ int drm_mode_atomic_ioctl(struct drm_device *dev,
2306 unsigned int copied_objs, copied_props; 2530 unsigned int copied_objs, copied_props;
2307 struct drm_atomic_state *state; 2531 struct drm_atomic_state *state;
2308 struct drm_modeset_acquire_ctx ctx; 2532 struct drm_modeset_acquire_ctx ctx;
2309 struct drm_plane *plane;
2310 struct drm_out_fence_state *fence_state; 2533 struct drm_out_fence_state *fence_state;
2311 unsigned plane_mask;
2312 int ret = 0; 2534 int ret = 0;
2313 unsigned int i, j, num_fences; 2535 unsigned int i, j, num_fences;
2314 2536
@@ -2348,7 +2570,6 @@ int drm_mode_atomic_ioctl(struct drm_device *dev,
2348 state->allow_modeset = !!(arg->flags & DRM_MODE_ATOMIC_ALLOW_MODESET); 2570 state->allow_modeset = !!(arg->flags & DRM_MODE_ATOMIC_ALLOW_MODESET);
2349 2571
2350retry: 2572retry:
2351 plane_mask = 0;
2352 copied_objs = 0; 2573 copied_objs = 0;
2353 copied_props = 0; 2574 copied_props = 0;
2354 fence_state = NULL; 2575 fence_state = NULL;
@@ -2419,17 +2640,11 @@ retry:
2419 copied_props++; 2640 copied_props++;
2420 } 2641 }
2421 2642
2422 if (obj->type == DRM_MODE_OBJECT_PLANE && count_props &&
2423 !(arg->flags & DRM_MODE_ATOMIC_TEST_ONLY)) {
2424 plane = obj_to_plane(obj);
2425 plane_mask |= (1 << drm_plane_index(plane));
2426 plane->old_fb = plane->fb;
2427 }
2428 drm_mode_object_put(obj); 2643 drm_mode_object_put(obj);
2429 } 2644 }
2430 2645
2431 ret = prepare_crtc_signaling(dev, state, arg, file_priv, &fence_state, 2646 ret = prepare_signaling(dev, state, arg, file_priv, &fence_state,
2432 &num_fences); 2647 &num_fences);
2433 if (ret) 2648 if (ret)
2434 goto out; 2649 goto out;
2435 2650
@@ -2445,9 +2660,7 @@ retry:
2445 } 2660 }
2446 2661
2447out: 2662out:
2448 drm_atomic_clean_old_fb(dev, plane_mask, ret); 2663 complete_signaling(dev, state, fence_state, num_fences, !ret);
2449
2450 complete_crtc_signaling(dev, state, fence_state, num_fences, !ret);
2451 2664
2452 if (ret == -EDEADLK) { 2665 if (ret == -EDEADLK) {
2453 drm_atomic_state_clear(state); 2666 drm_atomic_state_clear(state);
diff --git a/drivers/gpu/drm/drm_atomic_helper.c b/drivers/gpu/drm/drm_atomic_helper.c
index 130da5195f3b..17baf5057132 100644
--- a/drivers/gpu/drm/drm_atomic_helper.c
+++ b/drivers/gpu/drm/drm_atomic_helper.c
@@ -30,6 +30,7 @@
30#include <drm/drm_plane_helper.h> 30#include <drm/drm_plane_helper.h>
31#include <drm/drm_crtc_helper.h> 31#include <drm/drm_crtc_helper.h>
32#include <drm/drm_atomic_helper.h> 32#include <drm/drm_atomic_helper.h>
33#include <drm/drm_writeback.h>
33#include <linux/dma-fence.h> 34#include <linux/dma-fence.h>
34 35
35#include "drm_crtc_helper_internal.h" 36#include "drm_crtc_helper_internal.h"
@@ -1172,6 +1173,25 @@ void drm_atomic_helper_commit_modeset_disables(struct drm_device *dev,
1172} 1173}
1173EXPORT_SYMBOL(drm_atomic_helper_commit_modeset_disables); 1174EXPORT_SYMBOL(drm_atomic_helper_commit_modeset_disables);
1174 1175
1176static void drm_atomic_helper_commit_writebacks(struct drm_device *dev,
1177 struct drm_atomic_state *old_state)
1178{
1179 struct drm_connector *connector;
1180 struct drm_connector_state *new_conn_state;
1181 int i;
1182
1183 for_each_new_connector_in_state(old_state, connector, new_conn_state, i) {
1184 const struct drm_connector_helper_funcs *funcs;
1185
1186 funcs = connector->helper_private;
1187
1188 if (new_conn_state->writeback_job && new_conn_state->writeback_job->fb) {
1189 WARN_ON(connector->connector_type != DRM_MODE_CONNECTOR_WRITEBACK);
1190 funcs->atomic_commit(connector, new_conn_state->writeback_job);
1191 }
1192 }
1193}
1194
1175/** 1195/**
1176 * drm_atomic_helper_commit_modeset_enables - modeset commit to enable outputs 1196 * drm_atomic_helper_commit_modeset_enables - modeset commit to enable outputs
1177 * @dev: DRM device 1197 * @dev: DRM device
@@ -1251,6 +1271,8 @@ void drm_atomic_helper_commit_modeset_enables(struct drm_device *dev,
1251 1271
1252 drm_bridge_enable(encoder->bridge); 1272 drm_bridge_enable(encoder->bridge);
1253 } 1273 }
1274
1275 drm_atomic_helper_commit_writebacks(dev, old_state);
1254} 1276}
1255EXPORT_SYMBOL(drm_atomic_helper_commit_modeset_enables); 1277EXPORT_SYMBOL(drm_atomic_helper_commit_modeset_enables);
1256 1278
@@ -2914,7 +2936,6 @@ static int __drm_atomic_helper_disable_all(struct drm_device *dev,
2914 struct drm_plane *plane; 2936 struct drm_plane *plane;
2915 struct drm_crtc_state *crtc_state; 2937 struct drm_crtc_state *crtc_state;
2916 struct drm_crtc *crtc; 2938 struct drm_crtc *crtc;
2917 unsigned plane_mask = 0;
2918 int ret, i; 2939 int ret, i;
2919 2940
2920 state = drm_atomic_state_alloc(dev); 2941 state = drm_atomic_state_alloc(dev);
@@ -2957,17 +2978,10 @@ static int __drm_atomic_helper_disable_all(struct drm_device *dev,
2957 goto free; 2978 goto free;
2958 2979
2959 drm_atomic_set_fb_for_plane(plane_state, NULL); 2980 drm_atomic_set_fb_for_plane(plane_state, NULL);
2960
2961 if (clean_old_fbs) {
2962 plane->old_fb = plane->fb;
2963 plane_mask |= BIT(drm_plane_index(plane));
2964 }
2965 } 2981 }
2966 2982
2967 ret = drm_atomic_commit(state); 2983 ret = drm_atomic_commit(state);
2968free: 2984free:
2969 if (plane_mask)
2970 drm_atomic_clean_old_fb(dev, plane_mask, ret);
2971 drm_atomic_state_put(state); 2985 drm_atomic_state_put(state);
2972 return ret; 2986 return ret;
2973} 2987}
@@ -3129,13 +3143,8 @@ int drm_atomic_helper_commit_duplicated_state(struct drm_atomic_state *state,
3129 3143
3130 state->acquire_ctx = ctx; 3144 state->acquire_ctx = ctx;
3131 3145
3132 for_each_new_plane_in_state(state, plane, new_plane_state, i) { 3146 for_each_new_plane_in_state(state, plane, new_plane_state, i)
3133 WARN_ON(plane->crtc != new_plane_state->crtc);
3134 WARN_ON(plane->fb != new_plane_state->fb);
3135 WARN_ON(plane->old_fb);
3136
3137 state->planes[i].old_state = plane->state; 3147 state->planes[i].old_state = plane->state;
3138 }
3139 3148
3140 for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) 3149 for_each_new_crtc_in_state(state, crtc, new_crtc_state, i)
3141 state->crtcs[i].old_state = crtc->state; 3150 state->crtcs[i].old_state = crtc->state;
@@ -3660,6 +3669,9 @@ __drm_atomic_helper_connector_duplicate_state(struct drm_connector *connector,
3660 if (state->crtc) 3669 if (state->crtc)
3661 drm_connector_get(connector); 3670 drm_connector_get(connector);
3662 state->commit = NULL; 3671 state->commit = NULL;
3672
3673 /* Don't copy over a writeback job, they are used only once */
3674 state->writeback_job = NULL;
3663} 3675}
3664EXPORT_SYMBOL(__drm_atomic_helper_connector_duplicate_state); 3676EXPORT_SYMBOL(__drm_atomic_helper_connector_duplicate_state);
3665 3677
diff --git a/drivers/gpu/drm/drm_connector.c b/drivers/gpu/drm/drm_connector.c
index 9b9ba5d5ec0c..2f9ebddd178e 100644
--- a/drivers/gpu/drm/drm_connector.c
+++ b/drivers/gpu/drm/drm_connector.c
@@ -87,6 +87,7 @@ static struct drm_conn_prop_enum_list drm_connector_enum_list[] = {
87 { DRM_MODE_CONNECTOR_VIRTUAL, "Virtual" }, 87 { DRM_MODE_CONNECTOR_VIRTUAL, "Virtual" },
88 { DRM_MODE_CONNECTOR_DSI, "DSI" }, 88 { DRM_MODE_CONNECTOR_DSI, "DSI" },
89 { DRM_MODE_CONNECTOR_DPI, "DPI" }, 89 { DRM_MODE_CONNECTOR_DPI, "DPI" },
90 { DRM_MODE_CONNECTOR_WRITEBACK, "Writeback" },
90}; 91};
91 92
92void drm_connector_ida_init(void) 93void drm_connector_ida_init(void)
@@ -195,6 +196,10 @@ int drm_connector_init(struct drm_device *dev,
195 struct ida *connector_ida = 196 struct ida *connector_ida =
196 &drm_connector_enum_list[connector_type].ida; 197 &drm_connector_enum_list[connector_type].ida;
197 198
199 WARN_ON(drm_drv_uses_atomic_modeset(dev) &&
200 (!funcs->atomic_destroy_state ||
201 !funcs->atomic_duplicate_state));
202
198 ret = __drm_mode_object_add(dev, &connector->base, 203 ret = __drm_mode_object_add(dev, &connector->base,
199 DRM_MODE_OBJECT_CONNECTOR, 204 DRM_MODE_OBJECT_CONNECTOR,
200 false, drm_connector_free); 205 false, drm_connector_free);
@@ -249,7 +254,8 @@ int drm_connector_init(struct drm_device *dev,
249 config->num_connector++; 254 config->num_connector++;
250 spin_unlock_irq(&config->connector_list_lock); 255 spin_unlock_irq(&config->connector_list_lock);
251 256
252 if (connector_type != DRM_MODE_CONNECTOR_VIRTUAL) 257 if (connector_type != DRM_MODE_CONNECTOR_VIRTUAL &&
258 connector_type != DRM_MODE_CONNECTOR_WRITEBACK)
253 drm_object_attach_property(&connector->base, 259 drm_object_attach_property(&connector->base,
254 config->edid_property, 260 config->edid_property,
255 0); 261 0);
@@ -720,6 +726,14 @@ static const struct drm_prop_enum_list drm_aspect_ratio_enum_list[] = {
720 { DRM_MODE_PICTURE_ASPECT_16_9, "16:9" }, 726 { DRM_MODE_PICTURE_ASPECT_16_9, "16:9" },
721}; 727};
722 728
729static const struct drm_prop_enum_list drm_content_type_enum_list[] = {
730 { DRM_MODE_CONTENT_TYPE_NO_DATA, "No Data" },
731 { DRM_MODE_CONTENT_TYPE_GRAPHICS, "Graphics" },
732 { DRM_MODE_CONTENT_TYPE_PHOTO, "Photo" },
733 { DRM_MODE_CONTENT_TYPE_CINEMA, "Cinema" },
734 { DRM_MODE_CONTENT_TYPE_GAME, "Game" },
735};
736
723static const struct drm_prop_enum_list drm_panel_orientation_enum_list[] = { 737static const struct drm_prop_enum_list drm_panel_orientation_enum_list[] = {
724 { DRM_MODE_PANEL_ORIENTATION_NORMAL, "Normal" }, 738 { DRM_MODE_PANEL_ORIENTATION_NORMAL, "Normal" },
725 { DRM_MODE_PANEL_ORIENTATION_BOTTOM_UP, "Upside Down" }, 739 { DRM_MODE_PANEL_ORIENTATION_BOTTOM_UP, "Upside Down" },
@@ -997,6 +1011,84 @@ int drm_mode_create_dvi_i_properties(struct drm_device *dev)
997EXPORT_SYMBOL(drm_mode_create_dvi_i_properties); 1011EXPORT_SYMBOL(drm_mode_create_dvi_i_properties);
998 1012
999/** 1013/**
1014 * DOC: HDMI connector properties
1015 *
1016 * content type (HDMI specific):
1017 * Indicates content type setting to be used in HDMI infoframes to indicate
1018 * content type for the external device, so that it adjusts it's display
1019 * settings accordingly.
1020 *
1021 * The value of this property can be one of the following:
1022 *
1023 * No Data:
1024 * Content type is unknown
1025 * Graphics:
1026 * Content type is graphics
1027 * Photo:
1028 * Content type is photo
1029 * Cinema:
1030 * Content type is cinema
1031 * Game:
1032 * Content type is game
1033 *
1034 * Drivers can set up this property by calling
1035 * drm_connector_attach_content_type_property(). Decoding to
1036 * infoframe values is done through
1037 * drm_hdmi_get_content_type_from_property() and
1038 * drm_hdmi_get_itc_bit_from_property().
1039 */
1040
1041/**
1042 * drm_connector_attach_content_type_property - attach content-type property
1043 * @connector: connector to attach content type property on.
1044 *
1045 * Called by a driver the first time a HDMI connector is made.
1046 */
1047int drm_connector_attach_content_type_property(struct drm_connector *connector)
1048{
1049 if (!drm_mode_create_content_type_property(connector->dev))
1050 drm_object_attach_property(&connector->base,
1051 connector->dev->mode_config.content_type_property,
1052 DRM_MODE_CONTENT_TYPE_NO_DATA);
1053 return 0;
1054}
1055EXPORT_SYMBOL(drm_connector_attach_content_type_property);
1056
1057
1058/**
1059 * drm_hdmi_avi_infoframe_content_type() - fill the HDMI AVI infoframe
1060 * content type information, based
1061 * on correspondent DRM property.
1062 * @frame: HDMI AVI infoframe
1063 * @conn_state: DRM display connector state
1064 *
1065 */
1066void drm_hdmi_avi_infoframe_content_type(struct hdmi_avi_infoframe *frame,
1067 const struct drm_connector_state *conn_state)
1068{
1069 switch (conn_state->content_type) {
1070 case DRM_MODE_CONTENT_TYPE_GRAPHICS:
1071 frame->content_type = HDMI_CONTENT_TYPE_GRAPHICS;
1072 break;
1073 case DRM_MODE_CONTENT_TYPE_CINEMA:
1074 frame->content_type = HDMI_CONTENT_TYPE_CINEMA;
1075 break;
1076 case DRM_MODE_CONTENT_TYPE_GAME:
1077 frame->content_type = HDMI_CONTENT_TYPE_GAME;
1078 break;
1079 case DRM_MODE_CONTENT_TYPE_PHOTO:
1080 frame->content_type = HDMI_CONTENT_TYPE_PHOTO;
1081 break;
1082 default:
1083 /* Graphics is the default(0) */
1084 frame->content_type = HDMI_CONTENT_TYPE_GRAPHICS;
1085 }
1086
1087 frame->itc = conn_state->content_type != DRM_MODE_CONTENT_TYPE_NO_DATA;
1088}
1089EXPORT_SYMBOL(drm_hdmi_avi_infoframe_content_type);
1090
1091/**
1000 * drm_create_tv_properties - create TV specific connector properties 1092 * drm_create_tv_properties - create TV specific connector properties
1001 * @dev: DRM device 1093 * @dev: DRM device
1002 * @num_modes: number of different TV formats (modes) supported 1094 * @num_modes: number of different TV formats (modes) supported
@@ -1261,6 +1353,33 @@ int drm_mode_create_aspect_ratio_property(struct drm_device *dev)
1261EXPORT_SYMBOL(drm_mode_create_aspect_ratio_property); 1353EXPORT_SYMBOL(drm_mode_create_aspect_ratio_property);
1262 1354
1263/** 1355/**
1356 * drm_mode_create_content_type_property - create content type property
1357 * @dev: DRM device
1358 *
1359 * Called by a driver the first time it's needed, must be attached to desired
1360 * connectors.
1361 *
1362 * Returns:
1363 * Zero on success, negative errno on failure.
1364 */
1365int drm_mode_create_content_type_property(struct drm_device *dev)
1366{
1367 if (dev->mode_config.content_type_property)
1368 return 0;
1369
1370 dev->mode_config.content_type_property =
1371 drm_property_create_enum(dev, 0, "content type",
1372 drm_content_type_enum_list,
1373 ARRAY_SIZE(drm_content_type_enum_list));
1374
1375 if (dev->mode_config.content_type_property == NULL)
1376 return -ENOMEM;
1377
1378 return 0;
1379}
1380EXPORT_SYMBOL(drm_mode_create_content_type_property);
1381
1382/**
1264 * drm_mode_create_suggested_offset_properties - create suggests offset properties 1383 * drm_mode_create_suggested_offset_properties - create suggests offset properties
1265 * @dev: DRM device 1384 * @dev: DRM device
1266 * 1385 *
diff --git a/drivers/gpu/drm/drm_crtc.c b/drivers/gpu/drm/drm_crtc.c
index 98a36e6c69ad..f45e7a8d4acd 100644
--- a/drivers/gpu/drm/drm_crtc.c
+++ b/drivers/gpu/drm/drm_crtc.c
@@ -286,6 +286,10 @@ int drm_crtc_init_with_planes(struct drm_device *dev, struct drm_crtc *crtc,
286 if (WARN_ON(config->num_crtc >= 32)) 286 if (WARN_ON(config->num_crtc >= 32))
287 return -EINVAL; 287 return -EINVAL;
288 288
289 WARN_ON(drm_drv_uses_atomic_modeset(dev) &&
290 (!funcs->atomic_destroy_state ||
291 !funcs->atomic_duplicate_state));
292
289 crtc->dev = dev; 293 crtc->dev = dev;
290 crtc->funcs = funcs; 294 crtc->funcs = funcs;
291 295
@@ -469,23 +473,32 @@ static int __drm_mode_set_config_internal(struct drm_mode_set *set,
469 * connectors from it), hence we need to refcount the fbs across all 473 * connectors from it), hence we need to refcount the fbs across all
470 * crtcs. Atomic modeset will have saner semantics ... 474 * crtcs. Atomic modeset will have saner semantics ...
471 */ 475 */
472 drm_for_each_crtc(tmp, crtc->dev) 476 drm_for_each_crtc(tmp, crtc->dev) {
473 tmp->primary->old_fb = tmp->primary->fb; 477 struct drm_plane *plane = tmp->primary;
478
479 plane->old_fb = plane->fb;
480 }
474 481
475 fb = set->fb; 482 fb = set->fb;
476 483
477 ret = crtc->funcs->set_config(set, ctx); 484 ret = crtc->funcs->set_config(set, ctx);
478 if (ret == 0) { 485 if (ret == 0) {
479 crtc->primary->crtc = fb ? crtc : NULL; 486 struct drm_plane *plane = crtc->primary;
480 crtc->primary->fb = fb; 487
488 if (!plane->state) {
489 plane->crtc = fb ? crtc : NULL;
490 plane->fb = fb;
491 }
481 } 492 }
482 493
483 drm_for_each_crtc(tmp, crtc->dev) { 494 drm_for_each_crtc(tmp, crtc->dev) {
484 if (tmp->primary->fb) 495 struct drm_plane *plane = tmp->primary;
485 drm_framebuffer_get(tmp->primary->fb); 496
486 if (tmp->primary->old_fb) 497 if (plane->fb)
487 drm_framebuffer_put(tmp->primary->old_fb); 498 drm_framebuffer_get(plane->fb);
488 tmp->primary->old_fb = NULL; 499 if (plane->old_fb)
500 drm_framebuffer_put(plane->old_fb);
501 plane->old_fb = NULL;
489 } 502 }
490 503
491 return ret; 504 return ret;
@@ -640,7 +653,9 @@ retry:
640 653
641 ret = drm_mode_convert_umode(dev, mode, &crtc_req->mode); 654 ret = drm_mode_convert_umode(dev, mode, &crtc_req->mode);
642 if (ret) { 655 if (ret) {
643 DRM_DEBUG_KMS("Invalid mode\n"); 656 DRM_DEBUG_KMS("Invalid mode (ret=%d, status=%s)\n",
657 ret, drm_get_mode_status_name(mode->status));
658 drm_mode_debug_printmodeline(mode);
644 goto out; 659 goto out;
645 } 660 }
646 661
diff --git a/drivers/gpu/drm/drm_crtc_internal.h b/drivers/gpu/drm/drm_crtc_internal.h
index 5d307b23a4e6..34499800932a 100644
--- a/drivers/gpu/drm/drm_crtc_internal.h
+++ b/drivers/gpu/drm/drm_crtc_internal.h
@@ -56,6 +56,9 @@ int drm_mode_setcrtc(struct drm_device *dev,
56int drm_modeset_register_all(struct drm_device *dev); 56int drm_modeset_register_all(struct drm_device *dev);
57void drm_modeset_unregister_all(struct drm_device *dev); 57void drm_modeset_unregister_all(struct drm_device *dev);
58 58
59/* drm_modes.c */
60const char *drm_get_mode_status_name(enum drm_mode_status status);
61
59/* IOCTLs */ 62/* IOCTLs */
60int drm_mode_getresources(struct drm_device *dev, 63int drm_mode_getresources(struct drm_device *dev,
61 void *data, struct drm_file *file_priv); 64 void *data, struct drm_file *file_priv);
diff --git a/drivers/gpu/drm/drm_edid.c b/drivers/gpu/drm/drm_edid.c
index a5808382bdf0..5dc742b27ca0 100644
--- a/drivers/gpu/drm/drm_edid.c
+++ b/drivers/gpu/drm/drm_edid.c
@@ -163,8 +163,9 @@ static const struct edid_quirk {
163 /* Rotel RSX-1058 forwards sink's EDID but only does HDMI 1.1*/ 163 /* Rotel RSX-1058 forwards sink's EDID but only does HDMI 1.1*/
164 { "ETR", 13896, EDID_QUIRK_FORCE_8BPC }, 164 { "ETR", 13896, EDID_QUIRK_FORCE_8BPC },
165 165
166 /* HTC Vive VR Headset */ 166 /* HTC Vive and Vive Pro VR Headsets */
167 { "HVR", 0xaa01, EDID_QUIRK_NON_DESKTOP }, 167 { "HVR", 0xaa01, EDID_QUIRK_NON_DESKTOP },
168 { "HVR", 0xaa02, EDID_QUIRK_NON_DESKTOP },
168 169
169 /* Oculus Rift DK1, DK2, and CV1 VR Headsets */ 170 /* Oculus Rift DK1, DK2, and CV1 VR Headsets */
170 { "OVR", 0x0001, EDID_QUIRK_NON_DESKTOP }, 171 { "OVR", 0x0001, EDID_QUIRK_NON_DESKTOP },
@@ -687,562 +688,562 @@ static const struct minimode extra_modes[] = {
687static const struct drm_display_mode edid_cea_modes[] = { 688static const struct drm_display_mode edid_cea_modes[] = {
688 /* 0 - dummy, VICs start at 1 */ 689 /* 0 - dummy, VICs start at 1 */
689 { }, 690 { },
690 /* 1 - 640x480@60Hz */ 691 /* 1 - 640x480@60Hz 4:3 */
691 { DRM_MODE("640x480", DRM_MODE_TYPE_DRIVER, 25175, 640, 656, 692 { DRM_MODE("640x480", DRM_MODE_TYPE_DRIVER, 25175, 640, 656,
692 752, 800, 0, 480, 490, 492, 525, 0, 693 752, 800, 0, 480, 490, 492, 525, 0,
693 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 694 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
694 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 695 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
695 /* 2 - 720x480@60Hz */ 696 /* 2 - 720x480@60Hz 4:3 */
696 { DRM_MODE("720x480", DRM_MODE_TYPE_DRIVER, 27000, 720, 736, 697 { DRM_MODE("720x480", DRM_MODE_TYPE_DRIVER, 27000, 720, 736,
697 798, 858, 0, 480, 489, 495, 525, 0, 698 798, 858, 0, 480, 489, 495, 525, 0,
698 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 699 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
699 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 700 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
700 /* 3 - 720x480@60Hz */ 701 /* 3 - 720x480@60Hz 16:9 */
701 { DRM_MODE("720x480", DRM_MODE_TYPE_DRIVER, 27000, 720, 736, 702 { DRM_MODE("720x480", DRM_MODE_TYPE_DRIVER, 27000, 720, 736,
702 798, 858, 0, 480, 489, 495, 525, 0, 703 798, 858, 0, 480, 489, 495, 525, 0,
703 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 704 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
704 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 705 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
705 /* 4 - 1280x720@60Hz */ 706 /* 4 - 1280x720@60Hz 16:9 */
706 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 1390, 707 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 1390,
707 1430, 1650, 0, 720, 725, 730, 750, 0, 708 1430, 1650, 0, 720, 725, 730, 750, 0,
708 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 709 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
709 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 710 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
710 /* 5 - 1920x1080i@60Hz */ 711 /* 5 - 1920x1080i@60Hz 16:9 */
711 { DRM_MODE("1920x1080i", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2008, 712 { DRM_MODE("1920x1080i", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2008,
712 2052, 2200, 0, 1080, 1084, 1094, 1125, 0, 713 2052, 2200, 0, 1080, 1084, 1094, 1125, 0,
713 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC | 714 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC |
714 DRM_MODE_FLAG_INTERLACE), 715 DRM_MODE_FLAG_INTERLACE),
715 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 716 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
716 /* 6 - 720(1440)x480i@60Hz */ 717 /* 6 - 720(1440)x480i@60Hz 4:3 */
717 { DRM_MODE("720x480i", DRM_MODE_TYPE_DRIVER, 13500, 720, 739, 718 { DRM_MODE("720x480i", DRM_MODE_TYPE_DRIVER, 13500, 720, 739,
718 801, 858, 0, 480, 488, 494, 525, 0, 719 801, 858, 0, 480, 488, 494, 525, 0,
719 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 720 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
720 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK), 721 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK),
721 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 722 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
722 /* 7 - 720(1440)x480i@60Hz */ 723 /* 7 - 720(1440)x480i@60Hz 16:9 */
723 { DRM_MODE("720x480i", DRM_MODE_TYPE_DRIVER, 13500, 720, 739, 724 { DRM_MODE("720x480i", DRM_MODE_TYPE_DRIVER, 13500, 720, 739,
724 801, 858, 0, 480, 488, 494, 525, 0, 725 801, 858, 0, 480, 488, 494, 525, 0,
725 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 726 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
726 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK), 727 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK),
727 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 728 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
728 /* 8 - 720(1440)x240@60Hz */ 729 /* 8 - 720(1440)x240@60Hz 4:3 */
729 { DRM_MODE("720x240", DRM_MODE_TYPE_DRIVER, 13500, 720, 739, 730 { DRM_MODE("720x240", DRM_MODE_TYPE_DRIVER, 13500, 720, 739,
730 801, 858, 0, 240, 244, 247, 262, 0, 731 801, 858, 0, 240, 244, 247, 262, 0,
731 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 732 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
732 DRM_MODE_FLAG_DBLCLK), 733 DRM_MODE_FLAG_DBLCLK),
733 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 734 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
734 /* 9 - 720(1440)x240@60Hz */ 735 /* 9 - 720(1440)x240@60Hz 16:9 */
735 { DRM_MODE("720x240", DRM_MODE_TYPE_DRIVER, 13500, 720, 739, 736 { DRM_MODE("720x240", DRM_MODE_TYPE_DRIVER, 13500, 720, 739,
736 801, 858, 0, 240, 244, 247, 262, 0, 737 801, 858, 0, 240, 244, 247, 262, 0,
737 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 738 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
738 DRM_MODE_FLAG_DBLCLK), 739 DRM_MODE_FLAG_DBLCLK),
739 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 740 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
740 /* 10 - 2880x480i@60Hz */ 741 /* 10 - 2880x480i@60Hz 4:3 */
741 { DRM_MODE("2880x480i", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2956, 742 { DRM_MODE("2880x480i", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2956,
742 3204, 3432, 0, 480, 488, 494, 525, 0, 743 3204, 3432, 0, 480, 488, 494, 525, 0,
743 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 744 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
744 DRM_MODE_FLAG_INTERLACE), 745 DRM_MODE_FLAG_INTERLACE),
745 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 746 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
746 /* 11 - 2880x480i@60Hz */ 747 /* 11 - 2880x480i@60Hz 16:9 */
747 { DRM_MODE("2880x480i", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2956, 748 { DRM_MODE("2880x480i", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2956,
748 3204, 3432, 0, 480, 488, 494, 525, 0, 749 3204, 3432, 0, 480, 488, 494, 525, 0,
749 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 750 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
750 DRM_MODE_FLAG_INTERLACE), 751 DRM_MODE_FLAG_INTERLACE),
751 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 752 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
752 /* 12 - 2880x240@60Hz */ 753 /* 12 - 2880x240@60Hz 4:3 */
753 { DRM_MODE("2880x240", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2956, 754 { DRM_MODE("2880x240", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2956,
754 3204, 3432, 0, 240, 244, 247, 262, 0, 755 3204, 3432, 0, 240, 244, 247, 262, 0,
755 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 756 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
756 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 757 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
757 /* 13 - 2880x240@60Hz */ 758 /* 13 - 2880x240@60Hz 16:9 */
758 { DRM_MODE("2880x240", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2956, 759 { DRM_MODE("2880x240", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2956,
759 3204, 3432, 0, 240, 244, 247, 262, 0, 760 3204, 3432, 0, 240, 244, 247, 262, 0,
760 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 761 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
761 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 762 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
762 /* 14 - 1440x480@60Hz */ 763 /* 14 - 1440x480@60Hz 4:3 */
763 { DRM_MODE("1440x480", DRM_MODE_TYPE_DRIVER, 54000, 1440, 1472, 764 { DRM_MODE("1440x480", DRM_MODE_TYPE_DRIVER, 54000, 1440, 1472,
764 1596, 1716, 0, 480, 489, 495, 525, 0, 765 1596, 1716, 0, 480, 489, 495, 525, 0,
765 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 766 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
766 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 767 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
767 /* 15 - 1440x480@60Hz */ 768 /* 15 - 1440x480@60Hz 16:9 */
768 { DRM_MODE("1440x480", DRM_MODE_TYPE_DRIVER, 54000, 1440, 1472, 769 { DRM_MODE("1440x480", DRM_MODE_TYPE_DRIVER, 54000, 1440, 1472,
769 1596, 1716, 0, 480, 489, 495, 525, 0, 770 1596, 1716, 0, 480, 489, 495, 525, 0,
770 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 771 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
771 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 772 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
772 /* 16 - 1920x1080@60Hz */ 773 /* 16 - 1920x1080@60Hz 16:9 */
773 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 148500, 1920, 2008, 774 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 148500, 1920, 2008,
774 2052, 2200, 0, 1080, 1084, 1089, 1125, 0, 775 2052, 2200, 0, 1080, 1084, 1089, 1125, 0,
775 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 776 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
776 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 777 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
777 /* 17 - 720x576@50Hz */ 778 /* 17 - 720x576@50Hz 4:3 */
778 { DRM_MODE("720x576", DRM_MODE_TYPE_DRIVER, 27000, 720, 732, 779 { DRM_MODE("720x576", DRM_MODE_TYPE_DRIVER, 27000, 720, 732,
779 796, 864, 0, 576, 581, 586, 625, 0, 780 796, 864, 0, 576, 581, 586, 625, 0,
780 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 781 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
781 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 782 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
782 /* 18 - 720x576@50Hz */ 783 /* 18 - 720x576@50Hz 16:9 */
783 { DRM_MODE("720x576", DRM_MODE_TYPE_DRIVER, 27000, 720, 732, 784 { DRM_MODE("720x576", DRM_MODE_TYPE_DRIVER, 27000, 720, 732,
784 796, 864, 0, 576, 581, 586, 625, 0, 785 796, 864, 0, 576, 581, 586, 625, 0,
785 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 786 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
786 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 787 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
787 /* 19 - 1280x720@50Hz */ 788 /* 19 - 1280x720@50Hz 16:9 */
788 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 1720, 789 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 1720,
789 1760, 1980, 0, 720, 725, 730, 750, 0, 790 1760, 1980, 0, 720, 725, 730, 750, 0,
790 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 791 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
791 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 792 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
792 /* 20 - 1920x1080i@50Hz */ 793 /* 20 - 1920x1080i@50Hz 16:9 */
793 { DRM_MODE("1920x1080i", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2448, 794 { DRM_MODE("1920x1080i", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2448,
794 2492, 2640, 0, 1080, 1084, 1094, 1125, 0, 795 2492, 2640, 0, 1080, 1084, 1094, 1125, 0,
795 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC | 796 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC |
796 DRM_MODE_FLAG_INTERLACE), 797 DRM_MODE_FLAG_INTERLACE),
797 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 798 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
798 /* 21 - 720(1440)x576i@50Hz */ 799 /* 21 - 720(1440)x576i@50Hz 4:3 */
799 { DRM_MODE("720x576i", DRM_MODE_TYPE_DRIVER, 13500, 720, 732, 800 { DRM_MODE("720x576i", DRM_MODE_TYPE_DRIVER, 13500, 720, 732,
800 795, 864, 0, 576, 580, 586, 625, 0, 801 795, 864, 0, 576, 580, 586, 625, 0,
801 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 802 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
802 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK), 803 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK),
803 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 804 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
804 /* 22 - 720(1440)x576i@50Hz */ 805 /* 22 - 720(1440)x576i@50Hz 16:9 */
805 { DRM_MODE("720x576i", DRM_MODE_TYPE_DRIVER, 13500, 720, 732, 806 { DRM_MODE("720x576i", DRM_MODE_TYPE_DRIVER, 13500, 720, 732,
806 795, 864, 0, 576, 580, 586, 625, 0, 807 795, 864, 0, 576, 580, 586, 625, 0,
807 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 808 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
808 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK), 809 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK),
809 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 810 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
810 /* 23 - 720(1440)x288@50Hz */ 811 /* 23 - 720(1440)x288@50Hz 4:3 */
811 { DRM_MODE("720x288", DRM_MODE_TYPE_DRIVER, 13500, 720, 732, 812 { DRM_MODE("720x288", DRM_MODE_TYPE_DRIVER, 13500, 720, 732,
812 795, 864, 0, 288, 290, 293, 312, 0, 813 795, 864, 0, 288, 290, 293, 312, 0,
813 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 814 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
814 DRM_MODE_FLAG_DBLCLK), 815 DRM_MODE_FLAG_DBLCLK),
815 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 816 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
816 /* 24 - 720(1440)x288@50Hz */ 817 /* 24 - 720(1440)x288@50Hz 16:9 */
817 { DRM_MODE("720x288", DRM_MODE_TYPE_DRIVER, 13500, 720, 732, 818 { DRM_MODE("720x288", DRM_MODE_TYPE_DRIVER, 13500, 720, 732,
818 795, 864, 0, 288, 290, 293, 312, 0, 819 795, 864, 0, 288, 290, 293, 312, 0,
819 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 820 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
820 DRM_MODE_FLAG_DBLCLK), 821 DRM_MODE_FLAG_DBLCLK),
821 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 822 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
822 /* 25 - 2880x576i@50Hz */ 823 /* 25 - 2880x576i@50Hz 4:3 */
823 { DRM_MODE("2880x576i", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2928, 824 { DRM_MODE("2880x576i", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2928,
824 3180, 3456, 0, 576, 580, 586, 625, 0, 825 3180, 3456, 0, 576, 580, 586, 625, 0,
825 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 826 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
826 DRM_MODE_FLAG_INTERLACE), 827 DRM_MODE_FLAG_INTERLACE),
827 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 828 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
828 /* 26 - 2880x576i@50Hz */ 829 /* 26 - 2880x576i@50Hz 16:9 */
829 { DRM_MODE("2880x576i", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2928, 830 { DRM_MODE("2880x576i", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2928,
830 3180, 3456, 0, 576, 580, 586, 625, 0, 831 3180, 3456, 0, 576, 580, 586, 625, 0,
831 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 832 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
832 DRM_MODE_FLAG_INTERLACE), 833 DRM_MODE_FLAG_INTERLACE),
833 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 834 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
834 /* 27 - 2880x288@50Hz */ 835 /* 27 - 2880x288@50Hz 4:3 */
835 { DRM_MODE("2880x288", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2928, 836 { DRM_MODE("2880x288", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2928,
836 3180, 3456, 0, 288, 290, 293, 312, 0, 837 3180, 3456, 0, 288, 290, 293, 312, 0,
837 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 838 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
838 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 839 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
839 /* 28 - 2880x288@50Hz */ 840 /* 28 - 2880x288@50Hz 16:9 */
840 { DRM_MODE("2880x288", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2928, 841 { DRM_MODE("2880x288", DRM_MODE_TYPE_DRIVER, 54000, 2880, 2928,
841 3180, 3456, 0, 288, 290, 293, 312, 0, 842 3180, 3456, 0, 288, 290, 293, 312, 0,
842 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 843 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
843 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 844 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
844 /* 29 - 1440x576@50Hz */ 845 /* 29 - 1440x576@50Hz 4:3 */
845 { DRM_MODE("1440x576", DRM_MODE_TYPE_DRIVER, 54000, 1440, 1464, 846 { DRM_MODE("1440x576", DRM_MODE_TYPE_DRIVER, 54000, 1440, 1464,
846 1592, 1728, 0, 576, 581, 586, 625, 0, 847 1592, 1728, 0, 576, 581, 586, 625, 0,
847 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 848 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
848 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 849 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
849 /* 30 - 1440x576@50Hz */ 850 /* 30 - 1440x576@50Hz 16:9 */
850 { DRM_MODE("1440x576", DRM_MODE_TYPE_DRIVER, 54000, 1440, 1464, 851 { DRM_MODE("1440x576", DRM_MODE_TYPE_DRIVER, 54000, 1440, 1464,
851 1592, 1728, 0, 576, 581, 586, 625, 0, 852 1592, 1728, 0, 576, 581, 586, 625, 0,
852 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 853 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
853 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 854 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
854 /* 31 - 1920x1080@50Hz */ 855 /* 31 - 1920x1080@50Hz 16:9 */
855 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 148500, 1920, 2448, 856 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 148500, 1920, 2448,
856 2492, 2640, 0, 1080, 1084, 1089, 1125, 0, 857 2492, 2640, 0, 1080, 1084, 1089, 1125, 0,
857 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 858 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
858 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 859 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
859 /* 32 - 1920x1080@24Hz */ 860 /* 32 - 1920x1080@24Hz 16:9 */
860 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2558, 861 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2558,
861 2602, 2750, 0, 1080, 1084, 1089, 1125, 0, 862 2602, 2750, 0, 1080, 1084, 1089, 1125, 0,
862 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 863 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
863 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 864 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
864 /* 33 - 1920x1080@25Hz */ 865 /* 33 - 1920x1080@25Hz 16:9 */
865 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2448, 866 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2448,
866 2492, 2640, 0, 1080, 1084, 1089, 1125, 0, 867 2492, 2640, 0, 1080, 1084, 1089, 1125, 0,
867 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 868 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
868 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 869 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
869 /* 34 - 1920x1080@30Hz */ 870 /* 34 - 1920x1080@30Hz 16:9 */
870 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2008, 871 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2008,
871 2052, 2200, 0, 1080, 1084, 1089, 1125, 0, 872 2052, 2200, 0, 1080, 1084, 1089, 1125, 0,
872 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 873 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
873 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 874 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
874 /* 35 - 2880x480@60Hz */ 875 /* 35 - 2880x480@60Hz 4:3 */
875 { DRM_MODE("2880x480", DRM_MODE_TYPE_DRIVER, 108000, 2880, 2944, 876 { DRM_MODE("2880x480", DRM_MODE_TYPE_DRIVER, 108000, 2880, 2944,
876 3192, 3432, 0, 480, 489, 495, 525, 0, 877 3192, 3432, 0, 480, 489, 495, 525, 0,
877 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 878 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
878 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 879 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
879 /* 36 - 2880x480@60Hz */ 880 /* 36 - 2880x480@60Hz 16:9 */
880 { DRM_MODE("2880x480", DRM_MODE_TYPE_DRIVER, 108000, 2880, 2944, 881 { DRM_MODE("2880x480", DRM_MODE_TYPE_DRIVER, 108000, 2880, 2944,
881 3192, 3432, 0, 480, 489, 495, 525, 0, 882 3192, 3432, 0, 480, 489, 495, 525, 0,
882 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 883 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
883 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 884 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
884 /* 37 - 2880x576@50Hz */ 885 /* 37 - 2880x576@50Hz 4:3 */
885 { DRM_MODE("2880x576", DRM_MODE_TYPE_DRIVER, 108000, 2880, 2928, 886 { DRM_MODE("2880x576", DRM_MODE_TYPE_DRIVER, 108000, 2880, 2928,
886 3184, 3456, 0, 576, 581, 586, 625, 0, 887 3184, 3456, 0, 576, 581, 586, 625, 0,
887 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 888 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
888 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 889 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
889 /* 38 - 2880x576@50Hz */ 890 /* 38 - 2880x576@50Hz 16:9 */
890 { DRM_MODE("2880x576", DRM_MODE_TYPE_DRIVER, 108000, 2880, 2928, 891 { DRM_MODE("2880x576", DRM_MODE_TYPE_DRIVER, 108000, 2880, 2928,
891 3184, 3456, 0, 576, 581, 586, 625, 0, 892 3184, 3456, 0, 576, 581, 586, 625, 0,
892 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 893 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
893 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 894 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
894 /* 39 - 1920x1080i@50Hz */ 895 /* 39 - 1920x1080i@50Hz 16:9 */
895 { DRM_MODE("1920x1080i", DRM_MODE_TYPE_DRIVER, 72000, 1920, 1952, 896 { DRM_MODE("1920x1080i", DRM_MODE_TYPE_DRIVER, 72000, 1920, 1952,
896 2120, 2304, 0, 1080, 1126, 1136, 1250, 0, 897 2120, 2304, 0, 1080, 1126, 1136, 1250, 0,
897 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_NVSYNC | 898 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_NVSYNC |
898 DRM_MODE_FLAG_INTERLACE), 899 DRM_MODE_FLAG_INTERLACE),
899 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 900 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
900 /* 40 - 1920x1080i@100Hz */ 901 /* 40 - 1920x1080i@100Hz 16:9 */
901 { DRM_MODE("1920x1080i", DRM_MODE_TYPE_DRIVER, 148500, 1920, 2448, 902 { DRM_MODE("1920x1080i", DRM_MODE_TYPE_DRIVER, 148500, 1920, 2448,
902 2492, 2640, 0, 1080, 1084, 1094, 1125, 0, 903 2492, 2640, 0, 1080, 1084, 1094, 1125, 0,
903 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC | 904 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC |
904 DRM_MODE_FLAG_INTERLACE), 905 DRM_MODE_FLAG_INTERLACE),
905 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 906 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
906 /* 41 - 1280x720@100Hz */ 907 /* 41 - 1280x720@100Hz 16:9 */
907 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 148500, 1280, 1720, 908 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 148500, 1280, 1720,
908 1760, 1980, 0, 720, 725, 730, 750, 0, 909 1760, 1980, 0, 720, 725, 730, 750, 0,
909 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 910 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
910 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 911 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
911 /* 42 - 720x576@100Hz */ 912 /* 42 - 720x576@100Hz 4:3 */
912 { DRM_MODE("720x576", DRM_MODE_TYPE_DRIVER, 54000, 720, 732, 913 { DRM_MODE("720x576", DRM_MODE_TYPE_DRIVER, 54000, 720, 732,
913 796, 864, 0, 576, 581, 586, 625, 0, 914 796, 864, 0, 576, 581, 586, 625, 0,
914 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 915 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
915 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 916 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
916 /* 43 - 720x576@100Hz */ 917 /* 43 - 720x576@100Hz 16:9 */
917 { DRM_MODE("720x576", DRM_MODE_TYPE_DRIVER, 54000, 720, 732, 918 { DRM_MODE("720x576", DRM_MODE_TYPE_DRIVER, 54000, 720, 732,
918 796, 864, 0, 576, 581, 586, 625, 0, 919 796, 864, 0, 576, 581, 586, 625, 0,
919 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 920 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
920 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 921 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
921 /* 44 - 720(1440)x576i@100Hz */ 922 /* 44 - 720(1440)x576i@100Hz 4:3 */
922 { DRM_MODE("720x576i", DRM_MODE_TYPE_DRIVER, 27000, 720, 732, 923 { DRM_MODE("720x576i", DRM_MODE_TYPE_DRIVER, 27000, 720, 732,
923 795, 864, 0, 576, 580, 586, 625, 0, 924 795, 864, 0, 576, 580, 586, 625, 0,
924 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 925 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
925 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK), 926 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK),
926 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 927 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
927 /* 45 - 720(1440)x576i@100Hz */ 928 /* 45 - 720(1440)x576i@100Hz 16:9 */
928 { DRM_MODE("720x576i", DRM_MODE_TYPE_DRIVER, 27000, 720, 732, 929 { DRM_MODE("720x576i", DRM_MODE_TYPE_DRIVER, 27000, 720, 732,
929 795, 864, 0, 576, 580, 586, 625, 0, 930 795, 864, 0, 576, 580, 586, 625, 0,
930 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 931 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
931 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK), 932 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK),
932 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 933 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
933 /* 46 - 1920x1080i@120Hz */ 934 /* 46 - 1920x1080i@120Hz 16:9 */
934 { DRM_MODE("1920x1080i", DRM_MODE_TYPE_DRIVER, 148500, 1920, 2008, 935 { DRM_MODE("1920x1080i", DRM_MODE_TYPE_DRIVER, 148500, 1920, 2008,
935 2052, 2200, 0, 1080, 1084, 1094, 1125, 0, 936 2052, 2200, 0, 1080, 1084, 1094, 1125, 0,
936 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC | 937 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC |
937 DRM_MODE_FLAG_INTERLACE), 938 DRM_MODE_FLAG_INTERLACE),
938 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 939 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
939 /* 47 - 1280x720@120Hz */ 940 /* 47 - 1280x720@120Hz 16:9 */
940 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 148500, 1280, 1390, 941 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 148500, 1280, 1390,
941 1430, 1650, 0, 720, 725, 730, 750, 0, 942 1430, 1650, 0, 720, 725, 730, 750, 0,
942 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 943 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
943 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 944 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
944 /* 48 - 720x480@120Hz */ 945 /* 48 - 720x480@120Hz 4:3 */
945 { DRM_MODE("720x480", DRM_MODE_TYPE_DRIVER, 54000, 720, 736, 946 { DRM_MODE("720x480", DRM_MODE_TYPE_DRIVER, 54000, 720, 736,
946 798, 858, 0, 480, 489, 495, 525, 0, 947 798, 858, 0, 480, 489, 495, 525, 0,
947 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 948 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
948 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 949 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
949 /* 49 - 720x480@120Hz */ 950 /* 49 - 720x480@120Hz 16:9 */
950 { DRM_MODE("720x480", DRM_MODE_TYPE_DRIVER, 54000, 720, 736, 951 { DRM_MODE("720x480", DRM_MODE_TYPE_DRIVER, 54000, 720, 736,
951 798, 858, 0, 480, 489, 495, 525, 0, 952 798, 858, 0, 480, 489, 495, 525, 0,
952 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 953 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
953 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 954 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
954 /* 50 - 720(1440)x480i@120Hz */ 955 /* 50 - 720(1440)x480i@120Hz 4:3 */
955 { DRM_MODE("720x480i", DRM_MODE_TYPE_DRIVER, 27000, 720, 739, 956 { DRM_MODE("720x480i", DRM_MODE_TYPE_DRIVER, 27000, 720, 739,
956 801, 858, 0, 480, 488, 494, 525, 0, 957 801, 858, 0, 480, 488, 494, 525, 0,
957 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 958 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
958 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK), 959 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK),
959 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 960 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
960 /* 51 - 720(1440)x480i@120Hz */ 961 /* 51 - 720(1440)x480i@120Hz 16:9 */
961 { DRM_MODE("720x480i", DRM_MODE_TYPE_DRIVER, 27000, 720, 739, 962 { DRM_MODE("720x480i", DRM_MODE_TYPE_DRIVER, 27000, 720, 739,
962 801, 858, 0, 480, 488, 494, 525, 0, 963 801, 858, 0, 480, 488, 494, 525, 0,
963 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 964 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
964 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK), 965 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK),
965 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 966 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
966 /* 52 - 720x576@200Hz */ 967 /* 52 - 720x576@200Hz 4:3 */
967 { DRM_MODE("720x576", DRM_MODE_TYPE_DRIVER, 108000, 720, 732, 968 { DRM_MODE("720x576", DRM_MODE_TYPE_DRIVER, 108000, 720, 732,
968 796, 864, 0, 576, 581, 586, 625, 0, 969 796, 864, 0, 576, 581, 586, 625, 0,
969 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 970 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
970 .vrefresh = 200, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 971 .vrefresh = 200, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
971 /* 53 - 720x576@200Hz */ 972 /* 53 - 720x576@200Hz 16:9 */
972 { DRM_MODE("720x576", DRM_MODE_TYPE_DRIVER, 108000, 720, 732, 973 { DRM_MODE("720x576", DRM_MODE_TYPE_DRIVER, 108000, 720, 732,
973 796, 864, 0, 576, 581, 586, 625, 0, 974 796, 864, 0, 576, 581, 586, 625, 0,
974 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 975 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
975 .vrefresh = 200, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 976 .vrefresh = 200, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
976 /* 54 - 720(1440)x576i@200Hz */ 977 /* 54 - 720(1440)x576i@200Hz 4:3 */
977 { DRM_MODE("720x576i", DRM_MODE_TYPE_DRIVER, 54000, 720, 732, 978 { DRM_MODE("720x576i", DRM_MODE_TYPE_DRIVER, 54000, 720, 732,
978 795, 864, 0, 576, 580, 586, 625, 0, 979 795, 864, 0, 576, 580, 586, 625, 0,
979 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 980 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
980 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK), 981 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK),
981 .vrefresh = 200, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 982 .vrefresh = 200, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
982 /* 55 - 720(1440)x576i@200Hz */ 983 /* 55 - 720(1440)x576i@200Hz 16:9 */
983 { DRM_MODE("720x576i", DRM_MODE_TYPE_DRIVER, 54000, 720, 732, 984 { DRM_MODE("720x576i", DRM_MODE_TYPE_DRIVER, 54000, 720, 732,
984 795, 864, 0, 576, 580, 586, 625, 0, 985 795, 864, 0, 576, 580, 586, 625, 0,
985 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 986 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
986 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK), 987 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK),
987 .vrefresh = 200, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 988 .vrefresh = 200, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
988 /* 56 - 720x480@240Hz */ 989 /* 56 - 720x480@240Hz 4:3 */
989 { DRM_MODE("720x480", DRM_MODE_TYPE_DRIVER, 108000, 720, 736, 990 { DRM_MODE("720x480", DRM_MODE_TYPE_DRIVER, 108000, 720, 736,
990 798, 858, 0, 480, 489, 495, 525, 0, 991 798, 858, 0, 480, 489, 495, 525, 0,
991 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 992 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
992 .vrefresh = 240, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 993 .vrefresh = 240, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
993 /* 57 - 720x480@240Hz */ 994 /* 57 - 720x480@240Hz 16:9 */
994 { DRM_MODE("720x480", DRM_MODE_TYPE_DRIVER, 108000, 720, 736, 995 { DRM_MODE("720x480", DRM_MODE_TYPE_DRIVER, 108000, 720, 736,
995 798, 858, 0, 480, 489, 495, 525, 0, 996 798, 858, 0, 480, 489, 495, 525, 0,
996 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC), 997 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
997 .vrefresh = 240, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 998 .vrefresh = 240, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
998 /* 58 - 720(1440)x480i@240Hz */ 999 /* 58 - 720(1440)x480i@240Hz 4:3 */
999 { DRM_MODE("720x480i", DRM_MODE_TYPE_DRIVER, 54000, 720, 739, 1000 { DRM_MODE("720x480i", DRM_MODE_TYPE_DRIVER, 54000, 720, 739,
1000 801, 858, 0, 480, 488, 494, 525, 0, 1001 801, 858, 0, 480, 488, 494, 525, 0,
1001 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 1002 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
1002 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK), 1003 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK),
1003 .vrefresh = 240, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, }, 1004 .vrefresh = 240, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_4_3, },
1004 /* 59 - 720(1440)x480i@240Hz */ 1005 /* 59 - 720(1440)x480i@240Hz 16:9 */
1005 { DRM_MODE("720x480i", DRM_MODE_TYPE_DRIVER, 54000, 720, 739, 1006 { DRM_MODE("720x480i", DRM_MODE_TYPE_DRIVER, 54000, 720, 739,
1006 801, 858, 0, 480, 488, 494, 525, 0, 1007 801, 858, 0, 480, 488, 494, 525, 0,
1007 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC | 1008 DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC |
1008 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK), 1009 DRM_MODE_FLAG_INTERLACE | DRM_MODE_FLAG_DBLCLK),
1009 .vrefresh = 240, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 1010 .vrefresh = 240, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
1010 /* 60 - 1280x720@24Hz */ 1011 /* 60 - 1280x720@24Hz 16:9 */
1011 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 59400, 1280, 3040, 1012 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 59400, 1280, 3040,
1012 3080, 3300, 0, 720, 725, 730, 750, 0, 1013 3080, 3300, 0, 720, 725, 730, 750, 0,
1013 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1014 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1014 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 1015 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
1015 /* 61 - 1280x720@25Hz */ 1016 /* 61 - 1280x720@25Hz 16:9 */
1016 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 3700, 1017 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 3700,
1017 3740, 3960, 0, 720, 725, 730, 750, 0, 1018 3740, 3960, 0, 720, 725, 730, 750, 0,
1018 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1019 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1019 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 1020 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
1020 /* 62 - 1280x720@30Hz */ 1021 /* 62 - 1280x720@30Hz 16:9 */
1021 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 3040, 1022 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 3040,
1022 3080, 3300, 0, 720, 725, 730, 750, 0, 1023 3080, 3300, 0, 720, 725, 730, 750, 0,
1023 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1024 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1024 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 1025 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
1025 /* 63 - 1920x1080@120Hz */ 1026 /* 63 - 1920x1080@120Hz 16:9 */
1026 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 297000, 1920, 2008, 1027 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 297000, 1920, 2008,
1027 2052, 2200, 0, 1080, 1084, 1089, 1125, 0, 1028 2052, 2200, 0, 1080, 1084, 1089, 1125, 0,
1028 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1029 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1029 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 1030 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
1030 /* 64 - 1920x1080@100Hz */ 1031 /* 64 - 1920x1080@100Hz 16:9 */
1031 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 297000, 1920, 2448, 1032 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 297000, 1920, 2448,
1032 2492, 2640, 0, 1080, 1084, 1089, 1125, 0, 1033 2492, 2640, 0, 1080, 1084, 1089, 1125, 0,
1033 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1034 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1034 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 1035 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
1035 /* 65 - 1280x720@24Hz */ 1036 /* 65 - 1280x720@24Hz 64:27 */
1036 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 59400, 1280, 3040, 1037 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 59400, 1280, 3040,
1037 3080, 3300, 0, 720, 725, 730, 750, 0, 1038 3080, 3300, 0, 720, 725, 730, 750, 0,
1038 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1039 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1039 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1040 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1040 /* 66 - 1280x720@25Hz */ 1041 /* 66 - 1280x720@25Hz 64:27 */
1041 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 3700, 1042 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 3700,
1042 3740, 3960, 0, 720, 725, 730, 750, 0, 1043 3740, 3960, 0, 720, 725, 730, 750, 0,
1043 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1044 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1044 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1045 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1045 /* 67 - 1280x720@30Hz */ 1046 /* 67 - 1280x720@30Hz 64:27 */
1046 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 3040, 1047 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 3040,
1047 3080, 3300, 0, 720, 725, 730, 750, 0, 1048 3080, 3300, 0, 720, 725, 730, 750, 0,
1048 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1049 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1049 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1050 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1050 /* 68 - 1280x720@50Hz */ 1051 /* 68 - 1280x720@50Hz 64:27 */
1051 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 1720, 1052 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 1720,
1052 1760, 1980, 0, 720, 725, 730, 750, 0, 1053 1760, 1980, 0, 720, 725, 730, 750, 0,
1053 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1054 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1054 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1055 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1055 /* 69 - 1280x720@60Hz */ 1056 /* 69 - 1280x720@60Hz 64:27 */
1056 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 1390, 1057 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 74250, 1280, 1390,
1057 1430, 1650, 0, 720, 725, 730, 750, 0, 1058 1430, 1650, 0, 720, 725, 730, 750, 0,
1058 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1059 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1059 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1060 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1060 /* 70 - 1280x720@100Hz */ 1061 /* 70 - 1280x720@100Hz 64:27 */
1061 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 148500, 1280, 1720, 1062 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 148500, 1280, 1720,
1062 1760, 1980, 0, 720, 725, 730, 750, 0, 1063 1760, 1980, 0, 720, 725, 730, 750, 0,
1063 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1064 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1064 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1065 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1065 /* 71 - 1280x720@120Hz */ 1066 /* 71 - 1280x720@120Hz 64:27 */
1066 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 148500, 1280, 1390, 1067 { DRM_MODE("1280x720", DRM_MODE_TYPE_DRIVER, 148500, 1280, 1390,
1067 1430, 1650, 0, 720, 725, 730, 750, 0, 1068 1430, 1650, 0, 720, 725, 730, 750, 0,
1068 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1069 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1069 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1070 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1070 /* 72 - 1920x1080@24Hz */ 1071 /* 72 - 1920x1080@24Hz 64:27 */
1071 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2558, 1072 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2558,
1072 2602, 2750, 0, 1080, 1084, 1089, 1125, 0, 1073 2602, 2750, 0, 1080, 1084, 1089, 1125, 0,
1073 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1074 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1074 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1075 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1075 /* 73 - 1920x1080@25Hz */ 1076 /* 73 - 1920x1080@25Hz 64:27 */
1076 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2448, 1077 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2448,
1077 2492, 2640, 0, 1080, 1084, 1089, 1125, 0, 1078 2492, 2640, 0, 1080, 1084, 1089, 1125, 0,
1078 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1079 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1079 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1080 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1080 /* 74 - 1920x1080@30Hz */ 1081 /* 74 - 1920x1080@30Hz 64:27 */
1081 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2008, 1082 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 74250, 1920, 2008,
1082 2052, 2200, 0, 1080, 1084, 1089, 1125, 0, 1083 2052, 2200, 0, 1080, 1084, 1089, 1125, 0,
1083 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1084 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1084 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1085 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1085 /* 75 - 1920x1080@50Hz */ 1086 /* 75 - 1920x1080@50Hz 64:27 */
1086 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 148500, 1920, 2448, 1087 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 148500, 1920, 2448,
1087 2492, 2640, 0, 1080, 1084, 1089, 1125, 0, 1088 2492, 2640, 0, 1080, 1084, 1089, 1125, 0,
1088 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1089 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1089 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1090 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1090 /* 76 - 1920x1080@60Hz */ 1091 /* 76 - 1920x1080@60Hz 64:27 */
1091 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 148500, 1920, 2008, 1092 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 148500, 1920, 2008,
1092 2052, 2200, 0, 1080, 1084, 1089, 1125, 0, 1093 2052, 2200, 0, 1080, 1084, 1089, 1125, 0,
1093 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1094 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1094 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1095 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1095 /* 77 - 1920x1080@100Hz */ 1096 /* 77 - 1920x1080@100Hz 64:27 */
1096 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 297000, 1920, 2448, 1097 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 297000, 1920, 2448,
1097 2492, 2640, 0, 1080, 1084, 1089, 1125, 0, 1098 2492, 2640, 0, 1080, 1084, 1089, 1125, 0,
1098 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1099 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1099 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1100 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1100 /* 78 - 1920x1080@120Hz */ 1101 /* 78 - 1920x1080@120Hz 64:27 */
1101 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 297000, 1920, 2008, 1102 { DRM_MODE("1920x1080", DRM_MODE_TYPE_DRIVER, 297000, 1920, 2008,
1102 2052, 2200, 0, 1080, 1084, 1089, 1125, 0, 1103 2052, 2200, 0, 1080, 1084, 1089, 1125, 0,
1103 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1104 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1104 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1105 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1105 /* 79 - 1680x720@24Hz */ 1106 /* 79 - 1680x720@24Hz 64:27 */
1106 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 59400, 1680, 3040, 1107 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 59400, 1680, 3040,
1107 3080, 3300, 0, 720, 725, 730, 750, 0, 1108 3080, 3300, 0, 720, 725, 730, 750, 0,
1108 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1109 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1109 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1110 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1110 /* 80 - 1680x720@25Hz */ 1111 /* 80 - 1680x720@25Hz 64:27 */
1111 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 59400, 1680, 2908, 1112 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 59400, 1680, 2908,
1112 2948, 3168, 0, 720, 725, 730, 750, 0, 1113 2948, 3168, 0, 720, 725, 730, 750, 0,
1113 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1114 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1114 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1115 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1115 /* 81 - 1680x720@30Hz */ 1116 /* 81 - 1680x720@30Hz 64:27 */
1116 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 59400, 1680, 2380, 1117 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 59400, 1680, 2380,
1117 2420, 2640, 0, 720, 725, 730, 750, 0, 1118 2420, 2640, 0, 720, 725, 730, 750, 0,
1118 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1119 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1119 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1120 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1120 /* 82 - 1680x720@50Hz */ 1121 /* 82 - 1680x720@50Hz 64:27 */
1121 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 82500, 1680, 1940, 1122 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 82500, 1680, 1940,
1122 1980, 2200, 0, 720, 725, 730, 750, 0, 1123 1980, 2200, 0, 720, 725, 730, 750, 0,
1123 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1124 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1124 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1125 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1125 /* 83 - 1680x720@60Hz */ 1126 /* 83 - 1680x720@60Hz 64:27 */
1126 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 99000, 1680, 1940, 1127 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 99000, 1680, 1940,
1127 1980, 2200, 0, 720, 725, 730, 750, 0, 1128 1980, 2200, 0, 720, 725, 730, 750, 0,
1128 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1129 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1129 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1130 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1130 /* 84 - 1680x720@100Hz */ 1131 /* 84 - 1680x720@100Hz 64:27 */
1131 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 165000, 1680, 1740, 1132 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 165000, 1680, 1740,
1132 1780, 2000, 0, 720, 725, 730, 825, 0, 1133 1780, 2000, 0, 720, 725, 730, 825, 0,
1133 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1134 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1134 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1135 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1135 /* 85 - 1680x720@120Hz */ 1136 /* 85 - 1680x720@120Hz 64:27 */
1136 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 198000, 1680, 1740, 1137 { DRM_MODE("1680x720", DRM_MODE_TYPE_DRIVER, 198000, 1680, 1740,
1137 1780, 2000, 0, 720, 725, 730, 825, 0, 1138 1780, 2000, 0, 720, 725, 730, 825, 0,
1138 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1139 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1139 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1140 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1140 /* 86 - 2560x1080@24Hz */ 1141 /* 86 - 2560x1080@24Hz 64:27 */
1141 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 99000, 2560, 3558, 1142 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 99000, 2560, 3558,
1142 3602, 3750, 0, 1080, 1084, 1089, 1100, 0, 1143 3602, 3750, 0, 1080, 1084, 1089, 1100, 0,
1143 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1144 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1144 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1145 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1145 /* 87 - 2560x1080@25Hz */ 1146 /* 87 - 2560x1080@25Hz 64:27 */
1146 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 90000, 2560, 3008, 1147 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 90000, 2560, 3008,
1147 3052, 3200, 0, 1080, 1084, 1089, 1125, 0, 1148 3052, 3200, 0, 1080, 1084, 1089, 1125, 0,
1148 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1149 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1149 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1150 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1150 /* 88 - 2560x1080@30Hz */ 1151 /* 88 - 2560x1080@30Hz 64:27 */
1151 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 118800, 2560, 3328, 1152 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 118800, 2560, 3328,
1152 3372, 3520, 0, 1080, 1084, 1089, 1125, 0, 1153 3372, 3520, 0, 1080, 1084, 1089, 1125, 0,
1153 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1154 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1154 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1155 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1155 /* 89 - 2560x1080@50Hz */ 1156 /* 89 - 2560x1080@50Hz 64:27 */
1156 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 185625, 2560, 3108, 1157 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 185625, 2560, 3108,
1157 3152, 3300, 0, 1080, 1084, 1089, 1125, 0, 1158 3152, 3300, 0, 1080, 1084, 1089, 1125, 0,
1158 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1159 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1159 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1160 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1160 /* 90 - 2560x1080@60Hz */ 1161 /* 90 - 2560x1080@60Hz 64:27 */
1161 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 198000, 2560, 2808, 1162 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 198000, 2560, 2808,
1162 2852, 3000, 0, 1080, 1084, 1089, 1100, 0, 1163 2852, 3000, 0, 1080, 1084, 1089, 1100, 0,
1163 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1164 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1164 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1165 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1165 /* 91 - 2560x1080@100Hz */ 1166 /* 91 - 2560x1080@100Hz 64:27 */
1166 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 371250, 2560, 2778, 1167 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 371250, 2560, 2778,
1167 2822, 2970, 0, 1080, 1084, 1089, 1250, 0, 1168 2822, 2970, 0, 1080, 1084, 1089, 1250, 0,
1168 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1169 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1169 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1170 .vrefresh = 100, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1170 /* 92 - 2560x1080@120Hz */ 1171 /* 92 - 2560x1080@120Hz 64:27 */
1171 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 495000, 2560, 3108, 1172 { DRM_MODE("2560x1080", DRM_MODE_TYPE_DRIVER, 495000, 2560, 3108,
1172 3152, 3300, 0, 1080, 1084, 1089, 1250, 0, 1173 3152, 3300, 0, 1080, 1084, 1089, 1250, 0,
1173 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1174 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1174 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1175 .vrefresh = 120, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1175 /* 93 - 3840x2160p@24Hz 16:9 */ 1176 /* 93 - 3840x2160@24Hz 16:9 */
1176 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 297000, 3840, 5116, 1177 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 297000, 3840, 5116,
1177 5204, 5500, 0, 2160, 2168, 2178, 2250, 0, 1178 5204, 5500, 0, 2160, 2168, 2178, 2250, 0,
1178 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1179 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1179 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 1180 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
1180 /* 94 - 3840x2160p@25Hz 16:9 */ 1181 /* 94 - 3840x2160@25Hz 16:9 */
1181 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 297000, 3840, 4896, 1182 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 297000, 3840, 4896,
1182 4984, 5280, 0, 2160, 2168, 2178, 2250, 0, 1183 4984, 5280, 0, 2160, 2168, 2178, 2250, 0,
1183 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1184 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1184 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 1185 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
1185 /* 95 - 3840x2160p@30Hz 16:9 */ 1186 /* 95 - 3840x2160@30Hz 16:9 */
1186 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 297000, 3840, 4016, 1187 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 297000, 3840, 4016,
1187 4104, 4400, 0, 2160, 2168, 2178, 2250, 0, 1188 4104, 4400, 0, 2160, 2168, 2178, 2250, 0,
1188 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1189 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1189 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 1190 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
1190 /* 96 - 3840x2160p@50Hz 16:9 */ 1191 /* 96 - 3840x2160@50Hz 16:9 */
1191 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 594000, 3840, 4896, 1192 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 594000, 3840, 4896,
1192 4984, 5280, 0, 2160, 2168, 2178, 2250, 0, 1193 4984, 5280, 0, 2160, 2168, 2178, 2250, 0,
1193 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1194 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1194 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 1195 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
1195 /* 97 - 3840x2160p@60Hz 16:9 */ 1196 /* 97 - 3840x2160@60Hz 16:9 */
1196 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 594000, 3840, 4016, 1197 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 594000, 3840, 4016,
1197 4104, 4400, 0, 2160, 2168, 2178, 2250, 0, 1198 4104, 4400, 0, 2160, 2168, 2178, 2250, 0,
1198 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1199 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1199 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, }, 1200 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_16_9, },
1200 /* 98 - 4096x2160p@24Hz 256:135 */ 1201 /* 98 - 4096x2160@24Hz 256:135 */
1201 { DRM_MODE("4096x2160", DRM_MODE_TYPE_DRIVER, 297000, 4096, 5116, 1202 { DRM_MODE("4096x2160", DRM_MODE_TYPE_DRIVER, 297000, 4096, 5116,
1202 5204, 5500, 0, 2160, 2168, 2178, 2250, 0, 1203 5204, 5500, 0, 2160, 2168, 2178, 2250, 0,
1203 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1204 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1204 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_256_135, }, 1205 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_256_135, },
1205 /* 99 - 4096x2160p@25Hz 256:135 */ 1206 /* 99 - 4096x2160@25Hz 256:135 */
1206 { DRM_MODE("4096x2160", DRM_MODE_TYPE_DRIVER, 297000, 4096, 5064, 1207 { DRM_MODE("4096x2160", DRM_MODE_TYPE_DRIVER, 297000, 4096, 5064,
1207 5152, 5280, 0, 2160, 2168, 2178, 2250, 0, 1208 5152, 5280, 0, 2160, 2168, 2178, 2250, 0,
1208 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1209 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1209 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_256_135, }, 1210 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_256_135, },
1210 /* 100 - 4096x2160p@30Hz 256:135 */ 1211 /* 100 - 4096x2160@30Hz 256:135 */
1211 { DRM_MODE("4096x2160", DRM_MODE_TYPE_DRIVER, 297000, 4096, 4184, 1212 { DRM_MODE("4096x2160", DRM_MODE_TYPE_DRIVER, 297000, 4096, 4184,
1212 4272, 4400, 0, 2160, 2168, 2178, 2250, 0, 1213 4272, 4400, 0, 2160, 2168, 2178, 2250, 0,
1213 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1214 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1214 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_256_135, }, 1215 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_256_135, },
1215 /* 101 - 4096x2160p@50Hz 256:135 */ 1216 /* 101 - 4096x2160@50Hz 256:135 */
1216 { DRM_MODE("4096x2160", DRM_MODE_TYPE_DRIVER, 594000, 4096, 5064, 1217 { DRM_MODE("4096x2160", DRM_MODE_TYPE_DRIVER, 594000, 4096, 5064,
1217 5152, 5280, 0, 2160, 2168, 2178, 2250, 0, 1218 5152, 5280, 0, 2160, 2168, 2178, 2250, 0,
1218 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1219 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1219 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_256_135, }, 1220 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_256_135, },
1220 /* 102 - 4096x2160p@60Hz 256:135 */ 1221 /* 102 - 4096x2160@60Hz 256:135 */
1221 { DRM_MODE("4096x2160", DRM_MODE_TYPE_DRIVER, 594000, 4096, 4184, 1222 { DRM_MODE("4096x2160", DRM_MODE_TYPE_DRIVER, 594000, 4096, 4184,
1222 4272, 4400, 0, 2160, 2168, 2178, 2250, 0, 1223 4272, 4400, 0, 2160, 2168, 2178, 2250, 0,
1223 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1224 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1224 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_256_135, }, 1225 .vrefresh = 60, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_256_135, },
1225 /* 103 - 3840x2160p@24Hz 64:27 */ 1226 /* 103 - 3840x2160@24Hz 64:27 */
1226 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 297000, 3840, 5116, 1227 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 297000, 3840, 5116,
1227 5204, 5500, 0, 2160, 2168, 2178, 2250, 0, 1228 5204, 5500, 0, 2160, 2168, 2178, 2250, 0,
1228 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1229 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1229 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1230 .vrefresh = 24, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1230 /* 104 - 3840x2160p@25Hz 64:27 */ 1231 /* 104 - 3840x2160@25Hz 64:27 */
1231 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 297000, 3840, 4896, 1232 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 297000, 3840, 4896,
1232 4984, 5280, 0, 2160, 2168, 2178, 2250, 0, 1233 4984, 5280, 0, 2160, 2168, 2178, 2250, 0,
1233 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1234 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1234 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1235 .vrefresh = 25, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1235 /* 105 - 3840x2160p@30Hz 64:27 */ 1236 /* 105 - 3840x2160@30Hz 64:27 */
1236 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 297000, 3840, 4016, 1237 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 297000, 3840, 4016,
1237 4104, 4400, 0, 2160, 2168, 2178, 2250, 0, 1238 4104, 4400, 0, 2160, 2168, 2178, 2250, 0,
1238 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1239 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1239 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1240 .vrefresh = 30, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1240 /* 106 - 3840x2160p@50Hz 64:27 */ 1241 /* 106 - 3840x2160@50Hz 64:27 */
1241 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 594000, 3840, 4896, 1242 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 594000, 3840, 4896,
1242 4984, 5280, 0, 2160, 2168, 2178, 2250, 0, 1243 4984, 5280, 0, 2160, 2168, 2178, 2250, 0,
1243 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1244 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
1244 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, }, 1245 .vrefresh = 50, .picture_aspect_ratio = HDMI_PICTURE_ASPECT_64_27, },
1245 /* 107 - 3840x2160p@60Hz 64:27 */ 1246 /* 107 - 3840x2160@60Hz 64:27 */
1246 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 594000, 3840, 4016, 1247 { DRM_MODE("3840x2160", DRM_MODE_TYPE_DRIVER, 594000, 3840, 4016,
1247 4104, 4400, 0, 2160, 2168, 2178, 2250, 0, 1248 4104, 4400, 0, 2160, 2168, 2178, 2250, 0,
1248 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC), 1249 DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC),
@@ -4874,6 +4875,14 @@ drm_hdmi_avi_infoframe_from_display_mode(struct hdmi_avi_infoframe *frame,
4874 frame->picture_aspect = HDMI_PICTURE_ASPECT_NONE; 4875 frame->picture_aspect = HDMI_PICTURE_ASPECT_NONE;
4875 4876
4876 /* 4877 /*
4878 * As some drivers don't support atomic, we can't use connector state.
4879 * So just initialize the frame with default values, just the same way
4880 * as it's done with other properties here.
4881 */
4882 frame->content_type = HDMI_CONTENT_TYPE_GRAPHICS;
4883 frame->itc = 0;
4884
4885 /*
4877 * Populate picture aspect ratio from either 4886 * Populate picture aspect ratio from either
4878 * user input (if specified) or from the CEA mode list. 4887 * user input (if specified) or from the CEA mode list.
4879 */ 4888 */
diff --git a/drivers/gpu/drm/drm_fb_helper.c b/drivers/gpu/drm/drm_fb_helper.c
index 2ee1eaa66188..cab14f253384 100644
--- a/drivers/gpu/drm/drm_fb_helper.c
+++ b/drivers/gpu/drm/drm_fb_helper.c
@@ -368,7 +368,6 @@ static int restore_fbdev_mode_atomic(struct drm_fb_helper *fb_helper, bool activ
368 struct drm_plane *plane; 368 struct drm_plane *plane;
369 struct drm_atomic_state *state; 369 struct drm_atomic_state *state;
370 int i, ret; 370 int i, ret;
371 unsigned int plane_mask;
372 struct drm_modeset_acquire_ctx ctx; 371 struct drm_modeset_acquire_ctx ctx;
373 372
374 drm_modeset_acquire_init(&ctx, 0); 373 drm_modeset_acquire_init(&ctx, 0);
@@ -381,7 +380,6 @@ static int restore_fbdev_mode_atomic(struct drm_fb_helper *fb_helper, bool activ
381 380
382 state->acquire_ctx = &ctx; 381 state->acquire_ctx = &ctx;
383retry: 382retry:
384 plane_mask = 0;
385 drm_for_each_plane(plane, dev) { 383 drm_for_each_plane(plane, dev) {
386 plane_state = drm_atomic_get_plane_state(state, plane); 384 plane_state = drm_atomic_get_plane_state(state, plane);
387 if (IS_ERR(plane_state)) { 385 if (IS_ERR(plane_state)) {
@@ -391,9 +389,6 @@ retry:
391 389
392 plane_state->rotation = DRM_MODE_ROTATE_0; 390 plane_state->rotation = DRM_MODE_ROTATE_0;
393 391
394 plane->old_fb = plane->fb;
395 plane_mask |= 1 << drm_plane_index(plane);
396
397 /* disable non-primary: */ 392 /* disable non-primary: */
398 if (plane->type == DRM_PLANE_TYPE_PRIMARY) 393 if (plane->type == DRM_PLANE_TYPE_PRIMARY)
399 continue; 394 continue;
@@ -430,8 +425,6 @@ retry:
430 ret = drm_atomic_commit(state); 425 ret = drm_atomic_commit(state);
431 426
432out_state: 427out_state:
433 drm_atomic_clean_old_fb(dev, plane_mask, ret);
434
435 if (ret == -EDEADLK) 428 if (ret == -EDEADLK)
436 goto backoff; 429 goto backoff;
437 430
@@ -1164,7 +1157,7 @@ EXPORT_SYMBOL(drm_fb_helper_sys_imageblit);
1164 * @info: fbdev registered by the helper 1157 * @info: fbdev registered by the helper
1165 * @rect: info about rectangle to fill 1158 * @rect: info about rectangle to fill
1166 * 1159 *
1167 * A wrapper around cfb_imageblit implemented by fbdev core 1160 * A wrapper around cfb_fillrect implemented by fbdev core
1168 */ 1161 */
1169void drm_fb_helper_cfb_fillrect(struct fb_info *info, 1162void drm_fb_helper_cfb_fillrect(struct fb_info *info,
1170 const struct fb_fillrect *rect) 1163 const struct fb_fillrect *rect)
diff --git a/drivers/gpu/drm/drm_framebuffer.c b/drivers/gpu/drm/drm_framebuffer.c
index bfedceff87bb..46b11e46edbd 100644
--- a/drivers/gpu/drm/drm_framebuffer.c
+++ b/drivers/gpu/drm/drm_framebuffer.c
@@ -836,8 +836,6 @@ retry:
836 goto unlock; 836 goto unlock;
837 837
838 plane_mask |= BIT(drm_plane_index(plane)); 838 plane_mask |= BIT(drm_plane_index(plane));
839
840 plane->old_fb = plane->fb;
841 } 839 }
842 840
843 /* This list is only filled when disable_crtcs is set. */ 841 /* This list is only filled when disable_crtcs is set. */
@@ -852,9 +850,6 @@ retry:
852 ret = drm_atomic_commit(state); 850 ret = drm_atomic_commit(state);
853 851
854unlock: 852unlock:
855 if (plane_mask)
856 drm_atomic_clean_old_fb(dev, plane_mask, ret);
857
858 if (ret == -EDEADLK) { 853 if (ret == -EDEADLK) {
859 drm_atomic_state_clear(state); 854 drm_atomic_state_clear(state);
860 drm_modeset_backoff(&ctx); 855 drm_modeset_backoff(&ctx);
diff --git a/drivers/gpu/drm/drm_gem_framebuffer_helper.c b/drivers/gpu/drm/drm_gem_framebuffer_helper.c
index acfbc0641a06..2810d4131411 100644
--- a/drivers/gpu/drm/drm_gem_framebuffer_helper.c
+++ b/drivers/gpu/drm/drm_gem_framebuffer_helper.c
@@ -253,7 +253,7 @@ int drm_gem_fb_prepare_fb(struct drm_plane *plane,
253 struct dma_buf *dma_buf; 253 struct dma_buf *dma_buf;
254 struct dma_fence *fence; 254 struct dma_fence *fence;
255 255
256 if (plane->state->fb == state->fb || !state->fb) 256 if (!state->fb)
257 return 0; 257 return 0;
258 258
259 dma_buf = drm_gem_fb_get_obj(state->fb, 0)->dma_buf; 259 dma_buf = drm_gem_fb_get_obj(state->fb, 0)->dma_buf;
diff --git a/drivers/gpu/drm/drm_ioctl.c b/drivers/gpu/drm/drm_ioctl.c
index 0d4cfb232576..fe49fb0356b5 100644
--- a/drivers/gpu/drm/drm_ioctl.c
+++ b/drivers/gpu/drm/drm_ioctl.c
@@ -334,6 +334,13 @@ drm_setclientcap(struct drm_device *dev, void *data, struct drm_file *file_priv)
334 return -EINVAL; 334 return -EINVAL;
335 file_priv->aspect_ratio_allowed = req->value; 335 file_priv->aspect_ratio_allowed = req->value;
336 break; 336 break;
337 case DRM_CLIENT_CAP_WRITEBACK_CONNECTORS:
338 if (!file_priv->atomic)
339 return -EINVAL;
340 if (req->value > 1)
341 return -EINVAL;
342 file_priv->writeback_connectors = req->value;
343 break;
337 default: 344 default:
338 return -EINVAL; 345 return -EINVAL;
339 } 346 }
diff --git a/drivers/gpu/drm/drm_mm.c b/drivers/gpu/drm/drm_mm.c
index 3166026a1874..3cc5fbd78ee2 100644
--- a/drivers/gpu/drm/drm_mm.c
+++ b/drivers/gpu/drm/drm_mm.c
@@ -239,6 +239,32 @@ static void drm_mm_interval_tree_add_node(struct drm_mm_node *hole_node,
239#define HOLE_SIZE(NODE) ((NODE)->hole_size) 239#define HOLE_SIZE(NODE) ((NODE)->hole_size)
240#define HOLE_ADDR(NODE) (__drm_mm_hole_node_start(NODE)) 240#define HOLE_ADDR(NODE) (__drm_mm_hole_node_start(NODE))
241 241
242static u64 rb_to_hole_size(struct rb_node *rb)
243{
244 return rb_entry(rb, struct drm_mm_node, rb_hole_size)->hole_size;
245}
246
247static void insert_hole_size(struct rb_root_cached *root,
248 struct drm_mm_node *node)
249{
250 struct rb_node **link = &root->rb_root.rb_node, *rb = NULL;
251 u64 x = node->hole_size;
252 bool first = true;
253
254 while (*link) {
255 rb = *link;
256 if (x > rb_to_hole_size(rb)) {
257 link = &rb->rb_left;
258 } else {
259 link = &rb->rb_right;
260 first = false;
261 }
262 }
263
264 rb_link_node(&node->rb_hole_size, rb, link);
265 rb_insert_color_cached(&node->rb_hole_size, root, first);
266}
267
242static void add_hole(struct drm_mm_node *node) 268static void add_hole(struct drm_mm_node *node)
243{ 269{
244 struct drm_mm *mm = node->mm; 270 struct drm_mm *mm = node->mm;
@@ -247,7 +273,7 @@ static void add_hole(struct drm_mm_node *node)
247 __drm_mm_hole_node_end(node) - __drm_mm_hole_node_start(node); 273 __drm_mm_hole_node_end(node) - __drm_mm_hole_node_start(node);
248 DRM_MM_BUG_ON(!drm_mm_hole_follows(node)); 274 DRM_MM_BUG_ON(!drm_mm_hole_follows(node));
249 275
250 RB_INSERT(mm->holes_size, rb_hole_size, HOLE_SIZE); 276 insert_hole_size(&mm->holes_size, node);
251 RB_INSERT(mm->holes_addr, rb_hole_addr, HOLE_ADDR); 277 RB_INSERT(mm->holes_addr, rb_hole_addr, HOLE_ADDR);
252 278
253 list_add(&node->hole_stack, &mm->hole_stack); 279 list_add(&node->hole_stack, &mm->hole_stack);
@@ -258,7 +284,7 @@ static void rm_hole(struct drm_mm_node *node)
258 DRM_MM_BUG_ON(!drm_mm_hole_follows(node)); 284 DRM_MM_BUG_ON(!drm_mm_hole_follows(node));
259 285
260 list_del(&node->hole_stack); 286 list_del(&node->hole_stack);
261 rb_erase(&node->rb_hole_size, &node->mm->holes_size); 287 rb_erase_cached(&node->rb_hole_size, &node->mm->holes_size);
262 rb_erase(&node->rb_hole_addr, &node->mm->holes_addr); 288 rb_erase(&node->rb_hole_addr, &node->mm->holes_addr);
263 node->hole_size = 0; 289 node->hole_size = 0;
264 290
@@ -282,38 +308,39 @@ static inline u64 rb_hole_size(struct rb_node *rb)
282 308
283static struct drm_mm_node *best_hole(struct drm_mm *mm, u64 size) 309static struct drm_mm_node *best_hole(struct drm_mm *mm, u64 size)
284{ 310{
285 struct rb_node *best = NULL; 311 struct rb_node *rb = mm->holes_size.rb_root.rb_node;
286 struct rb_node **link = &mm->holes_size.rb_node; 312 struct drm_mm_node *best = NULL;
287 313
288 while (*link) { 314 do {
289 struct rb_node *rb = *link; 315 struct drm_mm_node *node =
316 rb_entry(rb, struct drm_mm_node, rb_hole_size);
290 317
291 if (size <= rb_hole_size(rb)) { 318 if (size <= node->hole_size) {
292 link = &rb->rb_left; 319 best = node;
293 best = rb; 320 rb = rb->rb_right;
294 } else { 321 } else {
295 link = &rb->rb_right; 322 rb = rb->rb_left;
296 } 323 }
297 } 324 } while (rb);
298 325
299 return rb_hole_size_to_node(best); 326 return best;
300} 327}
301 328
302static struct drm_mm_node *find_hole(struct drm_mm *mm, u64 addr) 329static struct drm_mm_node *find_hole(struct drm_mm *mm, u64 addr)
303{ 330{
331 struct rb_node *rb = mm->holes_addr.rb_node;
304 struct drm_mm_node *node = NULL; 332 struct drm_mm_node *node = NULL;
305 struct rb_node **link = &mm->holes_addr.rb_node;
306 333
307 while (*link) { 334 while (rb) {
308 u64 hole_start; 335 u64 hole_start;
309 336
310 node = rb_hole_addr_to_node(*link); 337 node = rb_hole_addr_to_node(rb);
311 hole_start = __drm_mm_hole_node_start(node); 338 hole_start = __drm_mm_hole_node_start(node);
312 339
313 if (addr < hole_start) 340 if (addr < hole_start)
314 link = &node->rb_hole_addr.rb_left; 341 rb = node->rb_hole_addr.rb_left;
315 else if (addr > hole_start + node->hole_size) 342 else if (addr > hole_start + node->hole_size)
316 link = &node->rb_hole_addr.rb_right; 343 rb = node->rb_hole_addr.rb_right;
317 else 344 else
318 break; 345 break;
319 } 346 }
@@ -326,9 +353,6 @@ first_hole(struct drm_mm *mm,
326 u64 start, u64 end, u64 size, 353 u64 start, u64 end, u64 size,
327 enum drm_mm_insert_mode mode) 354 enum drm_mm_insert_mode mode)
328{ 355{
329 if (RB_EMPTY_ROOT(&mm->holes_size))
330 return NULL;
331
332 switch (mode) { 356 switch (mode) {
333 default: 357 default:
334 case DRM_MM_INSERT_BEST: 358 case DRM_MM_INSERT_BEST:
@@ -355,7 +379,7 @@ next_hole(struct drm_mm *mm,
355 switch (mode) { 379 switch (mode) {
356 default: 380 default:
357 case DRM_MM_INSERT_BEST: 381 case DRM_MM_INSERT_BEST:
358 return rb_hole_size_to_node(rb_next(&node->rb_hole_size)); 382 return rb_hole_size_to_node(rb_prev(&node->rb_hole_size));
359 383
360 case DRM_MM_INSERT_LOW: 384 case DRM_MM_INSERT_LOW:
361 return rb_hole_addr_to_node(rb_next(&node->rb_hole_addr)); 385 return rb_hole_addr_to_node(rb_next(&node->rb_hole_addr));
@@ -426,6 +450,11 @@ int drm_mm_reserve_node(struct drm_mm *mm, struct drm_mm_node *node)
426} 450}
427EXPORT_SYMBOL(drm_mm_reserve_node); 451EXPORT_SYMBOL(drm_mm_reserve_node);
428 452
453static u64 rb_to_hole_size_or_zero(struct rb_node *rb)
454{
455 return rb ? rb_to_hole_size(rb) : 0;
456}
457
429/** 458/**
430 * drm_mm_insert_node_in_range - ranged search for space and insert @node 459 * drm_mm_insert_node_in_range - ranged search for space and insert @node
431 * @mm: drm_mm to allocate from 460 * @mm: drm_mm to allocate from
@@ -451,18 +480,26 @@ int drm_mm_insert_node_in_range(struct drm_mm * const mm,
451{ 480{
452 struct drm_mm_node *hole; 481 struct drm_mm_node *hole;
453 u64 remainder_mask; 482 u64 remainder_mask;
483 bool once;
454 484
455 DRM_MM_BUG_ON(range_start >= range_end); 485 DRM_MM_BUG_ON(range_start >= range_end);
456 486
457 if (unlikely(size == 0 || range_end - range_start < size)) 487 if (unlikely(size == 0 || range_end - range_start < size))
458 return -ENOSPC; 488 return -ENOSPC;
459 489
490 if (rb_to_hole_size_or_zero(rb_first_cached(&mm->holes_size)) < size)
491 return -ENOSPC;
492
460 if (alignment <= 1) 493 if (alignment <= 1)
461 alignment = 0; 494 alignment = 0;
462 495
496 once = mode & DRM_MM_INSERT_ONCE;
497 mode &= ~DRM_MM_INSERT_ONCE;
498
463 remainder_mask = is_power_of_2(alignment) ? alignment - 1 : 0; 499 remainder_mask = is_power_of_2(alignment) ? alignment - 1 : 0;
464 for (hole = first_hole(mm, range_start, range_end, size, mode); hole; 500 for (hole = first_hole(mm, range_start, range_end, size, mode);
465 hole = next_hole(mm, hole, mode)) { 501 hole;
502 hole = once ? NULL : next_hole(mm, hole, mode)) {
466 u64 hole_start = __drm_mm_hole_node_start(hole); 503 u64 hole_start = __drm_mm_hole_node_start(hole);
467 u64 hole_end = hole_start + hole->hole_size; 504 u64 hole_end = hole_start + hole->hole_size;
468 u64 adj_start, adj_end; 505 u64 adj_start, adj_end;
@@ -587,9 +624,9 @@ void drm_mm_replace_node(struct drm_mm_node *old, struct drm_mm_node *new)
587 624
588 if (drm_mm_hole_follows(old)) { 625 if (drm_mm_hole_follows(old)) {
589 list_replace(&old->hole_stack, &new->hole_stack); 626 list_replace(&old->hole_stack, &new->hole_stack);
590 rb_replace_node(&old->rb_hole_size, 627 rb_replace_node_cached(&old->rb_hole_size,
591 &new->rb_hole_size, 628 &new->rb_hole_size,
592 &mm->holes_size); 629 &mm->holes_size);
593 rb_replace_node(&old->rb_hole_addr, 630 rb_replace_node(&old->rb_hole_addr,
594 &new->rb_hole_addr, 631 &new->rb_hole_addr,
595 &mm->holes_addr); 632 &mm->holes_addr);
@@ -885,7 +922,7 @@ void drm_mm_init(struct drm_mm *mm, u64 start, u64 size)
885 922
886 INIT_LIST_HEAD(&mm->hole_stack); 923 INIT_LIST_HEAD(&mm->hole_stack);
887 mm->interval_tree = RB_ROOT_CACHED; 924 mm->interval_tree = RB_ROOT_CACHED;
888 mm->holes_size = RB_ROOT; 925 mm->holes_size = RB_ROOT_CACHED;
889 mm->holes_addr = RB_ROOT; 926 mm->holes_addr = RB_ROOT;
890 927
891 /* Clever trick to avoid a special case in the free hole tracking. */ 928 /* Clever trick to avoid a special case in the free hole tracking. */
diff --git a/drivers/gpu/drm/drm_mode_config.c b/drivers/gpu/drm/drm_mode_config.c
index e5c653357024..21e353bd3948 100644
--- a/drivers/gpu/drm/drm_mode_config.c
+++ b/drivers/gpu/drm/drm_mode_config.c
@@ -145,6 +145,11 @@ int drm_mode_getresources(struct drm_device *dev, void *data,
145 count = 0; 145 count = 0;
146 connector_id = u64_to_user_ptr(card_res->connector_id_ptr); 146 connector_id = u64_to_user_ptr(card_res->connector_id_ptr);
147 drm_for_each_connector_iter(connector, &conn_iter) { 147 drm_for_each_connector_iter(connector, &conn_iter) {
148 /* only expose writeback connectors if userspace understands them */
149 if (!file_priv->writeback_connectors &&
150 (connector->connector_type == DRM_MODE_CONNECTOR_WRITEBACK))
151 continue;
152
148 if (drm_lease_held(file_priv, connector->base.id)) { 153 if (drm_lease_held(file_priv, connector->base.id)) {
149 if (count < card_res->count_connectors && 154 if (count < card_res->count_connectors &&
150 put_user(connector->base.id, connector_id + count)) { 155 put_user(connector->base.id, connector_id + count)) {
diff --git a/drivers/gpu/drm/drm_modes.c b/drivers/gpu/drm/drm_modes.c
index c78ca0e84ffd..7f552d5fa88e 100644
--- a/drivers/gpu/drm/drm_modes.c
+++ b/drivers/gpu/drm/drm_modes.c
@@ -1257,7 +1257,7 @@ static const char * const drm_mode_status_names[] = {
1257 1257
1258#undef MODE_STATUS 1258#undef MODE_STATUS
1259 1259
1260static const char *drm_get_mode_status_name(enum drm_mode_status status) 1260const char *drm_get_mode_status_name(enum drm_mode_status status)
1261{ 1261{
1262 int index = status + 3; 1262 int index = status + 3;
1263 1263
diff --git a/drivers/gpu/drm/drm_panel.c b/drivers/gpu/drm/drm_panel.c
index 308d442a531b..965530a6f4cd 100644
--- a/drivers/gpu/drm/drm_panel.c
+++ b/drivers/gpu/drm/drm_panel.c
@@ -24,6 +24,7 @@
24#include <linux/err.h> 24#include <linux/err.h>
25#include <linux/module.h> 25#include <linux/module.h>
26 26
27#include <drm/drm_device.h>
27#include <drm/drm_crtc.h> 28#include <drm/drm_crtc.h>
28#include <drm/drm_panel.h> 29#include <drm/drm_panel.h>
29 30
@@ -94,6 +95,9 @@ EXPORT_SYMBOL(drm_panel_remove);
94 * 95 *
95 * An error is returned if the panel is already attached to another connector. 96 * An error is returned if the panel is already attached to another connector.
96 * 97 *
98 * When unloading, the driver should detach from the panel by calling
99 * drm_panel_detach().
100 *
97 * Return: 0 on success or a negative error code on failure. 101 * Return: 0 on success or a negative error code on failure.
98 */ 102 */
99int drm_panel_attach(struct drm_panel *panel, struct drm_connector *connector) 103int drm_panel_attach(struct drm_panel *panel, struct drm_connector *connector)
@@ -101,6 +105,13 @@ int drm_panel_attach(struct drm_panel *panel, struct drm_connector *connector)
101 if (panel->connector) 105 if (panel->connector)
102 return -EBUSY; 106 return -EBUSY;
103 107
108 panel->link = device_link_add(connector->dev->dev, panel->dev, 0);
109 if (!panel->link) {
110 dev_err(panel->dev, "failed to link panel to %s\n",
111 dev_name(connector->dev->dev));
112 return -EINVAL;
113 }
114
104 panel->connector = connector; 115 panel->connector = connector;
105 panel->drm = connector->dev; 116 panel->drm = connector->dev;
106 117
@@ -115,10 +126,15 @@ EXPORT_SYMBOL(drm_panel_attach);
115 * Detaches a panel from the connector it is attached to. If a panel is not 126 * Detaches a panel from the connector it is attached to. If a panel is not
116 * attached to any connector this is effectively a no-op. 127 * attached to any connector this is effectively a no-op.
117 * 128 *
129 * This function should not be called by the panel device itself. It
130 * is only for the drm device that called drm_panel_attach().
131 *
118 * Return: 0 on success or a negative error code on failure. 132 * Return: 0 on success or a negative error code on failure.
119 */ 133 */
120int drm_panel_detach(struct drm_panel *panel) 134int drm_panel_detach(struct drm_panel *panel)
121{ 135{
136 device_link_del(panel->link);
137
122 panel->connector = NULL; 138 panel->connector = NULL;
123 panel->drm = NULL; 139 panel->drm = NULL;
124 140
diff --git a/drivers/gpu/drm/drm_plane.c b/drivers/gpu/drm/drm_plane.c
index 035054455301..df0b4ebbedbf 100644
--- a/drivers/gpu/drm/drm_plane.c
+++ b/drivers/gpu/drm/drm_plane.c
@@ -177,6 +177,10 @@ int drm_universal_plane_init(struct drm_device *dev, struct drm_plane *plane,
177 if (WARN_ON(config->num_total_plane >= 32)) 177 if (WARN_ON(config->num_total_plane >= 32))
178 return -EINVAL; 178 return -EINVAL;
179 179
180 WARN_ON(drm_drv_uses_atomic_modeset(dev) &&
181 (!funcs->atomic_destroy_state ||
182 !funcs->atomic_duplicate_state));
183
180 ret = drm_mode_object_add(dev, &plane->base, DRM_MODE_OBJECT_PLANE); 184 ret = drm_mode_object_add(dev, &plane->base, DRM_MODE_OBJECT_PLANE);
181 if (ret) 185 if (ret)
182 return ret; 186 return ret;
@@ -561,19 +565,20 @@ int drm_plane_check_pixel_format(struct drm_plane *plane,
561 if (i == plane->format_count) 565 if (i == plane->format_count)
562 return -EINVAL; 566 return -EINVAL;
563 567
564 if (!plane->modifier_count) 568 if (plane->funcs->format_mod_supported) {
565 return 0; 569 if (!plane->funcs->format_mod_supported(plane, format, modifier))
570 return -EINVAL;
571 } else {
572 if (!plane->modifier_count)
573 return 0;
566 574
567 for (i = 0; i < plane->modifier_count; i++) { 575 for (i = 0; i < plane->modifier_count; i++) {
568 if (modifier == plane->modifiers[i]) 576 if (modifier == plane->modifiers[i])
569 break; 577 break;
578 }
579 if (i == plane->modifier_count)
580 return -EINVAL;
570 } 581 }
571 if (i == plane->modifier_count)
572 return -EINVAL;
573
574 if (plane->funcs->format_mod_supported &&
575 !plane->funcs->format_mod_supported(plane, format, modifier))
576 return -EINVAL;
577 582
578 return 0; 583 return 0;
579} 584}
@@ -650,9 +655,11 @@ static int __setplane_internal(struct drm_plane *plane,
650 crtc_x, crtc_y, crtc_w, crtc_h, 655 crtc_x, crtc_y, crtc_w, crtc_h,
651 src_x, src_y, src_w, src_h, ctx); 656 src_x, src_y, src_w, src_h, ctx);
652 if (!ret) { 657 if (!ret) {
653 plane->crtc = crtc; 658 if (!plane->state) {
654 plane->fb = fb; 659 plane->crtc = crtc;
655 drm_framebuffer_get(plane->fb); 660 plane->fb = fb;
661 drm_framebuffer_get(plane->fb);
662 }
656 } else { 663 } else {
657 plane->old_fb = NULL; 664 plane->old_fb = NULL;
658 } 665 }
@@ -1092,8 +1099,10 @@ retry:
1092 /* Keep the old fb, don't unref it. */ 1099 /* Keep the old fb, don't unref it. */
1093 plane->old_fb = NULL; 1100 plane->old_fb = NULL;
1094 } else { 1101 } else {
1095 plane->fb = fb; 1102 if (!plane->state) {
1096 drm_framebuffer_get(fb); 1103 plane->fb = fb;
1104 drm_framebuffer_get(fb);
1105 }
1097 } 1106 }
1098 1107
1099out: 1108out:
diff --git a/drivers/gpu/drm/drm_plane_helper.c b/drivers/gpu/drm/drm_plane_helper.c
index f88f68161519..2010794943bc 100644
--- a/drivers/gpu/drm/drm_plane_helper.c
+++ b/drivers/gpu/drm/drm_plane_helper.c
@@ -502,6 +502,7 @@ EXPORT_SYMBOL(drm_plane_helper_update);
502int drm_plane_helper_disable(struct drm_plane *plane) 502int drm_plane_helper_disable(struct drm_plane *plane)
503{ 503{
504 struct drm_plane_state *plane_state; 504 struct drm_plane_state *plane_state;
505 struct drm_framebuffer *old_fb;
505 506
506 /* crtc helpers love to call disable functions for already disabled hw 507 /* crtc helpers love to call disable functions for already disabled hw
507 * functions. So cope with that. */ 508 * functions. So cope with that. */
@@ -521,8 +522,9 @@ int drm_plane_helper_disable(struct drm_plane *plane)
521 plane_state->plane = plane; 522 plane_state->plane = plane;
522 523
523 plane_state->crtc = NULL; 524 plane_state->crtc = NULL;
525 old_fb = plane_state->fb;
524 drm_atomic_set_fb_for_plane(plane_state, NULL); 526 drm_atomic_set_fb_for_plane(plane_state, NULL);
525 527
526 return drm_plane_helper_commit(plane, plane_state, plane->fb); 528 return drm_plane_helper_commit(plane, plane_state, old_fb);
527} 529}
528EXPORT_SYMBOL(drm_plane_helper_disable); 530EXPORT_SYMBOL(drm_plane_helper_disable);
diff --git a/drivers/gpu/drm/drm_prime.c b/drivers/gpu/drm/drm_prime.c
index 397b46b33739..186db2e4c57a 100644
--- a/drivers/gpu/drm/drm_prime.c
+++ b/drivers/gpu/drm/drm_prime.c
@@ -186,7 +186,6 @@ static int drm_prime_lookup_buf_handle(struct drm_prime_file_private *prime_fpri
186/** 186/**
187 * drm_gem_map_attach - dma_buf attach implementation for GEM 187 * drm_gem_map_attach - dma_buf attach implementation for GEM
188 * @dma_buf: buffer to attach device to 188 * @dma_buf: buffer to attach device to
189 * @target_dev: not used
190 * @attach: buffer attachment data 189 * @attach: buffer attachment data
191 * 190 *
192 * Allocates &drm_prime_attachment and calls &drm_driver.gem_prime_pin for 191 * Allocates &drm_prime_attachment and calls &drm_driver.gem_prime_pin for
@@ -195,7 +194,7 @@ static int drm_prime_lookup_buf_handle(struct drm_prime_file_private *prime_fpri
195 * 194 *
196 * Returns 0 on success, negative error code on failure. 195 * Returns 0 on success, negative error code on failure.
197 */ 196 */
198int drm_gem_map_attach(struct dma_buf *dma_buf, struct device *target_dev, 197int drm_gem_map_attach(struct dma_buf *dma_buf,
199 struct dma_buf_attachment *attach) 198 struct dma_buf_attachment *attach)
200{ 199{
201 struct drm_prime_attachment *prime_attach; 200 struct drm_prime_attachment *prime_attach;
@@ -435,35 +434,6 @@ void drm_gem_dmabuf_vunmap(struct dma_buf *dma_buf, void *vaddr)
435EXPORT_SYMBOL(drm_gem_dmabuf_vunmap); 434EXPORT_SYMBOL(drm_gem_dmabuf_vunmap);
436 435
437/** 436/**
438 * drm_gem_dmabuf_kmap_atomic - map_atomic implementation for GEM
439 * @dma_buf: buffer to be mapped
440 * @page_num: page number within the buffer
441 *
442 * Not implemented. This can be used as the &dma_buf_ops.map_atomic callback.
443 */
444void *drm_gem_dmabuf_kmap_atomic(struct dma_buf *dma_buf,
445 unsigned long page_num)
446{
447 return NULL;
448}
449EXPORT_SYMBOL(drm_gem_dmabuf_kmap_atomic);
450
451/**
452 * drm_gem_dmabuf_kunmap_atomic - unmap_atomic implementation for GEM
453 * @dma_buf: buffer to be unmapped
454 * @page_num: page number within the buffer
455 * @addr: virtual address of the buffer
456 *
457 * Not implemented. This can be used as the &dma_buf_ops.unmap_atomic callback.
458 */
459void drm_gem_dmabuf_kunmap_atomic(struct dma_buf *dma_buf,
460 unsigned long page_num, void *addr)
461{
462
463}
464EXPORT_SYMBOL(drm_gem_dmabuf_kunmap_atomic);
465
466/**
467 * drm_gem_dmabuf_kmap - map implementation for GEM 437 * drm_gem_dmabuf_kmap - map implementation for GEM
468 * @dma_buf: buffer to be mapped 438 * @dma_buf: buffer to be mapped
469 * @page_num: page number within the buffer 439 * @page_num: page number within the buffer
@@ -520,9 +490,7 @@ static const struct dma_buf_ops drm_gem_prime_dmabuf_ops = {
520 .unmap_dma_buf = drm_gem_unmap_dma_buf, 490 .unmap_dma_buf = drm_gem_unmap_dma_buf,
521 .release = drm_gem_dmabuf_release, 491 .release = drm_gem_dmabuf_release,
522 .map = drm_gem_dmabuf_kmap, 492 .map = drm_gem_dmabuf_kmap,
523 .map_atomic = drm_gem_dmabuf_kmap_atomic,
524 .unmap = drm_gem_dmabuf_kunmap, 493 .unmap = drm_gem_dmabuf_kunmap,
525 .unmap_atomic = drm_gem_dmabuf_kunmap_atomic,
526 .mmap = drm_gem_dmabuf_mmap, 494 .mmap = drm_gem_dmabuf_mmap,
527 .vmap = drm_gem_dmabuf_vmap, 495 .vmap = drm_gem_dmabuf_vmap,
528 .vunmap = drm_gem_dmabuf_vunmap, 496 .vunmap = drm_gem_dmabuf_vunmap,
diff --git a/drivers/gpu/drm/drm_vm.c b/drivers/gpu/drm/drm_vm.c
index 2660543ad86a..c3301046dfaa 100644
--- a/drivers/gpu/drm/drm_vm.c
+++ b/drivers/gpu/drm/drm_vm.c
@@ -100,7 +100,7 @@ static pgprot_t drm_dma_prot(uint32_t map_type, struct vm_area_struct *vma)
100 * map, get the page, increment the use count and return it. 100 * map, get the page, increment the use count and return it.
101 */ 101 */
102#if IS_ENABLED(CONFIG_AGP) 102#if IS_ENABLED(CONFIG_AGP)
103static int drm_vm_fault(struct vm_fault *vmf) 103static vm_fault_t drm_vm_fault(struct vm_fault *vmf)
104{ 104{
105 struct vm_area_struct *vma = vmf->vma; 105 struct vm_area_struct *vma = vmf->vma;
106 struct drm_file *priv = vma->vm_file->private_data; 106 struct drm_file *priv = vma->vm_file->private_data;
@@ -173,7 +173,7 @@ vm_fault_error:
173 return VM_FAULT_SIGBUS; /* Disallow mremap */ 173 return VM_FAULT_SIGBUS; /* Disallow mremap */
174} 174}
175#else 175#else
176static int drm_vm_fault(struct vm_fault *vmf) 176static vm_fault_t drm_vm_fault(struct vm_fault *vmf)
177{ 177{
178 return VM_FAULT_SIGBUS; 178 return VM_FAULT_SIGBUS;
179} 179}
@@ -189,7 +189,7 @@ static int drm_vm_fault(struct vm_fault *vmf)
189 * Get the mapping, find the real physical page to map, get the page, and 189 * Get the mapping, find the real physical page to map, get the page, and
190 * return it. 190 * return it.
191 */ 191 */
192static int drm_vm_shm_fault(struct vm_fault *vmf) 192static vm_fault_t drm_vm_shm_fault(struct vm_fault *vmf)
193{ 193{
194 struct vm_area_struct *vma = vmf->vma; 194 struct vm_area_struct *vma = vmf->vma;
195 struct drm_local_map *map = vma->vm_private_data; 195 struct drm_local_map *map = vma->vm_private_data;
@@ -291,7 +291,7 @@ static void drm_vm_shm_close(struct vm_area_struct *vma)
291 * 291 *
292 * Determine the page number from the page offset and get it from drm_device_dma::pagelist. 292 * Determine the page number from the page offset and get it from drm_device_dma::pagelist.
293 */ 293 */
294static int drm_vm_dma_fault(struct vm_fault *vmf) 294static vm_fault_t drm_vm_dma_fault(struct vm_fault *vmf)
295{ 295{
296 struct vm_area_struct *vma = vmf->vma; 296 struct vm_area_struct *vma = vmf->vma;
297 struct drm_file *priv = vma->vm_file->private_data; 297 struct drm_file *priv = vma->vm_file->private_data;
@@ -326,7 +326,7 @@ static int drm_vm_dma_fault(struct vm_fault *vmf)
326 * 326 *
327 * Determine the map offset from the page offset and get it from drm_sg_mem::pagelist. 327 * Determine the map offset from the page offset and get it from drm_sg_mem::pagelist.
328 */ 328 */
329static int drm_vm_sg_fault(struct vm_fault *vmf) 329static vm_fault_t drm_vm_sg_fault(struct vm_fault *vmf)
330{ 330{
331 struct vm_area_struct *vma = vmf->vma; 331 struct vm_area_struct *vma = vmf->vma;
332 struct drm_local_map *map = vma->vm_private_data; 332 struct drm_local_map *map = vma->vm_private_data;
diff --git a/drivers/gpu/drm/drm_writeback.c b/drivers/gpu/drm/drm_writeback.c
new file mode 100644
index 000000000000..827395071f0b
--- /dev/null
+++ b/drivers/gpu/drm/drm_writeback.c
@@ -0,0 +1,350 @@
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * (C) COPYRIGHT 2016 ARM Limited. All rights reserved.
4 * Author: Brian Starkey <brian.starkey@arm.com>
5 *
6 * This program is free software and is provided to you under the terms of the
7 * GNU General Public License version 2 as published by the Free Software
8 * Foundation, and any use by you of this program is subject to the terms
9 * of such GNU licence.
10 */
11
12#include <drm/drm_crtc.h>
13#include <drm/drm_modeset_helper_vtables.h>
14#include <drm/drm_property.h>
15#include <drm/drm_writeback.h>
16#include <drm/drmP.h>
17#include <linux/dma-fence.h>
18
19/**
20 * DOC: overview
21 *
22 * Writeback connectors are used to expose hardware which can write the output
23 * from a CRTC to a memory buffer. They are used and act similarly to other
24 * types of connectors, with some important differences:
25 * - Writeback connectors don't provide a way to output visually to the user.
26 * - Writeback connectors should always report as "disconnected" (so that
27 * clients which don't understand them will ignore them).
28 * - Writeback connectors don't have EDID.
29 *
30 * A framebuffer may only be attached to a writeback connector when the
31 * connector is attached to a CRTC. The WRITEBACK_FB_ID property which sets the
32 * framebuffer applies only to a single commit (see below). A framebuffer may
33 * not be attached while the CRTC is off.
34 *
35 * Unlike with planes, when a writeback framebuffer is removed by userspace DRM
36 * makes no attempt to remove it from active use by the connector. This is
37 * because no method is provided to abort a writeback operation, and in any
38 * case making a new commit whilst a writeback is ongoing is undefined (see
39 * WRITEBACK_OUT_FENCE_PTR below). As soon as the current writeback is finished,
40 * the framebuffer will automatically no longer be in active use. As it will
41 * also have already been removed from the framebuffer list, there will be no
42 * way for any userspace application to retrieve a reference to it in the
43 * intervening period.
44 *
45 * Writeback connectors have some additional properties, which userspace
46 * can use to query and control them:
47 *
48 * "WRITEBACK_FB_ID":
49 * Write-only object property storing a DRM_MODE_OBJECT_FB: it stores the
50 * framebuffer to be written by the writeback connector. This property is
51 * similar to the FB_ID property on planes, but will always read as zero
52 * and is not preserved across commits.
53 * Userspace must set this property to an output buffer every time it
54 * wishes the buffer to get filled.
55 *
56 * "WRITEBACK_PIXEL_FORMATS":
57 * Immutable blob property to store the supported pixel formats table. The
58 * data is an array of u32 DRM_FORMAT_* fourcc values.
59 * Userspace can use this blob to find out what pixel formats are supported
60 * by the connector's writeback engine.
61 *
62 * "WRITEBACK_OUT_FENCE_PTR":
63 * Userspace can use this property to provide a pointer for the kernel to
64 * fill with a sync_file file descriptor, which will signal once the
65 * writeback is finished. The value should be the address of a 32-bit
66 * signed integer, cast to a u64.
67 * Userspace should wait for this fence to signal before making another
68 * commit affecting any of the same CRTCs, Planes or Connectors.
69 * **Failure to do so will result in undefined behaviour.**
70 * For this reason it is strongly recommended that all userspace
71 * applications making use of writeback connectors *always* retrieve an
72 * out-fence for the commit and use it appropriately.
73 * From userspace, this property will always read as zero.
74 */
75
76#define fence_to_wb_connector(x) container_of(x->lock, \
77 struct drm_writeback_connector, \
78 fence_lock)
79
80static const char *drm_writeback_fence_get_driver_name(struct dma_fence *fence)
81{
82 struct drm_writeback_connector *wb_connector =
83 fence_to_wb_connector(fence);
84
85 return wb_connector->base.dev->driver->name;
86}
87
88static const char *
89drm_writeback_fence_get_timeline_name(struct dma_fence *fence)
90{
91 struct drm_writeback_connector *wb_connector =
92 fence_to_wb_connector(fence);
93
94 return wb_connector->timeline_name;
95}
96
97static bool drm_writeback_fence_enable_signaling(struct dma_fence *fence)
98{
99 return true;
100}
101
102static const struct dma_fence_ops drm_writeback_fence_ops = {
103 .get_driver_name = drm_writeback_fence_get_driver_name,
104 .get_timeline_name = drm_writeback_fence_get_timeline_name,
105 .enable_signaling = drm_writeback_fence_enable_signaling,
106 .wait = dma_fence_default_wait,
107};
108
109static int create_writeback_properties(struct drm_device *dev)
110{
111 struct drm_property *prop;
112
113 if (!dev->mode_config.writeback_fb_id_property) {
114 prop = drm_property_create_object(dev, DRM_MODE_PROP_ATOMIC,
115 "WRITEBACK_FB_ID",
116 DRM_MODE_OBJECT_FB);
117 if (!prop)
118 return -ENOMEM;
119 dev->mode_config.writeback_fb_id_property = prop;
120 }
121
122 if (!dev->mode_config.writeback_pixel_formats_property) {
123 prop = drm_property_create(dev, DRM_MODE_PROP_BLOB |
124 DRM_MODE_PROP_ATOMIC |
125 DRM_MODE_PROP_IMMUTABLE,
126 "WRITEBACK_PIXEL_FORMATS", 0);
127 if (!prop)
128 return -ENOMEM;
129 dev->mode_config.writeback_pixel_formats_property = prop;
130 }
131
132 if (!dev->mode_config.writeback_out_fence_ptr_property) {
133 prop = drm_property_create_range(dev, DRM_MODE_PROP_ATOMIC,
134 "WRITEBACK_OUT_FENCE_PTR", 0,
135 U64_MAX);
136 if (!prop)
137 return -ENOMEM;
138 dev->mode_config.writeback_out_fence_ptr_property = prop;
139 }
140
141 return 0;
142}
143
144static const struct drm_encoder_funcs drm_writeback_encoder_funcs = {
145 .destroy = drm_encoder_cleanup,
146};
147
148/**
149 * drm_writeback_connector_init - Initialize a writeback connector and its properties
150 * @dev: DRM device
151 * @wb_connector: Writeback connector to initialize
152 * @con_funcs: Connector funcs vtable
153 * @enc_helper_funcs: Encoder helper funcs vtable to be used by the internal encoder
154 * @formats: Array of supported pixel formats for the writeback engine
155 * @n_formats: Length of the formats array
156 *
157 * This function creates the writeback-connector-specific properties if they
158 * have not been already created, initializes the connector as
159 * type DRM_MODE_CONNECTOR_WRITEBACK, and correctly initializes the property
160 * values. It will also create an internal encoder associated with the
161 * drm_writeback_connector and set it to use the @enc_helper_funcs vtable for
162 * the encoder helper.
163 *
164 * Drivers should always use this function instead of drm_connector_init() to
165 * set up writeback connectors.
166 *
167 * Returns: 0 on success, or a negative error code
168 */
169int drm_writeback_connector_init(struct drm_device *dev,
170 struct drm_writeback_connector *wb_connector,
171 const struct drm_connector_funcs *con_funcs,
172 const struct drm_encoder_helper_funcs *enc_helper_funcs,
173 const u32 *formats, int n_formats)
174{
175 struct drm_property_blob *blob;
176 struct drm_connector *connector = &wb_connector->base;
177 struct drm_mode_config *config = &dev->mode_config;
178 int ret = create_writeback_properties(dev);
179
180 if (ret != 0)
181 return ret;
182
183 blob = drm_property_create_blob(dev, n_formats * sizeof(*formats),
184 formats);
185 if (IS_ERR(blob))
186 return PTR_ERR(blob);
187
188 drm_encoder_helper_add(&wb_connector->encoder, enc_helper_funcs);
189 ret = drm_encoder_init(dev, &wb_connector->encoder,
190 &drm_writeback_encoder_funcs,
191 DRM_MODE_ENCODER_VIRTUAL, NULL);
192 if (ret)
193 goto fail;
194
195 connector->interlace_allowed = 0;
196
197 ret = drm_connector_init(dev, connector, con_funcs,
198 DRM_MODE_CONNECTOR_WRITEBACK);
199 if (ret)
200 goto connector_fail;
201
202 ret = drm_mode_connector_attach_encoder(connector,
203 &wb_connector->encoder);
204 if (ret)
205 goto attach_fail;
206
207 INIT_LIST_HEAD(&wb_connector->job_queue);
208 spin_lock_init(&wb_connector->job_lock);
209
210 wb_connector->fence_context = dma_fence_context_alloc(1);
211 spin_lock_init(&wb_connector->fence_lock);
212 snprintf(wb_connector->timeline_name,
213 sizeof(wb_connector->timeline_name),
214 "CONNECTOR:%d-%s", connector->base.id, connector->name);
215
216 drm_object_attach_property(&connector->base,
217 config->writeback_out_fence_ptr_property, 0);
218
219 drm_object_attach_property(&connector->base,
220 config->writeback_fb_id_property, 0);
221
222 drm_object_attach_property(&connector->base,
223 config->writeback_pixel_formats_property,
224 blob->base.id);
225 wb_connector->pixel_formats_blob_ptr = blob;
226
227 return 0;
228
229attach_fail:
230 drm_connector_cleanup(connector);
231connector_fail:
232 drm_encoder_cleanup(&wb_connector->encoder);
233fail:
234 drm_property_blob_put(blob);
235 return ret;
236}
237EXPORT_SYMBOL(drm_writeback_connector_init);
238
239/**
240 * drm_writeback_queue_job - Queue a writeback job for later signalling
241 * @wb_connector: The writeback connector to queue a job on
242 * @job: The job to queue
243 *
244 * This function adds a job to the job_queue for a writeback connector. It
245 * should be considered to take ownership of the writeback job, and so any other
246 * references to the job must be cleared after calling this function.
247 *
248 * Drivers must ensure that for a given writeback connector, jobs are queued in
249 * exactly the same order as they will be completed by the hardware (and
250 * signaled via drm_writeback_signal_completion).
251 *
252 * For every call to drm_writeback_queue_job() there must be exactly one call to
253 * drm_writeback_signal_completion()
254 *
255 * See also: drm_writeback_signal_completion()
256 */
257void drm_writeback_queue_job(struct drm_writeback_connector *wb_connector,
258 struct drm_writeback_job *job)
259{
260 unsigned long flags;
261
262 spin_lock_irqsave(&wb_connector->job_lock, flags);
263 list_add_tail(&job->list_entry, &wb_connector->job_queue);
264 spin_unlock_irqrestore(&wb_connector->job_lock, flags);
265}
266EXPORT_SYMBOL(drm_writeback_queue_job);
267
268/*
269 * @cleanup_work: deferred cleanup of a writeback job
270 *
271 * The job cannot be cleaned up directly in drm_writeback_signal_completion,
272 * because it may be called in interrupt context. Dropping the framebuffer
273 * reference can sleep, and so the cleanup is deferred to a workqueue.
274 */
275static void cleanup_work(struct work_struct *work)
276{
277 struct drm_writeback_job *job = container_of(work,
278 struct drm_writeback_job,
279 cleanup_work);
280 drm_framebuffer_put(job->fb);
281 kfree(job);
282}
283
284
285/**
286 * drm_writeback_signal_completion - Signal the completion of a writeback job
287 * @wb_connector: The writeback connector whose job is complete
288 * @status: Status code to set in the writeback out_fence (0 for success)
289 *
290 * Drivers should call this to signal the completion of a previously queued
291 * writeback job. It should be called as soon as possible after the hardware
292 * has finished writing, and may be called from interrupt context.
293 * It is the driver's responsibility to ensure that for a given connector, the
294 * hardware completes writeback jobs in the same order as they are queued.
295 *
296 * Unless the driver is holding its own reference to the framebuffer, it must
297 * not be accessed after calling this function.
298 *
299 * See also: drm_writeback_queue_job()
300 */
301void
302drm_writeback_signal_completion(struct drm_writeback_connector *wb_connector,
303 int status)
304{
305 unsigned long flags;
306 struct drm_writeback_job *job;
307
308 spin_lock_irqsave(&wb_connector->job_lock, flags);
309 job = list_first_entry_or_null(&wb_connector->job_queue,
310 struct drm_writeback_job,
311 list_entry);
312 if (job) {
313 list_del(&job->list_entry);
314 if (job->out_fence) {
315 if (status)
316 dma_fence_set_error(job->out_fence, status);
317 dma_fence_signal(job->out_fence);
318 dma_fence_put(job->out_fence);
319 }
320 }
321 spin_unlock_irqrestore(&wb_connector->job_lock, flags);
322
323 if (WARN_ON(!job))
324 return;
325
326 INIT_WORK(&job->cleanup_work, cleanup_work);
327 queue_work(system_long_wq, &job->cleanup_work);
328}
329EXPORT_SYMBOL(drm_writeback_signal_completion);
330
331struct dma_fence *
332drm_writeback_get_out_fence(struct drm_writeback_connector *wb_connector)
333{
334 struct dma_fence *fence;
335
336 if (WARN_ON(wb_connector->base.connector_type !=
337 DRM_MODE_CONNECTOR_WRITEBACK))
338 return NULL;
339
340 fence = kzalloc(sizeof(*fence), GFP_KERNEL);
341 if (!fence)
342 return NULL;
343
344 dma_fence_init(fence, &drm_writeback_fence_ops,
345 &wb_connector->fence_lock, wb_connector->fence_context,
346 ++wb_connector->fence_seqno);
347
348 return fence;
349}
350EXPORT_SYMBOL(drm_writeback_get_out_fence);
diff --git a/drivers/gpu/drm/exynos/exynos_drm_plane.c b/drivers/gpu/drm/exynos/exynos_drm_plane.c
index 38a2a7f1204b..eb9915da7dec 100644
--- a/drivers/gpu/drm/exynos/exynos_drm_plane.c
+++ b/drivers/gpu/drm/exynos/exynos_drm_plane.c
@@ -263,8 +263,6 @@ static void exynos_plane_atomic_update(struct drm_plane *plane,
263 if (!state->crtc) 263 if (!state->crtc)
264 return; 264 return;
265 265
266 plane->crtc = state->crtc;
267
268 if (exynos_crtc->ops->update_plane) 266 if (exynos_crtc->ops->update_plane)
269 exynos_crtc->ops->update_plane(exynos_crtc, exynos_plane); 267 exynos_crtc->ops->update_plane(exynos_crtc, exynos_plane);
270} 268}
diff --git a/drivers/gpu/drm/gma500/accel_2d.c b/drivers/gpu/drm/gma500/accel_2d.c
index c51d9259c7a7..204c8e452eb7 100644
--- a/drivers/gpu/drm/gma500/accel_2d.c
+++ b/drivers/gpu/drm/gma500/accel_2d.c
@@ -251,7 +251,7 @@ static void psbfb_copyarea_accel(struct fb_info *info,
251 if (!fb) 251 if (!fb)
252 return; 252 return;
253 253
254 offset = psbfb->gtt->offset; 254 offset = to_gtt_range(fb->obj[0])->offset;
255 stride = fb->pitches[0]; 255 stride = fb->pitches[0];
256 256
257 switch (fb->format->depth) { 257 switch (fb->format->depth) {
diff --git a/drivers/gpu/drm/gma500/framebuffer.c b/drivers/gpu/drm/gma500/framebuffer.c
index cb0a2ae916e0..8fa4ef192c1e 100644
--- a/drivers/gpu/drm/gma500/framebuffer.c
+++ b/drivers/gpu/drm/gma500/framebuffer.c
@@ -33,6 +33,7 @@
33#include <drm/drm.h> 33#include <drm/drm.h>
34#include <drm/drm_crtc.h> 34#include <drm/drm_crtc.h>
35#include <drm/drm_fb_helper.h> 35#include <drm/drm_fb_helper.h>
36#include <drm/drm_gem_framebuffer_helper.h>
36 37
37#include "psb_drv.h" 38#include "psb_drv.h"
38#include "psb_intel_reg.h" 39#include "psb_intel_reg.h"
@@ -40,14 +41,9 @@
40#include "framebuffer.h" 41#include "framebuffer.h"
41#include "gtt.h" 42#include "gtt.h"
42 43
43static void psb_user_framebuffer_destroy(struct drm_framebuffer *fb);
44static int psb_user_framebuffer_create_handle(struct drm_framebuffer *fb,
45 struct drm_file *file_priv,
46 unsigned int *handle);
47
48static const struct drm_framebuffer_funcs psb_fb_funcs = { 44static const struct drm_framebuffer_funcs psb_fb_funcs = {
49 .destroy = psb_user_framebuffer_destroy, 45 .destroy = drm_gem_fb_destroy,
50 .create_handle = psb_user_framebuffer_create_handle, 46 .create_handle = drm_gem_fb_create_handle,
51}; 47};
52 48
53#define CMAP_TOHW(_val, _width) ((((_val) << (_width)) + 0x7FFF - (_val)) >> 16) 49#define CMAP_TOHW(_val, _width) ((((_val) << (_width)) + 0x7FFF - (_val)) >> 16)
@@ -96,17 +92,18 @@ static int psbfb_pan(struct fb_var_screeninfo *var, struct fb_info *info)
96 struct psb_fbdev *fbdev = info->par; 92 struct psb_fbdev *fbdev = info->par;
97 struct psb_framebuffer *psbfb = &fbdev->pfb; 93 struct psb_framebuffer *psbfb = &fbdev->pfb;
98 struct drm_device *dev = psbfb->base.dev; 94 struct drm_device *dev = psbfb->base.dev;
95 struct gtt_range *gtt = to_gtt_range(psbfb->base.obj[0]);
99 96
100 /* 97 /*
101 * We have to poke our nose in here. The core fb code assumes 98 * We have to poke our nose in here. The core fb code assumes
102 * panning is part of the hardware that can be invoked before 99 * panning is part of the hardware that can be invoked before
103 * the actual fb is mapped. In our case that isn't quite true. 100 * the actual fb is mapped. In our case that isn't quite true.
104 */ 101 */
105 if (psbfb->gtt->npage) { 102 if (gtt->npage) {
106 /* GTT roll shifts in 4K pages, we need to shift the right 103 /* GTT roll shifts in 4K pages, we need to shift the right
107 number of pages */ 104 number of pages */
108 int pages = info->fix.line_length >> 12; 105 int pages = info->fix.line_length >> 12;
109 psb_gtt_roll(dev, psbfb->gtt, var->yoffset * pages); 106 psb_gtt_roll(dev, gtt, var->yoffset * pages);
110 } 107 }
111 return 0; 108 return 0;
112} 109}
@@ -117,13 +114,14 @@ static int psbfb_vm_fault(struct vm_fault *vmf)
117 struct psb_framebuffer *psbfb = vma->vm_private_data; 114 struct psb_framebuffer *psbfb = vma->vm_private_data;
118 struct drm_device *dev = psbfb->base.dev; 115 struct drm_device *dev = psbfb->base.dev;
119 struct drm_psb_private *dev_priv = dev->dev_private; 116 struct drm_psb_private *dev_priv = dev->dev_private;
117 struct gtt_range *gtt = to_gtt_range(psbfb->base.obj[0]);
120 int page_num; 118 int page_num;
121 int i; 119 int i;
122 unsigned long address; 120 unsigned long address;
123 int ret; 121 int ret;
124 unsigned long pfn; 122 unsigned long pfn;
125 unsigned long phys_addr = (unsigned long)dev_priv->stolen_base + 123 unsigned long phys_addr = (unsigned long)dev_priv->stolen_base +
126 psbfb->gtt->offset; 124 gtt->offset;
127 125
128 page_num = vma_pages(vma); 126 page_num = vma_pages(vma);
129 address = vmf->address - (vmf->pgoff << PAGE_SHIFT); 127 address = vmf->address - (vmf->pgoff << PAGE_SHIFT);
@@ -246,7 +244,7 @@ static int psb_framebuffer_init(struct drm_device *dev,
246 return -EINVAL; 244 return -EINVAL;
247 245
248 drm_helper_mode_fill_fb_struct(dev, &fb->base, mode_cmd); 246 drm_helper_mode_fill_fb_struct(dev, &fb->base, mode_cmd);
249 fb->gtt = gt; 247 fb->base.obj[0] = &gt->gem;
250 ret = drm_framebuffer_init(dev, &fb->base, &psb_fb_funcs); 248 ret = drm_framebuffer_init(dev, &fb->base, &psb_fb_funcs);
251 if (ret) { 249 if (ret) {
252 dev_err(dev->dev, "framebuffer init failed: %d\n", ret); 250 dev_err(dev->dev, "framebuffer init failed: %d\n", ret);
@@ -518,8 +516,8 @@ static int psb_fbdev_destroy(struct drm_device *dev, struct psb_fbdev *fbdev)
518 drm_framebuffer_unregister_private(&psbfb->base); 516 drm_framebuffer_unregister_private(&psbfb->base);
519 drm_framebuffer_cleanup(&psbfb->base); 517 drm_framebuffer_cleanup(&psbfb->base);
520 518
521 if (psbfb->gtt) 519 if (psbfb->base.obj[0])
522 drm_gem_object_unreference_unlocked(&psbfb->gtt->gem); 520 drm_gem_object_unreference_unlocked(psbfb->base.obj[0]);
523 return 0; 521 return 0;
524} 522}
525 523
@@ -576,44 +574,6 @@ static void psb_fbdev_fini(struct drm_device *dev)
576 dev_priv->fbdev = NULL; 574 dev_priv->fbdev = NULL;
577} 575}
578 576
579/**
580 * psb_user_framebuffer_create_handle - add hamdle to a framebuffer
581 * @fb: framebuffer
582 * @file_priv: our DRM file
583 * @handle: returned handle
584 *
585 * Our framebuffer object is a GTT range which also contains a GEM
586 * object. We need to turn it into a handle for userspace. GEM will do
587 * the work for us
588 */
589static int psb_user_framebuffer_create_handle(struct drm_framebuffer *fb,
590 struct drm_file *file_priv,
591 unsigned int *handle)
592{
593 struct psb_framebuffer *psbfb = to_psb_fb(fb);
594 struct gtt_range *r = psbfb->gtt;
595 return drm_gem_handle_create(file_priv, &r->gem, handle);
596}
597
598/**
599 * psb_user_framebuffer_destroy - destruct user created fb
600 * @fb: framebuffer
601 *
602 * User framebuffers are backed by GEM objects so all we have to do is
603 * clean up a bit and drop the reference, GEM will handle the fallout
604 */
605static void psb_user_framebuffer_destroy(struct drm_framebuffer *fb)
606{
607 struct psb_framebuffer *psbfb = to_psb_fb(fb);
608 struct gtt_range *r = psbfb->gtt;
609
610 /* Let DRM do its clean up */
611 drm_framebuffer_cleanup(fb);
612 /* We are no longer using the resource in GEM */
613 drm_gem_object_unreference_unlocked(&r->gem);
614 kfree(fb);
615}
616
617static const struct drm_mode_config_funcs psb_mode_funcs = { 577static const struct drm_mode_config_funcs psb_mode_funcs = {
618 .fb_create = psb_user_framebuffer_create, 578 .fb_create = psb_user_framebuffer_create,
619 .output_poll_changed = drm_fb_helper_output_poll_changed, 579 .output_poll_changed = drm_fb_helper_output_poll_changed,
diff --git a/drivers/gpu/drm/gma500/framebuffer.h b/drivers/gpu/drm/gma500/framebuffer.h
index 395f20b07aab..23dc3c5f8f0d 100644
--- a/drivers/gpu/drm/gma500/framebuffer.h
+++ b/drivers/gpu/drm/gma500/framebuffer.h
@@ -31,7 +31,6 @@ struct psb_framebuffer {
31 struct drm_framebuffer base; 31 struct drm_framebuffer base;
32 struct address_space *addr_space; 32 struct address_space *addr_space;
33 struct fb_info *fbdev; 33 struct fb_info *fbdev;
34 struct gtt_range *gtt;
35}; 34};
36 35
37struct psb_fbdev { 36struct psb_fbdev {
diff --git a/drivers/gpu/drm/gma500/gma_display.c b/drivers/gpu/drm/gma500/gma_display.c
index f3c48a2be71b..c8f071c47daf 100644
--- a/drivers/gpu/drm/gma500/gma_display.c
+++ b/drivers/gpu/drm/gma500/gma_display.c
@@ -60,7 +60,7 @@ int gma_pipe_set_base(struct drm_crtc *crtc, int x, int y,
60 struct drm_psb_private *dev_priv = dev->dev_private; 60 struct drm_psb_private *dev_priv = dev->dev_private;
61 struct gma_crtc *gma_crtc = to_gma_crtc(crtc); 61 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
62 struct drm_framebuffer *fb = crtc->primary->fb; 62 struct drm_framebuffer *fb = crtc->primary->fb;
63 struct psb_framebuffer *psbfb = to_psb_fb(fb); 63 struct gtt_range *gtt = to_gtt_range(fb->obj[0]);
64 int pipe = gma_crtc->pipe; 64 int pipe = gma_crtc->pipe;
65 const struct psb_offset *map = &dev_priv->regmap[pipe]; 65 const struct psb_offset *map = &dev_priv->regmap[pipe];
66 unsigned long start, offset; 66 unsigned long start, offset;
@@ -78,10 +78,10 @@ int gma_pipe_set_base(struct drm_crtc *crtc, int x, int y,
78 78
79 /* We are displaying this buffer, make sure it is actually loaded 79 /* We are displaying this buffer, make sure it is actually loaded
80 into the GTT */ 80 into the GTT */
81 ret = psb_gtt_pin(psbfb->gtt); 81 ret = psb_gtt_pin(gtt);
82 if (ret < 0) 82 if (ret < 0)
83 goto gma_pipe_set_base_exit; 83 goto gma_pipe_set_base_exit;
84 start = psbfb->gtt->offset; 84 start = gtt->offset;
85 offset = y * fb->pitches[0] + x * fb->format->cpp[0]; 85 offset = y * fb->pitches[0] + x * fb->format->cpp[0];
86 86
87 REG_WRITE(map->stride, fb->pitches[0]); 87 REG_WRITE(map->stride, fb->pitches[0]);
@@ -129,7 +129,7 @@ int gma_pipe_set_base(struct drm_crtc *crtc, int x, int y,
129gma_pipe_cleaner: 129gma_pipe_cleaner:
130 /* If there was a previous display we can now unpin it */ 130 /* If there was a previous display we can now unpin it */
131 if (old_fb) 131 if (old_fb)
132 psb_gtt_unpin(to_psb_fb(old_fb)->gtt); 132 psb_gtt_unpin(to_gtt_range(old_fb->obj[0]));
133 133
134gma_pipe_set_base_exit: 134gma_pipe_set_base_exit:
135 gma_power_end(dev); 135 gma_power_end(dev);
@@ -491,7 +491,7 @@ void gma_crtc_disable(struct drm_crtc *crtc)
491 crtc_funcs->dpms(crtc, DRM_MODE_DPMS_OFF); 491 crtc_funcs->dpms(crtc, DRM_MODE_DPMS_OFF);
492 492
493 if (crtc->primary->fb) { 493 if (crtc->primary->fb) {
494 gt = to_psb_fb(crtc->primary->fb)->gtt; 494 gt = to_gtt_range(crtc->primary->fb->obj[0]);
495 psb_gtt_unpin(gt); 495 psb_gtt_unpin(gt);
496 } 496 }
497} 497}
diff --git a/drivers/gpu/drm/gma500/gtt.h b/drivers/gpu/drm/gma500/gtt.h
index cdbb350c9d5d..cb0c3a2a1fd4 100644
--- a/drivers/gpu/drm/gma500/gtt.h
+++ b/drivers/gpu/drm/gma500/gtt.h
@@ -53,6 +53,8 @@ struct gtt_range {
53 int roll; /* Roll applied to the GTT entries */ 53 int roll; /* Roll applied to the GTT entries */
54}; 54};
55 55
56#define to_gtt_range(x) container_of(x, struct gtt_range, gem)
57
56extern struct gtt_range *psb_gtt_alloc_range(struct drm_device *dev, int len, 58extern struct gtt_range *psb_gtt_alloc_range(struct drm_device *dev, int len,
57 const char *name, int backed, 59 const char *name, int backed,
58 u32 align); 60 u32 align);
diff --git a/drivers/gpu/drm/gma500/mdfld_intel_display.c b/drivers/gpu/drm/gma500/mdfld_intel_display.c
index 5c066448be5b..881d613cc2e5 100644
--- a/drivers/gpu/drm/gma500/mdfld_intel_display.c
+++ b/drivers/gpu/drm/gma500/mdfld_intel_display.c
@@ -196,7 +196,7 @@ static int mdfld__intel_pipe_set_base(struct drm_crtc *crtc, int x, int y,
196 if (!gma_power_begin(dev, true)) 196 if (!gma_power_begin(dev, true))
197 return 0; 197 return 0;
198 198
199 start = psbfb->gtt->offset; 199 start = to_gtt_range(fb->obj[0])->offset;
200 offset = y * fb->pitches[0] + x * fb->format->cpp[0]; 200 offset = y * fb->pitches[0] + x * fb->format->cpp[0];
201 201
202 REG_WRITE(map->stride, fb->pitches[0]); 202 REG_WRITE(map->stride, fb->pitches[0]);
diff --git a/drivers/gpu/drm/gma500/oaktrail_crtc.c b/drivers/gpu/drm/gma500/oaktrail_crtc.c
index 0fff269d3fe6..1b7fd6a9d8a5 100644
--- a/drivers/gpu/drm/gma500/oaktrail_crtc.c
+++ b/drivers/gpu/drm/gma500/oaktrail_crtc.c
@@ -600,7 +600,6 @@ static int oaktrail_pipe_set_base(struct drm_crtc *crtc,
600 struct drm_psb_private *dev_priv = dev->dev_private; 600 struct drm_psb_private *dev_priv = dev->dev_private;
601 struct gma_crtc *gma_crtc = to_gma_crtc(crtc); 601 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
602 struct drm_framebuffer *fb = crtc->primary->fb; 602 struct drm_framebuffer *fb = crtc->primary->fb;
603 struct psb_framebuffer *psbfb = to_psb_fb(fb);
604 int pipe = gma_crtc->pipe; 603 int pipe = gma_crtc->pipe;
605 const struct psb_offset *map = &dev_priv->regmap[pipe]; 604 const struct psb_offset *map = &dev_priv->regmap[pipe];
606 unsigned long start, offset; 605 unsigned long start, offset;
@@ -617,7 +616,7 @@ static int oaktrail_pipe_set_base(struct drm_crtc *crtc,
617 if (!gma_power_begin(dev, true)) 616 if (!gma_power_begin(dev, true))
618 return 0; 617 return 0;
619 618
620 start = psbfb->gtt->offset; 619 start = to_gtt_range(fb->obj[0])->offset;
621 offset = y * fb->pitches[0] + x * fb->format->cpp[0]; 620 offset = y * fb->pitches[0] + x * fb->format->cpp[0];
622 621
623 REG_WRITE(map->stride, fb->pitches[0]); 622 REG_WRITE(map->stride, fb->pitches[0]);
diff --git a/drivers/gpu/drm/gma500/psb_intel_sdvo.c b/drivers/gpu/drm/gma500/psb_intel_sdvo.c
index f2ee6aa10afa..1d40746ab625 100644
--- a/drivers/gpu/drm/gma500/psb_intel_sdvo.c
+++ b/drivers/gpu/drm/gma500/psb_intel_sdvo.c
@@ -429,13 +429,20 @@ static const char *cmd_status_names[] = {
429 "Scaling not supported" 429 "Scaling not supported"
430}; 430};
431 431
432#define MAX_ARG_LEN 32
433
432static bool psb_intel_sdvo_write_cmd(struct psb_intel_sdvo *psb_intel_sdvo, u8 cmd, 434static bool psb_intel_sdvo_write_cmd(struct psb_intel_sdvo *psb_intel_sdvo, u8 cmd,
433 const void *args, int args_len) 435 const void *args, int args_len)
434{ 436{
435 u8 buf[args_len*2 + 2], status; 437 u8 buf[MAX_ARG_LEN*2 + 2], status;
436 struct i2c_msg msgs[args_len + 3]; 438 struct i2c_msg msgs[MAX_ARG_LEN + 3];
437 int i, ret; 439 int i, ret;
438 440
441 if (args_len > MAX_ARG_LEN) {
442 DRM_ERROR("Need to increase arg length\n");
443 return false;
444 }
445
439 psb_intel_sdvo_debug_write(psb_intel_sdvo, cmd, args, args_len); 446 psb_intel_sdvo_debug_write(psb_intel_sdvo, cmd, args, args_len);
440 447
441 for (i = 0; i < args_len; i++) { 448 for (i = 0; i < args_len; i++) {
diff --git a/drivers/gpu/drm/i2c/tda998x_drv.c b/drivers/gpu/drm/i2c/tda998x_drv.c
index 6ebd8842dbcc..0038c976536a 100644
--- a/drivers/gpu/drm/i2c/tda998x_drv.c
+++ b/drivers/gpu/drm/i2c/tda998x_drv.c
@@ -589,13 +589,22 @@ out:
589 return ret; 589 return ret;
590} 590}
591 591
592#define MAX_WRITE_RANGE_BUF 32
593
592static void 594static void
593reg_write_range(struct tda998x_priv *priv, u16 reg, u8 *p, int cnt) 595reg_write_range(struct tda998x_priv *priv, u16 reg, u8 *p, int cnt)
594{ 596{
595 struct i2c_client *client = priv->hdmi; 597 struct i2c_client *client = priv->hdmi;
596 u8 buf[cnt+1]; 598 /* This is the maximum size of the buffer passed in */
599 u8 buf[MAX_WRITE_RANGE_BUF + 1];
597 int ret; 600 int ret;
598 601
602 if (cnt > MAX_WRITE_RANGE_BUF) {
603 dev_err(&client->dev, "Fixed write buffer too small (%d)\n",
604 MAX_WRITE_RANGE_BUF);
605 return;
606 }
607
599 buf[0] = REG2ADDR(reg); 608 buf[0] = REG2ADDR(reg);
600 memcpy(&buf[1], p, cnt); 609 memcpy(&buf[1], p, cnt);
601 610
@@ -805,7 +814,7 @@ static void
805tda998x_write_if(struct tda998x_priv *priv, u8 bit, u16 addr, 814tda998x_write_if(struct tda998x_priv *priv, u8 bit, u16 addr,
806 union hdmi_infoframe *frame) 815 union hdmi_infoframe *frame)
807{ 816{
808 u8 buf[32]; 817 u8 buf[MAX_WRITE_RANGE_BUF];
809 ssize_t len; 818 ssize_t len;
810 819
811 len = hdmi_infoframe_pack(frame, buf, sizeof(buf)); 820 len = hdmi_infoframe_pack(frame, buf, sizeof(buf));
diff --git a/drivers/gpu/drm/i915/i915_gem_dmabuf.c b/drivers/gpu/drm/i915/i915_gem_dmabuf.c
index 69a7aec49e84..82e2ca17a441 100644
--- a/drivers/gpu/drm/i915/i915_gem_dmabuf.c
+++ b/drivers/gpu/drm/i915/i915_gem_dmabuf.c
@@ -111,15 +111,6 @@ static void i915_gem_dmabuf_vunmap(struct dma_buf *dma_buf, void *vaddr)
111 i915_gem_object_unpin_map(obj); 111 i915_gem_object_unpin_map(obj);
112} 112}
113 113
114static void *i915_gem_dmabuf_kmap_atomic(struct dma_buf *dma_buf, unsigned long page_num)
115{
116 return NULL;
117}
118
119static void i915_gem_dmabuf_kunmap_atomic(struct dma_buf *dma_buf, unsigned long page_num, void *addr)
120{
121
122}
123static void *i915_gem_dmabuf_kmap(struct dma_buf *dma_buf, unsigned long page_num) 114static void *i915_gem_dmabuf_kmap(struct dma_buf *dma_buf, unsigned long page_num)
124{ 115{
125 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf); 116 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf);
@@ -225,9 +216,7 @@ static const struct dma_buf_ops i915_dmabuf_ops = {
225 .unmap_dma_buf = i915_gem_unmap_dma_buf, 216 .unmap_dma_buf = i915_gem_unmap_dma_buf,
226 .release = drm_gem_dmabuf_release, 217 .release = drm_gem_dmabuf_release,
227 .map = i915_gem_dmabuf_kmap, 218 .map = i915_gem_dmabuf_kmap,
228 .map_atomic = i915_gem_dmabuf_kmap_atomic,
229 .unmap = i915_gem_dmabuf_kunmap, 219 .unmap = i915_gem_dmabuf_kunmap,
230 .unmap_atomic = i915_gem_dmabuf_kunmap_atomic,
231 .mmap = i915_gem_dmabuf_mmap, 220 .mmap = i915_gem_dmabuf_mmap,
232 .vmap = i915_gem_dmabuf_vmap, 221 .vmap = i915_gem_dmabuf_vmap,
233 .vunmap = i915_gem_dmabuf_vunmap, 222 .vunmap = i915_gem_dmabuf_vunmap,
diff --git a/drivers/gpu/drm/i915/i915_gem_gtt.c b/drivers/gpu/drm/i915/i915_gem_gtt.c
index 2b4a40a32b76..284ae9574f03 100644
--- a/drivers/gpu/drm/i915/i915_gem_gtt.c
+++ b/drivers/gpu/drm/i915/i915_gem_gtt.c
@@ -3945,7 +3945,7 @@ int i915_gem_gtt_insert(struct i915_address_space *vm,
3945 3945
3946 mode = DRM_MM_INSERT_BEST; 3946 mode = DRM_MM_INSERT_BEST;
3947 if (flags & PIN_HIGH) 3947 if (flags & PIN_HIGH)
3948 mode = DRM_MM_INSERT_HIGH; 3948 mode = DRM_MM_INSERT_HIGHEST;
3949 if (flags & PIN_MAPPABLE) 3949 if (flags & PIN_MAPPABLE)
3950 mode = DRM_MM_INSERT_LOW; 3950 mode = DRM_MM_INSERT_LOW;
3951 3951
@@ -3965,6 +3965,15 @@ int i915_gem_gtt_insert(struct i915_address_space *vm,
3965 if (err != -ENOSPC) 3965 if (err != -ENOSPC)
3966 return err; 3966 return err;
3967 3967
3968 if (mode & DRM_MM_INSERT_ONCE) {
3969 err = drm_mm_insert_node_in_range(&vm->mm, node,
3970 size, alignment, color,
3971 start, end,
3972 DRM_MM_INSERT_BEST);
3973 if (err != -ENOSPC)
3974 return err;
3975 }
3976
3968 if (flags & PIN_NOEVICT) 3977 if (flags & PIN_NOEVICT)
3969 return -ENOSPC; 3978 return -ENOSPC;
3970 3979
diff --git a/drivers/gpu/drm/i915/intel_atomic.c b/drivers/gpu/drm/i915/intel_atomic.c
index 40285d1b91b7..61ddb5871d8a 100644
--- a/drivers/gpu/drm/i915/intel_atomic.c
+++ b/drivers/gpu/drm/i915/intel_atomic.c
@@ -124,6 +124,7 @@ int intel_digital_connector_atomic_check(struct drm_connector *conn,
124 if (new_conn_state->force_audio != old_conn_state->force_audio || 124 if (new_conn_state->force_audio != old_conn_state->force_audio ||
125 new_conn_state->broadcast_rgb != old_conn_state->broadcast_rgb || 125 new_conn_state->broadcast_rgb != old_conn_state->broadcast_rgb ||
126 new_conn_state->base.picture_aspect_ratio != old_conn_state->base.picture_aspect_ratio || 126 new_conn_state->base.picture_aspect_ratio != old_conn_state->base.picture_aspect_ratio ||
127 new_conn_state->base.content_type != old_conn_state->base.content_type ||
127 new_conn_state->base.scaling_mode != old_conn_state->base.scaling_mode) 128 new_conn_state->base.scaling_mode != old_conn_state->base.scaling_mode)
128 crtc_state->mode_changed = true; 129 crtc_state->mode_changed = true;
129 130
diff --git a/drivers/gpu/drm/i915/intel_atomic_plane.c b/drivers/gpu/drm/i915/intel_atomic_plane.c
index 6d068786eb41..e8bf4cc499e1 100644
--- a/drivers/gpu/drm/i915/intel_atomic_plane.c
+++ b/drivers/gpu/drm/i915/intel_atomic_plane.c
@@ -120,12 +120,6 @@ int intel_plane_atomic_check_with_state(const struct intel_crtc_state *old_crtc_
120 &crtc_state->base.adjusted_mode; 120 &crtc_state->base.adjusted_mode;
121 int ret; 121 int ret;
122 122
123 /*
124 * Both crtc and plane->crtc could be NULL if we're updating a
125 * property while the plane is disabled. We don't actually have
126 * anything driver-specific we need to test in that case, so
127 * just return success.
128 */
129 if (!intel_state->base.crtc && !old_plane_state->base.crtc) 123 if (!intel_state->base.crtc && !old_plane_state->base.crtc)
130 return 0; 124 return 0;
131 125
@@ -209,12 +203,6 @@ static int intel_plane_atomic_check(struct drm_plane *plane,
209 const struct drm_crtc_state *old_crtc_state; 203 const struct drm_crtc_state *old_crtc_state;
210 struct drm_crtc_state *new_crtc_state; 204 struct drm_crtc_state *new_crtc_state;
211 205
212 /*
213 * Both crtc and plane->crtc could be NULL if we're updating a
214 * property while the plane is disabled. We don't actually have
215 * anything driver-specific we need to test in that case, so
216 * just return success.
217 */
218 if (!crtc) 206 if (!crtc)
219 return 0; 207 return 0;
220 208
diff --git a/drivers/gpu/drm/i915/intel_display.c b/drivers/gpu/drm/i915/intel_display.c
index 17c590b42fd7..2c16c3a3cdea 100644
--- a/drivers/gpu/drm/i915/intel_display.c
+++ b/drivers/gpu/drm/i915/intel_display.c
@@ -1022,7 +1022,7 @@ bool intel_crtc_active(struct intel_crtc *crtc)
1022 * We can ditch the adjusted_mode.crtc_clock check as soon 1022 * We can ditch the adjusted_mode.crtc_clock check as soon
1023 * as Haswell has gained clock readout/fastboot support. 1023 * as Haswell has gained clock readout/fastboot support.
1024 * 1024 *
1025 * We can ditch the crtc->primary->fb check as soon as we can 1025 * We can ditch the crtc->primary->state->fb check as soon as we can
1026 * properly reconstruct framebuffers. 1026 * properly reconstruct framebuffers.
1027 * 1027 *
1028 * FIXME: The intel_crtc->active here should be switched to 1028 * FIXME: The intel_crtc->active here should be switched to
@@ -2882,9 +2882,8 @@ valid_fb:
2882 if (i915_gem_object_is_tiled(obj)) 2882 if (i915_gem_object_is_tiled(obj))
2883 dev_priv->preserve_bios_swizzle = true; 2883 dev_priv->preserve_bios_swizzle = true;
2884 2884
2885 drm_framebuffer_get(fb); 2885 plane_state->fb = fb;
2886 primary->fb = primary->state->fb = fb; 2886 plane_state->crtc = &intel_crtc->base;
2887 primary->crtc = primary->state->crtc = &intel_crtc->base;
2888 2887
2889 intel_set_plane_visible(to_intel_crtc_state(crtc_state), 2888 intel_set_plane_visible(to_intel_crtc_state(crtc_state),
2890 to_intel_plane_state(plane_state), 2889 to_intel_plane_state(plane_state),
@@ -13241,8 +13240,17 @@ void intel_plane_destroy(struct drm_plane *plane)
13241 kfree(to_intel_plane(plane)); 13240 kfree(to_intel_plane(plane));
13242} 13241}
13243 13242
13244static bool i8xx_mod_supported(uint32_t format, uint64_t modifier) 13243static bool i8xx_plane_format_mod_supported(struct drm_plane *_plane,
13244 u32 format, u64 modifier)
13245{ 13245{
13246 switch (modifier) {
13247 case DRM_FORMAT_MOD_LINEAR:
13248 case I915_FORMAT_MOD_X_TILED:
13249 break;
13250 default:
13251 return false;
13252 }
13253
13246 switch (format) { 13254 switch (format) {
13247 case DRM_FORMAT_C8: 13255 case DRM_FORMAT_C8:
13248 case DRM_FORMAT_RGB565: 13256 case DRM_FORMAT_RGB565:
@@ -13255,8 +13263,17 @@ static bool i8xx_mod_supported(uint32_t format, uint64_t modifier)
13255 } 13263 }
13256} 13264}
13257 13265
13258static bool i965_mod_supported(uint32_t format, uint64_t modifier) 13266static bool i965_plane_format_mod_supported(struct drm_plane *_plane,
13267 u32 format, u64 modifier)
13259{ 13268{
13269 switch (modifier) {
13270 case DRM_FORMAT_MOD_LINEAR:
13271 case I915_FORMAT_MOD_X_TILED:
13272 break;
13273 default:
13274 return false;
13275 }
13276
13260 switch (format) { 13277 switch (format) {
13261 case DRM_FORMAT_C8: 13278 case DRM_FORMAT_C8:
13262 case DRM_FORMAT_RGB565: 13279 case DRM_FORMAT_RGB565:
@@ -13271,8 +13288,26 @@ static bool i965_mod_supported(uint32_t format, uint64_t modifier)
13271 } 13288 }
13272} 13289}
13273 13290
13274static bool skl_mod_supported(uint32_t format, uint64_t modifier) 13291static bool skl_plane_format_mod_supported(struct drm_plane *_plane,
13292 u32 format, u64 modifier)
13275{ 13293{
13294 struct intel_plane *plane = to_intel_plane(_plane);
13295
13296 switch (modifier) {
13297 case DRM_FORMAT_MOD_LINEAR:
13298 case I915_FORMAT_MOD_X_TILED:
13299 case I915_FORMAT_MOD_Y_TILED:
13300 case I915_FORMAT_MOD_Yf_TILED:
13301 break;
13302 case I915_FORMAT_MOD_Y_TILED_CCS:
13303 case I915_FORMAT_MOD_Yf_TILED_CCS:
13304 if (!plane->has_ccs)
13305 return false;
13306 break;
13307 default:
13308 return false;
13309 }
13310
13276 switch (format) { 13311 switch (format) {
13277 case DRM_FORMAT_XRGB8888: 13312 case DRM_FORMAT_XRGB8888:
13278 case DRM_FORMAT_XBGR8888: 13313 case DRM_FORMAT_XBGR8888:
@@ -13304,38 +13339,36 @@ static bool skl_mod_supported(uint32_t format, uint64_t modifier)
13304 } 13339 }
13305} 13340}
13306 13341
13307static bool intel_primary_plane_format_mod_supported(struct drm_plane *plane, 13342static bool intel_cursor_format_mod_supported(struct drm_plane *_plane,
13308 uint32_t format, 13343 u32 format, u64 modifier)
13309 uint64_t modifier)
13310{ 13344{
13311 struct drm_i915_private *dev_priv = to_i915(plane->dev); 13345 return modifier == DRM_FORMAT_MOD_LINEAR &&
13312 13346 format == DRM_FORMAT_ARGB8888;
13313 if (WARN_ON(modifier == DRM_FORMAT_MOD_INVALID))
13314 return false;
13315
13316 if ((modifier >> 56) != DRM_FORMAT_MOD_VENDOR_INTEL &&
13317 modifier != DRM_FORMAT_MOD_LINEAR)
13318 return false;
13319
13320 if (INTEL_GEN(dev_priv) >= 9)
13321 return skl_mod_supported(format, modifier);
13322 else if (INTEL_GEN(dev_priv) >= 4)
13323 return i965_mod_supported(format, modifier);
13324 else
13325 return i8xx_mod_supported(format, modifier);
13326} 13347}
13327 13348
13328static bool intel_cursor_plane_format_mod_supported(struct drm_plane *plane, 13349static struct drm_plane_funcs skl_plane_funcs = {
13329 uint32_t format, 13350 .update_plane = drm_atomic_helper_update_plane,
13330 uint64_t modifier) 13351 .disable_plane = drm_atomic_helper_disable_plane,
13331{ 13352 .destroy = intel_plane_destroy,
13332 if (WARN_ON(modifier == DRM_FORMAT_MOD_INVALID)) 13353 .atomic_get_property = intel_plane_atomic_get_property,
13333 return false; 13354 .atomic_set_property = intel_plane_atomic_set_property,
13355 .atomic_duplicate_state = intel_plane_duplicate_state,
13356 .atomic_destroy_state = intel_plane_destroy_state,
13357 .format_mod_supported = skl_plane_format_mod_supported,
13358};
13334 13359
13335 return modifier == DRM_FORMAT_MOD_LINEAR && format == DRM_FORMAT_ARGB8888; 13360static struct drm_plane_funcs i965_plane_funcs = {
13336} 13361 .update_plane = drm_atomic_helper_update_plane,
13362 .disable_plane = drm_atomic_helper_disable_plane,
13363 .destroy = intel_plane_destroy,
13364 .atomic_get_property = intel_plane_atomic_get_property,
13365 .atomic_set_property = intel_plane_atomic_set_property,
13366 .atomic_duplicate_state = intel_plane_duplicate_state,
13367 .atomic_destroy_state = intel_plane_destroy_state,
13368 .format_mod_supported = i965_plane_format_mod_supported,
13369};
13337 13370
13338static struct drm_plane_funcs intel_plane_funcs = { 13371static struct drm_plane_funcs i8xx_plane_funcs = {
13339 .update_plane = drm_atomic_helper_update_plane, 13372 .update_plane = drm_atomic_helper_update_plane,
13340 .disable_plane = drm_atomic_helper_disable_plane, 13373 .disable_plane = drm_atomic_helper_disable_plane,
13341 .destroy = intel_plane_destroy, 13374 .destroy = intel_plane_destroy,
@@ -13343,7 +13376,7 @@ static struct drm_plane_funcs intel_plane_funcs = {
13343 .atomic_set_property = intel_plane_atomic_set_property, 13376 .atomic_set_property = intel_plane_atomic_set_property,
13344 .atomic_duplicate_state = intel_plane_duplicate_state, 13377 .atomic_duplicate_state = intel_plane_duplicate_state,
13345 .atomic_destroy_state = intel_plane_destroy_state, 13378 .atomic_destroy_state = intel_plane_destroy_state,
13346 .format_mod_supported = intel_primary_plane_format_mod_supported, 13379 .format_mod_supported = i8xx_plane_format_mod_supported,
13347}; 13380};
13348 13381
13349static int 13382static int
@@ -13468,7 +13501,7 @@ static const struct drm_plane_funcs intel_cursor_plane_funcs = {
13468 .atomic_set_property = intel_plane_atomic_set_property, 13501 .atomic_set_property = intel_plane_atomic_set_property,
13469 .atomic_duplicate_state = intel_plane_duplicate_state, 13502 .atomic_duplicate_state = intel_plane_duplicate_state,
13470 .atomic_destroy_state = intel_plane_destroy_state, 13503 .atomic_destroy_state = intel_plane_destroy_state,
13471 .format_mod_supported = intel_cursor_plane_format_mod_supported, 13504 .format_mod_supported = intel_cursor_format_mod_supported,
13472}; 13505};
13473 13506
13474static bool i9xx_plane_has_fbc(struct drm_i915_private *dev_priv, 13507static bool i9xx_plane_has_fbc(struct drm_i915_private *dev_priv,
@@ -13526,6 +13559,7 @@ intel_primary_plane_create(struct drm_i915_private *dev_priv, enum pipe pipe)
13526{ 13559{
13527 struct intel_plane *primary = NULL; 13560 struct intel_plane *primary = NULL;
13528 struct intel_plane_state *state = NULL; 13561 struct intel_plane_state *state = NULL;
13562 const struct drm_plane_funcs *plane_funcs;
13529 const uint32_t *intel_primary_formats; 13563 const uint32_t *intel_primary_formats;
13530 unsigned int supported_rotations; 13564 unsigned int supported_rotations;
13531 unsigned int num_formats; 13565 unsigned int num_formats;
@@ -13581,6 +13615,9 @@ intel_primary_plane_create(struct drm_i915_private *dev_priv, enum pipe pipe)
13581 primary->check_plane = intel_check_primary_plane; 13615 primary->check_plane = intel_check_primary_plane;
13582 13616
13583 if (INTEL_GEN(dev_priv) >= 9) { 13617 if (INTEL_GEN(dev_priv) >= 9) {
13618 primary->has_ccs = skl_plane_has_ccs(dev_priv, pipe,
13619 PLANE_PRIMARY);
13620
13584 if (skl_plane_has_planar(dev_priv, pipe, PLANE_PRIMARY)) { 13621 if (skl_plane_has_planar(dev_priv, pipe, PLANE_PRIMARY)) {
13585 intel_primary_formats = skl_pri_planar_formats; 13622 intel_primary_formats = skl_pri_planar_formats;
13586 num_formats = ARRAY_SIZE(skl_pri_planar_formats); 13623 num_formats = ARRAY_SIZE(skl_pri_planar_formats);
@@ -13589,7 +13626,7 @@ intel_primary_plane_create(struct drm_i915_private *dev_priv, enum pipe pipe)
13589 num_formats = ARRAY_SIZE(skl_primary_formats); 13626 num_formats = ARRAY_SIZE(skl_primary_formats);
13590 } 13627 }
13591 13628
13592 if (skl_plane_has_ccs(dev_priv, pipe, PLANE_PRIMARY)) 13629 if (primary->has_ccs)
13593 modifiers = skl_format_modifiers_ccs; 13630 modifiers = skl_format_modifiers_ccs;
13594 else 13631 else
13595 modifiers = skl_format_modifiers_noccs; 13632 modifiers = skl_format_modifiers_noccs;
@@ -13597,6 +13634,8 @@ intel_primary_plane_create(struct drm_i915_private *dev_priv, enum pipe pipe)
13597 primary->update_plane = skl_update_plane; 13634 primary->update_plane = skl_update_plane;
13598 primary->disable_plane = skl_disable_plane; 13635 primary->disable_plane = skl_disable_plane;
13599 primary->get_hw_state = skl_plane_get_hw_state; 13636 primary->get_hw_state = skl_plane_get_hw_state;
13637
13638 plane_funcs = &skl_plane_funcs;
13600 } else if (INTEL_GEN(dev_priv) >= 4) { 13639 } else if (INTEL_GEN(dev_priv) >= 4) {
13601 intel_primary_formats = i965_primary_formats; 13640 intel_primary_formats = i965_primary_formats;
13602 num_formats = ARRAY_SIZE(i965_primary_formats); 13641 num_formats = ARRAY_SIZE(i965_primary_formats);
@@ -13605,6 +13644,8 @@ intel_primary_plane_create(struct drm_i915_private *dev_priv, enum pipe pipe)
13605 primary->update_plane = i9xx_update_plane; 13644 primary->update_plane = i9xx_update_plane;
13606 primary->disable_plane = i9xx_disable_plane; 13645 primary->disable_plane = i9xx_disable_plane;
13607 primary->get_hw_state = i9xx_plane_get_hw_state; 13646 primary->get_hw_state = i9xx_plane_get_hw_state;
13647
13648 plane_funcs = &i965_plane_funcs;
13608 } else { 13649 } else {
13609 intel_primary_formats = i8xx_primary_formats; 13650 intel_primary_formats = i8xx_primary_formats;
13610 num_formats = ARRAY_SIZE(i8xx_primary_formats); 13651 num_formats = ARRAY_SIZE(i8xx_primary_formats);
@@ -13613,25 +13654,27 @@ intel_primary_plane_create(struct drm_i915_private *dev_priv, enum pipe pipe)
13613 primary->update_plane = i9xx_update_plane; 13654 primary->update_plane = i9xx_update_plane;
13614 primary->disable_plane = i9xx_disable_plane; 13655 primary->disable_plane = i9xx_disable_plane;
13615 primary->get_hw_state = i9xx_plane_get_hw_state; 13656 primary->get_hw_state = i9xx_plane_get_hw_state;
13657
13658 plane_funcs = &i8xx_plane_funcs;
13616 } 13659 }
13617 13660
13618 if (INTEL_GEN(dev_priv) >= 9) 13661 if (INTEL_GEN(dev_priv) >= 9)
13619 ret = drm_universal_plane_init(&dev_priv->drm, &primary->base, 13662 ret = drm_universal_plane_init(&dev_priv->drm, &primary->base,
13620 0, &intel_plane_funcs, 13663 0, plane_funcs,
13621 intel_primary_formats, num_formats, 13664 intel_primary_formats, num_formats,
13622 modifiers, 13665 modifiers,
13623 DRM_PLANE_TYPE_PRIMARY, 13666 DRM_PLANE_TYPE_PRIMARY,
13624 "plane 1%c", pipe_name(pipe)); 13667 "plane 1%c", pipe_name(pipe));
13625 else if (INTEL_GEN(dev_priv) >= 5 || IS_G4X(dev_priv)) 13668 else if (INTEL_GEN(dev_priv) >= 5 || IS_G4X(dev_priv))
13626 ret = drm_universal_plane_init(&dev_priv->drm, &primary->base, 13669 ret = drm_universal_plane_init(&dev_priv->drm, &primary->base,
13627 0, &intel_plane_funcs, 13670 0, plane_funcs,
13628 intel_primary_formats, num_formats, 13671 intel_primary_formats, num_formats,
13629 modifiers, 13672 modifiers,
13630 DRM_PLANE_TYPE_PRIMARY, 13673 DRM_PLANE_TYPE_PRIMARY,
13631 "primary %c", pipe_name(pipe)); 13674 "primary %c", pipe_name(pipe));
13632 else 13675 else
13633 ret = drm_universal_plane_init(&dev_priv->drm, &primary->base, 13676 ret = drm_universal_plane_init(&dev_priv->drm, &primary->base,
13634 0, &intel_plane_funcs, 13677 0, plane_funcs,
13635 intel_primary_formats, num_formats, 13678 intel_primary_formats, num_formats,
13636 modifiers, 13679 modifiers,
13637 DRM_PLANE_TYPE_PRIMARY, 13680 DRM_PLANE_TYPE_PRIMARY,
diff --git a/drivers/gpu/drm/i915/intel_drv.h b/drivers/gpu/drm/i915/intel_drv.h
index 8641583842be..8840108749a5 100644
--- a/drivers/gpu/drm/i915/intel_drv.h
+++ b/drivers/gpu/drm/i915/intel_drv.h
@@ -952,6 +952,7 @@ struct intel_plane {
952 enum pipe pipe; 952 enum pipe pipe;
953 bool can_scale; 953 bool can_scale;
954 bool has_fbc; 954 bool has_fbc;
955 bool has_ccs;
955 int max_downscale; 956 int max_downscale;
956 uint32_t frontbuffer_bit; 957 uint32_t frontbuffer_bit;
957 958
diff --git a/drivers/gpu/drm/i915/intel_hdmi.c b/drivers/gpu/drm/i915/intel_hdmi.c
index ba5ea61fb7b9..0ca4cc877520 100644
--- a/drivers/gpu/drm/i915/intel_hdmi.c
+++ b/drivers/gpu/drm/i915/intel_hdmi.c
@@ -461,7 +461,8 @@ static void intel_write_infoframe(struct drm_encoder *encoder,
461} 461}
462 462
463static void intel_hdmi_set_avi_infoframe(struct drm_encoder *encoder, 463static void intel_hdmi_set_avi_infoframe(struct drm_encoder *encoder,
464 const struct intel_crtc_state *crtc_state) 464 const struct intel_crtc_state *crtc_state,
465 const struct drm_connector_state *conn_state)
465{ 466{
466 struct intel_hdmi *intel_hdmi = enc_to_intel_hdmi(encoder); 467 struct intel_hdmi *intel_hdmi = enc_to_intel_hdmi(encoder);
467 const struct drm_display_mode *adjusted_mode = 468 const struct drm_display_mode *adjusted_mode =
@@ -491,6 +492,9 @@ static void intel_hdmi_set_avi_infoframe(struct drm_encoder *encoder,
491 intel_hdmi->rgb_quant_range_selectable, 492 intel_hdmi->rgb_quant_range_selectable,
492 is_hdmi2_sink); 493 is_hdmi2_sink);
493 494
495 drm_hdmi_avi_infoframe_content_type(&frame.avi,
496 conn_state);
497
494 /* TODO: handle pixel repetition for YCBCR420 outputs */ 498 /* TODO: handle pixel repetition for YCBCR420 outputs */
495 intel_write_infoframe(encoder, crtc_state, &frame); 499 intel_write_infoframe(encoder, crtc_state, &frame);
496} 500}
@@ -586,7 +590,7 @@ static void g4x_set_infoframes(struct drm_encoder *encoder,
586 I915_WRITE(reg, val); 590 I915_WRITE(reg, val);
587 POSTING_READ(reg); 591 POSTING_READ(reg);
588 592
589 intel_hdmi_set_avi_infoframe(encoder, crtc_state); 593 intel_hdmi_set_avi_infoframe(encoder, crtc_state, conn_state);
590 intel_hdmi_set_spd_infoframe(encoder, crtc_state); 594 intel_hdmi_set_spd_infoframe(encoder, crtc_state);
591 intel_hdmi_set_hdmi_infoframe(encoder, crtc_state, conn_state); 595 intel_hdmi_set_hdmi_infoframe(encoder, crtc_state, conn_state);
592} 596}
@@ -727,7 +731,7 @@ static void ibx_set_infoframes(struct drm_encoder *encoder,
727 I915_WRITE(reg, val); 731 I915_WRITE(reg, val);
728 POSTING_READ(reg); 732 POSTING_READ(reg);
729 733
730 intel_hdmi_set_avi_infoframe(encoder, crtc_state); 734 intel_hdmi_set_avi_infoframe(encoder, crtc_state, conn_state);
731 intel_hdmi_set_spd_infoframe(encoder, crtc_state); 735 intel_hdmi_set_spd_infoframe(encoder, crtc_state);
732 intel_hdmi_set_hdmi_infoframe(encoder, crtc_state, conn_state); 736 intel_hdmi_set_hdmi_infoframe(encoder, crtc_state, conn_state);
733} 737}
@@ -770,7 +774,7 @@ static void cpt_set_infoframes(struct drm_encoder *encoder,
770 I915_WRITE(reg, val); 774 I915_WRITE(reg, val);
771 POSTING_READ(reg); 775 POSTING_READ(reg);
772 776
773 intel_hdmi_set_avi_infoframe(encoder, crtc_state); 777 intel_hdmi_set_avi_infoframe(encoder, crtc_state, conn_state);
774 intel_hdmi_set_spd_infoframe(encoder, crtc_state); 778 intel_hdmi_set_spd_infoframe(encoder, crtc_state);
775 intel_hdmi_set_hdmi_infoframe(encoder, crtc_state, conn_state); 779 intel_hdmi_set_hdmi_infoframe(encoder, crtc_state, conn_state);
776} 780}
@@ -823,7 +827,7 @@ static void vlv_set_infoframes(struct drm_encoder *encoder,
823 I915_WRITE(reg, val); 827 I915_WRITE(reg, val);
824 POSTING_READ(reg); 828 POSTING_READ(reg);
825 829
826 intel_hdmi_set_avi_infoframe(encoder, crtc_state); 830 intel_hdmi_set_avi_infoframe(encoder, crtc_state, conn_state);
827 intel_hdmi_set_spd_infoframe(encoder, crtc_state); 831 intel_hdmi_set_spd_infoframe(encoder, crtc_state);
828 intel_hdmi_set_hdmi_infoframe(encoder, crtc_state, conn_state); 832 intel_hdmi_set_hdmi_infoframe(encoder, crtc_state, conn_state);
829} 833}
@@ -856,7 +860,7 @@ static void hsw_set_infoframes(struct drm_encoder *encoder,
856 I915_WRITE(reg, val); 860 I915_WRITE(reg, val);
857 POSTING_READ(reg); 861 POSTING_READ(reg);
858 862
859 intel_hdmi_set_avi_infoframe(encoder, crtc_state); 863 intel_hdmi_set_avi_infoframe(encoder, crtc_state, conn_state);
860 intel_hdmi_set_spd_infoframe(encoder, crtc_state); 864 intel_hdmi_set_spd_infoframe(encoder, crtc_state);
861 intel_hdmi_set_hdmi_infoframe(encoder, crtc_state, conn_state); 865 intel_hdmi_set_hdmi_infoframe(encoder, crtc_state, conn_state);
862} 866}
@@ -2048,6 +2052,7 @@ intel_hdmi_add_properties(struct intel_hdmi *intel_hdmi, struct drm_connector *c
2048 intel_attach_force_audio_property(connector); 2052 intel_attach_force_audio_property(connector);
2049 intel_attach_broadcast_rgb_property(connector); 2053 intel_attach_broadcast_rgb_property(connector);
2050 intel_attach_aspect_ratio_property(connector); 2054 intel_attach_aspect_ratio_property(connector);
2055 drm_connector_attach_content_type_property(connector);
2051 connector->state->picture_aspect_ratio = HDMI_PICTURE_ASPECT_NONE; 2056 connector->state->picture_aspect_ratio = HDMI_PICTURE_ASPECT_NONE;
2052} 2057}
2053 2058
diff --git a/drivers/gpu/drm/i915/intel_ringbuffer.c b/drivers/gpu/drm/i915/intel_ringbuffer.c
index 6496c1d00dbb..65811e2fa7da 100644
--- a/drivers/gpu/drm/i915/intel_ringbuffer.c
+++ b/drivers/gpu/drm/i915/intel_ringbuffer.c
@@ -1049,6 +1049,8 @@ int intel_ring_pin(struct intel_ring *ring,
1049 flags |= PIN_OFFSET_BIAS | offset_bias; 1049 flags |= PIN_OFFSET_BIAS | offset_bias;
1050 if (vma->obj->stolen) 1050 if (vma->obj->stolen)
1051 flags |= PIN_MAPPABLE; 1051 flags |= PIN_MAPPABLE;
1052 else
1053 flags |= PIN_HIGH;
1052 1054
1053 if (!(vma->flags & I915_VMA_GLOBAL_BIND)) { 1055 if (!(vma->flags & I915_VMA_GLOBAL_BIND)) {
1054 if (flags & PIN_MAPPABLE || map == I915_MAP_WC) 1056 if (flags & PIN_MAPPABLE || map == I915_MAP_WC)
diff --git a/drivers/gpu/drm/i915/intel_sprite.c b/drivers/gpu/drm/i915/intel_sprite.c
index d474065ed2dd..1160bc3a1e8e 100644
--- a/drivers/gpu/drm/i915/intel_sprite.c
+++ b/drivers/gpu/drm/i915/intel_sprite.c
@@ -1241,8 +1241,17 @@ static const uint64_t skl_plane_format_modifiers_ccs[] = {
1241 DRM_FORMAT_MOD_INVALID 1241 DRM_FORMAT_MOD_INVALID
1242}; 1242};
1243 1243
1244static bool g4x_mod_supported(uint32_t format, uint64_t modifier) 1244static bool g4x_sprite_format_mod_supported(struct drm_plane *_plane,
1245 u32 format, u64 modifier)
1245{ 1246{
1247 switch (modifier) {
1248 case DRM_FORMAT_MOD_LINEAR:
1249 case I915_FORMAT_MOD_X_TILED:
1250 break;
1251 default:
1252 return false;
1253 }
1254
1246 switch (format) { 1255 switch (format) {
1247 case DRM_FORMAT_XRGB8888: 1256 case DRM_FORMAT_XRGB8888:
1248 case DRM_FORMAT_YUYV: 1257 case DRM_FORMAT_YUYV:
@@ -1258,8 +1267,17 @@ static bool g4x_mod_supported(uint32_t format, uint64_t modifier)
1258 } 1267 }
1259} 1268}
1260 1269
1261static bool snb_mod_supported(uint32_t format, uint64_t modifier) 1270static bool snb_sprite_format_mod_supported(struct drm_plane *_plane,
1271 u32 format, u64 modifier)
1262{ 1272{
1273 switch (modifier) {
1274 case DRM_FORMAT_MOD_LINEAR:
1275 case I915_FORMAT_MOD_X_TILED:
1276 break;
1277 default:
1278 return false;
1279 }
1280
1263 switch (format) { 1281 switch (format) {
1264 case DRM_FORMAT_XRGB8888: 1282 case DRM_FORMAT_XRGB8888:
1265 case DRM_FORMAT_XBGR8888: 1283 case DRM_FORMAT_XBGR8888:
@@ -1276,8 +1294,17 @@ static bool snb_mod_supported(uint32_t format, uint64_t modifier)
1276 } 1294 }
1277} 1295}
1278 1296
1279static bool vlv_mod_supported(uint32_t format, uint64_t modifier) 1297static bool vlv_sprite_format_mod_supported(struct drm_plane *_plane,
1298 u32 format, u64 modifier)
1280{ 1299{
1300 switch (modifier) {
1301 case DRM_FORMAT_MOD_LINEAR:
1302 case I915_FORMAT_MOD_X_TILED:
1303 break;
1304 default:
1305 return false;
1306 }
1307
1281 switch (format) { 1308 switch (format) {
1282 case DRM_FORMAT_RGB565: 1309 case DRM_FORMAT_RGB565:
1283 case DRM_FORMAT_ABGR8888: 1310 case DRM_FORMAT_ABGR8888:
@@ -1299,8 +1326,26 @@ static bool vlv_mod_supported(uint32_t format, uint64_t modifier)
1299 } 1326 }
1300} 1327}
1301 1328
1302static bool skl_mod_supported(uint32_t format, uint64_t modifier) 1329static bool skl_plane_format_mod_supported(struct drm_plane *_plane,
1330 u32 format, u64 modifier)
1303{ 1331{
1332 struct intel_plane *plane = to_intel_plane(_plane);
1333
1334 switch (modifier) {
1335 case DRM_FORMAT_MOD_LINEAR:
1336 case I915_FORMAT_MOD_X_TILED:
1337 case I915_FORMAT_MOD_Y_TILED:
1338 case I915_FORMAT_MOD_Yf_TILED:
1339 break;
1340 case I915_FORMAT_MOD_Y_TILED_CCS:
1341 case I915_FORMAT_MOD_Yf_TILED_CCS:
1342 if (!plane->has_ccs)
1343 return false;
1344 break;
1345 default:
1346 return false;
1347 }
1348
1304 switch (format) { 1349 switch (format) {
1305 case DRM_FORMAT_XRGB8888: 1350 case DRM_FORMAT_XRGB8888:
1306 case DRM_FORMAT_XBGR8888: 1351 case DRM_FORMAT_XBGR8888:
@@ -1332,30 +1377,40 @@ static bool skl_mod_supported(uint32_t format, uint64_t modifier)
1332 } 1377 }
1333} 1378}
1334 1379
1335static bool intel_sprite_plane_format_mod_supported(struct drm_plane *plane, 1380static const struct drm_plane_funcs g4x_sprite_funcs = {
1336 uint32_t format, 1381 .update_plane = drm_atomic_helper_update_plane,
1337 uint64_t modifier) 1382 .disable_plane = drm_atomic_helper_disable_plane,
1338{ 1383 .destroy = intel_plane_destroy,
1339 struct drm_i915_private *dev_priv = to_i915(plane->dev); 1384 .atomic_get_property = intel_plane_atomic_get_property,
1340 1385 .atomic_set_property = intel_plane_atomic_set_property,
1341 if (WARN_ON(modifier == DRM_FORMAT_MOD_INVALID)) 1386 .atomic_duplicate_state = intel_plane_duplicate_state,
1342 return false; 1387 .atomic_destroy_state = intel_plane_destroy_state,
1388 .format_mod_supported = g4x_sprite_format_mod_supported,
1389};
1343 1390
1344 if ((modifier >> 56) != DRM_FORMAT_MOD_VENDOR_INTEL && 1391static const struct drm_plane_funcs snb_sprite_funcs = {
1345 modifier != DRM_FORMAT_MOD_LINEAR) 1392 .update_plane = drm_atomic_helper_update_plane,
1346 return false; 1393 .disable_plane = drm_atomic_helper_disable_plane,
1394 .destroy = intel_plane_destroy,
1395 .atomic_get_property = intel_plane_atomic_get_property,
1396 .atomic_set_property = intel_plane_atomic_set_property,
1397 .atomic_duplicate_state = intel_plane_duplicate_state,
1398 .atomic_destroy_state = intel_plane_destroy_state,
1399 .format_mod_supported = snb_sprite_format_mod_supported,
1400};
1347 1401
1348 if (INTEL_GEN(dev_priv) >= 9) 1402static const struct drm_plane_funcs vlv_sprite_funcs = {
1349 return skl_mod_supported(format, modifier); 1403 .update_plane = drm_atomic_helper_update_plane,
1350 else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) 1404 .disable_plane = drm_atomic_helper_disable_plane,
1351 return vlv_mod_supported(format, modifier); 1405 .destroy = intel_plane_destroy,
1352 else if (INTEL_GEN(dev_priv) >= 6) 1406 .atomic_get_property = intel_plane_atomic_get_property,
1353 return snb_mod_supported(format, modifier); 1407 .atomic_set_property = intel_plane_atomic_set_property,
1354 else 1408 .atomic_duplicate_state = intel_plane_duplicate_state,
1355 return g4x_mod_supported(format, modifier); 1409 .atomic_destroy_state = intel_plane_destroy_state,
1356} 1410 .format_mod_supported = vlv_sprite_format_mod_supported,
1411};
1357 1412
1358static const struct drm_plane_funcs intel_sprite_plane_funcs = { 1413static const struct drm_plane_funcs skl_plane_funcs = {
1359 .update_plane = drm_atomic_helper_update_plane, 1414 .update_plane = drm_atomic_helper_update_plane,
1360 .disable_plane = drm_atomic_helper_disable_plane, 1415 .disable_plane = drm_atomic_helper_disable_plane,
1361 .destroy = intel_plane_destroy, 1416 .destroy = intel_plane_destroy,
@@ -1363,7 +1418,7 @@ static const struct drm_plane_funcs intel_sprite_plane_funcs = {
1363 .atomic_set_property = intel_plane_atomic_set_property, 1418 .atomic_set_property = intel_plane_atomic_set_property,
1364 .atomic_duplicate_state = intel_plane_duplicate_state, 1419 .atomic_duplicate_state = intel_plane_duplicate_state,
1365 .atomic_destroy_state = intel_plane_destroy_state, 1420 .atomic_destroy_state = intel_plane_destroy_state,
1366 .format_mod_supported = intel_sprite_plane_format_mod_supported, 1421 .format_mod_supported = skl_plane_format_mod_supported,
1367}; 1422};
1368 1423
1369bool skl_plane_has_ccs(struct drm_i915_private *dev_priv, 1424bool skl_plane_has_ccs(struct drm_i915_private *dev_priv,
@@ -1389,6 +1444,7 @@ intel_sprite_plane_create(struct drm_i915_private *dev_priv,
1389{ 1444{
1390 struct intel_plane *intel_plane = NULL; 1445 struct intel_plane *intel_plane = NULL;
1391 struct intel_plane_state *state = NULL; 1446 struct intel_plane_state *state = NULL;
1447 const struct drm_plane_funcs *plane_funcs;
1392 unsigned long possible_crtcs; 1448 unsigned long possible_crtcs;
1393 const uint32_t *plane_formats; 1449 const uint32_t *plane_formats;
1394 const uint64_t *modifiers; 1450 const uint64_t *modifiers;
@@ -1413,6 +1469,9 @@ intel_sprite_plane_create(struct drm_i915_private *dev_priv,
1413 intel_plane->can_scale = true; 1469 intel_plane->can_scale = true;
1414 state->scaler_id = -1; 1470 state->scaler_id = -1;
1415 1471
1472 intel_plane->has_ccs = skl_plane_has_ccs(dev_priv, pipe,
1473 PLANE_SPRITE0 + plane);
1474
1416 intel_plane->update_plane = skl_update_plane; 1475 intel_plane->update_plane = skl_update_plane;
1417 intel_plane->disable_plane = skl_disable_plane; 1476 intel_plane->disable_plane = skl_disable_plane;
1418 intel_plane->get_hw_state = skl_plane_get_hw_state; 1477 intel_plane->get_hw_state = skl_plane_get_hw_state;
@@ -1426,10 +1485,12 @@ intel_sprite_plane_create(struct drm_i915_private *dev_priv,
1426 num_plane_formats = ARRAY_SIZE(skl_plane_formats); 1485 num_plane_formats = ARRAY_SIZE(skl_plane_formats);
1427 } 1486 }
1428 1487
1429 if (skl_plane_has_ccs(dev_priv, pipe, PLANE_SPRITE0 + plane)) 1488 if (intel_plane->has_ccs)
1430 modifiers = skl_plane_format_modifiers_ccs; 1489 modifiers = skl_plane_format_modifiers_ccs;
1431 else 1490 else
1432 modifiers = skl_plane_format_modifiers_noccs; 1491 modifiers = skl_plane_format_modifiers_noccs;
1492
1493 plane_funcs = &skl_plane_funcs;
1433 } else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 1494 } else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
1434 intel_plane->can_scale = false; 1495 intel_plane->can_scale = false;
1435 intel_plane->max_downscale = 1; 1496 intel_plane->max_downscale = 1;
@@ -1441,6 +1502,8 @@ intel_sprite_plane_create(struct drm_i915_private *dev_priv,
1441 plane_formats = vlv_plane_formats; 1502 plane_formats = vlv_plane_formats;
1442 num_plane_formats = ARRAY_SIZE(vlv_plane_formats); 1503 num_plane_formats = ARRAY_SIZE(vlv_plane_formats);
1443 modifiers = i9xx_plane_format_modifiers; 1504 modifiers = i9xx_plane_format_modifiers;
1505
1506 plane_funcs = &vlv_sprite_funcs;
1444 } else if (INTEL_GEN(dev_priv) >= 7) { 1507 } else if (INTEL_GEN(dev_priv) >= 7) {
1445 if (IS_IVYBRIDGE(dev_priv)) { 1508 if (IS_IVYBRIDGE(dev_priv)) {
1446 intel_plane->can_scale = true; 1509 intel_plane->can_scale = true;
@@ -1457,6 +1520,8 @@ intel_sprite_plane_create(struct drm_i915_private *dev_priv,
1457 plane_formats = snb_plane_formats; 1520 plane_formats = snb_plane_formats;
1458 num_plane_formats = ARRAY_SIZE(snb_plane_formats); 1521 num_plane_formats = ARRAY_SIZE(snb_plane_formats);
1459 modifiers = i9xx_plane_format_modifiers; 1522 modifiers = i9xx_plane_format_modifiers;
1523
1524 plane_funcs = &snb_sprite_funcs;
1460 } else { 1525 } else {
1461 intel_plane->can_scale = true; 1526 intel_plane->can_scale = true;
1462 intel_plane->max_downscale = 16; 1527 intel_plane->max_downscale = 16;
@@ -1469,9 +1534,13 @@ intel_sprite_plane_create(struct drm_i915_private *dev_priv,
1469 if (IS_GEN6(dev_priv)) { 1534 if (IS_GEN6(dev_priv)) {
1470 plane_formats = snb_plane_formats; 1535 plane_formats = snb_plane_formats;
1471 num_plane_formats = ARRAY_SIZE(snb_plane_formats); 1536 num_plane_formats = ARRAY_SIZE(snb_plane_formats);
1537
1538 plane_funcs = &snb_sprite_funcs;
1472 } else { 1539 } else {
1473 plane_formats = g4x_plane_formats; 1540 plane_formats = g4x_plane_formats;
1474 num_plane_formats = ARRAY_SIZE(g4x_plane_formats); 1541 num_plane_formats = ARRAY_SIZE(g4x_plane_formats);
1542
1543 plane_funcs = &g4x_sprite_funcs;
1475 } 1544 }
1476 } 1545 }
1477 1546
@@ -1498,14 +1567,14 @@ intel_sprite_plane_create(struct drm_i915_private *dev_priv,
1498 1567
1499 if (INTEL_GEN(dev_priv) >= 9) 1568 if (INTEL_GEN(dev_priv) >= 9)
1500 ret = drm_universal_plane_init(&dev_priv->drm, &intel_plane->base, 1569 ret = drm_universal_plane_init(&dev_priv->drm, &intel_plane->base,
1501 possible_crtcs, &intel_sprite_plane_funcs, 1570 possible_crtcs, plane_funcs,
1502 plane_formats, num_plane_formats, 1571 plane_formats, num_plane_formats,
1503 modifiers, 1572 modifiers,
1504 DRM_PLANE_TYPE_OVERLAY, 1573 DRM_PLANE_TYPE_OVERLAY,
1505 "plane %d%c", plane + 2, pipe_name(pipe)); 1574 "plane %d%c", plane + 2, pipe_name(pipe));
1506 else 1575 else
1507 ret = drm_universal_plane_init(&dev_priv->drm, &intel_plane->base, 1576 ret = drm_universal_plane_init(&dev_priv->drm, &intel_plane->base,
1508 possible_crtcs, &intel_sprite_plane_funcs, 1577 possible_crtcs, plane_funcs,
1509 plane_formats, num_plane_formats, 1578 plane_formats, num_plane_formats,
1510 modifiers, 1579 modifiers,
1511 DRM_PLANE_TYPE_OVERLAY, 1580 DRM_PLANE_TYPE_OVERLAY,
diff --git a/drivers/gpu/drm/i915/selftests/mock_dmabuf.c b/drivers/gpu/drm/i915/selftests/mock_dmabuf.c
index 302f7d103635..ca682caf1062 100644
--- a/drivers/gpu/drm/i915/selftests/mock_dmabuf.c
+++ b/drivers/gpu/drm/i915/selftests/mock_dmabuf.c
@@ -94,18 +94,6 @@ static void mock_dmabuf_vunmap(struct dma_buf *dma_buf, void *vaddr)
94 vm_unmap_ram(vaddr, mock->npages); 94 vm_unmap_ram(vaddr, mock->npages);
95} 95}
96 96
97static void *mock_dmabuf_kmap_atomic(struct dma_buf *dma_buf, unsigned long page_num)
98{
99 struct mock_dmabuf *mock = to_mock(dma_buf);
100
101 return kmap_atomic(mock->pages[page_num]);
102}
103
104static void mock_dmabuf_kunmap_atomic(struct dma_buf *dma_buf, unsigned long page_num, void *addr)
105{
106 kunmap_atomic(addr);
107}
108
109static void *mock_dmabuf_kmap(struct dma_buf *dma_buf, unsigned long page_num) 97static void *mock_dmabuf_kmap(struct dma_buf *dma_buf, unsigned long page_num)
110{ 98{
111 struct mock_dmabuf *mock = to_mock(dma_buf); 99 struct mock_dmabuf *mock = to_mock(dma_buf);
@@ -130,9 +118,7 @@ static const struct dma_buf_ops mock_dmabuf_ops = {
130 .unmap_dma_buf = mock_unmap_dma_buf, 118 .unmap_dma_buf = mock_unmap_dma_buf,
131 .release = mock_dmabuf_release, 119 .release = mock_dmabuf_release,
132 .map = mock_dmabuf_kmap, 120 .map = mock_dmabuf_kmap,
133 .map_atomic = mock_dmabuf_kmap_atomic,
134 .unmap = mock_dmabuf_kunmap, 121 .unmap = mock_dmabuf_kunmap,
135 .unmap_atomic = mock_dmabuf_kunmap_atomic,
136 .mmap = mock_dmabuf_mmap, 122 .mmap = mock_dmabuf_mmap,
137 .vmap = mock_dmabuf_vmap, 123 .vmap = mock_dmabuf_vmap,
138 .vunmap = mock_dmabuf_vunmap, 124 .vunmap = mock_dmabuf_vunmap,
diff --git a/drivers/gpu/drm/mediatek/mtk_drm_fb.c b/drivers/gpu/drm/mediatek/mtk_drm_fb.c
index 0d8d506695f9..be5f6f1daf55 100644
--- a/drivers/gpu/drm/mediatek/mtk_drm_fb.c
+++ b/drivers/gpu/drm/mediatek/mtk_drm_fb.c
@@ -15,6 +15,7 @@
15#include <drm/drm_crtc_helper.h> 15#include <drm/drm_crtc_helper.h>
16#include <drm/drm_fb_helper.h> 16#include <drm/drm_fb_helper.h>
17#include <drm/drm_gem.h> 17#include <drm/drm_gem.h>
18#include <drm/drm_gem_framebuffer_helper.h>
18#include <linux/dma-buf.h> 19#include <linux/dma-buf.h>
19#include <linux/reservation.h> 20#include <linux/reservation.h>
20 21
@@ -22,78 +23,37 @@
22#include "mtk_drm_fb.h" 23#include "mtk_drm_fb.h"
23#include "mtk_drm_gem.h" 24#include "mtk_drm_gem.h"
24 25
25/*
26 * mtk specific framebuffer structure.
27 *
28 * @fb: drm framebuffer object.
29 * @gem_obj: array of gem objects.
30 */
31struct mtk_drm_fb {
32 struct drm_framebuffer base;
33 /* For now we only support a single plane */
34 struct drm_gem_object *gem_obj;
35};
36
37#define to_mtk_fb(x) container_of(x, struct mtk_drm_fb, base)
38
39struct drm_gem_object *mtk_fb_get_gem_obj(struct drm_framebuffer *fb)
40{
41 struct mtk_drm_fb *mtk_fb = to_mtk_fb(fb);
42
43 return mtk_fb->gem_obj;
44}
45
46static int mtk_drm_fb_create_handle(struct drm_framebuffer *fb,
47 struct drm_file *file_priv,
48 unsigned int *handle)
49{
50 struct mtk_drm_fb *mtk_fb = to_mtk_fb(fb);
51
52 return drm_gem_handle_create(file_priv, mtk_fb->gem_obj, handle);
53}
54
55static void mtk_drm_fb_destroy(struct drm_framebuffer *fb)
56{
57 struct mtk_drm_fb *mtk_fb = to_mtk_fb(fb);
58
59 drm_framebuffer_cleanup(fb);
60
61 drm_gem_object_put_unlocked(mtk_fb->gem_obj);
62
63 kfree(mtk_fb);
64}
65
66static const struct drm_framebuffer_funcs mtk_drm_fb_funcs = { 26static const struct drm_framebuffer_funcs mtk_drm_fb_funcs = {
67 .create_handle = mtk_drm_fb_create_handle, 27 .create_handle = drm_gem_fb_create_handle,
68 .destroy = mtk_drm_fb_destroy, 28 .destroy = drm_gem_fb_destroy,
69}; 29};
70 30
71static struct mtk_drm_fb *mtk_drm_framebuffer_init(struct drm_device *dev, 31static struct drm_framebuffer *mtk_drm_framebuffer_init(struct drm_device *dev,
72 const struct drm_mode_fb_cmd2 *mode, 32 const struct drm_mode_fb_cmd2 *mode,
73 struct drm_gem_object *obj) 33 struct drm_gem_object *obj)
74{ 34{
75 struct mtk_drm_fb *mtk_fb; 35 struct drm_framebuffer *fb;
76 int ret; 36 int ret;
77 37
78 if (drm_format_num_planes(mode->pixel_format) != 1) 38 if (drm_format_num_planes(mode->pixel_format) != 1)
79 return ERR_PTR(-EINVAL); 39 return ERR_PTR(-EINVAL);
80 40
81 mtk_fb = kzalloc(sizeof(*mtk_fb), GFP_KERNEL); 41 fb = kzalloc(sizeof(*fb), GFP_KERNEL);
82 if (!mtk_fb) 42 if (!fb)
83 return ERR_PTR(-ENOMEM); 43 return ERR_PTR(-ENOMEM);
84 44
85 drm_helper_mode_fill_fb_struct(dev, &mtk_fb->base, mode); 45 drm_helper_mode_fill_fb_struct(dev, fb, mode);
86 46
87 mtk_fb->gem_obj = obj; 47 fb->obj[0] = obj;
88 48
89 ret = drm_framebuffer_init(dev, &mtk_fb->base, &mtk_drm_fb_funcs); 49 ret = drm_framebuffer_init(dev, fb, &mtk_drm_fb_funcs);
90 if (ret) { 50 if (ret) {
91 DRM_ERROR("failed to initialize framebuffer\n"); 51 DRM_ERROR("failed to initialize framebuffer\n");
92 kfree(mtk_fb); 52 kfree(fb);
93 return ERR_PTR(ret); 53 return ERR_PTR(ret);
94 } 54 }
95 55
96 return mtk_fb; 56 return fb;
97} 57}
98 58
99/* 59/*
@@ -110,7 +70,7 @@ int mtk_fb_wait(struct drm_framebuffer *fb)
110 if (!fb) 70 if (!fb)
111 return 0; 71 return 0;
112 72
113 gem = mtk_fb_get_gem_obj(fb); 73 gem = fb->obj[0];
114 if (!gem || !gem->dma_buf || !gem->dma_buf->resv) 74 if (!gem || !gem->dma_buf || !gem->dma_buf->resv)
115 return 0; 75 return 0;
116 76
@@ -128,7 +88,7 @@ struct drm_framebuffer *mtk_drm_mode_fb_create(struct drm_device *dev,
128 struct drm_file *file, 88 struct drm_file *file,
129 const struct drm_mode_fb_cmd2 *cmd) 89 const struct drm_mode_fb_cmd2 *cmd)
130{ 90{
131 struct mtk_drm_fb *mtk_fb; 91 struct drm_framebuffer *fb;
132 struct drm_gem_object *gem; 92 struct drm_gem_object *gem;
133 unsigned int width = cmd->width; 93 unsigned int width = cmd->width;
134 unsigned int height = cmd->height; 94 unsigned int height = cmd->height;
@@ -151,13 +111,13 @@ struct drm_framebuffer *mtk_drm_mode_fb_create(struct drm_device *dev,
151 goto unreference; 111 goto unreference;
152 } 112 }
153 113
154 mtk_fb = mtk_drm_framebuffer_init(dev, cmd, gem); 114 fb = mtk_drm_framebuffer_init(dev, cmd, gem);
155 if (IS_ERR(mtk_fb)) { 115 if (IS_ERR(fb)) {
156 ret = PTR_ERR(mtk_fb); 116 ret = PTR_ERR(fb);
157 goto unreference; 117 goto unreference;
158 } 118 }
159 119
160 return &mtk_fb->base; 120 return fb;
161 121
162unreference: 122unreference:
163 drm_gem_object_put_unlocked(gem); 123 drm_gem_object_put_unlocked(gem);
diff --git a/drivers/gpu/drm/mediatek/mtk_drm_fb.h b/drivers/gpu/drm/mediatek/mtk_drm_fb.h
index 9b2ae345a4e9..7f976b196a15 100644
--- a/drivers/gpu/drm/mediatek/mtk_drm_fb.h
+++ b/drivers/gpu/drm/mediatek/mtk_drm_fb.h
@@ -14,7 +14,6 @@
14#ifndef MTK_DRM_FB_H 14#ifndef MTK_DRM_FB_H
15#define MTK_DRM_FB_H 15#define MTK_DRM_FB_H
16 16
17struct drm_gem_object *mtk_fb_get_gem_obj(struct drm_framebuffer *fb);
18int mtk_fb_wait(struct drm_framebuffer *fb); 17int mtk_fb_wait(struct drm_framebuffer *fb);
19struct drm_framebuffer *mtk_drm_mode_fb_create(struct drm_device *dev, 18struct drm_framebuffer *mtk_drm_mode_fb_create(struct drm_device *dev,
20 struct drm_file *file, 19 struct drm_file *file,
diff --git a/drivers/gpu/drm/mediatek/mtk_drm_plane.c b/drivers/gpu/drm/mediatek/mtk_drm_plane.c
index 2f4b0ffee598..f7e6aa1b5b7d 100644
--- a/drivers/gpu/drm/mediatek/mtk_drm_plane.c
+++ b/drivers/gpu/drm/mediatek/mtk_drm_plane.c
@@ -95,11 +95,6 @@ static int mtk_plane_atomic_check(struct drm_plane *plane,
95 if (!fb) 95 if (!fb)
96 return 0; 96 return 0;
97 97
98 if (!mtk_fb_get_gem_obj(fb)) {
99 DRM_DEBUG_KMS("buffer is null\n");
100 return -EFAULT;
101 }
102
103 if (!state->crtc) 98 if (!state->crtc)
104 return 0; 99 return 0;
105 100
@@ -127,7 +122,7 @@ static void mtk_plane_atomic_update(struct drm_plane *plane,
127 if (!crtc || WARN_ON(!fb)) 122 if (!crtc || WARN_ON(!fb))
128 return; 123 return;
129 124
130 gem = mtk_fb_get_gem_obj(fb); 125 gem = fb->obj[0];
131 mtk_gem = to_mtk_gem_obj(gem); 126 mtk_gem = to_mtk_gem_obj(gem);
132 addr = mtk_gem->dma_addr; 127 addr = mtk_gem->dma_addr;
133 pitch = fb->pitches[0]; 128 pitch = fb->pitches[0];
diff --git a/drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c b/drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c
index b001699297c4..457c29dba4a1 100644
--- a/drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c
+++ b/drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c
@@ -201,7 +201,7 @@ static void blend_setup(struct drm_crtc *crtc)
201 int idx = idxs[pipe_id]; 201 int idx = idxs[pipe_id];
202 if (idx > 0) { 202 if (idx > 0) {
203 const struct mdp_format *format = 203 const struct mdp_format *format =
204 to_mdp_format(msm_framebuffer_format(plane->fb)); 204 to_mdp_format(msm_framebuffer_format(plane->state->fb));
205 alpha[idx-1] = format->alpha_enable; 205 alpha[idx-1] = format->alpha_enable;
206 } 206 }
207 } 207 }
@@ -665,7 +665,6 @@ struct drm_crtc *mdp4_crtc_init(struct drm_device *dev,
665 drm_crtc_init_with_planes(dev, crtc, plane, NULL, &mdp4_crtc_funcs, 665 drm_crtc_init_with_planes(dev, crtc, plane, NULL, &mdp4_crtc_funcs,
666 NULL); 666 NULL);
667 drm_crtc_helper_add(crtc, &mdp4_crtc_helper_funcs); 667 drm_crtc_helper_add(crtc, &mdp4_crtc_helper_funcs);
668 plane->crtc = crtc;
669 668
670 return crtc; 669 return crtc;
671} 670}
diff --git a/drivers/gpu/drm/msm/disp/mdp4/mdp4_plane.c b/drivers/gpu/drm/msm/disp/mdp4/mdp4_plane.c
index 20e956e14c21..7b641fa6dc4d 100644
--- a/drivers/gpu/drm/msm/disp/mdp4/mdp4_plane.c
+++ b/drivers/gpu/drm/msm/disp/mdp4/mdp4_plane.c
@@ -167,8 +167,6 @@ static void mdp4_plane_set_scanout(struct drm_plane *plane,
167 msm_framebuffer_iova(fb, kms->aspace, 2)); 167 msm_framebuffer_iova(fb, kms->aspace, 2));
168 mdp4_write(mdp4_kms, REG_MDP4_PIPE_SRCP3_BASE(pipe), 168 mdp4_write(mdp4_kms, REG_MDP4_PIPE_SRCP3_BASE(pipe),
169 msm_framebuffer_iova(fb, kms->aspace, 3)); 169 msm_framebuffer_iova(fb, kms->aspace, 3));
170
171 plane->fb = fb;
172} 170}
173 171
174static void mdp4_write_csc_config(struct mdp4_kms *mdp4_kms, 172static void mdp4_write_csc_config(struct mdp4_kms *mdp4_kms,
diff --git a/drivers/gpu/drm/msm/disp/mdp5/mdp5_crtc.c b/drivers/gpu/drm/msm/disp/mdp5/mdp5_crtc.c
index 10271359789e..24e00274844b 100644
--- a/drivers/gpu/drm/msm/disp/mdp5/mdp5_crtc.c
+++ b/drivers/gpu/drm/msm/disp/mdp5/mdp5_crtc.c
@@ -1207,7 +1207,6 @@ struct drm_crtc *mdp5_crtc_init(struct drm_device *dev,
1207 "unref cursor", unref_cursor_worker); 1207 "unref cursor", unref_cursor_worker);
1208 1208
1209 drm_crtc_helper_add(crtc, &mdp5_crtc_helper_funcs); 1209 drm_crtc_helper_add(crtc, &mdp5_crtc_helper_funcs);
1210 plane->crtc = crtc;
1211 1210
1212 return crtc; 1211 return crtc;
1213} 1212}
diff --git a/drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c b/drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c
index e09bc53a0e65..c4f115fe96ff 100644
--- a/drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c
+++ b/drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c
@@ -512,7 +512,7 @@ static void mdp5_plane_atomic_async_update(struct drm_plane *plane,
512 if (plane_enabled(new_state)) { 512 if (plane_enabled(new_state)) {
513 struct mdp5_ctl *ctl; 513 struct mdp5_ctl *ctl;
514 struct mdp5_pipeline *pipeline = 514 struct mdp5_pipeline *pipeline =
515 mdp5_crtc_get_pipeline(plane->crtc); 515 mdp5_crtc_get_pipeline(new_state->crtc);
516 int ret; 516 int ret;
517 517
518 ret = mdp5_plane_mode_set(plane, new_state->crtc, new_state->fb, 518 ret = mdp5_plane_mode_set(plane, new_state->crtc, new_state->fb,
@@ -1029,8 +1029,6 @@ static int mdp5_plane_mode_set(struct drm_plane *plane,
1029 src_img_w, src_img_h, 1029 src_img_w, src_img_h,
1030 src_x + src_w, src_y, src_w, src_h); 1030 src_x + src_w, src_y, src_w, src_h);
1031 1031
1032 plane->fb = fb;
1033
1034 return ret; 1032 return ret;
1035} 1033}
1036 1034
diff --git a/drivers/gpu/drm/msm/msm_fb.c b/drivers/gpu/drm/msm/msm_fb.c
index 7a16242bf8bf..2a7348aeb38d 100644
--- a/drivers/gpu/drm/msm/msm_fb.c
+++ b/drivers/gpu/drm/msm/msm_fb.c
@@ -17,6 +17,7 @@
17 17
18#include <drm/drm_crtc.h> 18#include <drm/drm_crtc.h>
19#include <drm/drm_crtc_helper.h> 19#include <drm/drm_crtc_helper.h>
20#include <drm/drm_gem_framebuffer_helper.h>
20 21
21#include "msm_drv.h" 22#include "msm_drv.h"
22#include "msm_kms.h" 23#include "msm_kms.h"
@@ -25,49 +26,20 @@
25struct msm_framebuffer { 26struct msm_framebuffer {
26 struct drm_framebuffer base; 27 struct drm_framebuffer base;
27 const struct msm_format *format; 28 const struct msm_format *format;
28 struct drm_gem_object *planes[MAX_PLANE];
29}; 29};
30#define to_msm_framebuffer(x) container_of(x, struct msm_framebuffer, base) 30#define to_msm_framebuffer(x) container_of(x, struct msm_framebuffer, base)
31 31
32static struct drm_framebuffer *msm_framebuffer_init(struct drm_device *dev, 32static struct drm_framebuffer *msm_framebuffer_init(struct drm_device *dev,
33 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos); 33 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos);
34 34
35static int msm_framebuffer_create_handle(struct drm_framebuffer *fb,
36 struct drm_file *file_priv,
37 unsigned int *handle)
38{
39 struct msm_framebuffer *msm_fb = to_msm_framebuffer(fb);
40 return drm_gem_handle_create(file_priv,
41 msm_fb->planes[0], handle);
42}
43
44static void msm_framebuffer_destroy(struct drm_framebuffer *fb)
45{
46 struct msm_framebuffer *msm_fb = to_msm_framebuffer(fb);
47 int i, n = fb->format->num_planes;
48
49 DBG("destroy: FB ID: %d (%p)", fb->base.id, fb);
50
51 drm_framebuffer_cleanup(fb);
52
53 for (i = 0; i < n; i++) {
54 struct drm_gem_object *bo = msm_fb->planes[i];
55
56 drm_gem_object_put_unlocked(bo);
57 }
58
59 kfree(msm_fb);
60}
61
62static const struct drm_framebuffer_funcs msm_framebuffer_funcs = { 35static const struct drm_framebuffer_funcs msm_framebuffer_funcs = {
63 .create_handle = msm_framebuffer_create_handle, 36 .create_handle = drm_gem_fb_create_handle,
64 .destroy = msm_framebuffer_destroy, 37 .destroy = drm_gem_fb_destroy,
65}; 38};
66 39
67#ifdef CONFIG_DEBUG_FS 40#ifdef CONFIG_DEBUG_FS
68void msm_framebuffer_describe(struct drm_framebuffer *fb, struct seq_file *m) 41void msm_framebuffer_describe(struct drm_framebuffer *fb, struct seq_file *m)
69{ 42{
70 struct msm_framebuffer *msm_fb = to_msm_framebuffer(fb);
71 int i, n = fb->format->num_planes; 43 int i, n = fb->format->num_planes;
72 44
73 seq_printf(m, "fb: %dx%d@%4.4s (%2d, ID:%d)\n", 45 seq_printf(m, "fb: %dx%d@%4.4s (%2d, ID:%d)\n",
@@ -77,7 +49,7 @@ void msm_framebuffer_describe(struct drm_framebuffer *fb, struct seq_file *m)
77 for (i = 0; i < n; i++) { 49 for (i = 0; i < n; i++) {
78 seq_printf(m, " %d: offset=%d pitch=%d, obj: ", 50 seq_printf(m, " %d: offset=%d pitch=%d, obj: ",
79 i, fb->offsets[i], fb->pitches[i]); 51 i, fb->offsets[i], fb->pitches[i]);
80 msm_gem_describe(msm_fb->planes[i], m); 52 msm_gem_describe(fb->obj[i], m);
81 } 53 }
82} 54}
83#endif 55#endif
@@ -90,12 +62,11 @@ void msm_framebuffer_describe(struct drm_framebuffer *fb, struct seq_file *m)
90int msm_framebuffer_prepare(struct drm_framebuffer *fb, 62int msm_framebuffer_prepare(struct drm_framebuffer *fb,
91 struct msm_gem_address_space *aspace) 63 struct msm_gem_address_space *aspace)
92{ 64{
93 struct msm_framebuffer *msm_fb = to_msm_framebuffer(fb);
94 int ret, i, n = fb->format->num_planes; 65 int ret, i, n = fb->format->num_planes;
95 uint64_t iova; 66 uint64_t iova;
96 67
97 for (i = 0; i < n; i++) { 68 for (i = 0; i < n; i++) {
98 ret = msm_gem_get_iova(msm_fb->planes[i], aspace, &iova); 69 ret = msm_gem_get_iova(fb->obj[i], aspace, &iova);
99 DBG("FB[%u]: iova[%d]: %08llx (%d)", fb->base.id, i, iova, ret); 70 DBG("FB[%u]: iova[%d]: %08llx (%d)", fb->base.id, i, iova, ret);
100 if (ret) 71 if (ret)
101 return ret; 72 return ret;
@@ -107,26 +78,23 @@ int msm_framebuffer_prepare(struct drm_framebuffer *fb,
107void msm_framebuffer_cleanup(struct drm_framebuffer *fb, 78void msm_framebuffer_cleanup(struct drm_framebuffer *fb,
108 struct msm_gem_address_space *aspace) 79 struct msm_gem_address_space *aspace)
109{ 80{
110 struct msm_framebuffer *msm_fb = to_msm_framebuffer(fb);
111 int i, n = fb->format->num_planes; 81 int i, n = fb->format->num_planes;
112 82
113 for (i = 0; i < n; i++) 83 for (i = 0; i < n; i++)
114 msm_gem_put_iova(msm_fb->planes[i], aspace); 84 msm_gem_put_iova(fb->obj[i], aspace);
115} 85}
116 86
117uint32_t msm_framebuffer_iova(struct drm_framebuffer *fb, 87uint32_t msm_framebuffer_iova(struct drm_framebuffer *fb,
118 struct msm_gem_address_space *aspace, int plane) 88 struct msm_gem_address_space *aspace, int plane)
119{ 89{
120 struct msm_framebuffer *msm_fb = to_msm_framebuffer(fb); 90 if (!fb->obj[plane])
121 if (!msm_fb->planes[plane])
122 return 0; 91 return 0;
123 return msm_gem_iova(msm_fb->planes[plane], aspace) + fb->offsets[plane]; 92 return msm_gem_iova(fb->obj[plane], aspace) + fb->offsets[plane];
124} 93}
125 94
126struct drm_gem_object *msm_framebuffer_bo(struct drm_framebuffer *fb, int plane) 95struct drm_gem_object *msm_framebuffer_bo(struct drm_framebuffer *fb, int plane)
127{ 96{
128 struct msm_framebuffer *msm_fb = to_msm_framebuffer(fb); 97 return drm_gem_fb_get_obj(fb, plane);
129 return msm_fb->planes[plane];
130} 98}
131 99
132const struct msm_format *msm_framebuffer_format(struct drm_framebuffer *fb) 100const struct msm_format *msm_framebuffer_format(struct drm_framebuffer *fb)
@@ -202,7 +170,7 @@ static struct drm_framebuffer *msm_framebuffer_init(struct drm_device *dev,
202 170
203 msm_fb->format = format; 171 msm_fb->format = format;
204 172
205 if (n > ARRAY_SIZE(msm_fb->planes)) { 173 if (n > ARRAY_SIZE(fb->obj)) {
206 ret = -EINVAL; 174 ret = -EINVAL;
207 goto fail; 175 goto fail;
208 } 176 }
@@ -221,7 +189,7 @@ static struct drm_framebuffer *msm_framebuffer_init(struct drm_device *dev,
221 goto fail; 189 goto fail;
222 } 190 }
223 191
224 msm_fb->planes[i] = bos[i]; 192 msm_fb->base.obj[i] = bos[i];
225 } 193 }
226 194
227 drm_helper_mode_fill_fb_struct(dev, fb, mode_cmd); 195 drm_helper_mode_fill_fb_struct(dev, fb, mode_cmd);
diff --git a/drivers/gpu/drm/omapdrm/omap_fb.c b/drivers/gpu/drm/omapdrm/omap_fb.c
index 5fd22ca73913..9f1e3d8f8488 100644
--- a/drivers/gpu/drm/omapdrm/omap_fb.c
+++ b/drivers/gpu/drm/omapdrm/omap_fb.c
@@ -19,6 +19,7 @@
19 19
20#include <drm/drm_crtc.h> 20#include <drm/drm_crtc.h>
21#include <drm/drm_crtc_helper.h> 21#include <drm/drm_crtc_helper.h>
22#include <drm/drm_gem_framebuffer_helper.h>
22 23
23#include "omap_dmm_tiler.h" 24#include "omap_dmm_tiler.h"
24#include "omap_drv.h" 25#include "omap_drv.h"
@@ -51,9 +52,6 @@ static const u32 formats[] = {
51 52
52/* per-plane info for the fb: */ 53/* per-plane info for the fb: */
53struct plane { 54struct plane {
54 struct drm_gem_object *bo;
55 u32 pitch;
56 u32 offset;
57 dma_addr_t dma_addr; 55 dma_addr_t dma_addr;
58}; 56};
59 57
@@ -68,56 +66,28 @@ struct omap_framebuffer {
68 struct mutex lock; 66 struct mutex lock;
69}; 67};
70 68
71static int omap_framebuffer_create_handle(struct drm_framebuffer *fb,
72 struct drm_file *file_priv,
73 unsigned int *handle)
74{
75 struct omap_framebuffer *omap_fb = to_omap_framebuffer(fb);
76 return drm_gem_handle_create(file_priv,
77 omap_fb->planes[0].bo, handle);
78}
79
80static void omap_framebuffer_destroy(struct drm_framebuffer *fb)
81{
82 struct omap_framebuffer *omap_fb = to_omap_framebuffer(fb);
83 int i, n = fb->format->num_planes;
84
85 DBG("destroy: FB ID: %d (%p)", fb->base.id, fb);
86
87 drm_framebuffer_cleanup(fb);
88
89 for (i = 0; i < n; i++) {
90 struct plane *plane = &omap_fb->planes[i];
91
92 drm_gem_object_unreference_unlocked(plane->bo);
93 }
94
95 kfree(omap_fb);
96}
97
98static const struct drm_framebuffer_funcs omap_framebuffer_funcs = { 69static const struct drm_framebuffer_funcs omap_framebuffer_funcs = {
99 .create_handle = omap_framebuffer_create_handle, 70 .create_handle = drm_gem_fb_create_handle,
100 .destroy = omap_framebuffer_destroy, 71 .destroy = drm_gem_fb_destroy,
101}; 72};
102 73
103static u32 get_linear_addr(struct plane *plane, 74static u32 get_linear_addr(struct drm_framebuffer *fb,
104 const struct drm_format_info *format, int n, int x, int y) 75 const struct drm_format_info *format, int n, int x, int y)
105{ 76{
77 struct omap_framebuffer *omap_fb = to_omap_framebuffer(fb);
78 struct plane *plane = &omap_fb->planes[n];
106 u32 offset; 79 u32 offset;
107 80
108 offset = plane->offset 81 offset = fb->offsets[n]
109 + (x * format->cpp[n] / (n == 0 ? 1 : format->hsub)) 82 + (x * format->cpp[n] / (n == 0 ? 1 : format->hsub))
110 + (y * plane->pitch / (n == 0 ? 1 : format->vsub)); 83 + (y * fb->pitches[n] / (n == 0 ? 1 : format->vsub));
111 84
112 return plane->dma_addr + offset; 85 return plane->dma_addr + offset;
113} 86}
114 87
115bool omap_framebuffer_supports_rotation(struct drm_framebuffer *fb) 88bool omap_framebuffer_supports_rotation(struct drm_framebuffer *fb)
116{ 89{
117 struct omap_framebuffer *omap_fb = to_omap_framebuffer(fb); 90 return omap_gem_flags(fb->obj[0]) & OMAP_BO_TILED;
118 struct plane *plane = &omap_fb->planes[0];
119
120 return omap_gem_flags(plane->bo) & OMAP_BO_TILED;
121} 91}
122 92
123/* Note: DRM rotates counter-clockwise, TILER & DSS rotates clockwise */ 93/* Note: DRM rotates counter-clockwise, TILER & DSS rotates clockwise */
@@ -176,7 +146,7 @@ void omap_framebuffer_update_scanout(struct drm_framebuffer *fb,
176 x = state->src_x >> 16; 146 x = state->src_x >> 16;
177 y = state->src_y >> 16; 147 y = state->src_y >> 16;
178 148
179 if (omap_gem_flags(plane->bo) & OMAP_BO_TILED) { 149 if (omap_gem_flags(fb->obj[0]) & OMAP_BO_TILED) {
180 u32 w = state->src_w >> 16; 150 u32 w = state->src_w >> 16;
181 u32 h = state->src_h >> 16; 151 u32 h = state->src_h >> 16;
182 152
@@ -201,12 +171,12 @@ void omap_framebuffer_update_scanout(struct drm_framebuffer *fb,
201 x += w - 1; 171 x += w - 1;
202 172
203 /* Note: x and y are in TILER units, not pixels */ 173 /* Note: x and y are in TILER units, not pixels */
204 omap_gem_rotated_dma_addr(plane->bo, orient, x, y, 174 omap_gem_rotated_dma_addr(fb->obj[0], orient, x, y,
205 &info->paddr); 175 &info->paddr);
206 info->rotation_type = OMAP_DSS_ROT_TILER; 176 info->rotation_type = OMAP_DSS_ROT_TILER;
207 info->rotation = state->rotation ?: DRM_MODE_ROTATE_0; 177 info->rotation = state->rotation ?: DRM_MODE_ROTATE_0;
208 /* Note: stride in TILER units, not pixels */ 178 /* Note: stride in TILER units, not pixels */
209 info->screen_width = omap_gem_tiled_stride(plane->bo, orient); 179 info->screen_width = omap_gem_tiled_stride(fb->obj[0], orient);
210 } else { 180 } else {
211 switch (state->rotation & DRM_MODE_ROTATE_MASK) { 181 switch (state->rotation & DRM_MODE_ROTATE_MASK) {
212 case 0: 182 case 0:
@@ -221,10 +191,10 @@ void omap_framebuffer_update_scanout(struct drm_framebuffer *fb,
221 break; 191 break;
222 } 192 }
223 193
224 info->paddr = get_linear_addr(plane, format, 0, x, y); 194 info->paddr = get_linear_addr(fb, format, 0, x, y);
225 info->rotation_type = OMAP_DSS_ROT_NONE; 195 info->rotation_type = OMAP_DSS_ROT_NONE;
226 info->rotation = DRM_MODE_ROTATE_0; 196 info->rotation = DRM_MODE_ROTATE_0;
227 info->screen_width = plane->pitch; 197 info->screen_width = fb->pitches[0];
228 } 198 }
229 199
230 /* convert to pixels: */ 200 /* convert to pixels: */
@@ -234,11 +204,11 @@ void omap_framebuffer_update_scanout(struct drm_framebuffer *fb,
234 plane = &omap_fb->planes[1]; 204 plane = &omap_fb->planes[1];
235 205
236 if (info->rotation_type == OMAP_DSS_ROT_TILER) { 206 if (info->rotation_type == OMAP_DSS_ROT_TILER) {
237 WARN_ON(!(omap_gem_flags(plane->bo) & OMAP_BO_TILED)); 207 WARN_ON(!(omap_gem_flags(fb->obj[1]) & OMAP_BO_TILED));
238 omap_gem_rotated_dma_addr(plane->bo, orient, x/2, y/2, 208 omap_gem_rotated_dma_addr(fb->obj[1], orient, x/2, y/2,
239 &info->p_uv_addr); 209 &info->p_uv_addr);
240 } else { 210 } else {
241 info->p_uv_addr = get_linear_addr(plane, format, 1, x, y); 211 info->p_uv_addr = get_linear_addr(fb, format, 1, x, y);
242 } 212 }
243 } else { 213 } else {
244 info->p_uv_addr = 0; 214 info->p_uv_addr = 0;
@@ -261,10 +231,10 @@ int omap_framebuffer_pin(struct drm_framebuffer *fb)
261 231
262 for (i = 0; i < n; i++) { 232 for (i = 0; i < n; i++) {
263 struct plane *plane = &omap_fb->planes[i]; 233 struct plane *plane = &omap_fb->planes[i];
264 ret = omap_gem_pin(plane->bo, &plane->dma_addr); 234 ret = omap_gem_pin(fb->obj[i], &plane->dma_addr);
265 if (ret) 235 if (ret)
266 goto fail; 236 goto fail;
267 omap_gem_dma_sync_buffer(plane->bo, DMA_TO_DEVICE); 237 omap_gem_dma_sync_buffer(fb->obj[i], DMA_TO_DEVICE);
268 } 238 }
269 239
270 omap_fb->pin_count++; 240 omap_fb->pin_count++;
@@ -276,7 +246,7 @@ int omap_framebuffer_pin(struct drm_framebuffer *fb)
276fail: 246fail:
277 for (i--; i >= 0; i--) { 247 for (i--; i >= 0; i--) {
278 struct plane *plane = &omap_fb->planes[i]; 248 struct plane *plane = &omap_fb->planes[i];
279 omap_gem_unpin(plane->bo); 249 omap_gem_unpin(fb->obj[i]);
280 plane->dma_addr = 0; 250 plane->dma_addr = 0;
281 } 251 }
282 252
@@ -302,54 +272,25 @@ void omap_framebuffer_unpin(struct drm_framebuffer *fb)
302 272
303 for (i = 0; i < n; i++) { 273 for (i = 0; i < n; i++) {
304 struct plane *plane = &omap_fb->planes[i]; 274 struct plane *plane = &omap_fb->planes[i];
305 omap_gem_unpin(plane->bo); 275 omap_gem_unpin(fb->obj[i]);
306 plane->dma_addr = 0; 276 plane->dma_addr = 0;
307 } 277 }
308 278
309 mutex_unlock(&omap_fb->lock); 279 mutex_unlock(&omap_fb->lock);
310} 280}
311 281
312/* iterate thru all the connectors, returning ones that are attached
313 * to the same fb..
314 */
315struct drm_connector *omap_framebuffer_get_next_connector(
316 struct drm_framebuffer *fb, struct drm_connector *from)
317{
318 struct drm_device *dev = fb->dev;
319 struct list_head *connector_list = &dev->mode_config.connector_list;
320 struct drm_connector *connector = from;
321
322 if (!from)
323 return list_first_entry_or_null(connector_list, typeof(*from),
324 head);
325
326 list_for_each_entry_from(connector, connector_list, head) {
327 if (connector != from) {
328 struct drm_encoder *encoder = connector->encoder;
329 struct drm_crtc *crtc = encoder ? encoder->crtc : NULL;
330 if (crtc && crtc->primary->fb == fb)
331 return connector;
332
333 }
334 }
335
336 return NULL;
337}
338
339#ifdef CONFIG_DEBUG_FS 282#ifdef CONFIG_DEBUG_FS
340void omap_framebuffer_describe(struct drm_framebuffer *fb, struct seq_file *m) 283void omap_framebuffer_describe(struct drm_framebuffer *fb, struct seq_file *m)
341{ 284{
342 struct omap_framebuffer *omap_fb = to_omap_framebuffer(fb);
343 int i, n = fb->format->num_planes; 285 int i, n = fb->format->num_planes;
344 286
345 seq_printf(m, "fb: %dx%d@%4.4s\n", fb->width, fb->height, 287 seq_printf(m, "fb: %dx%d@%4.4s\n", fb->width, fb->height,
346 (char *)&fb->format->format); 288 (char *)&fb->format->format);
347 289
348 for (i = 0; i < n; i++) { 290 for (i = 0; i < n; i++) {
349 struct plane *plane = &omap_fb->planes[i];
350 seq_printf(m, " %d: offset=%d pitch=%d, obj: ", 291 seq_printf(m, " %d: offset=%d pitch=%d, obj: ",
351 i, plane->offset, plane->pitch); 292 i, fb->offsets[n], fb->pitches[i]);
352 omap_gem_describe(plane->bo, m); 293 omap_gem_describe(fb->obj[i], m);
353 } 294 }
354} 295}
355#endif 296#endif
@@ -454,9 +395,7 @@ struct drm_framebuffer *omap_framebuffer_init(struct drm_device *dev,
454 goto fail; 395 goto fail;
455 } 396 }
456 397
457 plane->bo = bos[i]; 398 fb->obj[i] = bos[i];
458 plane->offset = mode_cmd->offsets[i];
459 plane->pitch = pitch;
460 plane->dma_addr = 0; 399 plane->dma_addr = 0;
461 } 400 }
462 401
diff --git a/drivers/gpu/drm/omapdrm/omap_fb.h b/drivers/gpu/drm/omapdrm/omap_fb.h
index 94ad5f9e4404..c20cb4bc714d 100644
--- a/drivers/gpu/drm/omapdrm/omap_fb.h
+++ b/drivers/gpu/drm/omapdrm/omap_fb.h
@@ -38,8 +38,6 @@ int omap_framebuffer_pin(struct drm_framebuffer *fb);
38void omap_framebuffer_unpin(struct drm_framebuffer *fb); 38void omap_framebuffer_unpin(struct drm_framebuffer *fb);
39void omap_framebuffer_update_scanout(struct drm_framebuffer *fb, 39void omap_framebuffer_update_scanout(struct drm_framebuffer *fb,
40 struct drm_plane_state *state, struct omap_overlay_info *info); 40 struct drm_plane_state *state, struct omap_overlay_info *info);
41struct drm_connector *omap_framebuffer_get_next_connector(
42 struct drm_framebuffer *fb, struct drm_connector *from);
43bool omap_framebuffer_supports_rotation(struct drm_framebuffer *fb); 41bool omap_framebuffer_supports_rotation(struct drm_framebuffer *fb);
44void omap_framebuffer_describe(struct drm_framebuffer *fb, struct seq_file *m); 42void omap_framebuffer_describe(struct drm_framebuffer *fb, struct seq_file *m);
45 43
diff --git a/drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c b/drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c
index 8e41d649e248..1a073f9b2834 100644
--- a/drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c
+++ b/drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c
@@ -148,8 +148,6 @@ static const struct dma_buf_ops omap_dmabuf_ops = {
148 .release = drm_gem_dmabuf_release, 148 .release = drm_gem_dmabuf_release,
149 .begin_cpu_access = omap_gem_dmabuf_begin_cpu_access, 149 .begin_cpu_access = omap_gem_dmabuf_begin_cpu_access,
150 .end_cpu_access = omap_gem_dmabuf_end_cpu_access, 150 .end_cpu_access = omap_gem_dmabuf_end_cpu_access,
151 .map_atomic = omap_gem_dmabuf_kmap_atomic,
152 .unmap_atomic = omap_gem_dmabuf_kunmap_atomic,
153 .map = omap_gem_dmabuf_kmap, 151 .map = omap_gem_dmabuf_kmap,
154 .unmap = omap_gem_dmabuf_kunmap, 152 .unmap = omap_gem_dmabuf_kunmap,
155 .mmap = omap_gem_dmabuf_mmap, 153 .mmap = omap_gem_dmabuf_mmap,
diff --git a/drivers/gpu/drm/panel/panel-innolux-p079zca.c b/drivers/gpu/drm/panel/panel-innolux-p079zca.c
index 57df39b5c589..bb53e0850764 100644
--- a/drivers/gpu/drm/panel/panel-innolux-p079zca.c
+++ b/drivers/gpu/drm/panel/panel-innolux-p079zca.c
@@ -292,7 +292,6 @@ static int innolux_panel_remove(struct mipi_dsi_device *dsi)
292 DRM_DEV_ERROR(&dsi->dev, "failed to detach from DSI host: %d\n", 292 DRM_DEV_ERROR(&dsi->dev, "failed to detach from DSI host: %d\n",
293 err); 293 err);
294 294
295 drm_panel_detach(&innolux->base);
296 innolux_panel_del(innolux); 295 innolux_panel_del(innolux);
297 296
298 return 0; 297 return 0;
diff --git a/drivers/gpu/drm/panel/panel-jdi-lt070me05000.c b/drivers/gpu/drm/panel/panel-jdi-lt070me05000.c
index 0a94ab79a6c0..99caa7835e7b 100644
--- a/drivers/gpu/drm/panel/panel-jdi-lt070me05000.c
+++ b/drivers/gpu/drm/panel/panel-jdi-lt070me05000.c
@@ -500,7 +500,6 @@ static int jdi_panel_remove(struct mipi_dsi_device *dsi)
500 dev_err(&dsi->dev, "failed to detach from DSI host: %d\n", 500 dev_err(&dsi->dev, "failed to detach from DSI host: %d\n",
501 ret); 501 ret);
502 502
503 drm_panel_detach(&jdi->base);
504 jdi_panel_del(jdi); 503 jdi_panel_del(jdi);
505 504
506 return 0; 505 return 0;
diff --git a/drivers/gpu/drm/panel/panel-lvds.c b/drivers/gpu/drm/panel/panel-lvds.c
index 5185819c5b79..8a1687887ae9 100644
--- a/drivers/gpu/drm/panel/panel-lvds.c
+++ b/drivers/gpu/drm/panel/panel-lvds.c
@@ -282,7 +282,6 @@ static int panel_lvds_remove(struct platform_device *pdev)
282{ 282{
283 struct panel_lvds *lvds = dev_get_drvdata(&pdev->dev); 283 struct panel_lvds *lvds = dev_get_drvdata(&pdev->dev);
284 284
285 drm_panel_detach(&lvds->panel);
286 drm_panel_remove(&lvds->panel); 285 drm_panel_remove(&lvds->panel);
287 286
288 panel_lvds_disable(&lvds->panel); 287 panel_lvds_disable(&lvds->panel);
diff --git a/drivers/gpu/drm/panel/panel-orisetech-otm8009a.c b/drivers/gpu/drm/panel/panel-orisetech-otm8009a.c
index 90f1ae4af93c..87fa316e1d7b 100644
--- a/drivers/gpu/drm/panel/panel-orisetech-otm8009a.c
+++ b/drivers/gpu/drm/panel/panel-orisetech-otm8009a.c
@@ -14,8 +14,6 @@
14#include <linux/regulator/consumer.h> 14#include <linux/regulator/consumer.h>
15#include <video/mipi_display.h> 15#include <video/mipi_display.h>
16 16
17#define DRV_NAME "orisetech_otm8009a"
18
19#define OTM8009A_BACKLIGHT_DEFAULT 240 17#define OTM8009A_BACKLIGHT_DEFAULT 240
20#define OTM8009A_BACKLIGHT_MAX 255 18#define OTM8009A_BACKLIGHT_MAX 255
21 19
@@ -98,6 +96,20 @@ static void otm8009a_dcs_write_buf(struct otm8009a *ctx, const void *data,
98 DRM_WARN("mipi dsi dcs write buffer failed\n"); 96 DRM_WARN("mipi dsi dcs write buffer failed\n");
99} 97}
100 98
99static void otm8009a_dcs_write_buf_hs(struct otm8009a *ctx, const void *data,
100 size_t len)
101{
102 struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
103
104 /* data will be sent in dsi hs mode (ie. no lpm) */
105 dsi->mode_flags &= ~MIPI_DSI_MODE_LPM;
106
107 otm8009a_dcs_write_buf(ctx, data, len);
108
109 /* restore back the dsi lpm mode */
110 dsi->mode_flags |= MIPI_DSI_MODE_LPM;
111}
112
101#define dcs_write_seq(ctx, seq...) \ 113#define dcs_write_seq(ctx, seq...) \
102({ \ 114({ \
103 static const u8 d[] = { seq }; \ 115 static const u8 d[] = { seq }; \
@@ -248,11 +260,7 @@ static int otm8009a_disable(struct drm_panel *panel)
248 if (!ctx->enabled) 260 if (!ctx->enabled)
249 return 0; /* This is not an issue so we return 0 here */ 261 return 0; /* This is not an issue so we return 0 here */
250 262
251 /* Power off the backlight. Note: end-user still controls brightness */ 263 backlight_disable(ctx->bl_dev);
252 ctx->bl_dev->props.power = FB_BLANK_POWERDOWN;
253 ret = backlight_update_status(ctx->bl_dev);
254 if (ret)
255 return ret;
256 264
257 ret = mipi_dsi_dcs_set_display_off(dsi); 265 ret = mipi_dsi_dcs_set_display_off(dsi);
258 if (ret) 266 if (ret)
@@ -316,13 +324,6 @@ static int otm8009a_prepare(struct drm_panel *panel)
316 324
317 ctx->prepared = true; 325 ctx->prepared = true;
318 326
319 /*
320 * Power on the backlight. Note: end-user still controls brightness
321 * Note: ctx->prepared must be true before updating the backlight.
322 */
323 ctx->bl_dev->props.power = FB_BLANK_UNBLANK;
324 backlight_update_status(ctx->bl_dev);
325
326 return 0; 327 return 0;
327} 328}
328 329
@@ -330,6 +331,11 @@ static int otm8009a_enable(struct drm_panel *panel)
330{ 331{
331 struct otm8009a *ctx = panel_to_otm8009a(panel); 332 struct otm8009a *ctx = panel_to_otm8009a(panel);
332 333
334 if (ctx->enabled)
335 return 0;
336
337 backlight_enable(ctx->bl_dev);
338
333 ctx->enabled = true; 339 ctx->enabled = true;
334 340
335 return 0; 341 return 0;
@@ -387,7 +393,7 @@ static int otm8009a_backlight_update_status(struct backlight_device *bd)
387 */ 393 */
388 data[0] = MIPI_DCS_SET_DISPLAY_BRIGHTNESS; 394 data[0] = MIPI_DCS_SET_DISPLAY_BRIGHTNESS;
389 data[1] = bd->props.brightness; 395 data[1] = bd->props.brightness;
390 otm8009a_dcs_write_buf(ctx, data, ARRAY_SIZE(data)); 396 otm8009a_dcs_write_buf_hs(ctx, data, ARRAY_SIZE(data));
391 397
392 /* set Brightness Control & Backlight on */ 398 /* set Brightness Control & Backlight on */
393 data[1] = 0x24; 399 data[1] = 0x24;
@@ -399,7 +405,7 @@ static int otm8009a_backlight_update_status(struct backlight_device *bd)
399 405
400 /* Update Brightness Control & Backlight */ 406 /* Update Brightness Control & Backlight */
401 data[0] = MIPI_DCS_WRITE_CONTROL_DISPLAY; 407 data[0] = MIPI_DCS_WRITE_CONTROL_DISPLAY;
402 otm8009a_dcs_write_buf(ctx, data, ARRAY_SIZE(data)); 408 otm8009a_dcs_write_buf_hs(ctx, data, ARRAY_SIZE(data));
403 409
404 return 0; 410 return 0;
405} 411}
@@ -444,11 +450,14 @@ static int otm8009a_probe(struct mipi_dsi_device *dsi)
444 ctx->panel.dev = dev; 450 ctx->panel.dev = dev;
445 ctx->panel.funcs = &otm8009a_drm_funcs; 451 ctx->panel.funcs = &otm8009a_drm_funcs;
446 452
447 ctx->bl_dev = backlight_device_register(DRV_NAME "_backlight", dev, ctx, 453 ctx->bl_dev = devm_backlight_device_register(dev, dev_name(dev),
448 &otm8009a_backlight_ops, NULL); 454 dsi->host->dev, ctx,
455 &otm8009a_backlight_ops,
456 NULL);
449 if (IS_ERR(ctx->bl_dev)) { 457 if (IS_ERR(ctx->bl_dev)) {
450 dev_err(dev, "failed to register backlight device\n"); 458 ret = PTR_ERR(ctx->bl_dev);
451 return PTR_ERR(ctx->bl_dev); 459 dev_err(dev, "failed to register backlight: %d\n", ret);
460 return ret;
452 } 461 }
453 462
454 ctx->bl_dev->props.max_brightness = OTM8009A_BACKLIGHT_MAX; 463 ctx->bl_dev->props.max_brightness = OTM8009A_BACKLIGHT_MAX;
@@ -466,11 +475,6 @@ static int otm8009a_probe(struct mipi_dsi_device *dsi)
466 return ret; 475 return ret;
467 } 476 }
468 477
469 DRM_INFO(DRV_NAME "_panel %ux%u@%u %ubpp dsi %udl - ready\n",
470 default_mode.hdisplay, default_mode.vdisplay,
471 default_mode.vrefresh,
472 mipi_dsi_pixel_format_to_bpp(dsi->format), dsi->lanes);
473
474 return 0; 478 return 0;
475} 479}
476 480
@@ -481,8 +485,6 @@ static int otm8009a_remove(struct mipi_dsi_device *dsi)
481 mipi_dsi_detach(dsi); 485 mipi_dsi_detach(dsi);
482 drm_panel_remove(&ctx->panel); 486 drm_panel_remove(&ctx->panel);
483 487
484 backlight_device_unregister(ctx->bl_dev);
485
486 return 0; 488 return 0;
487} 489}
488 490
@@ -496,7 +498,7 @@ static struct mipi_dsi_driver orisetech_otm8009a_driver = {
496 .probe = otm8009a_probe, 498 .probe = otm8009a_probe,
497 .remove = otm8009a_remove, 499 .remove = otm8009a_remove,
498 .driver = { 500 .driver = {
499 .name = DRV_NAME "_panel", 501 .name = "panel-orisetech-otm8009a",
500 .of_match_table = orisetech_otm8009a_of_match, 502 .of_match_table = orisetech_otm8009a_of_match,
501 }, 503 },
502}; 504};
diff --git a/drivers/gpu/drm/panel/panel-panasonic-vvx10f034n00.c b/drivers/gpu/drm/panel/panel-panasonic-vvx10f034n00.c
index 74a806121f80..cb4dfb98be0f 100644
--- a/drivers/gpu/drm/panel/panel-panasonic-vvx10f034n00.c
+++ b/drivers/gpu/drm/panel/panel-panasonic-vvx10f034n00.c
@@ -299,7 +299,6 @@ static int wuxga_nt_panel_remove(struct mipi_dsi_device *dsi)
299 if (ret < 0) 299 if (ret < 0)
300 dev_err(&dsi->dev, "failed to detach from DSI host: %d\n", ret); 300 dev_err(&dsi->dev, "failed to detach from DSI host: %d\n", ret);
301 301
302 drm_panel_detach(&wuxga_nt->base);
303 wuxga_nt_panel_del(wuxga_nt); 302 wuxga_nt_panel_del(wuxga_nt);
304 303
305 return 0; 304 return 0;
diff --git a/drivers/gpu/drm/panel/panel-seiko-43wvf1g.c b/drivers/gpu/drm/panel/panel-seiko-43wvf1g.c
index 71c09ed436ae..75f925390551 100644
--- a/drivers/gpu/drm/panel/panel-seiko-43wvf1g.c
+++ b/drivers/gpu/drm/panel/panel-seiko-43wvf1g.c
@@ -292,7 +292,6 @@ static int seiko_panel_remove(struct platform_device *pdev)
292{ 292{
293 struct seiko_panel *panel = dev_get_drvdata(&pdev->dev); 293 struct seiko_panel *panel = dev_get_drvdata(&pdev->dev);
294 294
295 drm_panel_detach(&panel->base);
296 drm_panel_remove(&panel->base); 295 drm_panel_remove(&panel->base);
297 296
298 seiko_panel_disable(&panel->base); 297 seiko_panel_disable(&panel->base);
diff --git a/drivers/gpu/drm/panel/panel-sharp-lq101r1sx01.c b/drivers/gpu/drm/panel/panel-sharp-lq101r1sx01.c
index 6bf8730f1a21..02fc0f5423d4 100644
--- a/drivers/gpu/drm/panel/panel-sharp-lq101r1sx01.c
+++ b/drivers/gpu/drm/panel/panel-sharp-lq101r1sx01.c
@@ -418,7 +418,6 @@ static int sharp_panel_remove(struct mipi_dsi_device *dsi)
418 if (err < 0) 418 if (err < 0)
419 dev_err(&dsi->dev, "failed to detach from DSI host: %d\n", err); 419 dev_err(&dsi->dev, "failed to detach from DSI host: %d\n", err);
420 420
421 drm_panel_detach(&sharp->base);
422 sharp_panel_del(sharp); 421 sharp_panel_del(sharp);
423 422
424 return 0; 423 return 0;
diff --git a/drivers/gpu/drm/panel/panel-sharp-ls043t1le01.c b/drivers/gpu/drm/panel/panel-sharp-ls043t1le01.c
index 494aa9b1628a..e5cae0050f52 100644
--- a/drivers/gpu/drm/panel/panel-sharp-ls043t1le01.c
+++ b/drivers/gpu/drm/panel/panel-sharp-ls043t1le01.c
@@ -327,7 +327,6 @@ static int sharp_nt_panel_remove(struct mipi_dsi_device *dsi)
327 if (ret < 0) 327 if (ret < 0)
328 dev_err(&dsi->dev, "failed to detach from DSI host: %d\n", ret); 328 dev_err(&dsi->dev, "failed to detach from DSI host: %d\n", ret);
329 329
330 drm_panel_detach(&sharp_nt->base);
331 sharp_nt_panel_del(sharp_nt); 330 sharp_nt_panel_del(sharp_nt);
332 331
333 return 0; 332 return 0;
diff --git a/drivers/gpu/drm/panel/panel-simple.c b/drivers/gpu/drm/panel/panel-simple.c
index cbf1ab404ee7..ac6aaa174c0b 100644
--- a/drivers/gpu/drm/panel/panel-simple.c
+++ b/drivers/gpu/drm/panel/panel-simple.c
@@ -364,7 +364,6 @@ static int panel_simple_remove(struct device *dev)
364{ 364{
365 struct panel_simple *panel = dev_get_drvdata(dev); 365 struct panel_simple *panel = dev_get_drvdata(dev);
366 366
367 drm_panel_detach(&panel->base);
368 drm_panel_remove(&panel->base); 367 drm_panel_remove(&panel->base);
369 368
370 panel_simple_disable(&panel->base); 369 panel_simple_disable(&panel->base);
@@ -581,6 +580,34 @@ static const struct panel_desc auo_b133htn01 = {
581 }, 580 },
582}; 581};
583 582
583static const struct display_timing auo_g070vvn01_timings = {
584 .pixelclock = { 33300000, 34209000, 45000000 },
585 .hactive = { 800, 800, 800 },
586 .hfront_porch = { 20, 40, 200 },
587 .hback_porch = { 87, 40, 1 },
588 .hsync_len = { 1, 48, 87 },
589 .vactive = { 480, 480, 480 },
590 .vfront_porch = { 5, 13, 200 },
591 .vback_porch = { 31, 31, 29 },
592 .vsync_len = { 1, 1, 3 },
593};
594
595static const struct panel_desc auo_g070vvn01 = {
596 .timings = &auo_g070vvn01_timings,
597 .num_timings = 1,
598 .bpc = 8,
599 .size = {
600 .width = 152,
601 .height = 91,
602 },
603 .delay = {
604 .prepare = 200,
605 .enable = 50,
606 .disable = 50,
607 .unprepare = 1000,
608 },
609};
610
584static const struct drm_display_mode auo_g104sn02_mode = { 611static const struct drm_display_mode auo_g104sn02_mode = {
585 .clock = 40000, 612 .clock = 40000,
586 .hdisplay = 800, 613 .hdisplay = 800,
@@ -687,7 +714,7 @@ static const struct panel_desc auo_p320hvn03 = {
687 .enable = 450, 714 .enable = 450,
688 .unprepare = 500, 715 .unprepare = 500,
689 }, 716 },
690 .bus_format = MEDIA_BUS_FMT_RGB888_1X7X4_JEIDA, 717 .bus_format = MEDIA_BUS_FMT_RGB888_1X7X4_SPWG,
691}; 718};
692 719
693static const struct drm_display_mode auo_t215hvn01_mode = { 720static const struct drm_display_mode auo_t215hvn01_mode = {
@@ -1217,6 +1244,30 @@ static const struct panel_desc innolux_n156bge_l21 = {
1217 }, 1244 },
1218}; 1245};
1219 1246
1247static const struct drm_display_mode innolux_tv123wam_mode = {
1248 .clock = 206016,
1249 .hdisplay = 2160,
1250 .hsync_start = 2160 + 48,
1251 .hsync_end = 2160 + 48 + 32,
1252 .htotal = 2160 + 48 + 32 + 80,
1253 .vdisplay = 1440,
1254 .vsync_start = 1440 + 3,
1255 .vsync_end = 1440 + 3 + 10,
1256 .vtotal = 1440 + 3 + 10 + 27,
1257 .vrefresh = 60,
1258 .flags = DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_PVSYNC,
1259};
1260
1261static const struct panel_desc innolux_tv123wam = {
1262 .modes = &innolux_tv123wam_mode,
1263 .num_modes = 1,
1264 .bpc = 8,
1265 .size = {
1266 .width = 259,
1267 .height = 173,
1268 },
1269};
1270
1220static const struct drm_display_mode innolux_zj070na_01p_mode = { 1271static const struct drm_display_mode innolux_zj070na_01p_mode = {
1221 .clock = 51501, 1272 .clock = 51501,
1222 .hdisplay = 1024, 1273 .hdisplay = 1024,
@@ -1247,8 +1298,8 @@ static const struct display_timing koe_tx31d200vm0baa_timing = {
1247 .hback_porch = { 16, 36, 56 }, 1298 .hback_porch = { 16, 36, 56 },
1248 .hsync_len = { 8, 8, 8 }, 1299 .hsync_len = { 8, 8, 8 },
1249 .vactive = { 480, 480, 480 }, 1300 .vactive = { 480, 480, 480 },
1250 .vfront_porch = { 6, 21, 33.5 }, 1301 .vfront_porch = { 6, 21, 33 },
1251 .vback_porch = { 6, 21, 33.5 }, 1302 .vback_porch = { 6, 21, 33 },
1252 .vsync_len = { 8, 8, 8 }, 1303 .vsync_len = { 8, 8, 8 },
1253 .flags = DISPLAY_FLAGS_DE_HIGH, 1304 .flags = DISPLAY_FLAGS_DE_HIGH,
1254}; 1305};
@@ -2095,6 +2146,9 @@ static const struct of_device_id platform_of_match[] = {
2095 .compatible = "auo,b133xtn01", 2146 .compatible = "auo,b133xtn01",
2096 .data = &auo_b133xtn01, 2147 .data = &auo_b133xtn01,
2097 }, { 2148 }, {
2149 .compatible = "auo,g070vvn01",
2150 .data = &auo_g070vvn01,
2151 }, {
2098 .compatible = "auo,g104sn02", 2152 .compatible = "auo,g104sn02",
2099 .data = &auo_g104sn02, 2153 .data = &auo_g104sn02,
2100 }, { 2154 }, {
@@ -2170,6 +2224,9 @@ static const struct of_device_id platform_of_match[] = {
2170 .compatible = "innolux,n156bge-l21", 2224 .compatible = "innolux,n156bge-l21",
2171 .data = &innolux_n156bge_l21, 2225 .data = &innolux_n156bge_l21,
2172 }, { 2226 }, {
2227 .compatible = "innolux,tv123wam",
2228 .data = &innolux_tv123wam,
2229 }, {
2173 .compatible = "innolux,zj070na-01p", 2230 .compatible = "innolux,zj070na-01p",
2174 .data = &innolux_zj070na_01p, 2231 .data = &innolux_zj070na_01p,
2175 }, { 2232 }, {
diff --git a/drivers/gpu/drm/panel/panel-sitronix-st7789v.c b/drivers/gpu/drm/panel/panel-sitronix-st7789v.c
index 358c64ef1922..74284e5afc5d 100644
--- a/drivers/gpu/drm/panel/panel-sitronix-st7789v.c
+++ b/drivers/gpu/drm/panel/panel-sitronix-st7789v.c
@@ -419,7 +419,6 @@ static int st7789v_remove(struct spi_device *spi)
419{ 419{
420 struct st7789v *ctx = spi_get_drvdata(spi); 420 struct st7789v *ctx = spi_get_drvdata(spi);
421 421
422 drm_panel_detach(&ctx->panel);
423 drm_panel_remove(&ctx->panel); 422 drm_panel_remove(&ctx->panel);
424 423
425 if (ctx->backlight) 424 if (ctx->backlight)
diff --git a/drivers/gpu/drm/rockchip/cdn-dp-reg.c b/drivers/gpu/drm/rockchip/cdn-dp-reg.c
index eb3042c6d1b2..3105965fc260 100644
--- a/drivers/gpu/drm/rockchip/cdn-dp-reg.c
+++ b/drivers/gpu/drm/rockchip/cdn-dp-reg.c
@@ -792,7 +792,6 @@ err_config_video:
792 792
793int cdn_dp_audio_stop(struct cdn_dp_device *dp, struct audio_info *audio) 793int cdn_dp_audio_stop(struct cdn_dp_device *dp, struct audio_info *audio)
794{ 794{
795 u32 val;
796 int ret; 795 int ret;
797 796
798 ret = cdn_dp_reg_write(dp, AUDIO_PACK_CONTROL, 0); 797 ret = cdn_dp_reg_write(dp, AUDIO_PACK_CONTROL, 0);
@@ -801,11 +800,7 @@ int cdn_dp_audio_stop(struct cdn_dp_device *dp, struct audio_info *audio)
801 return ret; 800 return ret;
802 } 801 }
803 802
804 val = SPDIF_AVG_SEL | SPDIF_JITTER_BYPASS; 803 writel(0, dp->regs + SPDIF_CTRL_ADDR);
805 val |= SPDIF_FIFO_MID_RANGE(0xe0);
806 val |= SPDIF_JITTER_THRSH(0xe0);
807 val |= SPDIF_JITTER_AVG_WIN(7);
808 writel(val, dp->regs + SPDIF_CTRL_ADDR);
809 804
810 /* clearn the audio config and reset */ 805 /* clearn the audio config and reset */
811 writel(0, dp->regs + AUDIO_SRC_CNTL); 806 writel(0, dp->regs + AUDIO_SRC_CNTL);
@@ -929,12 +924,6 @@ static void cdn_dp_audio_config_spdif(struct cdn_dp_device *dp)
929{ 924{
930 u32 val; 925 u32 val;
931 926
932 val = SPDIF_AVG_SEL | SPDIF_JITTER_BYPASS;
933 val |= SPDIF_FIFO_MID_RANGE(0xe0);
934 val |= SPDIF_JITTER_THRSH(0xe0);
935 val |= SPDIF_JITTER_AVG_WIN(7);
936 writel(val, dp->regs + SPDIF_CTRL_ADDR);
937
938 writel(SYNC_WR_TO_CH_ZERO, dp->regs + FIFO_CNTL); 927 writel(SYNC_WR_TO_CH_ZERO, dp->regs + FIFO_CNTL);
939 928
940 val = MAX_NUM_CH(2) | AUDIO_TYPE_LPCM | CFG_SUB_PCKT_NUM(4); 929 val = MAX_NUM_CH(2) | AUDIO_TYPE_LPCM | CFG_SUB_PCKT_NUM(4);
@@ -942,9 +931,6 @@ static void cdn_dp_audio_config_spdif(struct cdn_dp_device *dp)
942 writel(SMPL2PKT_EN, dp->regs + SMPL2PKT_CNTL); 931 writel(SMPL2PKT_EN, dp->regs + SMPL2PKT_CNTL);
943 932
944 val = SPDIF_ENABLE | SPDIF_AVG_SEL | SPDIF_JITTER_BYPASS; 933 val = SPDIF_ENABLE | SPDIF_AVG_SEL | SPDIF_JITTER_BYPASS;
945 val |= SPDIF_FIFO_MID_RANGE(0xe0);
946 val |= SPDIF_JITTER_THRSH(0xe0);
947 val |= SPDIF_JITTER_AVG_WIN(7);
948 writel(val, dp->regs + SPDIF_CTRL_ADDR); 934 writel(val, dp->regs + SPDIF_CTRL_ADDR);
949 935
950 clk_prepare_enable(dp->spdif_clk); 936 clk_prepare_enable(dp->spdif_clk);
diff --git a/drivers/gpu/drm/rockchip/rockchip_drm_fb.c b/drivers/gpu/drm/rockchip/rockchip_drm_fb.c
index d4f4118b482d..ea18cb2a76c0 100644
--- a/drivers/gpu/drm/rockchip/rockchip_drm_fb.c
+++ b/drivers/gpu/drm/rockchip/rockchip_drm_fb.c
@@ -18,52 +18,13 @@
18#include <drm/drm_atomic.h> 18#include <drm/drm_atomic.h>
19#include <drm/drm_fb_helper.h> 19#include <drm/drm_fb_helper.h>
20#include <drm/drm_crtc_helper.h> 20#include <drm/drm_crtc_helper.h>
21#include <drm/drm_gem_framebuffer_helper.h>
21 22
22#include "rockchip_drm_drv.h" 23#include "rockchip_drm_drv.h"
23#include "rockchip_drm_fb.h" 24#include "rockchip_drm_fb.h"
24#include "rockchip_drm_gem.h" 25#include "rockchip_drm_gem.h"
25#include "rockchip_drm_psr.h" 26#include "rockchip_drm_psr.h"
26 27
27#define to_rockchip_fb(x) container_of(x, struct rockchip_drm_fb, fb)
28
29struct rockchip_drm_fb {
30 struct drm_framebuffer fb;
31 struct drm_gem_object *obj[ROCKCHIP_MAX_FB_BUFFER];
32};
33
34struct drm_gem_object *rockchip_fb_get_gem_obj(struct drm_framebuffer *fb,
35 unsigned int plane)
36{
37 struct rockchip_drm_fb *rk_fb = to_rockchip_fb(fb);
38
39 if (plane >= ROCKCHIP_MAX_FB_BUFFER)
40 return NULL;
41
42 return rk_fb->obj[plane];
43}
44
45static void rockchip_drm_fb_destroy(struct drm_framebuffer *fb)
46{
47 struct rockchip_drm_fb *rockchip_fb = to_rockchip_fb(fb);
48 int i;
49
50 for (i = 0; i < ROCKCHIP_MAX_FB_BUFFER; i++)
51 drm_gem_object_put_unlocked(rockchip_fb->obj[i]);
52
53 drm_framebuffer_cleanup(fb);
54 kfree(rockchip_fb);
55}
56
57static int rockchip_drm_fb_create_handle(struct drm_framebuffer *fb,
58 struct drm_file *file_priv,
59 unsigned int *handle)
60{
61 struct rockchip_drm_fb *rockchip_fb = to_rockchip_fb(fb);
62
63 return drm_gem_handle_create(file_priv,
64 rockchip_fb->obj[0], handle);
65}
66
67static int rockchip_drm_fb_dirty(struct drm_framebuffer *fb, 28static int rockchip_drm_fb_dirty(struct drm_framebuffer *fb,
68 struct drm_file *file, 29 struct drm_file *file,
69 unsigned int flags, unsigned int color, 30 unsigned int flags, unsigned int color,
@@ -75,46 +36,45 @@ static int rockchip_drm_fb_dirty(struct drm_framebuffer *fb,
75} 36}
76 37
77static const struct drm_framebuffer_funcs rockchip_drm_fb_funcs = { 38static const struct drm_framebuffer_funcs rockchip_drm_fb_funcs = {
78 .destroy = rockchip_drm_fb_destroy, 39 .destroy = drm_gem_fb_destroy,
79 .create_handle = rockchip_drm_fb_create_handle, 40 .create_handle = drm_gem_fb_create_handle,
80 .dirty = rockchip_drm_fb_dirty, 41 .dirty = rockchip_drm_fb_dirty,
81}; 42};
82 43
83static struct rockchip_drm_fb * 44static struct drm_framebuffer *
84rockchip_fb_alloc(struct drm_device *dev, const struct drm_mode_fb_cmd2 *mode_cmd, 45rockchip_fb_alloc(struct drm_device *dev, const struct drm_mode_fb_cmd2 *mode_cmd,
85 struct drm_gem_object **obj, unsigned int num_planes) 46 struct drm_gem_object **obj, unsigned int num_planes)
86{ 47{
87 struct rockchip_drm_fb *rockchip_fb; 48 struct drm_framebuffer *fb;
88 int ret; 49 int ret;
89 int i; 50 int i;
90 51
91 rockchip_fb = kzalloc(sizeof(*rockchip_fb), GFP_KERNEL); 52 fb = kzalloc(sizeof(*fb), GFP_KERNEL);
92 if (!rockchip_fb) 53 if (!fb)
93 return ERR_PTR(-ENOMEM); 54 return ERR_PTR(-ENOMEM);
94 55
95 drm_helper_mode_fill_fb_struct(dev, &rockchip_fb->fb, mode_cmd); 56 drm_helper_mode_fill_fb_struct(dev, fb, mode_cmd);
96 57
97 for (i = 0; i < num_planes; i++) 58 for (i = 0; i < num_planes; i++)
98 rockchip_fb->obj[i] = obj[i]; 59 fb->obj[i] = obj[i];
99 60
100 ret = drm_framebuffer_init(dev, &rockchip_fb->fb, 61 ret = drm_framebuffer_init(dev, fb, &rockchip_drm_fb_funcs);
101 &rockchip_drm_fb_funcs);
102 if (ret) { 62 if (ret) {
103 DRM_DEV_ERROR(dev->dev, 63 DRM_DEV_ERROR(dev->dev,
104 "Failed to initialize framebuffer: %d\n", 64 "Failed to initialize framebuffer: %d\n",
105 ret); 65 ret);
106 kfree(rockchip_fb); 66 kfree(fb);
107 return ERR_PTR(ret); 67 return ERR_PTR(ret);
108 } 68 }
109 69
110 return rockchip_fb; 70 return fb;
111} 71}
112 72
113static struct drm_framebuffer * 73static struct drm_framebuffer *
114rockchip_user_fb_create(struct drm_device *dev, struct drm_file *file_priv, 74rockchip_user_fb_create(struct drm_device *dev, struct drm_file *file_priv,
115 const struct drm_mode_fb_cmd2 *mode_cmd) 75 const struct drm_mode_fb_cmd2 *mode_cmd)
116{ 76{
117 struct rockchip_drm_fb *rockchip_fb; 77 struct drm_framebuffer *fb;
118 struct drm_gem_object *objs[ROCKCHIP_MAX_FB_BUFFER]; 78 struct drm_gem_object *objs[ROCKCHIP_MAX_FB_BUFFER];
119 struct drm_gem_object *obj; 79 struct drm_gem_object *obj;
120 unsigned int hsub; 80 unsigned int hsub;
@@ -153,13 +113,13 @@ rockchip_user_fb_create(struct drm_device *dev, struct drm_file *file_priv,
153 objs[i] = obj; 113 objs[i] = obj;
154 } 114 }
155 115
156 rockchip_fb = rockchip_fb_alloc(dev, mode_cmd, objs, i); 116 fb = rockchip_fb_alloc(dev, mode_cmd, objs, i);
157 if (IS_ERR(rockchip_fb)) { 117 if (IS_ERR(fb)) {
158 ret = PTR_ERR(rockchip_fb); 118 ret = PTR_ERR(fb);
159 goto err_gem_object_unreference; 119 goto err_gem_object_unreference;
160 } 120 }
161 121
162 return &rockchip_fb->fb; 122 return fb;
163 123
164err_gem_object_unreference: 124err_gem_object_unreference:
165 for (i--; i >= 0; i--) 125 for (i--; i >= 0; i--)
@@ -242,13 +202,13 @@ rockchip_drm_framebuffer_init(struct drm_device *dev,
242 const struct drm_mode_fb_cmd2 *mode_cmd, 202 const struct drm_mode_fb_cmd2 *mode_cmd,
243 struct drm_gem_object *obj) 203 struct drm_gem_object *obj)
244{ 204{
245 struct rockchip_drm_fb *rockchip_fb; 205 struct drm_framebuffer *fb;
246 206
247 rockchip_fb = rockchip_fb_alloc(dev, mode_cmd, &obj, 1); 207 fb = rockchip_fb_alloc(dev, mode_cmd, &obj, 1);
248 if (IS_ERR(rockchip_fb)) 208 if (IS_ERR(fb))
249 return ERR_CAST(rockchip_fb); 209 return ERR_CAST(fb);
250 210
251 return &rockchip_fb->fb; 211 return fb;
252} 212}
253 213
254void rockchip_drm_mode_config_init(struct drm_device *dev) 214void rockchip_drm_mode_config_init(struct drm_device *dev)
diff --git a/drivers/gpu/drm/rockchip/rockchip_drm_fb.h b/drivers/gpu/drm/rockchip/rockchip_drm_fb.h
index 2fe47f1ee98f..f1265cb1aee8 100644
--- a/drivers/gpu/drm/rockchip/rockchip_drm_fb.h
+++ b/drivers/gpu/drm/rockchip/rockchip_drm_fb.h
@@ -22,7 +22,4 @@ rockchip_drm_framebuffer_init(struct drm_device *dev,
22void rockchip_drm_framebuffer_fini(struct drm_framebuffer *fb); 22void rockchip_drm_framebuffer_fini(struct drm_framebuffer *fb);
23 23
24void rockchip_drm_mode_config_init(struct drm_device *dev); 24void rockchip_drm_mode_config_init(struct drm_device *dev);
25
26struct drm_gem_object *rockchip_fb_get_gem_obj(struct drm_framebuffer *fb,
27 unsigned int plane);
28#endif /* _ROCKCHIP_DRM_FB_H */ 25#endif /* _ROCKCHIP_DRM_FB_H */
diff --git a/drivers/gpu/drm/rockchip/rockchip_drm_vop.c b/drivers/gpu/drm/rockchip/rockchip_drm_vop.c
index 2121345a61af..c9222119767d 100644
--- a/drivers/gpu/drm/rockchip/rockchip_drm_vop.c
+++ b/drivers/gpu/drm/rockchip/rockchip_drm_vop.c
@@ -486,6 +486,31 @@ static void vop_line_flag_irq_disable(struct vop *vop)
486 spin_unlock_irqrestore(&vop->irq_lock, flags); 486 spin_unlock_irqrestore(&vop->irq_lock, flags);
487} 487}
488 488
489static int vop_core_clks_enable(struct vop *vop)
490{
491 int ret;
492
493 ret = clk_enable(vop->hclk);
494 if (ret < 0)
495 return ret;
496
497 ret = clk_enable(vop->aclk);
498 if (ret < 0)
499 goto err_disable_hclk;
500
501 return 0;
502
503err_disable_hclk:
504 clk_disable(vop->hclk);
505 return ret;
506}
507
508static void vop_core_clks_disable(struct vop *vop)
509{
510 clk_disable(vop->aclk);
511 clk_disable(vop->hclk);
512}
513
489static int vop_enable(struct drm_crtc *crtc) 514static int vop_enable(struct drm_crtc *crtc)
490{ 515{
491 struct vop *vop = to_vop(crtc); 516 struct vop *vop = to_vop(crtc);
@@ -497,17 +522,13 @@ static int vop_enable(struct drm_crtc *crtc)
497 return ret; 522 return ret;
498 } 523 }
499 524
500 ret = clk_enable(vop->hclk); 525 ret = vop_core_clks_enable(vop);
501 if (WARN_ON(ret < 0)) 526 if (WARN_ON(ret < 0))
502 goto err_put_pm_runtime; 527 goto err_put_pm_runtime;
503 528
504 ret = clk_enable(vop->dclk); 529 ret = clk_enable(vop->dclk);
505 if (WARN_ON(ret < 0)) 530 if (WARN_ON(ret < 0))
506 goto err_disable_hclk; 531 goto err_disable_core;
507
508 ret = clk_enable(vop->aclk);
509 if (WARN_ON(ret < 0))
510 goto err_disable_dclk;
511 532
512 /* 533 /*
513 * Slave iommu shares power, irq and clock with vop. It was associated 534 * Slave iommu shares power, irq and clock with vop. It was associated
@@ -519,7 +540,7 @@ static int vop_enable(struct drm_crtc *crtc)
519 if (ret) { 540 if (ret) {
520 DRM_DEV_ERROR(vop->dev, 541 DRM_DEV_ERROR(vop->dev,
521 "failed to attach dma mapping, %d\n", ret); 542 "failed to attach dma mapping, %d\n", ret);
522 goto err_disable_aclk; 543 goto err_disable_dclk;
523 } 544 }
524 545
525 spin_lock(&vop->reg_lock); 546 spin_lock(&vop->reg_lock);
@@ -552,18 +573,14 @@ static int vop_enable(struct drm_crtc *crtc)
552 573
553 spin_unlock(&vop->reg_lock); 574 spin_unlock(&vop->reg_lock);
554 575
555 enable_irq(vop->irq);
556
557 drm_crtc_vblank_on(crtc); 576 drm_crtc_vblank_on(crtc);
558 577
559 return 0; 578 return 0;
560 579
561err_disable_aclk:
562 clk_disable(vop->aclk);
563err_disable_dclk: 580err_disable_dclk:
564 clk_disable(vop->dclk); 581 clk_disable(vop->dclk);
565err_disable_hclk: 582err_disable_core:
566 clk_disable(vop->hclk); 583 vop_core_clks_disable(vop);
567err_put_pm_runtime: 584err_put_pm_runtime:
568 pm_runtime_put_sync(vop->dev); 585 pm_runtime_put_sync(vop->dev);
569 return ret; 586 return ret;
@@ -599,8 +616,6 @@ static void vop_crtc_atomic_disable(struct drm_crtc *crtc,
599 616
600 vop_dsp_hold_valid_irq_disable(vop); 617 vop_dsp_hold_valid_irq_disable(vop);
601 618
602 disable_irq(vop->irq);
603
604 vop->is_enabled = false; 619 vop->is_enabled = false;
605 620
606 /* 621 /*
@@ -609,8 +624,7 @@ static void vop_crtc_atomic_disable(struct drm_crtc *crtc,
609 rockchip_drm_dma_detach_device(vop->drm_dev, vop->dev); 624 rockchip_drm_dma_detach_device(vop->drm_dev, vop->dev);
610 625
611 clk_disable(vop->dclk); 626 clk_disable(vop->dclk);
612 clk_disable(vop->aclk); 627 vop_core_clks_disable(vop);
613 clk_disable(vop->hclk);
614 pm_runtime_put(vop->dev); 628 pm_runtime_put(vop->dev);
615 mutex_unlock(&vop->vop_lock); 629 mutex_unlock(&vop->vop_lock);
616 630
@@ -728,7 +742,7 @@ static void vop_plane_atomic_update(struct drm_plane *plane,
728 return; 742 return;
729 } 743 }
730 744
731 obj = rockchip_fb_get_gem_obj(fb, 0); 745 obj = fb->obj[0];
732 rk_obj = to_rockchip_obj(obj); 746 rk_obj = to_rockchip_obj(obj);
733 747
734 actual_w = drm_rect_width(src) >> 16; 748 actual_w = drm_rect_width(src) >> 16;
@@ -758,7 +772,7 @@ static void vop_plane_atomic_update(struct drm_plane *plane,
758 int vsub = drm_format_vert_chroma_subsampling(fb->format->format); 772 int vsub = drm_format_vert_chroma_subsampling(fb->format->format);
759 int bpp = fb->format->cpp[1]; 773 int bpp = fb->format->cpp[1];
760 774
761 uv_obj = rockchip_fb_get_gem_obj(fb, 1); 775 uv_obj = fb->obj[1];
762 rk_uv_obj = to_rockchip_obj(uv_obj); 776 rk_uv_obj = to_rockchip_obj(uv_obj);
763 777
764 offset = (src->x1 >> 16) * bpp / hsub; 778 offset = (src->x1 >> 16) * bpp / hsub;
@@ -1178,6 +1192,18 @@ static irqreturn_t vop_isr(int irq, void *data)
1178 int ret = IRQ_NONE; 1192 int ret = IRQ_NONE;
1179 1193
1180 /* 1194 /*
1195 * The irq is shared with the iommu. If the runtime-pm state of the
1196 * vop-device is disabled the irq has to be targeted at the iommu.
1197 */
1198 if (!pm_runtime_get_if_in_use(vop->dev))
1199 return IRQ_NONE;
1200
1201 if (vop_core_clks_enable(vop)) {
1202 DRM_DEV_ERROR_RATELIMITED(vop->dev, "couldn't enable clocks\n");
1203 goto out;
1204 }
1205
1206 /*
1181 * interrupt register has interrupt status, enable and clear bits, we 1207 * interrupt register has interrupt status, enable and clear bits, we
1182 * must hold irq_lock to avoid a race with enable/disable_vblank(). 1208 * must hold irq_lock to avoid a race with enable/disable_vblank().
1183 */ 1209 */
@@ -1192,7 +1218,7 @@ static irqreturn_t vop_isr(int irq, void *data)
1192 1218
1193 /* This is expected for vop iommu irqs, since the irq is shared */ 1219 /* This is expected for vop iommu irqs, since the irq is shared */
1194 if (!active_irqs) 1220 if (!active_irqs)
1195 return IRQ_NONE; 1221 goto out_disable;
1196 1222
1197 if (active_irqs & DSP_HOLD_VALID_INTR) { 1223 if (active_irqs & DSP_HOLD_VALID_INTR) {
1198 complete(&vop->dsp_hold_completion); 1224 complete(&vop->dsp_hold_completion);
@@ -1218,6 +1244,10 @@ static irqreturn_t vop_isr(int irq, void *data)
1218 DRM_DEV_ERROR(vop->dev, "Unknown VOP IRQs: %#02x\n", 1244 DRM_DEV_ERROR(vop->dev, "Unknown VOP IRQs: %#02x\n",
1219 active_irqs); 1245 active_irqs);
1220 1246
1247out_disable:
1248 vop_core_clks_disable(vop);
1249out:
1250 pm_runtime_put(vop->dev);
1221 return ret; 1251 return ret;
1222} 1252}
1223 1253
@@ -1596,9 +1626,6 @@ static int vop_bind(struct device *dev, struct device *master, void *data)
1596 if (ret) 1626 if (ret)
1597 goto err_disable_pm_runtime; 1627 goto err_disable_pm_runtime;
1598 1628
1599 /* IRQ is initially disabled; it gets enabled in power_on */
1600 disable_irq(vop->irq);
1601
1602 return 0; 1629 return 0;
1603 1630
1604err_disable_pm_runtime: 1631err_disable_pm_runtime:
diff --git a/drivers/gpu/drm/rockchip/rockchip_lvds.c b/drivers/gpu/drm/rockchip/rockchip_lvds.c
index e67f4ea28c0e..b3f6f524b402 100644
--- a/drivers/gpu/drm/rockchip/rockchip_lvds.c
+++ b/drivers/gpu/drm/rockchip/rockchip_lvds.c
@@ -363,8 +363,10 @@ static int rockchip_lvds_bind(struct device *dev, struct device *master,
363 of_property_read_u32(endpoint, "reg", &endpoint_id); 363 of_property_read_u32(endpoint, "reg", &endpoint_id);
364 ret = drm_of_find_panel_or_bridge(dev->of_node, 1, endpoint_id, 364 ret = drm_of_find_panel_or_bridge(dev->of_node, 1, endpoint_id,
365 &lvds->panel, &lvds->bridge); 365 &lvds->panel, &lvds->bridge);
366 if (!ret) 366 if (!ret) {
367 of_node_put(endpoint);
367 break; 368 break;
369 }
368 } 370 }
369 if (!child_count) { 371 if (!child_count) {
370 DRM_DEV_ERROR(dev, "lvds port does not have any children\n"); 372 DRM_DEV_ERROR(dev, "lvds port does not have any children\n");
@@ -446,14 +448,12 @@ static int rockchip_lvds_bind(struct device *dev, struct device *master,
446 goto err_free_connector; 448 goto err_free_connector;
447 } 449 }
448 } else { 450 } else {
449 lvds->bridge->encoder = encoder;
450 ret = drm_bridge_attach(encoder, lvds->bridge, NULL); 451 ret = drm_bridge_attach(encoder, lvds->bridge, NULL);
451 if (ret) { 452 if (ret) {
452 DRM_DEV_ERROR(drm_dev->dev, 453 DRM_DEV_ERROR(drm_dev->dev,
453 "failed to attach bridge: %d\n", ret); 454 "failed to attach bridge: %d\n", ret);
454 goto err_free_encoder; 455 goto err_free_encoder;
455 } 456 }
456 encoder->bridge = lvds->bridge;
457 } 457 }
458 458
459 pm_runtime_enable(dev); 459 pm_runtime_enable(dev);
diff --git a/drivers/gpu/drm/selftests/drm_mm_selftests.h b/drivers/gpu/drm/selftests/drm_mm_selftests.h
index 54acc117550c..6b943ea1c57d 100644
--- a/drivers/gpu/drm/selftests/drm_mm_selftests.h
+++ b/drivers/gpu/drm/selftests/drm_mm_selftests.h
@@ -19,7 +19,9 @@ selftest(align64, igt_align64)
19selftest(evict, igt_evict) 19selftest(evict, igt_evict)
20selftest(evict_range, igt_evict_range) 20selftest(evict_range, igt_evict_range)
21selftest(bottomup, igt_bottomup) 21selftest(bottomup, igt_bottomup)
22selftest(lowest, igt_lowest)
22selftest(topdown, igt_topdown) 23selftest(topdown, igt_topdown)
24selftest(highest, igt_highest)
23selftest(color, igt_color) 25selftest(color, igt_color)
24selftest(color_evict, igt_color_evict) 26selftest(color_evict, igt_color_evict)
25selftest(color_evict_range, igt_color_evict_range) 27selftest(color_evict_range, igt_color_evict_range)
diff --git a/drivers/gpu/drm/selftests/test-drm_mm.c b/drivers/gpu/drm/selftests/test-drm_mm.c
index 933af1c25387..fbed2c90fd51 100644
--- a/drivers/gpu/drm/selftests/test-drm_mm.c
+++ b/drivers/gpu/drm/selftests/test-drm_mm.c
@@ -1825,6 +1825,77 @@ err:
1825 return ret; 1825 return ret;
1826} 1826}
1827 1827
1828static int __igt_once(unsigned int mode)
1829{
1830 struct drm_mm mm;
1831 struct drm_mm_node rsvd_lo, rsvd_hi, node;
1832 int err;
1833
1834 drm_mm_init(&mm, 0, 7);
1835
1836 memset(&rsvd_lo, 0, sizeof(rsvd_lo));
1837 rsvd_lo.start = 1;
1838 rsvd_lo.size = 1;
1839 err = drm_mm_reserve_node(&mm, &rsvd_lo);
1840 if (err) {
1841 pr_err("Could not reserve low node\n");
1842 goto err;
1843 }
1844
1845 memset(&rsvd_hi, 0, sizeof(rsvd_hi));
1846 rsvd_hi.start = 5;
1847 rsvd_hi.size = 1;
1848 err = drm_mm_reserve_node(&mm, &rsvd_hi);
1849 if (err) {
1850 pr_err("Could not reserve low node\n");
1851 goto err_lo;
1852 }
1853
1854 if (!drm_mm_hole_follows(&rsvd_lo) || !drm_mm_hole_follows(&rsvd_hi)) {
1855 pr_err("Expected a hole after lo and high nodes!\n");
1856 err = -EINVAL;
1857 goto err_hi;
1858 }
1859
1860 memset(&node, 0, sizeof(node));
1861 err = drm_mm_insert_node_generic(&mm, &node,
1862 2, 0, 0,
1863 mode | DRM_MM_INSERT_ONCE);
1864 if (!err) {
1865 pr_err("Unexpectedly inserted the node into the wrong hole: node.start=%llx\n",
1866 node.start);
1867 err = -EINVAL;
1868 goto err_node;
1869 }
1870
1871 err = drm_mm_insert_node_generic(&mm, &node, 2, 0, 0, mode);
1872 if (err) {
1873 pr_err("Could not insert the node into the available hole!\n");
1874 err = -EINVAL;
1875 goto err_hi;
1876 }
1877
1878err_node:
1879 drm_mm_remove_node(&node);
1880err_hi:
1881 drm_mm_remove_node(&rsvd_hi);
1882err_lo:
1883 drm_mm_remove_node(&rsvd_lo);
1884err:
1885 drm_mm_takedown(&mm);
1886 return err;
1887}
1888
1889static int igt_lowest(void *ignored)
1890{
1891 return __igt_once(DRM_MM_INSERT_LOW);
1892}
1893
1894static int igt_highest(void *ignored)
1895{
1896 return __igt_once(DRM_MM_INSERT_HIGH);
1897}
1898
1828static void separate_adjacent_colors(const struct drm_mm_node *node, 1899static void separate_adjacent_colors(const struct drm_mm_node *node,
1829 unsigned long color, 1900 unsigned long color,
1830 u64 *start, 1901 u64 *start,
diff --git a/drivers/gpu/drm/sti/sti_gdp.c b/drivers/gpu/drm/sti/sti_gdp.c
index 9b2c47051b51..49813d34bdf0 100644
--- a/drivers/gpu/drm/sti/sti_gdp.c
+++ b/drivers/gpu/drm/sti/sti_gdp.c
@@ -211,7 +211,11 @@ static int gdp_dbg_show(struct seq_file *s, void *data)
211 struct drm_info_node *node = s->private; 211 struct drm_info_node *node = s->private;
212 struct sti_gdp *gdp = (struct sti_gdp *)node->info_ent->data; 212 struct sti_gdp *gdp = (struct sti_gdp *)node->info_ent->data;
213 struct drm_plane *drm_plane = &gdp->plane.drm_plane; 213 struct drm_plane *drm_plane = &gdp->plane.drm_plane;
214 struct drm_crtc *crtc = drm_plane->crtc; 214 struct drm_crtc *crtc;
215
216 drm_modeset_lock(&drm_plane->mutex, NULL);
217 crtc = drm_plane->state->crtc;
218 drm_modeset_unlock(&drm_plane->mutex);
215 219
216 seq_printf(s, "%s: (vaddr = 0x%p)", 220 seq_printf(s, "%s: (vaddr = 0x%p)",
217 sti_plane_to_str(&gdp->plane), gdp->regs); 221 sti_plane_to_str(&gdp->plane), gdp->regs);
diff --git a/drivers/gpu/drm/sun4i/sun6i_mipi_dsi.c b/drivers/gpu/drm/sun4i/sun6i_mipi_dsi.c
index bfbf761f0c1d..d4e7d16a2514 100644
--- a/drivers/gpu/drm/sun4i/sun6i_mipi_dsi.c
+++ b/drivers/gpu/drm/sun4i/sun6i_mipi_dsi.c
@@ -1040,7 +1040,7 @@ static int sun6i_dsi_remove(struct platform_device *pdev)
1040 return 0; 1040 return 0;
1041} 1041}
1042 1042
1043static int sun6i_dsi_runtime_resume(struct device *dev) 1043static int __maybe_unused sun6i_dsi_runtime_resume(struct device *dev)
1044{ 1044{
1045 struct sun6i_dsi *dsi = dev_get_drvdata(dev); 1045 struct sun6i_dsi *dsi = dev_get_drvdata(dev);
1046 1046
@@ -1069,7 +1069,7 @@ static int sun6i_dsi_runtime_resume(struct device *dev)
1069 return 0; 1069 return 0;
1070} 1070}
1071 1071
1072static int sun6i_dsi_runtime_suspend(struct device *dev) 1072static int __maybe_unused sun6i_dsi_runtime_suspend(struct device *dev)
1073{ 1073{
1074 struct sun6i_dsi *dsi = dev_get_drvdata(dev); 1074 struct sun6i_dsi *dsi = dev_get_drvdata(dev);
1075 1075
diff --git a/drivers/gpu/drm/tegra/gem.c b/drivers/gpu/drm/tegra/gem.c
index 00a5c9f32254..4f80100ff5f3 100644
--- a/drivers/gpu/drm/tegra/gem.c
+++ b/drivers/gpu/drm/tegra/gem.c
@@ -582,18 +582,6 @@ static int tegra_gem_prime_end_cpu_access(struct dma_buf *buf,
582 return 0; 582 return 0;
583} 583}
584 584
585static void *tegra_gem_prime_kmap_atomic(struct dma_buf *buf,
586 unsigned long page)
587{
588 return NULL;
589}
590
591static void tegra_gem_prime_kunmap_atomic(struct dma_buf *buf,
592 unsigned long page,
593 void *addr)
594{
595}
596
597static void *tegra_gem_prime_kmap(struct dma_buf *buf, unsigned long page) 585static void *tegra_gem_prime_kmap(struct dma_buf *buf, unsigned long page)
598{ 586{
599 return NULL; 587 return NULL;
@@ -634,8 +622,6 @@ static const struct dma_buf_ops tegra_gem_prime_dmabuf_ops = {
634 .release = tegra_gem_prime_release, 622 .release = tegra_gem_prime_release,
635 .begin_cpu_access = tegra_gem_prime_begin_cpu_access, 623 .begin_cpu_access = tegra_gem_prime_begin_cpu_access,
636 .end_cpu_access = tegra_gem_prime_end_cpu_access, 624 .end_cpu_access = tegra_gem_prime_end_cpu_access,
637 .map_atomic = tegra_gem_prime_kmap_atomic,
638 .unmap_atomic = tegra_gem_prime_kunmap_atomic,
639 .map = tegra_gem_prime_kmap, 625 .map = tegra_gem_prime_kmap,
640 .unmap = tegra_gem_prime_kunmap, 626 .unmap = tegra_gem_prime_kunmap,
641 .mmap = tegra_gem_prime_mmap, 627 .mmap = tegra_gem_prime_mmap,
diff --git a/drivers/gpu/drm/udl/udl_dmabuf.c b/drivers/gpu/drm/udl/udl_dmabuf.c
index 0a20695eb120..556f62662aa9 100644
--- a/drivers/gpu/drm/udl/udl_dmabuf.c
+++ b/drivers/gpu/drm/udl/udl_dmabuf.c
@@ -29,7 +29,6 @@ struct udl_drm_dmabuf_attachment {
29}; 29};
30 30
31static int udl_attach_dma_buf(struct dma_buf *dmabuf, 31static int udl_attach_dma_buf(struct dma_buf *dmabuf,
32 struct device *dev,
33 struct dma_buf_attachment *attach) 32 struct dma_buf_attachment *attach)
34{ 33{
35 struct udl_drm_dmabuf_attachment *udl_attach; 34 struct udl_drm_dmabuf_attachment *udl_attach;
@@ -158,27 +157,12 @@ static void *udl_dmabuf_kmap(struct dma_buf *dma_buf, unsigned long page_num)
158 return NULL; 157 return NULL;
159} 158}
160 159
161static void *udl_dmabuf_kmap_atomic(struct dma_buf *dma_buf,
162 unsigned long page_num)
163{
164 /* TODO */
165
166 return NULL;
167}
168
169static void udl_dmabuf_kunmap(struct dma_buf *dma_buf, 160static void udl_dmabuf_kunmap(struct dma_buf *dma_buf,
170 unsigned long page_num, void *addr) 161 unsigned long page_num, void *addr)
171{ 162{
172 /* TODO */ 163 /* TODO */
173} 164}
174 165
175static void udl_dmabuf_kunmap_atomic(struct dma_buf *dma_buf,
176 unsigned long page_num,
177 void *addr)
178{
179 /* TODO */
180}
181
182static int udl_dmabuf_mmap(struct dma_buf *dma_buf, 166static int udl_dmabuf_mmap(struct dma_buf *dma_buf,
183 struct vm_area_struct *vma) 167 struct vm_area_struct *vma)
184{ 168{
@@ -193,9 +177,7 @@ static const struct dma_buf_ops udl_dmabuf_ops = {
193 .map_dma_buf = udl_map_dma_buf, 177 .map_dma_buf = udl_map_dma_buf,
194 .unmap_dma_buf = udl_unmap_dma_buf, 178 .unmap_dma_buf = udl_unmap_dma_buf,
195 .map = udl_dmabuf_kmap, 179 .map = udl_dmabuf_kmap,
196 .map_atomic = udl_dmabuf_kmap_atomic,
197 .unmap = udl_dmabuf_kunmap, 180 .unmap = udl_dmabuf_kunmap,
198 .unmap_atomic = udl_dmabuf_kunmap_atomic,
199 .mmap = udl_dmabuf_mmap, 181 .mmap = udl_dmabuf_mmap,
200 .release = drm_gem_dmabuf_release, 182 .release = drm_gem_dmabuf_release,
201}; 183};
diff --git a/drivers/gpu/drm/udl/udl_drv.h b/drivers/gpu/drm/udl/udl_drv.h
index 55c0cc309198..072582570a4f 100644
--- a/drivers/gpu/drm/udl/udl_drv.h
+++ b/drivers/gpu/drm/udl/udl_drv.h
@@ -16,6 +16,7 @@
16 16
17#include <linux/usb.h> 17#include <linux/usb.h>
18#include <drm/drm_gem.h> 18#include <drm/drm_gem.h>
19#include <linux/mm_types.h>
19 20
20#define DRIVER_NAME "udl" 21#define DRIVER_NAME "udl"
21#define DRIVER_DESC "DisplayLink" 22#define DRIVER_DESC "DisplayLink"
@@ -136,7 +137,7 @@ void udl_gem_put_pages(struct udl_gem_object *obj);
136int udl_gem_vmap(struct udl_gem_object *obj); 137int udl_gem_vmap(struct udl_gem_object *obj);
137void udl_gem_vunmap(struct udl_gem_object *obj); 138void udl_gem_vunmap(struct udl_gem_object *obj);
138int udl_drm_gem_mmap(struct file *filp, struct vm_area_struct *vma); 139int udl_drm_gem_mmap(struct file *filp, struct vm_area_struct *vma);
139int udl_gem_fault(struct vm_fault *vmf); 140vm_fault_t udl_gem_fault(struct vm_fault *vmf);
140 141
141int udl_handle_damage(struct udl_framebuffer *fb, int x, int y, 142int udl_handle_damage(struct udl_framebuffer *fb, int x, int y,
142 int width, int height); 143 int width, int height);
diff --git a/drivers/gpu/drm/udl/udl_gem.c b/drivers/gpu/drm/udl/udl_gem.c
index 9a15cce22cce..d5a23295dd80 100644
--- a/drivers/gpu/drm/udl/udl_gem.c
+++ b/drivers/gpu/drm/udl/udl_gem.c
@@ -100,13 +100,12 @@ int udl_drm_gem_mmap(struct file *filp, struct vm_area_struct *vma)
100 return ret; 100 return ret;
101} 101}
102 102
103int udl_gem_fault(struct vm_fault *vmf) 103vm_fault_t udl_gem_fault(struct vm_fault *vmf)
104{ 104{
105 struct vm_area_struct *vma = vmf->vma; 105 struct vm_area_struct *vma = vmf->vma;
106 struct udl_gem_object *obj = to_udl_bo(vma->vm_private_data); 106 struct udl_gem_object *obj = to_udl_bo(vma->vm_private_data);
107 struct page *page; 107 struct page *page;
108 unsigned int page_offset; 108 unsigned int page_offset;
109 int ret = 0;
110 109
111 page_offset = (vmf->address - vma->vm_start) >> PAGE_SHIFT; 110 page_offset = (vmf->address - vma->vm_start) >> PAGE_SHIFT;
112 111
@@ -114,17 +113,7 @@ int udl_gem_fault(struct vm_fault *vmf)
114 return VM_FAULT_SIGBUS; 113 return VM_FAULT_SIGBUS;
115 114
116 page = obj->pages[page_offset]; 115 page = obj->pages[page_offset];
117 ret = vm_insert_page(vma, vmf->address, page); 116 return vmf_insert_page(vma, vmf->address, page);
118 switch (ret) {
119 case -EAGAIN:
120 case 0:
121 case -ERESTARTSYS:
122 return VM_FAULT_NOPAGE;
123 case -ENOMEM:
124 return VM_FAULT_OOM;
125 default:
126 return VM_FAULT_SIGBUS;
127 }
128} 117}
129 118
130int udl_gem_get_pages(struct udl_gem_object *obj) 119int udl_gem_get_pages(struct udl_gem_object *obj)
diff --git a/drivers/gpu/drm/v3d/v3d_sched.c b/drivers/gpu/drm/v3d/v3d_sched.c
index b07bece9417d..808bc901f567 100644
--- a/drivers/gpu/drm/v3d/v3d_sched.c
+++ b/drivers/gpu/drm/v3d/v3d_sched.c
@@ -114,8 +114,8 @@ static struct dma_fence *v3d_job_run(struct drm_sched_job *sched_job)
114 v3d_invalidate_caches(v3d); 114 v3d_invalidate_caches(v3d);
115 115
116 fence = v3d_fence_create(v3d, q); 116 fence = v3d_fence_create(v3d, q);
117 if (!fence) 117 if (IS_ERR(fence))
118 return fence; 118 return NULL;
119 119
120 if (job->done_fence) 120 if (job->done_fence)
121 dma_fence_put(job->done_fence); 121 dma_fence_put(job->done_fence);
diff --git a/drivers/gpu/drm/vc4/vc4_crtc.c b/drivers/gpu/drm/vc4/vc4_crtc.c
index c8650bbcbcb3..dcadf793ee80 100644
--- a/drivers/gpu/drm/vc4/vc4_crtc.c
+++ b/drivers/gpu/drm/vc4/vc4_crtc.c
@@ -862,7 +862,6 @@ static int vc4_async_page_flip(struct drm_crtc *crtc,
862 * is released. 862 * is released.
863 */ 863 */
864 drm_atomic_set_fb_for_plane(plane->state, fb); 864 drm_atomic_set_fb_for_plane(plane->state, fb);
865 plane->fb = fb;
866 865
867 vc4_queue_seqno_cb(dev, &flip_state->cb, bo->seqno, 866 vc4_queue_seqno_cb(dev, &flip_state->cb, bo->seqno,
868 vc4_async_page_flip_complete); 867 vc4_async_page_flip_complete);
@@ -1057,7 +1056,6 @@ static int vc4_crtc_bind(struct device *dev, struct device *master, void *data)
1057 drm_crtc_init_with_planes(drm, crtc, primary_plane, NULL, 1056 drm_crtc_init_with_planes(drm, crtc, primary_plane, NULL,
1058 &vc4_crtc_funcs, NULL); 1057 &vc4_crtc_funcs, NULL);
1059 drm_crtc_helper_add(crtc, &vc4_crtc_helper_funcs); 1058 drm_crtc_helper_add(crtc, &vc4_crtc_helper_funcs);
1060 primary_plane->crtc = crtc;
1061 vc4_crtc->channel = vc4_crtc->data->hvs_channel; 1059 vc4_crtc->channel = vc4_crtc->data->hvs_channel;
1062 drm_mode_crtc_set_gamma_size(crtc, ARRAY_SIZE(vc4_crtc->lut_r)); 1060 drm_mode_crtc_set_gamma_size(crtc, ARRAY_SIZE(vc4_crtc->lut_r));
1063 drm_crtc_enable_color_mgmt(crtc, 0, false, crtc->gamma_size); 1061 drm_crtc_enable_color_mgmt(crtc, 0, false, crtc->gamma_size);
@@ -1093,7 +1091,6 @@ static int vc4_crtc_bind(struct device *dev, struct device *master, void *data)
1093 cursor_plane = vc4_plane_init(drm, DRM_PLANE_TYPE_CURSOR); 1091 cursor_plane = vc4_plane_init(drm, DRM_PLANE_TYPE_CURSOR);
1094 if (!IS_ERR(cursor_plane)) { 1092 if (!IS_ERR(cursor_plane)) {
1095 cursor_plane->possible_crtcs = 1 << drm_crtc_index(crtc); 1093 cursor_plane->possible_crtcs = 1 << drm_crtc_index(crtc);
1096 cursor_plane->crtc = crtc;
1097 crtc->cursor = cursor_plane; 1094 crtc->cursor = cursor_plane;
1098 } 1095 }
1099 1096
diff --git a/drivers/gpu/drm/vc4/vc4_plane.c b/drivers/gpu/drm/vc4/vc4_plane.c
index 1d34619eb3fe..8604fd2e7c5a 100644
--- a/drivers/gpu/drm/vc4/vc4_plane.c
+++ b/drivers/gpu/drm/vc4/vc4_plane.c
@@ -467,12 +467,14 @@ static int vc4_plane_mode_set(struct drm_plane *plane,
467 struct drm_framebuffer *fb = state->fb; 467 struct drm_framebuffer *fb = state->fb;
468 u32 ctl0_offset = vc4_state->dlist_count; 468 u32 ctl0_offset = vc4_state->dlist_count;
469 const struct hvs_format *format = vc4_get_hvs_format(fb->format->format); 469 const struct hvs_format *format = vc4_get_hvs_format(fb->format->format);
470 u64 base_format_mod = fourcc_mod_broadcom_mod(fb->modifier);
470 int num_planes = drm_format_num_planes(format->drm); 471 int num_planes = drm_format_num_planes(format->drm);
471 bool mix_plane_alpha; 472 bool mix_plane_alpha;
472 bool covers_screen; 473 bool covers_screen;
473 u32 scl0, scl1, pitch0; 474 u32 scl0, scl1, pitch0;
474 u32 lbm_size, tiling; 475 u32 lbm_size, tiling;
475 unsigned long irqflags; 476 unsigned long irqflags;
477 u32 hvs_format = format->hvs;
476 int ret, i; 478 int ret, i;
477 479
478 ret = vc4_plane_setup_clipping_and_scaling(state); 480 ret = vc4_plane_setup_clipping_and_scaling(state);
@@ -512,7 +514,7 @@ static int vc4_plane_mode_set(struct drm_plane *plane,
512 scl1 = vc4_get_scl_field(state, 0); 514 scl1 = vc4_get_scl_field(state, 0);
513 } 515 }
514 516
515 switch (fb->modifier) { 517 switch (base_format_mod) {
516 case DRM_FORMAT_MOD_LINEAR: 518 case DRM_FORMAT_MOD_LINEAR:
517 tiling = SCALER_CTL0_TILING_LINEAR; 519 tiling = SCALER_CTL0_TILING_LINEAR;
518 pitch0 = VC4_SET_FIELD(fb->pitches[0], SCALER_SRC_PITCH); 520 pitch0 = VC4_SET_FIELD(fb->pitches[0], SCALER_SRC_PITCH);
@@ -535,6 +537,49 @@ static int vc4_plane_mode_set(struct drm_plane *plane,
535 break; 537 break;
536 } 538 }
537 539
540 case DRM_FORMAT_MOD_BROADCOM_SAND64:
541 case DRM_FORMAT_MOD_BROADCOM_SAND128:
542 case DRM_FORMAT_MOD_BROADCOM_SAND256: {
543 uint32_t param = fourcc_mod_broadcom_param(fb->modifier);
544
545 /* Column-based NV12 or RGBA.
546 */
547 if (fb->format->num_planes > 1) {
548 if (hvs_format != HVS_PIXEL_FORMAT_YCBCR_YUV420_2PLANE) {
549 DRM_DEBUG_KMS("SAND format only valid for NV12/21");
550 return -EINVAL;
551 }
552 hvs_format = HVS_PIXEL_FORMAT_H264;
553 } else {
554 if (base_format_mod == DRM_FORMAT_MOD_BROADCOM_SAND256) {
555 DRM_DEBUG_KMS("SAND256 format only valid for H.264");
556 return -EINVAL;
557 }
558 }
559
560 switch (base_format_mod) {
561 case DRM_FORMAT_MOD_BROADCOM_SAND64:
562 tiling = SCALER_CTL0_TILING_64B;
563 break;
564 case DRM_FORMAT_MOD_BROADCOM_SAND128:
565 tiling = SCALER_CTL0_TILING_128B;
566 break;
567 case DRM_FORMAT_MOD_BROADCOM_SAND256:
568 tiling = SCALER_CTL0_TILING_256B_OR_T;
569 break;
570 default:
571 break;
572 }
573
574 if (param > SCALER_TILE_HEIGHT_MASK) {
575 DRM_DEBUG_KMS("SAND height too large (%d)\n", param);
576 return -EINVAL;
577 }
578
579 pitch0 = VC4_SET_FIELD(param, SCALER_TILE_HEIGHT);
580 break;
581 }
582
538 default: 583 default:
539 DRM_DEBUG_KMS("Unsupported FB tiling flag 0x%16llx", 584 DRM_DEBUG_KMS("Unsupported FB tiling flag 0x%16llx",
540 (long long)fb->modifier); 585 (long long)fb->modifier);
@@ -544,8 +589,9 @@ static int vc4_plane_mode_set(struct drm_plane *plane,
544 /* Control word */ 589 /* Control word */
545 vc4_dlist_write(vc4_state, 590 vc4_dlist_write(vc4_state,
546 SCALER_CTL0_VALID | 591 SCALER_CTL0_VALID |
592 VC4_SET_FIELD(SCALER_CTL0_RGBA_EXPAND_ROUND, SCALER_CTL0_RGBA_EXPAND) |
547 (format->pixel_order << SCALER_CTL0_ORDER_SHIFT) | 593 (format->pixel_order << SCALER_CTL0_ORDER_SHIFT) |
548 (format->hvs << SCALER_CTL0_PIXEL_FORMAT_SHIFT) | 594 (hvs_format << SCALER_CTL0_PIXEL_FORMAT_SHIFT) |
549 VC4_SET_FIELD(tiling, SCALER_CTL0_TILING) | 595 VC4_SET_FIELD(tiling, SCALER_CTL0_TILING) |
550 (vc4_state->is_unity ? SCALER_CTL0_UNITY : 0) | 596 (vc4_state->is_unity ? SCALER_CTL0_UNITY : 0) |
551 VC4_SET_FIELD(scl0, SCALER_CTL0_SCL0) | 597 VC4_SET_FIELD(scl0, SCALER_CTL0_SCL0) |
@@ -607,8 +653,13 @@ static int vc4_plane_mode_set(struct drm_plane *plane,
607 653
608 /* Pitch word 1/2 */ 654 /* Pitch word 1/2 */
609 for (i = 1; i < num_planes; i++) { 655 for (i = 1; i < num_planes; i++) {
610 vc4_dlist_write(vc4_state, 656 if (hvs_format != HVS_PIXEL_FORMAT_H264) {
611 VC4_SET_FIELD(fb->pitches[i], SCALER_SRC_PITCH)); 657 vc4_dlist_write(vc4_state,
658 VC4_SET_FIELD(fb->pitches[i],
659 SCALER_SRC_PITCH));
660 } else {
661 vc4_dlist_write(vc4_state, pitch0);
662 }
612 } 663 }
613 664
614 /* Colorspace conversion words */ 665 /* Colorspace conversion words */
@@ -810,18 +861,21 @@ static int vc4_prepare_fb(struct drm_plane *plane,
810 struct dma_fence *fence; 861 struct dma_fence *fence;
811 int ret; 862 int ret;
812 863
813 if ((plane->state->fb == state->fb) || !state->fb) 864 if (!state->fb)
814 return 0; 865 return 0;
815 866
816 bo = to_vc4_bo(&drm_fb_cma_get_gem_obj(state->fb, 0)->base); 867 bo = to_vc4_bo(&drm_fb_cma_get_gem_obj(state->fb, 0)->base);
817 868
869 fence = reservation_object_get_excl_rcu(bo->resv);
870 drm_atomic_set_fence_for_plane(state, fence);
871
872 if (plane->state->fb == state->fb)
873 return 0;
874
818 ret = vc4_bo_inc_usecnt(bo); 875 ret = vc4_bo_inc_usecnt(bo);
819 if (ret) 876 if (ret)
820 return ret; 877 return ret;
821 878
822 fence = reservation_object_get_excl_rcu(bo->resv);
823 drm_atomic_set_fence_for_plane(state, fence);
824
825 return 0; 879 return 0;
826} 880}
827 881
@@ -866,13 +920,32 @@ static bool vc4_format_mod_supported(struct drm_plane *plane,
866 case DRM_FORMAT_BGR565: 920 case DRM_FORMAT_BGR565:
867 case DRM_FORMAT_ARGB1555: 921 case DRM_FORMAT_ARGB1555:
868 case DRM_FORMAT_XRGB1555: 922 case DRM_FORMAT_XRGB1555:
869 return true; 923 switch (fourcc_mod_broadcom_mod(modifier)) {
924 case DRM_FORMAT_MOD_LINEAR:
925 case DRM_FORMAT_MOD_BROADCOM_VC4_T_TILED:
926 case DRM_FORMAT_MOD_BROADCOM_SAND64:
927 case DRM_FORMAT_MOD_BROADCOM_SAND128:
928 return true;
929 default:
930 return false;
931 }
932 case DRM_FORMAT_NV12:
933 case DRM_FORMAT_NV21:
934 switch (fourcc_mod_broadcom_mod(modifier)) {
935 case DRM_FORMAT_MOD_LINEAR:
936 case DRM_FORMAT_MOD_BROADCOM_SAND64:
937 case DRM_FORMAT_MOD_BROADCOM_SAND128:
938 case DRM_FORMAT_MOD_BROADCOM_SAND256:
939 return true;
940 default:
941 return false;
942 }
870 case DRM_FORMAT_YUV422: 943 case DRM_FORMAT_YUV422:
871 case DRM_FORMAT_YVU422: 944 case DRM_FORMAT_YVU422:
872 case DRM_FORMAT_YUV420: 945 case DRM_FORMAT_YUV420:
873 case DRM_FORMAT_YVU420: 946 case DRM_FORMAT_YVU420:
874 case DRM_FORMAT_NV12:
875 case DRM_FORMAT_NV16: 947 case DRM_FORMAT_NV16:
948 case DRM_FORMAT_NV61:
876 default: 949 default:
877 return (modifier == DRM_FORMAT_MOD_LINEAR); 950 return (modifier == DRM_FORMAT_MOD_LINEAR);
878 } 951 }
@@ -900,6 +973,9 @@ struct drm_plane *vc4_plane_init(struct drm_device *dev,
900 unsigned i; 973 unsigned i;
901 static const uint64_t modifiers[] = { 974 static const uint64_t modifiers[] = {
902 DRM_FORMAT_MOD_BROADCOM_VC4_T_TILED, 975 DRM_FORMAT_MOD_BROADCOM_VC4_T_TILED,
976 DRM_FORMAT_MOD_BROADCOM_SAND128,
977 DRM_FORMAT_MOD_BROADCOM_SAND64,
978 DRM_FORMAT_MOD_BROADCOM_SAND256,
903 DRM_FORMAT_MOD_LINEAR, 979 DRM_FORMAT_MOD_LINEAR,
904 DRM_FORMAT_MOD_INVALID 980 DRM_FORMAT_MOD_INVALID
905 }; 981 };
diff --git a/drivers/gpu/drm/vc4/vc4_regs.h b/drivers/gpu/drm/vc4/vc4_regs.h
index d1fb6fec46eb..d6864fa4bd14 100644
--- a/drivers/gpu/drm/vc4/vc4_regs.h
+++ b/drivers/gpu/drm/vc4/vc4_regs.h
@@ -1031,6 +1031,12 @@ enum hvs_pixel_format {
1031#define SCALER_SRC_PITCH_MASK VC4_MASK(15, 0) 1031#define SCALER_SRC_PITCH_MASK VC4_MASK(15, 0)
1032#define SCALER_SRC_PITCH_SHIFT 0 1032#define SCALER_SRC_PITCH_SHIFT 0
1033 1033
1034/* PITCH0/1/2 fields for tiled (SAND). */
1035#define SCALER_TILE_SKIP_0_MASK VC4_MASK(18, 16)
1036#define SCALER_TILE_SKIP_0_SHIFT 16
1037#define SCALER_TILE_HEIGHT_MASK VC4_MASK(15, 0)
1038#define SCALER_TILE_HEIGHT_SHIFT 0
1039
1034/* PITCH0 fields for T-tiled. */ 1040/* PITCH0 fields for T-tiled. */
1035#define SCALER_PITCH0_TILE_WIDTH_L_MASK VC4_MASK(22, 16) 1041#define SCALER_PITCH0_TILE_WIDTH_L_MASK VC4_MASK(22, 16)
1036#define SCALER_PITCH0_TILE_WIDTH_L_SHIFT 16 1042#define SCALER_PITCH0_TILE_WIDTH_L_SHIFT 16
diff --git a/drivers/gpu/drm/vgem/vgem_drv.c b/drivers/gpu/drm/vgem/vgem_drv.c
index 2524ff116f00..c64a85950c82 100644
--- a/drivers/gpu/drm/vgem/vgem_drv.c
+++ b/drivers/gpu/drm/vgem/vgem_drv.c
@@ -61,13 +61,13 @@ static void vgem_gem_free_object(struct drm_gem_object *obj)
61 kfree(vgem_obj); 61 kfree(vgem_obj);
62} 62}
63 63
64static int vgem_gem_fault(struct vm_fault *vmf) 64static vm_fault_t vgem_gem_fault(struct vm_fault *vmf)
65{ 65{
66 struct vm_area_struct *vma = vmf->vma; 66 struct vm_area_struct *vma = vmf->vma;
67 struct drm_vgem_gem_object *obj = vma->vm_private_data; 67 struct drm_vgem_gem_object *obj = vma->vm_private_data;
68 /* We don't use vmf->pgoff since that has the fake offset */ 68 /* We don't use vmf->pgoff since that has the fake offset */
69 unsigned long vaddr = vmf->address; 69 unsigned long vaddr = vmf->address;
70 int ret; 70 vm_fault_t ret = VM_FAULT_SIGBUS;
71 loff_t num_pages; 71 loff_t num_pages;
72 pgoff_t page_offset; 72 pgoff_t page_offset;
73 page_offset = (vaddr - vma->vm_start) >> PAGE_SHIFT; 73 page_offset = (vaddr - vma->vm_start) >> PAGE_SHIFT;
@@ -77,7 +77,6 @@ static int vgem_gem_fault(struct vm_fault *vmf)
77 if (page_offset > num_pages) 77 if (page_offset > num_pages)
78 return VM_FAULT_SIGBUS; 78 return VM_FAULT_SIGBUS;
79 79
80 ret = -ENOENT;
81 mutex_lock(&obj->pages_lock); 80 mutex_lock(&obj->pages_lock);
82 if (obj->pages) { 81 if (obj->pages) {
83 get_page(obj->pages[page_offset]); 82 get_page(obj->pages[page_offset]);
diff --git a/drivers/gpu/drm/virtio/virtgpu_display.c b/drivers/gpu/drm/virtio/virtgpu_display.c
index a5edd86603d9..ff9933e79416 100644
--- a/drivers/gpu/drm/virtio/virtgpu_display.c
+++ b/drivers/gpu/drm/virtio/virtgpu_display.c
@@ -28,6 +28,7 @@
28#include "virtgpu_drv.h" 28#include "virtgpu_drv.h"
29#include <drm/drm_crtc_helper.h> 29#include <drm/drm_crtc_helper.h>
30#include <drm/drm_atomic_helper.h> 30#include <drm/drm_atomic_helper.h>
31#include <drm/drm_gem_framebuffer_helper.h>
31 32
32#define XRES_MIN 32 33#define XRES_MIN 32
33#define YRES_MIN 32 34#define YRES_MIN 32
@@ -48,16 +49,6 @@ static const struct drm_crtc_funcs virtio_gpu_crtc_funcs = {
48 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state, 49 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
49}; 50};
50 51
51static void virtio_gpu_user_framebuffer_destroy(struct drm_framebuffer *fb)
52{
53 struct virtio_gpu_framebuffer *virtio_gpu_fb
54 = to_virtio_gpu_framebuffer(fb);
55
56 drm_gem_object_put_unlocked(virtio_gpu_fb->obj);
57 drm_framebuffer_cleanup(fb);
58 kfree(virtio_gpu_fb);
59}
60
61static int 52static int
62virtio_gpu_framebuffer_surface_dirty(struct drm_framebuffer *fb, 53virtio_gpu_framebuffer_surface_dirty(struct drm_framebuffer *fb,
63 struct drm_file *file_priv, 54 struct drm_file *file_priv,
@@ -71,20 +62,9 @@ virtio_gpu_framebuffer_surface_dirty(struct drm_framebuffer *fb,
71 return virtio_gpu_surface_dirty(virtio_gpu_fb, clips, num_clips); 62 return virtio_gpu_surface_dirty(virtio_gpu_fb, clips, num_clips);
72} 63}
73 64
74static int
75virtio_gpu_framebuffer_create_handle(struct drm_framebuffer *fb,
76 struct drm_file *file_priv,
77 unsigned int *handle)
78{
79 struct virtio_gpu_framebuffer *virtio_gpu_fb =
80 to_virtio_gpu_framebuffer(fb);
81
82 return drm_gem_handle_create(file_priv, virtio_gpu_fb->obj, handle);
83}
84
85static const struct drm_framebuffer_funcs virtio_gpu_fb_funcs = { 65static const struct drm_framebuffer_funcs virtio_gpu_fb_funcs = {
86 .create_handle = virtio_gpu_framebuffer_create_handle, 66 .create_handle = drm_gem_fb_create_handle,
87 .destroy = virtio_gpu_user_framebuffer_destroy, 67 .destroy = drm_gem_fb_destroy,
88 .dirty = virtio_gpu_framebuffer_surface_dirty, 68 .dirty = virtio_gpu_framebuffer_surface_dirty,
89}; 69};
90 70
@@ -97,7 +77,7 @@ virtio_gpu_framebuffer_init(struct drm_device *dev,
97 int ret; 77 int ret;
98 struct virtio_gpu_object *bo; 78 struct virtio_gpu_object *bo;
99 79
100 vgfb->obj = obj; 80 vgfb->base.obj[0] = obj;
101 81
102 bo = gem_to_virtio_gpu_obj(obj); 82 bo = gem_to_virtio_gpu_obj(obj);
103 83
@@ -105,7 +85,7 @@ virtio_gpu_framebuffer_init(struct drm_device *dev,
105 85
106 ret = drm_framebuffer_init(dev, &vgfb->base, &virtio_gpu_fb_funcs); 86 ret = drm_framebuffer_init(dev, &vgfb->base, &virtio_gpu_fb_funcs);
107 if (ret) { 87 if (ret) {
108 vgfb->obj = NULL; 88 vgfb->base.obj[0] = NULL;
109 return ret; 89 return ret;
110 } 90 }
111 91
@@ -302,8 +282,6 @@ static int vgdev_output_init(struct virtio_gpu_device *vgdev, int index)
302 drm_crtc_init_with_planes(dev, crtc, primary, cursor, 282 drm_crtc_init_with_planes(dev, crtc, primary, cursor,
303 &virtio_gpu_crtc_funcs, NULL); 283 &virtio_gpu_crtc_funcs, NULL);
304 drm_crtc_helper_add(crtc, &virtio_gpu_crtc_helper_funcs); 284 drm_crtc_helper_add(crtc, &virtio_gpu_crtc_helper_funcs);
305 primary->crtc = crtc;
306 cursor->crtc = crtc;
307 285
308 drm_connector_init(dev, connector, &virtio_gpu_connector_funcs, 286 drm_connector_init(dev, connector, &virtio_gpu_connector_funcs,
309 DRM_MODE_CONNECTOR_VIRTUAL); 287 DRM_MODE_CONNECTOR_VIRTUAL);
diff --git a/drivers/gpu/drm/virtio/virtgpu_drv.h b/drivers/gpu/drm/virtio/virtgpu_drv.h
index d25c8ca224aa..65605e207bbe 100644
--- a/drivers/gpu/drm/virtio/virtgpu_drv.h
+++ b/drivers/gpu/drm/virtio/virtgpu_drv.h
@@ -124,7 +124,6 @@ struct virtio_gpu_output {
124 124
125struct virtio_gpu_framebuffer { 125struct virtio_gpu_framebuffer {
126 struct drm_framebuffer base; 126 struct drm_framebuffer base;
127 struct drm_gem_object *obj;
128 int x1, y1, x2, y2; /* dirty rect */ 127 int x1, y1, x2, y2; /* dirty rect */
129 spinlock_t dirty_lock; 128 spinlock_t dirty_lock;
130 uint32_t hw_res_handle; 129 uint32_t hw_res_handle;
diff --git a/drivers/gpu/drm/virtio/virtgpu_fb.c b/drivers/gpu/drm/virtio/virtgpu_fb.c
index 8af69ab58b89..a121b1c79522 100644
--- a/drivers/gpu/drm/virtio/virtgpu_fb.c
+++ b/drivers/gpu/drm/virtio/virtgpu_fb.c
@@ -46,7 +46,7 @@ static int virtio_gpu_dirty_update(struct virtio_gpu_framebuffer *fb,
46 int bpp = fb->base.format->cpp[0]; 46 int bpp = fb->base.format->cpp[0];
47 int x2, y2; 47 int x2, y2;
48 unsigned long flags; 48 unsigned long flags;
49 struct virtio_gpu_object *obj = gem_to_virtio_gpu_obj(fb->obj); 49 struct virtio_gpu_object *obj = gem_to_virtio_gpu_obj(fb->base.obj[0]);
50 50
51 if ((width <= 0) || 51 if ((width <= 0) ||
52 (x + width > fb->base.width) || 52 (x + width > fb->base.width) ||
@@ -121,7 +121,7 @@ int virtio_gpu_surface_dirty(struct virtio_gpu_framebuffer *vgfb,
121 unsigned int num_clips) 121 unsigned int num_clips)
122{ 122{
123 struct virtio_gpu_device *vgdev = vgfb->base.dev->dev_private; 123 struct virtio_gpu_device *vgdev = vgfb->base.dev->dev_private;
124 struct virtio_gpu_object *obj = gem_to_virtio_gpu_obj(vgfb->obj); 124 struct virtio_gpu_object *obj = gem_to_virtio_gpu_obj(vgfb->base.obj[0]);
125 struct drm_clip_rect norect; 125 struct drm_clip_rect norect;
126 struct drm_clip_rect *clips_ptr; 126 struct drm_clip_rect *clips_ptr;
127 int left, right, top, bottom; 127 int left, right, top, bottom;
@@ -305,8 +305,8 @@ static int virtio_gpu_fbdev_destroy(struct drm_device *dev,
305 305
306 drm_fb_helper_unregister_fbi(&vgfbdev->helper); 306 drm_fb_helper_unregister_fbi(&vgfbdev->helper);
307 307
308 if (vgfb->obj) 308 if (vgfb->base.obj[0])
309 vgfb->obj = NULL; 309 vgfb->base.obj[0] = NULL;
310 drm_fb_helper_fini(&vgfbdev->helper); 310 drm_fb_helper_fini(&vgfbdev->helper);
311 drm_framebuffer_cleanup(&vgfb->base); 311 drm_framebuffer_cleanup(&vgfb->base);
312 312
diff --git a/drivers/gpu/drm/virtio/virtgpu_plane.c b/drivers/gpu/drm/virtio/virtgpu_plane.c
index 71ba455af915..dc5b5b2b7aab 100644
--- a/drivers/gpu/drm/virtio/virtgpu_plane.c
+++ b/drivers/gpu/drm/virtio/virtgpu_plane.c
@@ -154,7 +154,7 @@ static void virtio_gpu_primary_plane_update(struct drm_plane *plane,
154 154
155 if (plane->state->fb) { 155 if (plane->state->fb) {
156 vgfb = to_virtio_gpu_framebuffer(plane->state->fb); 156 vgfb = to_virtio_gpu_framebuffer(plane->state->fb);
157 bo = gem_to_virtio_gpu_obj(vgfb->obj); 157 bo = gem_to_virtio_gpu_obj(vgfb->base.obj[0]);
158 handle = bo->hw_res_handle; 158 handle = bo->hw_res_handle;
159 if (bo->dumb) { 159 if (bo->dumb) {
160 virtio_gpu_cmd_transfer_to_host_2d 160 virtio_gpu_cmd_transfer_to_host_2d
@@ -208,7 +208,7 @@ static void virtio_gpu_cursor_plane_update(struct drm_plane *plane,
208 208
209 if (plane->state->fb) { 209 if (plane->state->fb) {
210 vgfb = to_virtio_gpu_framebuffer(plane->state->fb); 210 vgfb = to_virtio_gpu_framebuffer(plane->state->fb);
211 bo = gem_to_virtio_gpu_obj(vgfb->obj); 211 bo = gem_to_virtio_gpu_obj(vgfb->base.obj[0]);
212 handle = bo->hw_res_handle; 212 handle = bo->hw_res_handle;
213 } else { 213 } else {
214 handle = 0; 214 handle = 0;
diff --git a/drivers/gpu/drm/vmwgfx/vmwgfx_fb.c b/drivers/gpu/drm/vmwgfx/vmwgfx_fb.c
index 54e300365a5c..9b7e0aca5f84 100644
--- a/drivers/gpu/drm/vmwgfx/vmwgfx_fb.c
+++ b/drivers/gpu/drm/vmwgfx/vmwgfx_fb.c
@@ -439,38 +439,13 @@ static int vmw_fb_compute_depth(struct fb_var_screeninfo *var,
439static int vmwgfx_set_config_internal(struct drm_mode_set *set) 439static int vmwgfx_set_config_internal(struct drm_mode_set *set)
440{ 440{
441 struct drm_crtc *crtc = set->crtc; 441 struct drm_crtc *crtc = set->crtc;
442 struct drm_framebuffer *fb;
443 struct drm_crtc *tmp;
444 struct drm_device *dev = set->crtc->dev;
445 struct drm_modeset_acquire_ctx ctx; 442 struct drm_modeset_acquire_ctx ctx;
446 int ret; 443 int ret;
447 444
448 drm_modeset_acquire_init(&ctx, 0); 445 drm_modeset_acquire_init(&ctx, 0);
449 446
450restart: 447restart:
451 /*
452 * NOTE: ->set_config can also disable other crtcs (if we steal all
453 * connectors from it), hence we need to refcount the fbs across all
454 * crtcs. Atomic modeset will have saner semantics ...
455 */
456 drm_for_each_crtc(tmp, dev)
457 tmp->primary->old_fb = tmp->primary->fb;
458
459 fb = set->fb;
460
461 ret = crtc->funcs->set_config(set, &ctx); 448 ret = crtc->funcs->set_config(set, &ctx);
462 if (ret == 0) {
463 crtc->primary->crtc = crtc;
464 crtc->primary->fb = fb;
465 }
466
467 drm_for_each_crtc(tmp, dev) {
468 if (tmp->primary->fb)
469 drm_framebuffer_get(tmp->primary->fb);
470 if (tmp->primary->old_fb)
471 drm_framebuffer_put(tmp->primary->old_fb);
472 tmp->primary->old_fb = NULL;
473 }
474 449
475 if (ret == -EDEADLK) { 450 if (ret == -EDEADLK) {
476 drm_modeset_backoff(&ctx); 451 drm_modeset_backoff(&ctx);
diff --git a/drivers/gpu/drm/vmwgfx/vmwgfx_kms.c b/drivers/gpu/drm/vmwgfx/vmwgfx_kms.c
index 01f2dc9e6f52..ef96ba7432ad 100644
--- a/drivers/gpu/drm/vmwgfx/vmwgfx_kms.c
+++ b/drivers/gpu/drm/vmwgfx/vmwgfx_kms.c
@@ -1536,9 +1536,13 @@ vmw_kms_atomic_check_modeset(struct drm_device *dev,
1536 unsigned long requested_bb_mem = 0; 1536 unsigned long requested_bb_mem = 0;
1537 1537
1538 if (dev_priv->active_display_unit == vmw_du_screen_target) { 1538 if (dev_priv->active_display_unit == vmw_du_screen_target) {
1539 if (crtc->primary->fb) { 1539 struct drm_plane *plane = crtc->primary;
1540 int cpp = crtc->primary->fb->pitches[0] / 1540 struct drm_plane_state *plane_state;
1541 crtc->primary->fb->width; 1541
1542 plane_state = drm_atomic_get_new_plane_state(state, plane);
1543
1544 if (plane_state && plane_state->fb) {
1545 int cpp = plane_state->fb->format->cpp[0];
1542 1546
1543 requested_bb_mem += crtc->mode.hdisplay * cpp * 1547 requested_bb_mem += crtc->mode.hdisplay * cpp *
1544 crtc->mode.vdisplay; 1548 crtc->mode.vdisplay;
@@ -2322,9 +2326,10 @@ int vmw_kms_helper_dirty(struct vmw_private *dev_priv,
2322 } else { 2326 } else {
2323 list_for_each_entry(crtc, &dev_priv->dev->mode_config.crtc_list, 2327 list_for_each_entry(crtc, &dev_priv->dev->mode_config.crtc_list,
2324 head) { 2328 head) {
2325 if (crtc->primary->fb != &framebuffer->base) 2329 struct drm_plane *plane = crtc->primary;
2326 continue; 2330
2327 units[num_units++] = vmw_crtc_to_du(crtc); 2331 if (plane->state->fb == &framebuffer->base)
2332 units[num_units++] = vmw_crtc_to_du(crtc);
2328 } 2333 }
2329 } 2334 }
2330 2335
@@ -2806,6 +2811,7 @@ void vmw_kms_update_implicit_fb(struct vmw_private *dev_priv,
2806 struct drm_crtc *crtc) 2811 struct drm_crtc *crtc)
2807{ 2812{
2808 struct vmw_display_unit *du = vmw_crtc_to_du(crtc); 2813 struct vmw_display_unit *du = vmw_crtc_to_du(crtc);
2814 struct drm_plane *plane = crtc->primary;
2809 struct vmw_framebuffer *vfb; 2815 struct vmw_framebuffer *vfb;
2810 2816
2811 mutex_lock(&dev_priv->global_kms_state_mutex); 2817 mutex_lock(&dev_priv->global_kms_state_mutex);
@@ -2813,7 +2819,7 @@ void vmw_kms_update_implicit_fb(struct vmw_private *dev_priv,
2813 if (!du->is_implicit) 2819 if (!du->is_implicit)
2814 goto out_unlock; 2820 goto out_unlock;
2815 2821
2816 vfb = vmw_framebuffer_to_vfb(crtc->primary->fb); 2822 vfb = vmw_framebuffer_to_vfb(plane->state->fb);
2817 WARN_ON_ONCE(dev_priv->num_implicit != 1 && 2823 WARN_ON_ONCE(dev_priv->num_implicit != 1 &&
2818 dev_priv->implicit_fb != vfb); 2824 dev_priv->implicit_fb != vfb);
2819 2825
diff --git a/drivers/gpu/drm/vmwgfx/vmwgfx_prime.c b/drivers/gpu/drm/vmwgfx/vmwgfx_prime.c
index 0d42a46521fc..373bc6da2f84 100644
--- a/drivers/gpu/drm/vmwgfx/vmwgfx_prime.c
+++ b/drivers/gpu/drm/vmwgfx/vmwgfx_prime.c
@@ -40,7 +40,6 @@
40 */ 40 */
41 41
42static int vmw_prime_map_attach(struct dma_buf *dma_buf, 42static int vmw_prime_map_attach(struct dma_buf *dma_buf,
43 struct device *target_dev,
44 struct dma_buf_attachment *attach) 43 struct dma_buf_attachment *attach)
45{ 44{
46 return -ENOSYS; 45 return -ENOSYS;
@@ -72,17 +71,6 @@ static void vmw_prime_dmabuf_vunmap(struct dma_buf *dma_buf, void *vaddr)
72{ 71{
73} 72}
74 73
75static void *vmw_prime_dmabuf_kmap_atomic(struct dma_buf *dma_buf,
76 unsigned long page_num)
77{
78 return NULL;
79}
80
81static void vmw_prime_dmabuf_kunmap_atomic(struct dma_buf *dma_buf,
82 unsigned long page_num, void *addr)
83{
84
85}
86static void *vmw_prime_dmabuf_kmap(struct dma_buf *dma_buf, 74static void *vmw_prime_dmabuf_kmap(struct dma_buf *dma_buf,
87 unsigned long page_num) 75 unsigned long page_num)
88{ 76{
@@ -109,9 +97,7 @@ const struct dma_buf_ops vmw_prime_dmabuf_ops = {
109 .unmap_dma_buf = vmw_prime_unmap_dma_buf, 97 .unmap_dma_buf = vmw_prime_unmap_dma_buf,
110 .release = NULL, 98 .release = NULL,
111 .map = vmw_prime_dmabuf_kmap, 99 .map = vmw_prime_dmabuf_kmap,
112 .map_atomic = vmw_prime_dmabuf_kmap_atomic,
113 .unmap = vmw_prime_dmabuf_kunmap, 100 .unmap = vmw_prime_dmabuf_kunmap,
114 .unmap_atomic = vmw_prime_dmabuf_kunmap_atomic,
115 .mmap = vmw_prime_dmabuf_mmap, 101 .mmap = vmw_prime_dmabuf_mmap,
116 .vmap = vmw_prime_dmabuf_vmap, 102 .vmap = vmw_prime_dmabuf_vmap,
117 .vunmap = vmw_prime_dmabuf_vunmap, 103 .vunmap = vmw_prime_dmabuf_vunmap,
diff --git a/drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c b/drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c
index 3d667e903beb..9798640cbfcd 100644
--- a/drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c
+++ b/drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c
@@ -527,8 +527,6 @@ vmw_sou_primary_plane_atomic_update(struct drm_plane *plane,
527 */ 527 */
528 if (ret != 0) 528 if (ret != 0)
529 DRM_ERROR("Failed to update screen.\n"); 529 DRM_ERROR("Failed to update screen.\n");
530
531 crtc->primary->fb = plane->state->fb;
532 } else { 530 } else {
533 /* 531 /*
534 * When disabling a plane, CRTC and FB should always be NULL 532 * When disabling a plane, CRTC and FB should always be NULL
diff --git a/drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c b/drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c
index 67331f01ef32..152e96cb1c01 100644
--- a/drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c
+++ b/drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c
@@ -414,6 +414,7 @@ static void vmw_stdu_crtc_helper_prepare(struct drm_crtc *crtc)
414static void vmw_stdu_crtc_atomic_enable(struct drm_crtc *crtc, 414static void vmw_stdu_crtc_atomic_enable(struct drm_crtc *crtc,
415 struct drm_crtc_state *old_state) 415 struct drm_crtc_state *old_state)
416{ 416{
417 struct drm_plane_state *plane_state = crtc->primary->state;
417 struct vmw_private *dev_priv; 418 struct vmw_private *dev_priv;
418 struct vmw_screen_target_display_unit *stdu; 419 struct vmw_screen_target_display_unit *stdu;
419 struct vmw_framebuffer *vfb; 420 struct vmw_framebuffer *vfb;
@@ -422,7 +423,7 @@ static void vmw_stdu_crtc_atomic_enable(struct drm_crtc *crtc,
422 423
423 stdu = vmw_crtc_to_stdu(crtc); 424 stdu = vmw_crtc_to_stdu(crtc);
424 dev_priv = vmw_priv(crtc->dev); 425 dev_priv = vmw_priv(crtc->dev);
425 fb = crtc->primary->fb; 426 fb = plane_state->fb;
426 427
427 vfb = (fb) ? vmw_framebuffer_to_vfb(fb) : NULL; 428 vfb = (fb) ? vmw_framebuffer_to_vfb(fb) : NULL;
428 429
@@ -1285,8 +1286,6 @@ vmw_stdu_primary_plane_atomic_update(struct drm_plane *plane,
1285 1, 1, NULL, crtc); 1286 1, 1, NULL, crtc);
1286 if (ret) 1287 if (ret)
1287 DRM_ERROR("Failed to update STDU.\n"); 1288 DRM_ERROR("Failed to update STDU.\n");
1288
1289 crtc->primary->fb = plane->state->fb;
1290 } else { 1289 } else {
1291 crtc = old_state->crtc; 1290 crtc = old_state->crtc;
1292 stdu = vmw_crtc_to_stdu(crtc); 1291 stdu = vmw_crtc_to_stdu(crtc);
diff --git a/drivers/gpu/drm/xen/xen_drm_front.c b/drivers/gpu/drm/xen/xen_drm_front.c
index b3786c1a4e80..6b6d5ab82ec3 100644
--- a/drivers/gpu/drm/xen/xen_drm_front.c
+++ b/drivers/gpu/drm/xen/xen_drm_front.c
@@ -623,7 +623,7 @@ static int displback_initwait(struct xen_drm_front_info *front_info)
623 if (ret < 0) 623 if (ret < 0)
624 return ret; 624 return ret;
625 625
626 DRM_INFO("Have %d conector(s)\n", cfg->num_connectors); 626 DRM_INFO("Have %d connector(s)\n", cfg->num_connectors);
627 /* Create event channels for all connectors and publish */ 627 /* Create event channels for all connectors and publish */
628 ret = xen_drm_front_evtchnl_create_all(front_info); 628 ret = xen_drm_front_evtchnl_create_all(front_info);
629 if (ret < 0) 629 if (ret < 0)
diff --git a/drivers/gpu/drm/xen/xen_drm_front.h b/drivers/gpu/drm/xen/xen_drm_front.h
index 2c2479b571ae..5693b4a4b02b 100644
--- a/drivers/gpu/drm/xen/xen_drm_front.h
+++ b/drivers/gpu/drm/xen/xen_drm_front.h
@@ -126,12 +126,12 @@ struct xen_drm_front_drm_info {
126 126
127static inline u64 xen_drm_front_fb_to_cookie(struct drm_framebuffer *fb) 127static inline u64 xen_drm_front_fb_to_cookie(struct drm_framebuffer *fb)
128{ 128{
129 return (u64)fb; 129 return (uintptr_t)fb;
130} 130}
131 131
132static inline u64 xen_drm_front_dbuf_to_cookie(struct drm_gem_object *gem_obj) 132static inline u64 xen_drm_front_dbuf_to_cookie(struct drm_gem_object *gem_obj)
133{ 133{
134 return (u64)gem_obj; 134 return (uintptr_t)gem_obj;
135} 135}
136 136
137int xen_drm_front_mode_set(struct xen_drm_front_drm_pipeline *pipeline, 137int xen_drm_front_mode_set(struct xen_drm_front_drm_pipeline *pipeline,
diff --git a/drivers/gpu/drm/xen/xen_drm_front_shbuf.c b/drivers/gpu/drm/xen/xen_drm_front_shbuf.c
index 8099cb343ae3..d333b67cc1a0 100644
--- a/drivers/gpu/drm/xen/xen_drm_front_shbuf.c
+++ b/drivers/gpu/drm/xen/xen_drm_front_shbuf.c
@@ -122,7 +122,7 @@ static void guest_calc_num_grefs(struct xen_drm_front_shbuf *buf)
122} 122}
123 123
124#define xen_page_to_vaddr(page) \ 124#define xen_page_to_vaddr(page) \
125 ((phys_addr_t)pfn_to_kaddr(page_to_xen_pfn(page))) 125 ((uintptr_t)pfn_to_kaddr(page_to_xen_pfn(page)))
126 126
127static int backend_unmap(struct xen_drm_front_shbuf *buf) 127static int backend_unmap(struct xen_drm_front_shbuf *buf)
128{ 128{
diff --git a/drivers/media/common/videobuf2/videobuf2-dma-contig.c b/drivers/media/common/videobuf2/videobuf2-dma-contig.c
index f1178f6f434d..aff0ab7bf83d 100644
--- a/drivers/media/common/videobuf2/videobuf2-dma-contig.c
+++ b/drivers/media/common/videobuf2/videobuf2-dma-contig.c
@@ -222,7 +222,7 @@ struct vb2_dc_attachment {
222 enum dma_data_direction dma_dir; 222 enum dma_data_direction dma_dir;
223}; 223};
224 224
225static int vb2_dc_dmabuf_ops_attach(struct dma_buf *dbuf, struct device *dev, 225static int vb2_dc_dmabuf_ops_attach(struct dma_buf *dbuf,
226 struct dma_buf_attachment *dbuf_attach) 226 struct dma_buf_attachment *dbuf_attach)
227{ 227{
228 struct vb2_dc_attachment *attach; 228 struct vb2_dc_attachment *attach;
@@ -358,7 +358,6 @@ static const struct dma_buf_ops vb2_dc_dmabuf_ops = {
358 .map_dma_buf = vb2_dc_dmabuf_ops_map, 358 .map_dma_buf = vb2_dc_dmabuf_ops_map,
359 .unmap_dma_buf = vb2_dc_dmabuf_ops_unmap, 359 .unmap_dma_buf = vb2_dc_dmabuf_ops_unmap,
360 .map = vb2_dc_dmabuf_ops_kmap, 360 .map = vb2_dc_dmabuf_ops_kmap,
361 .map_atomic = vb2_dc_dmabuf_ops_kmap,
362 .vmap = vb2_dc_dmabuf_ops_vmap, 361 .vmap = vb2_dc_dmabuf_ops_vmap,
363 .mmap = vb2_dc_dmabuf_ops_mmap, 362 .mmap = vb2_dc_dmabuf_ops_mmap,
364 .release = vb2_dc_dmabuf_ops_release, 363 .release = vb2_dc_dmabuf_ops_release,
diff --git a/drivers/media/common/videobuf2/videobuf2-dma-sg.c b/drivers/media/common/videobuf2/videobuf2-dma-sg.c
index 753ed3138dcc..015e737095cd 100644
--- a/drivers/media/common/videobuf2/videobuf2-dma-sg.c
+++ b/drivers/media/common/videobuf2/videobuf2-dma-sg.c
@@ -371,7 +371,7 @@ struct vb2_dma_sg_attachment {
371 enum dma_data_direction dma_dir; 371 enum dma_data_direction dma_dir;
372}; 372};
373 373
374static int vb2_dma_sg_dmabuf_ops_attach(struct dma_buf *dbuf, struct device *dev, 374static int vb2_dma_sg_dmabuf_ops_attach(struct dma_buf *dbuf,
375 struct dma_buf_attachment *dbuf_attach) 375 struct dma_buf_attachment *dbuf_attach)
376{ 376{
377 struct vb2_dma_sg_attachment *attach; 377 struct vb2_dma_sg_attachment *attach;
@@ -507,7 +507,6 @@ static const struct dma_buf_ops vb2_dma_sg_dmabuf_ops = {
507 .map_dma_buf = vb2_dma_sg_dmabuf_ops_map, 507 .map_dma_buf = vb2_dma_sg_dmabuf_ops_map,
508 .unmap_dma_buf = vb2_dma_sg_dmabuf_ops_unmap, 508 .unmap_dma_buf = vb2_dma_sg_dmabuf_ops_unmap,
509 .map = vb2_dma_sg_dmabuf_ops_kmap, 509 .map = vb2_dma_sg_dmabuf_ops_kmap,
510 .map_atomic = vb2_dma_sg_dmabuf_ops_kmap,
511 .vmap = vb2_dma_sg_dmabuf_ops_vmap, 510 .vmap = vb2_dma_sg_dmabuf_ops_vmap,
512 .mmap = vb2_dma_sg_dmabuf_ops_mmap, 511 .mmap = vb2_dma_sg_dmabuf_ops_mmap,
513 .release = vb2_dma_sg_dmabuf_ops_release, 512 .release = vb2_dma_sg_dmabuf_ops_release,
diff --git a/drivers/media/common/videobuf2/videobuf2-vmalloc.c b/drivers/media/common/videobuf2/videobuf2-vmalloc.c
index 359fb9804d16..6dfbd5b05907 100644
--- a/drivers/media/common/videobuf2/videobuf2-vmalloc.c
+++ b/drivers/media/common/videobuf2/videobuf2-vmalloc.c
@@ -209,7 +209,7 @@ struct vb2_vmalloc_attachment {
209 enum dma_data_direction dma_dir; 209 enum dma_data_direction dma_dir;
210}; 210};
211 211
212static int vb2_vmalloc_dmabuf_ops_attach(struct dma_buf *dbuf, struct device *dev, 212static int vb2_vmalloc_dmabuf_ops_attach(struct dma_buf *dbuf,
213 struct dma_buf_attachment *dbuf_attach) 213 struct dma_buf_attachment *dbuf_attach)
214{ 214{
215 struct vb2_vmalloc_attachment *attach; 215 struct vb2_vmalloc_attachment *attach;
@@ -346,7 +346,6 @@ static const struct dma_buf_ops vb2_vmalloc_dmabuf_ops = {
346 .map_dma_buf = vb2_vmalloc_dmabuf_ops_map, 346 .map_dma_buf = vb2_vmalloc_dmabuf_ops_map,
347 .unmap_dma_buf = vb2_vmalloc_dmabuf_ops_unmap, 347 .unmap_dma_buf = vb2_vmalloc_dmabuf_ops_unmap,
348 .map = vb2_vmalloc_dmabuf_ops_kmap, 348 .map = vb2_vmalloc_dmabuf_ops_kmap,
349 .map_atomic = vb2_vmalloc_dmabuf_ops_kmap,
350 .vmap = vb2_vmalloc_dmabuf_ops_vmap, 349 .vmap = vb2_vmalloc_dmabuf_ops_vmap,
351 .mmap = vb2_vmalloc_dmabuf_ops_mmap, 350 .mmap = vb2_vmalloc_dmabuf_ops_mmap,
352 .release = vb2_vmalloc_dmabuf_ops_release, 351 .release = vb2_vmalloc_dmabuf_ops_release,
diff --git a/drivers/staging/android/ion/ion.c b/drivers/staging/android/ion/ion.c
index 9d1109e43ed4..99073325b0c0 100644
--- a/drivers/staging/android/ion/ion.c
+++ b/drivers/staging/android/ion/ion.c
@@ -201,7 +201,7 @@ struct ion_dma_buf_attachment {
201 struct list_head list; 201 struct list_head list;
202}; 202};
203 203
204static int ion_dma_buf_attach(struct dma_buf *dmabuf, struct device *dev, 204static int ion_dma_buf_attach(struct dma_buf *dmabuf,
205 struct dma_buf_attachment *attachment) 205 struct dma_buf_attachment *attachment)
206{ 206{
207 struct ion_dma_buf_attachment *a; 207 struct ion_dma_buf_attachment *a;
@@ -219,7 +219,7 @@ static int ion_dma_buf_attach(struct dma_buf *dmabuf, struct device *dev,
219 } 219 }
220 220
221 a->table = table; 221 a->table = table;
222 a->dev = dev; 222 a->dev = attachment->dev;
223 INIT_LIST_HEAD(&a->list); 223 INIT_LIST_HEAD(&a->list);
224 224
225 attachment->priv = a; 225 attachment->priv = a;
@@ -375,8 +375,6 @@ static const struct dma_buf_ops dma_buf_ops = {
375 .detach = ion_dma_buf_detatch, 375 .detach = ion_dma_buf_detatch,
376 .begin_cpu_access = ion_dma_buf_begin_cpu_access, 376 .begin_cpu_access = ion_dma_buf_begin_cpu_access,
377 .end_cpu_access = ion_dma_buf_end_cpu_access, 377 .end_cpu_access = ion_dma_buf_end_cpu_access,
378 .map_atomic = ion_dma_buf_kmap,
379 .unmap_atomic = ion_dma_buf_kunmap,
380 .map = ion_dma_buf_kmap, 378 .map = ion_dma_buf_kmap,
381 .unmap = ion_dma_buf_kunmap, 379 .unmap = ion_dma_buf_kunmap,
382}; 380};
diff --git a/drivers/tee/tee_shm.c b/drivers/tee/tee_shm.c
index 07d3be6f0780..0b9ab1d0dd45 100644
--- a/drivers/tee/tee_shm.c
+++ b/drivers/tee/tee_shm.c
@@ -80,11 +80,6 @@ static void tee_shm_op_release(struct dma_buf *dmabuf)
80 tee_shm_release(shm); 80 tee_shm_release(shm);
81} 81}
82 82
83static void *tee_shm_op_map_atomic(struct dma_buf *dmabuf, unsigned long pgnum)
84{
85 return NULL;
86}
87
88static void *tee_shm_op_map(struct dma_buf *dmabuf, unsigned long pgnum) 83static void *tee_shm_op_map(struct dma_buf *dmabuf, unsigned long pgnum)
89{ 84{
90 return NULL; 85 return NULL;
@@ -107,7 +102,6 @@ static const struct dma_buf_ops tee_shm_dma_buf_ops = {
107 .map_dma_buf = tee_shm_op_map_dma_buf, 102 .map_dma_buf = tee_shm_op_map_dma_buf,
108 .unmap_dma_buf = tee_shm_op_unmap_dma_buf, 103 .unmap_dma_buf = tee_shm_op_unmap_dma_buf,
109 .release = tee_shm_op_release, 104 .release = tee_shm_op_release,
110 .map_atomic = tee_shm_op_map_atomic,
111 .map = tee_shm_op_map, 105 .map = tee_shm_op_map,
112 .mmap = tee_shm_op_mmap, 106 .mmap = tee_shm_op_mmap,
113}; 107};
diff --git a/include/drm/drm_atomic.h b/include/drm/drm_atomic.h
index a57a8aa90ffb..da9d95a19580 100644
--- a/include/drm/drm_atomic.h
+++ b/include/drm/drm_atomic.h
@@ -160,6 +160,14 @@ struct __drm_crtcs_state {
160struct __drm_connnectors_state { 160struct __drm_connnectors_state {
161 struct drm_connector *ptr; 161 struct drm_connector *ptr;
162 struct drm_connector_state *state, *old_state, *new_state; 162 struct drm_connector_state *state, *old_state, *new_state;
163 /**
164 * @out_fence_ptr:
165 *
166 * User-provided pointer which the kernel uses to return a sync_file
167 * file descriptor. Used by writeback connectors to signal completion of
168 * the writeback.
169 */
170 s32 __user *out_fence_ptr;
163}; 171};
164 172
165struct drm_private_obj; 173struct drm_private_obj;
@@ -594,6 +602,9 @@ void drm_atomic_set_fence_for_plane(struct drm_plane_state *plane_state,
594int __must_check 602int __must_check
595drm_atomic_set_crtc_for_connector(struct drm_connector_state *conn_state, 603drm_atomic_set_crtc_for_connector(struct drm_connector_state *conn_state,
596 struct drm_crtc *crtc); 604 struct drm_crtc *crtc);
605int drm_atomic_set_writeback_fb_for_connector(
606 struct drm_connector_state *conn_state,
607 struct drm_framebuffer *fb);
597int __must_check 608int __must_check
598drm_atomic_add_affected_connectors(struct drm_atomic_state *state, 609drm_atomic_add_affected_connectors(struct drm_atomic_state *state,
599 struct drm_crtc *crtc); 610 struct drm_crtc *crtc);
@@ -601,9 +612,6 @@ int __must_check
601drm_atomic_add_affected_planes(struct drm_atomic_state *state, 612drm_atomic_add_affected_planes(struct drm_atomic_state *state,
602 struct drm_crtc *crtc); 613 struct drm_crtc *crtc);
603 614
604void
605drm_atomic_clean_old_fb(struct drm_device *dev, unsigned plane_mask, int ret);
606
607int __must_check drm_atomic_check_only(struct drm_atomic_state *state); 615int __must_check drm_atomic_check_only(struct drm_atomic_state *state);
608int __must_check drm_atomic_commit(struct drm_atomic_state *state); 616int __must_check drm_atomic_commit(struct drm_atomic_state *state);
609int __must_check drm_atomic_nonblocking_commit(struct drm_atomic_state *state); 617int __must_check drm_atomic_nonblocking_commit(struct drm_atomic_state *state);
diff --git a/include/drm/drm_bridge.h b/include/drm/drm_bridge.h
index 3270fec46979..70131ab57e8f 100644
--- a/include/drm/drm_bridge.h
+++ b/include/drm/drm_bridge.h
@@ -97,7 +97,7 @@ struct drm_bridge_funcs {
97 /** 97 /**
98 * @mode_fixup: 98 * @mode_fixup:
99 * 99 *
100 * This callback is used to validate and adjust a mode. The paramater 100 * This callback is used to validate and adjust a mode. The parameter
101 * mode is the display mode that should be fed to the next element in 101 * mode is the display mode that should be fed to the next element in
102 * the display chain, either the final &drm_connector or the next 102 * the display chain, either the final &drm_connector or the next
103 * &drm_bridge. The parameter adjusted_mode is the input mode the bridge 103 * &drm_bridge. The parameter adjusted_mode is the input mode the bridge
@@ -178,6 +178,22 @@ struct drm_bridge_funcs {
178 * then this would be &drm_encoder_helper_funcs.mode_set. The display 178 * then this would be &drm_encoder_helper_funcs.mode_set. The display
179 * pipe (i.e. clocks and timing signals) is off when this function is 179 * pipe (i.e. clocks and timing signals) is off when this function is
180 * called. 180 * called.
181 *
182 * The adjusted_mode parameter is the mode output by the CRTC for the
183 * first bridge in the chain. It can be different from the mode
184 * parameter that contains the desired mode for the connector at the end
185 * of the bridges chain, for instance when the first bridge in the chain
186 * performs scaling. The adjusted mode is mostly useful for the first
187 * bridge in the chain and is likely irrelevant for the other bridges.
188 *
189 * For atomic drivers the adjusted_mode is the mode stored in
190 * &drm_crtc_state.adjusted_mode.
191 *
192 * NOTE:
193 *
194 * If a need arises to store and access modes adjusted for other
195 * locations than the connection between the CRTC and the first bridge,
196 * the DRM framework will have to be extended with DRM bridge states.
181 */ 197 */
182 void (*mode_set)(struct drm_bridge *bridge, 198 void (*mode_set)(struct drm_bridge *bridge,
183 struct drm_display_mode *mode, 199 struct drm_display_mode *mode,
@@ -285,15 +301,15 @@ int drm_bridge_attach(struct drm_encoder *encoder, struct drm_bridge *bridge,
285 struct drm_bridge *previous); 301 struct drm_bridge *previous);
286 302
287bool drm_bridge_mode_fixup(struct drm_bridge *bridge, 303bool drm_bridge_mode_fixup(struct drm_bridge *bridge,
288 const struct drm_display_mode *mode, 304 const struct drm_display_mode *mode,
289 struct drm_display_mode *adjusted_mode); 305 struct drm_display_mode *adjusted_mode);
290enum drm_mode_status drm_bridge_mode_valid(struct drm_bridge *bridge, 306enum drm_mode_status drm_bridge_mode_valid(struct drm_bridge *bridge,
291 const struct drm_display_mode *mode); 307 const struct drm_display_mode *mode);
292void drm_bridge_disable(struct drm_bridge *bridge); 308void drm_bridge_disable(struct drm_bridge *bridge);
293void drm_bridge_post_disable(struct drm_bridge *bridge); 309void drm_bridge_post_disable(struct drm_bridge *bridge);
294void drm_bridge_mode_set(struct drm_bridge *bridge, 310void drm_bridge_mode_set(struct drm_bridge *bridge,
295 struct drm_display_mode *mode, 311 struct drm_display_mode *mode,
296 struct drm_display_mode *adjusted_mode); 312 struct drm_display_mode *adjusted_mode);
297void drm_bridge_pre_enable(struct drm_bridge *bridge); 313void drm_bridge_pre_enable(struct drm_bridge *bridge);
298void drm_bridge_enable(struct drm_bridge *bridge); 314void drm_bridge_enable(struct drm_bridge *bridge);
299 315
diff --git a/include/drm/drm_connector.h b/include/drm/drm_connector.h
index 675cc3f8cf85..14ab58ade87f 100644
--- a/include/drm/drm_connector.h
+++ b/include/drm/drm_connector.h
@@ -419,6 +419,14 @@ struct drm_connector_state {
419 enum hdmi_picture_aspect picture_aspect_ratio; 419 enum hdmi_picture_aspect picture_aspect_ratio;
420 420
421 /** 421 /**
422 * @content_type: Connector property to control the
423 * HDMI infoframe content type setting.
424 * The %DRM_MODE_CONTENT_TYPE_\* values much
425 * match the values.
426 */
427 unsigned int content_type;
428
429 /**
422 * @scaling_mode: Connector property to control the 430 * @scaling_mode: Connector property to control the
423 * upscaling, mostly used for built-in panels. 431 * upscaling, mostly used for built-in panels.
424 */ 432 */
@@ -429,6 +437,19 @@ struct drm_connector_state {
429 * protection. This is most commonly used for HDCP. 437 * protection. This is most commonly used for HDCP.
430 */ 438 */
431 unsigned int content_protection; 439 unsigned int content_protection;
440
441 /**
442 * @writeback_job: Writeback job for writeback connectors
443 *
444 * Holds the framebuffer and out-fence for a writeback connector. As
445 * the writeback completion may be asynchronous to the normal commit
446 * cycle, the writeback job lifetime is managed separately from the
447 * normal atomic state by this object.
448 *
449 * See also: drm_writeback_queue_job() and
450 * drm_writeback_signal_completion()
451 */
452 struct drm_writeback_job *writeback_job;
432}; 453};
433 454
434/** 455/**
@@ -608,6 +629,8 @@ struct drm_connector_funcs {
608 * cleaned up by calling the @atomic_destroy_state hook in this 629 * cleaned up by calling the @atomic_destroy_state hook in this
609 * structure. 630 * structure.
610 * 631 *
632 * This callback is mandatory for atomic drivers.
633 *
611 * Atomic drivers which don't subclass &struct drm_connector_state should use 634 * Atomic drivers which don't subclass &struct drm_connector_state should use
612 * drm_atomic_helper_connector_duplicate_state(). Drivers that subclass the 635 * drm_atomic_helper_connector_duplicate_state(). Drivers that subclass the
613 * state structure to extend it with driver-private state should use 636 * state structure to extend it with driver-private state should use
@@ -634,6 +657,8 @@ struct drm_connector_funcs {
634 * 657 *
635 * Destroy a state duplicated with @atomic_duplicate_state and release 658 * Destroy a state duplicated with @atomic_duplicate_state and release
636 * or unreference all resources it references 659 * or unreference all resources it references
660 *
661 * This callback is mandatory for atomic drivers.
637 */ 662 */
638 void (*atomic_destroy_state)(struct drm_connector *connector, 663 void (*atomic_destroy_state)(struct drm_connector *connector,
639 struct drm_connector_state *state); 664 struct drm_connector_state *state);
@@ -1089,11 +1114,16 @@ int drm_mode_create_tv_properties(struct drm_device *dev,
1089 unsigned int num_modes, 1114 unsigned int num_modes,
1090 const char * const modes[]); 1115 const char * const modes[]);
1091int drm_mode_create_scaling_mode_property(struct drm_device *dev); 1116int drm_mode_create_scaling_mode_property(struct drm_device *dev);
1117int drm_connector_attach_content_type_property(struct drm_connector *dev);
1092int drm_connector_attach_scaling_mode_property(struct drm_connector *connector, 1118int drm_connector_attach_scaling_mode_property(struct drm_connector *connector,
1093 u32 scaling_mode_mask); 1119 u32 scaling_mode_mask);
1094int drm_connector_attach_content_protection_property( 1120int drm_connector_attach_content_protection_property(
1095 struct drm_connector *connector); 1121 struct drm_connector *connector);
1096int drm_mode_create_aspect_ratio_property(struct drm_device *dev); 1122int drm_mode_create_aspect_ratio_property(struct drm_device *dev);
1123int drm_mode_create_content_type_property(struct drm_device *dev);
1124void drm_hdmi_avi_infoframe_content_type(struct hdmi_avi_infoframe *frame,
1125 const struct drm_connector_state *conn_state);
1126
1097int drm_mode_create_suggested_offset_properties(struct drm_device *dev); 1127int drm_mode_create_suggested_offset_properties(struct drm_device *dev);
1098 1128
1099int drm_mode_connector_set_path_property(struct drm_connector *connector, 1129int drm_mode_connector_set_path_property(struct drm_connector *connector,
diff --git a/include/drm/drm_crtc.h b/include/drm/drm_crtc.h
index a2d81d2907a9..23eddbccab10 100644
--- a/include/drm/drm_crtc.h
+++ b/include/drm/drm_crtc.h
@@ -134,10 +134,13 @@ struct drm_crtc_state {
134 * 134 *
135 * Internal display timings which can be used by the driver to handle 135 * Internal display timings which can be used by the driver to handle
136 * differences between the mode requested by userspace in @mode and what 136 * differences between the mode requested by userspace in @mode and what
137 * is actually programmed into the hardware. It is purely driver 137 * is actually programmed into the hardware.
138 * implementation defined what exactly this adjusted mode means. Usually 138 *
139 * it is used to store the hardware display timings used between the 139 * For drivers using drm_bridge, this stores hardware display timings
140 * CRTC and encoder blocks. 140 * used between the CRTC and the first bridge. For other drivers, the
141 * meaning of the adjusted_mode field is purely driver implementation
142 * defined information, and will usually be used to store the hardware
143 * display timings used between the CRTC and encoder blocks.
141 */ 144 */
142 struct drm_display_mode adjusted_mode; 145 struct drm_display_mode adjusted_mode;
143 146
@@ -503,6 +506,8 @@ struct drm_crtc_funcs {
503 * cleaned up by calling the @atomic_destroy_state hook in this 506 * cleaned up by calling the @atomic_destroy_state hook in this
504 * structure. 507 * structure.
505 * 508 *
509 * This callback is mandatory for atomic drivers.
510 *
506 * Atomic drivers which don't subclass &struct drm_crtc_state should use 511 * Atomic drivers which don't subclass &struct drm_crtc_state should use
507 * drm_atomic_helper_crtc_duplicate_state(). Drivers that subclass the 512 * drm_atomic_helper_crtc_duplicate_state(). Drivers that subclass the
508 * state structure to extend it with driver-private state should use 513 * state structure to extend it with driver-private state should use
@@ -529,6 +534,8 @@ struct drm_crtc_funcs {
529 * 534 *
530 * Destroy a state duplicated with @atomic_duplicate_state and release 535 * Destroy a state duplicated with @atomic_duplicate_state and release
531 * or unreference all resources it references 536 * or unreference all resources it references
537 *
538 * This callback is mandatory for atomic drivers.
532 */ 539 */
533 void (*atomic_destroy_state)(struct drm_crtc *crtc, 540 void (*atomic_destroy_state)(struct drm_crtc *crtc,
534 struct drm_crtc_state *state); 541 struct drm_crtc_state *state);
diff --git a/include/drm/drm_file.h b/include/drm/drm_file.h
index 027ac16da3d1..26485acc51d7 100644
--- a/include/drm/drm_file.h
+++ b/include/drm/drm_file.h
@@ -193,6 +193,13 @@ struct drm_file {
193 unsigned aspect_ratio_allowed:1; 193 unsigned aspect_ratio_allowed:1;
194 194
195 /** 195 /**
196 * @writeback_connectors:
197 *
198 * True if client understands writeback connectors
199 */
200 unsigned writeback_connectors:1;
201
202 /**
196 * @is_master: 203 * @is_master:
197 * 204 *
198 * This client is the creator of @master. Protected by struct 205 * This client is the creator of @master. Protected by struct
diff --git a/include/drm/drm_mm.h b/include/drm/drm_mm.h
index 101f566ae43d..2c3bbb43c7d1 100644
--- a/include/drm/drm_mm.h
+++ b/include/drm/drm_mm.h
@@ -109,6 +109,38 @@ enum drm_mm_insert_mode {
109 * Allocates the node from the bottom of the found hole. 109 * Allocates the node from the bottom of the found hole.
110 */ 110 */
111 DRM_MM_INSERT_EVICT, 111 DRM_MM_INSERT_EVICT,
112
113 /**
114 * @DRM_MM_INSERT_ONCE:
115 *
116 * Only check the first hole for suitablity and report -ENOSPC
117 * immediately otherwise, rather than check every hole until a
118 * suitable one is found. Can only be used in conjunction with another
119 * search method such as DRM_MM_INSERT_HIGH or DRM_MM_INSERT_LOW.
120 */
121 DRM_MM_INSERT_ONCE = BIT(31),
122
123 /**
124 * @DRM_MM_INSERT_HIGHEST:
125 *
126 * Only check the highest hole (the hole with the largest address) and
127 * insert the node at the top of the hole or report -ENOSPC if
128 * unsuitable.
129 *
130 * Does not search all holes.
131 */
132 DRM_MM_INSERT_HIGHEST = DRM_MM_INSERT_HIGH | DRM_MM_INSERT_ONCE,
133
134 /**
135 * @DRM_MM_INSERT_LOWEST:
136 *
137 * Only check the lowest hole (the hole with the smallest address) and
138 * insert the node at the bottom of the hole or report -ENOSPC if
139 * unsuitable.
140 *
141 * Does not search all holes.
142 */
143 DRM_MM_INSERT_LOWEST = DRM_MM_INSERT_LOW | DRM_MM_INSERT_ONCE,
112}; 144};
113 145
114/** 146/**
@@ -173,7 +205,7 @@ struct drm_mm {
173 struct drm_mm_node head_node; 205 struct drm_mm_node head_node;
174 /* Keep an interval_tree for fast lookup of drm_mm_nodes by address. */ 206 /* Keep an interval_tree for fast lookup of drm_mm_nodes by address. */
175 struct rb_root_cached interval_tree; 207 struct rb_root_cached interval_tree;
176 struct rb_root holes_size; 208 struct rb_root_cached holes_size;
177 struct rb_root holes_addr; 209 struct rb_root holes_addr;
178 210
179 unsigned long scan_active; 211 unsigned long scan_active;
diff --git a/include/drm/drm_mode_config.h b/include/drm/drm_mode_config.h
index 33b3a96d66d0..f4a173c8d79c 100644
--- a/include/drm/drm_mode_config.h
+++ b/include/drm/drm_mode_config.h
@@ -727,6 +727,11 @@ struct drm_mode_config {
727 */ 727 */
728 struct drm_property *aspect_ratio_property; 728 struct drm_property *aspect_ratio_property;
729 /** 729 /**
730 * @content_type_property: Optional connector property to control the
731 * HDMI infoframe content type setting.
732 */
733 struct drm_property *content_type_property;
734 /**
730 * @degamma_lut_property: Optional CRTC property to set the LUT used to 735 * @degamma_lut_property: Optional CRTC property to set the LUT used to
731 * convert the framebuffer's colors to linear gamma. 736 * convert the framebuffer's colors to linear gamma.
732 */ 737 */
@@ -779,6 +784,29 @@ struct drm_mode_config {
779 */ 784 */
780 struct drm_property *panel_orientation_property; 785 struct drm_property *panel_orientation_property;
781 786
787 /**
788 * @writeback_fb_id_property: Property for writeback connectors, storing
789 * the ID of the output framebuffer.
790 * See also: drm_writeback_connector_init()
791 */
792 struct drm_property *writeback_fb_id_property;
793
794 /**
795 * @writeback_pixel_formats_property: Property for writeback connectors,
796 * storing an array of the supported pixel formats for the writeback
797 * engine (read-only).
798 * See also: drm_writeback_connector_init()
799 */
800 struct drm_property *writeback_pixel_formats_property;
801 /**
802 * @writeback_out_fence_ptr_property: Property for writeback connectors,
803 * fd pointer representing the outgoing fences for a writeback
804 * connector. Userspace should provide a pointer to a value of type s32,
805 * and then cast that pointer to u64.
806 * See also: drm_writeback_connector_init()
807 */
808 struct drm_property *writeback_out_fence_ptr_property;
809
782 /* dumb ioctl parameters */ 810 /* dumb ioctl parameters */
783 uint32_t preferred_depth, prefer_shadow; 811 uint32_t preferred_depth, prefer_shadow;
784 812
diff --git a/include/drm/drm_modeset_helper_vtables.h b/include/drm/drm_modeset_helper_vtables.h
index 35e2a3a79fc5..3b289773297c 100644
--- a/include/drm/drm_modeset_helper_vtables.h
+++ b/include/drm/drm_modeset_helper_vtables.h
@@ -974,6 +974,17 @@ struct drm_connector_helper_funcs {
974 */ 974 */
975 int (*atomic_check)(struct drm_connector *connector, 975 int (*atomic_check)(struct drm_connector *connector,
976 struct drm_connector_state *state); 976 struct drm_connector_state *state);
977
978 /**
979 * @atomic_commit:
980 *
981 * This hook is to be used by drivers implementing writeback connectors
982 * that need a point when to commit the writeback job to the hardware.
983 *
984 * This callback is used by the atomic modeset helpers.
985 */
986 void (*atomic_commit)(struct drm_connector *connector,
987 struct drm_writeback_job *writeback_job);
977}; 988};
978 989
979/** 990/**
diff --git a/include/drm/drm_panel.h b/include/drm/drm_panel.h
index 14ac240a1f64..26a1b5fd8796 100644
--- a/include/drm/drm_panel.h
+++ b/include/drm/drm_panel.h
@@ -89,6 +89,7 @@ struct drm_panel {
89 struct drm_device *drm; 89 struct drm_device *drm;
90 struct drm_connector *connector; 90 struct drm_connector *connector;
91 struct device *dev; 91 struct device *dev;
92 struct device_link *link;
92 93
93 const struct drm_panel_funcs *funcs; 94 const struct drm_panel_funcs *funcs;
94 95
diff --git a/include/drm/drm_plane.h b/include/drm/drm_plane.h
index 26fa50c2a50e..7d4d6c7f0afd 100644
--- a/include/drm/drm_plane.h
+++ b/include/drm/drm_plane.h
@@ -288,6 +288,8 @@ struct drm_plane_funcs {
288 * cleaned up by calling the @atomic_destroy_state hook in this 288 * cleaned up by calling the @atomic_destroy_state hook in this
289 * structure. 289 * structure.
290 * 290 *
291 * This callback is mandatory for atomic drivers.
292 *
291 * Atomic drivers which don't subclass &struct drm_plane_state should use 293 * Atomic drivers which don't subclass &struct drm_plane_state should use
292 * drm_atomic_helper_plane_duplicate_state(). Drivers that subclass the 294 * drm_atomic_helper_plane_duplicate_state(). Drivers that subclass the
293 * state structure to extend it with driver-private state should use 295 * state structure to extend it with driver-private state should use
@@ -314,6 +316,8 @@ struct drm_plane_funcs {
314 * 316 *
315 * Destroy a state duplicated with @atomic_duplicate_state and release 317 * Destroy a state duplicated with @atomic_duplicate_state and release
316 * or unreference all resources it references 318 * or unreference all resources it references
319 *
320 * This callback is mandatory for atomic drivers.
317 */ 321 */
318 void (*atomic_destroy_state)(struct drm_plane *plane, 322 void (*atomic_destroy_state)(struct drm_plane *plane,
319 struct drm_plane_state *state); 323 struct drm_plane_state *state);
@@ -431,7 +435,10 @@ struct drm_plane_funcs {
431 * This optional hook is used for the DRM to determine if the given 435 * This optional hook is used for the DRM to determine if the given
432 * format/modifier combination is valid for the plane. This allows the 436 * format/modifier combination is valid for the plane. This allows the
433 * DRM to generate the correct format bitmask (which formats apply to 437 * DRM to generate the correct format bitmask (which formats apply to
434 * which modifier). 438 * which modifier), and to valdiate modifiers at atomic_check time.
439 *
440 * If not present, then any modifier in the plane's modifier
441 * list is allowed with any of the plane's formats.
435 * 442 *
436 * Returns: 443 * Returns:
437 * 444 *
diff --git a/include/drm/drm_prime.h b/include/drm/drm_prime.h
index 4d5f5d6cf6a6..d716d653b096 100644
--- a/include/drm/drm_prime.h
+++ b/include/drm/drm_prime.h
@@ -82,7 +82,7 @@ int drm_gem_prime_fd_to_handle(struct drm_device *dev,
82struct dma_buf *drm_gem_dmabuf_export(struct drm_device *dev, 82struct dma_buf *drm_gem_dmabuf_export(struct drm_device *dev,
83 struct dma_buf_export_info *exp_info); 83 struct dma_buf_export_info *exp_info);
84void drm_gem_dmabuf_release(struct dma_buf *dma_buf); 84void drm_gem_dmabuf_release(struct dma_buf *dma_buf);
85int drm_gem_map_attach(struct dma_buf *dma_buf, struct device *target_dev, 85int drm_gem_map_attach(struct dma_buf *dma_buf,
86 struct dma_buf_attachment *attach); 86 struct dma_buf_attachment *attach);
87void drm_gem_map_detach(struct dma_buf *dma_buf, 87void drm_gem_map_detach(struct dma_buf *dma_buf,
88 struct dma_buf_attachment *attach); 88 struct dma_buf_attachment *attach);
@@ -93,10 +93,6 @@ void drm_gem_unmap_dma_buf(struct dma_buf_attachment *attach,
93 enum dma_data_direction dir); 93 enum dma_data_direction dir);
94void *drm_gem_dmabuf_vmap(struct dma_buf *dma_buf); 94void *drm_gem_dmabuf_vmap(struct dma_buf *dma_buf);
95void drm_gem_dmabuf_vunmap(struct dma_buf *dma_buf, void *vaddr); 95void drm_gem_dmabuf_vunmap(struct dma_buf *dma_buf, void *vaddr);
96void *drm_gem_dmabuf_kmap_atomic(struct dma_buf *dma_buf,
97 unsigned long page_num);
98void drm_gem_dmabuf_kunmap_atomic(struct dma_buf *dma_buf,
99 unsigned long page_num, void *addr);
100void *drm_gem_dmabuf_kmap(struct dma_buf *dma_buf, unsigned long page_num); 96void *drm_gem_dmabuf_kmap(struct dma_buf *dma_buf, unsigned long page_num);
101void drm_gem_dmabuf_kunmap(struct dma_buf *dma_buf, unsigned long page_num, 97void drm_gem_dmabuf_kunmap(struct dma_buf *dma_buf, unsigned long page_num,
102 void *addr); 98 void *addr);
diff --git a/include/drm/drm_writeback.h b/include/drm/drm_writeback.h
new file mode 100644
index 000000000000..a10fe556dfd4
--- /dev/null
+++ b/include/drm/drm_writeback.h
@@ -0,0 +1,130 @@
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * (C) COPYRIGHT 2016 ARM Limited. All rights reserved.
4 * Author: Brian Starkey <brian.starkey@arm.com>
5 *
6 * This program is free software and is provided to you under the terms of the
7 * GNU General Public License version 2 as published by the Free Software
8 * Foundation, and any use by you of this program is subject to the terms
9 * of such GNU licence.
10 */
11
12#ifndef __DRM_WRITEBACK_H__
13#define __DRM_WRITEBACK_H__
14#include <drm/drm_connector.h>
15#include <drm/drm_encoder.h>
16#include <linux/workqueue.h>
17
18struct drm_writeback_connector {
19 struct drm_connector base;
20
21 /**
22 * @encoder: Internal encoder used by the connector to fulfill
23 * the DRM framework requirements. The users of the
24 * @drm_writeback_connector control the behaviour of the @encoder
25 * by passing the @enc_funcs parameter to drm_writeback_connector_init()
26 * function.
27 */
28 struct drm_encoder encoder;
29
30 /**
31 * @pixel_formats_blob_ptr:
32 *
33 * DRM blob property data for the pixel formats list on writeback
34 * connectors
35 * See also drm_writeback_connector_init()
36 */
37 struct drm_property_blob *pixel_formats_blob_ptr;
38
39 /** @job_lock: Protects job_queue */
40 spinlock_t job_lock;
41
42 /**
43 * @job_queue:
44 *
45 * Holds a list of a connector's writeback jobs; the last item is the
46 * most recent. The first item may be either waiting for the hardware
47 * to begin writing, or currently being written.
48 *
49 * See also: drm_writeback_queue_job() and
50 * drm_writeback_signal_completion()
51 */
52 struct list_head job_queue;
53
54 /**
55 * @fence_context:
56 *
57 * timeline context used for fence operations.
58 */
59 unsigned int fence_context;
60 /**
61 * @fence_lock:
62 *
63 * spinlock to protect the fences in the fence_context.
64 */
65 spinlock_t fence_lock;
66 /**
67 * @fence_seqno:
68 *
69 * Seqno variable used as monotonic counter for the fences
70 * created on the connector's timeline.
71 */
72 unsigned long fence_seqno;
73 /**
74 * @timeline_name:
75 *
76 * The name of the connector's fence timeline.
77 */
78 char timeline_name[32];
79};
80
81struct drm_writeback_job {
82 /**
83 * @cleanup_work:
84 *
85 * Used to allow drm_writeback_signal_completion to defer dropping the
86 * framebuffer reference to a workqueue
87 */
88 struct work_struct cleanup_work;
89
90 /**
91 * @list_entry:
92 *
93 * List item for the writeback connector's @job_queue
94 */
95 struct list_head list_entry;
96
97 /**
98 * @fb:
99 *
100 * Framebuffer to be written to by the writeback connector. Do not set
101 * directly, use drm_atomic_set_writeback_fb_for_connector()
102 */
103 struct drm_framebuffer *fb;
104
105 /**
106 * @out_fence:
107 *
108 * Fence which will signal once the writeback has completed
109 */
110 struct dma_fence *out_fence;
111};
112
113int drm_writeback_connector_init(struct drm_device *dev,
114 struct drm_writeback_connector *wb_connector,
115 const struct drm_connector_funcs *con_funcs,
116 const struct drm_encoder_helper_funcs *enc_helper_funcs,
117 const u32 *formats, int n_formats);
118
119void drm_writeback_queue_job(struct drm_writeback_connector *wb_connector,
120 struct drm_writeback_job *job);
121
122void drm_writeback_cleanup_job(struct drm_writeback_job *job);
123
124void
125drm_writeback_signal_completion(struct drm_writeback_connector *wb_connector,
126 int status);
127
128struct dma_fence *
129drm_writeback_get_out_fence(struct drm_writeback_connector *wb_connector);
130#endif
diff --git a/include/linux/dma-buf.h b/include/linux/dma-buf.h
index 085db2fee2d7..58725f890b5b 100644
--- a/include/linux/dma-buf.h
+++ b/include/linux/dma-buf.h
@@ -39,12 +39,12 @@ struct dma_buf_attachment;
39 39
40/** 40/**
41 * struct dma_buf_ops - operations possible on struct dma_buf 41 * struct dma_buf_ops - operations possible on struct dma_buf
42 * @map_atomic: maps a page from the buffer into kernel address 42 * @map_atomic: [optional] maps a page from the buffer into kernel address
43 * space, users may not block until the subsequent unmap call. 43 * space, users may not block until the subsequent unmap call.
44 * This callback must not sleep. 44 * This callback must not sleep.
45 * @unmap_atomic: [optional] unmaps a atomically mapped page from the buffer. 45 * @unmap_atomic: [optional] unmaps a atomically mapped page from the buffer.
46 * This Callback must not sleep. 46 * This Callback must not sleep.
47 * @map: maps a page from the buffer into kernel address space. 47 * @map: [optional] maps a page from the buffer into kernel address space.
48 * @unmap: [optional] unmaps a page from the buffer. 48 * @unmap: [optional] unmaps a page from the buffer.
49 * @vmap: [optional] creates a virtual mapping for the buffer into kernel 49 * @vmap: [optional] creates a virtual mapping for the buffer into kernel
50 * address space. Same restrictions as for vmap and friends apply. 50 * address space. Same restrictions as for vmap and friends apply.
@@ -55,11 +55,11 @@ struct dma_buf_ops {
55 * @attach: 55 * @attach:
56 * 56 *
57 * This is called from dma_buf_attach() to make sure that a given 57 * This is called from dma_buf_attach() to make sure that a given
58 * &device can access the provided &dma_buf. Exporters which support 58 * &dma_buf_attachment.dev can access the provided &dma_buf. Exporters
59 * buffer objects in special locations like VRAM or device-specific 59 * which support buffer objects in special locations like VRAM or
60 * carveout areas should check whether the buffer could be move to 60 * device-specific carveout areas should check whether the buffer could
61 * system memory (or directly accessed by the provided device), and 61 * be move to system memory (or directly accessed by the provided
62 * otherwise need to fail the attach operation. 62 * device), and otherwise need to fail the attach operation.
63 * 63 *
64 * The exporter should also in general check whether the current 64 * The exporter should also in general check whether the current
65 * allocation fullfills the DMA constraints of the new device. If this 65 * allocation fullfills the DMA constraints of the new device. If this
@@ -77,8 +77,7 @@ struct dma_buf_ops {
77 * to signal that backing storage is already allocated and incompatible 77 * to signal that backing storage is already allocated and incompatible
78 * with the requirements of requesting device. 78 * with the requirements of requesting device.
79 */ 79 */
80 int (*attach)(struct dma_buf *, struct device *, 80 int (*attach)(struct dma_buf *, struct dma_buf_attachment *);
81 struct dma_buf_attachment *);
82 81
83 /** 82 /**
84 * @detach: 83 * @detach:
@@ -206,8 +205,6 @@ struct dma_buf_ops {
206 * to be restarted. 205 * to be restarted.
207 */ 206 */
208 int (*end_cpu_access)(struct dma_buf *, enum dma_data_direction); 207 int (*end_cpu_access)(struct dma_buf *, enum dma_data_direction);
209 void *(*map_atomic)(struct dma_buf *, unsigned long);
210 void (*unmap_atomic)(struct dma_buf *, unsigned long, void *);
211 void *(*map)(struct dma_buf *, unsigned long); 208 void *(*map)(struct dma_buf *, unsigned long);
212 void (*unmap)(struct dma_buf *, unsigned long, void *); 209 void (*unmap)(struct dma_buf *, unsigned long, void *);
213 210
@@ -395,8 +392,6 @@ int dma_buf_begin_cpu_access(struct dma_buf *dma_buf,
395 enum dma_data_direction dir); 392 enum dma_data_direction dir);
396int dma_buf_end_cpu_access(struct dma_buf *dma_buf, 393int dma_buf_end_cpu_access(struct dma_buf *dma_buf,
397 enum dma_data_direction dir); 394 enum dma_data_direction dir);
398void *dma_buf_kmap_atomic(struct dma_buf *, unsigned long);
399void dma_buf_kunmap_atomic(struct dma_buf *, unsigned long, void *);
400void *dma_buf_kmap(struct dma_buf *, unsigned long); 395void *dma_buf_kmap(struct dma_buf *, unsigned long);
401void dma_buf_kunmap(struct dma_buf *, unsigned long, void *); 396void dma_buf_kunmap(struct dma_buf *, unsigned long, void *);
402 397
diff --git a/include/uapi/drm/drm.h b/include/uapi/drm/drm.h
index 9c660e1688ab..300f336633f2 100644
--- a/include/uapi/drm/drm.h
+++ b/include/uapi/drm/drm.h
@@ -687,6 +687,15 @@ struct drm_get_cap {
687 */ 687 */
688#define DRM_CLIENT_CAP_ASPECT_RATIO 4 688#define DRM_CLIENT_CAP_ASPECT_RATIO 4
689 689
690/**
691 * DRM_CLIENT_CAP_WRITEBACK_CONNECTORS
692 *
693 * If set to 1, the DRM core will expose special connectors to be used for
694 * writing back to memory the scene setup in the commit. Depends on client
695 * also supporting DRM_CLIENT_CAP_ATOMIC
696 */
697#define DRM_CLIENT_CAP_WRITEBACK_CONNECTORS 5
698
690/** DRM_IOCTL_SET_CLIENT_CAP ioctl argument type */ 699/** DRM_IOCTL_SET_CLIENT_CAP ioctl argument type */
691struct drm_set_client_cap { 700struct drm_set_client_cap {
692 __u64 capability; 701 __u64 capability;
diff --git a/include/uapi/drm/drm_fourcc.h b/include/uapi/drm/drm_fourcc.h
index e04613d30a13..64bf67abff7e 100644
--- a/include/uapi/drm/drm_fourcc.h
+++ b/include/uapi/drm/drm_fourcc.h
@@ -385,6 +385,23 @@ extern "C" {
385 fourcc_mod_code(NVIDIA, 0x15) 385 fourcc_mod_code(NVIDIA, 0x15)
386 386
387/* 387/*
388 * Some Broadcom modifiers take parameters, for example the number of
389 * vertical lines in the image. Reserve the lower 32 bits for modifier
390 * type, and the next 24 bits for parameters. Top 8 bits are the
391 * vendor code.
392 */
393#define __fourcc_mod_broadcom_param_shift 8
394#define __fourcc_mod_broadcom_param_bits 48
395#define fourcc_mod_broadcom_code(val, params) \
396 fourcc_mod_code(BROADCOM, ((((__u64)params) << __fourcc_mod_broadcom_param_shift) | val))
397#define fourcc_mod_broadcom_param(m) \
398 ((int)(((m) >> __fourcc_mod_broadcom_param_shift) & \
399 ((1ULL << __fourcc_mod_broadcom_param_bits) - 1)))
400#define fourcc_mod_broadcom_mod(m) \
401 ((m) & ~(((1ULL << __fourcc_mod_broadcom_param_bits) - 1) << \
402 __fourcc_mod_broadcom_param_shift))
403
404/*
388 * Broadcom VC4 "T" format 405 * Broadcom VC4 "T" format
389 * 406 *
390 * This is the primary layout that the V3D GPU can texture from (it 407 * This is the primary layout that the V3D GPU can texture from (it
@@ -405,6 +422,48 @@ extern "C" {
405 */ 422 */
406#define DRM_FORMAT_MOD_BROADCOM_VC4_T_TILED fourcc_mod_code(BROADCOM, 1) 423#define DRM_FORMAT_MOD_BROADCOM_VC4_T_TILED fourcc_mod_code(BROADCOM, 1)
407 424
425/*
426 * Broadcom SAND format
427 *
428 * This is the native format that the H.264 codec block uses. For VC4
429 * HVS, it is only valid for H.264 (NV12/21) and RGBA modes.
430 *
431 * The image can be considered to be split into columns, and the
432 * columns are placed consecutively into memory. The width of those
433 * columns can be either 32, 64, 128, or 256 pixels, but in practice
434 * only 128 pixel columns are used.
435 *
436 * The pitch between the start of each column is set to optimally
437 * switch between SDRAM banks. This is passed as the number of lines
438 * of column width in the modifier (we can't use the stride value due
439 * to various core checks that look at it , so you should set the
440 * stride to width*cpp).
441 *
442 * Note that the column height for this format modifier is the same
443 * for all of the planes, assuming that each column contains both Y
444 * and UV. Some SAND-using hardware stores UV in a separate tiled
445 * image from Y to reduce the column height, which is not supported
446 * with these modifiers.
447 */
448
449#define DRM_FORMAT_MOD_BROADCOM_SAND32_COL_HEIGHT(v) \
450 fourcc_mod_broadcom_code(2, v)
451#define DRM_FORMAT_MOD_BROADCOM_SAND64_COL_HEIGHT(v) \
452 fourcc_mod_broadcom_code(3, v)
453#define DRM_FORMAT_MOD_BROADCOM_SAND128_COL_HEIGHT(v) \
454 fourcc_mod_broadcom_code(4, v)
455#define DRM_FORMAT_MOD_BROADCOM_SAND256_COL_HEIGHT(v) \
456 fourcc_mod_broadcom_code(5, v)
457
458#define DRM_FORMAT_MOD_BROADCOM_SAND32 \
459 DRM_FORMAT_MOD_BROADCOM_SAND32_COL_HEIGHT(0)
460#define DRM_FORMAT_MOD_BROADCOM_SAND64 \
461 DRM_FORMAT_MOD_BROADCOM_SAND64_COL_HEIGHT(0)
462#define DRM_FORMAT_MOD_BROADCOM_SAND128 \
463 DRM_FORMAT_MOD_BROADCOM_SAND128_COL_HEIGHT(0)
464#define DRM_FORMAT_MOD_BROADCOM_SAND256 \
465 DRM_FORMAT_MOD_BROADCOM_SAND256_COL_HEIGHT(0)
466
408#if defined(__cplusplus) 467#if defined(__cplusplus)
409} 468}
410#endif 469#endif
diff --git a/include/uapi/drm/drm_mode.h b/include/uapi/drm/drm_mode.h
index 4b3a1bb58e68..8d67243952f4 100644
--- a/include/uapi/drm/drm_mode.h
+++ b/include/uapi/drm/drm_mode.h
@@ -96,6 +96,13 @@ extern "C" {
96#define DRM_MODE_PICTURE_ASPECT_64_27 3 96#define DRM_MODE_PICTURE_ASPECT_64_27 3
97#define DRM_MODE_PICTURE_ASPECT_256_135 4 97#define DRM_MODE_PICTURE_ASPECT_256_135 4
98 98
99/* Content type options */
100#define DRM_MODE_CONTENT_TYPE_NO_DATA 0
101#define DRM_MODE_CONTENT_TYPE_GRAPHICS 1
102#define DRM_MODE_CONTENT_TYPE_PHOTO 2
103#define DRM_MODE_CONTENT_TYPE_CINEMA 3
104#define DRM_MODE_CONTENT_TYPE_GAME 4
105
99/* Aspect ratio flag bitmask (4 bits 22:19) */ 106/* Aspect ratio flag bitmask (4 bits 22:19) */
100#define DRM_MODE_FLAG_PIC_AR_MASK (0x0F<<19) 107#define DRM_MODE_FLAG_PIC_AR_MASK (0x0F<<19)
101#define DRM_MODE_FLAG_PIC_AR_NONE \ 108#define DRM_MODE_FLAG_PIC_AR_NONE \
@@ -344,6 +351,7 @@ enum drm_mode_subconnector {
344#define DRM_MODE_CONNECTOR_VIRTUAL 15 351#define DRM_MODE_CONNECTOR_VIRTUAL 15
345#define DRM_MODE_CONNECTOR_DSI 16 352#define DRM_MODE_CONNECTOR_DSI 16
346#define DRM_MODE_CONNECTOR_DPI 17 353#define DRM_MODE_CONNECTOR_DPI 17
354#define DRM_MODE_CONNECTOR_WRITEBACK 18
347 355
348struct drm_mode_get_connector { 356struct drm_mode_get_connector {
349 357