aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2010-08-22 14:03:27 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2010-08-22 14:03:27 -0400
commit4238a417a91643e1162a98770288f630e37f0484 (patch)
tree9a4ec0f4249ebe5c723d3f281f087aa472666c02
parentbc584c5107bfd97e2aa41c798e3b213bcdd4eae7 (diff)
parent4fefe435626758b14e6c05d2a5f8d71a997c0ad6 (diff)
Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/anholt/drm-intel
* 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/anholt/drm-intel: (58 commits) drm/i915,intel_agp: Add support for Sandybridge D0 drm/i915: fix render pipe control notify on sandybridge agp/intel: set 40-bit dma mask on Sandybridge drm/i915: Remove the conflicting BUG_ON() drm/i915/suspend: s/IS_IRONLAKE/HAS_PCH_SPLIT/ drm/i915/suspend: Flush register writes before busy-waiting. i915: disable DAC on Ironlake also when doing CRT load detection. drm/i915: wait for actual vblank, not just 20ms drm/i915: make sure eDP PLL is enabled at the right time drm/i915: fix VGA plane disable for Ironlake+ drm/i915: eDP mode set sequence corrections drm/i915: add panel reset workaround drm/i915: Enable RC6 on Ironlake. drm/i915/sdvo: Only set is_lvds if we have a valid fixed mode. drm/i915: Set up a render context on Ironlake drm/i915 invalidate indirect state pointers at end of ring exec drm/i915: Wake-up wait_request() from elapsed hang-check (v2) drm/i915: Apply i830 errata for cursor alignment drm/i915: Only update i845/i865 CURBASE when disabled (v2) drm/i915: FBC is updated within set_base() so remove second call in mode_set() ...
-rw-r--r--drivers/char/agp/intel-agp.c26
-rw-r--r--drivers/char/agp/intel-agp.h1
-rw-r--r--drivers/gpu/drm/i915/Makefile2
-rw-r--r--drivers/gpu/drm/i915/dvo.h7
-rw-r--r--drivers/gpu/drm/i915/i915_debugfs.c3
-rw-r--r--drivers/gpu/drm/i915/i915_dma.c7
-rw-r--r--drivers/gpu/drm/i915/i915_drv.c1
-rw-r--r--drivers/gpu/drm/i915/i915_drv.h45
-rw-r--r--drivers/gpu/drm/i915/i915_gem.c354
-rw-r--r--drivers/gpu/drm/i915/i915_gem_evict.c271
-rw-r--r--drivers/gpu/drm/i915/i915_irq.c84
-rw-r--r--drivers/gpu/drm/i915/i915_opregion.c10
-rw-r--r--drivers/gpu/drm/i915/i915_reg.h14
-rw-r--r--drivers/gpu/drm/i915/i915_suspend.c97
-rw-r--r--drivers/gpu/drm/i915/intel_crt.c53
-rw-r--r--drivers/gpu/drm/i915/intel_display.c519
-rw-r--r--drivers/gpu/drm/i915/intel_dp.c596
-rw-r--r--drivers/gpu/drm/i915/intel_drv.h29
-rw-r--r--drivers/gpu/drm/i915/intel_dvo.c136
-rw-r--r--drivers/gpu/drm/i915/intel_hdmi.c77
-rw-r--r--drivers/gpu/drm/i915/intel_lvds.c106
-rw-r--r--drivers/gpu/drm/i915/intel_overlay.c99
-rw-r--r--drivers/gpu/drm/i915/intel_panel.c111
-rw-r--r--drivers/gpu/drm/i915/intel_ringbuffer.c103
-rw-r--r--drivers/gpu/drm/i915/intel_ringbuffer.h13
-rw-r--r--drivers/gpu/drm/i915/intel_sdvo.c2107
-rw-r--r--drivers/gpu/drm/i915/intel_sdvo_regs.h50
-rw-r--r--drivers/gpu/drm/i915/intel_tv.c159
28 files changed, 2663 insertions, 2417 deletions
diff --git a/drivers/char/agp/intel-agp.c b/drivers/char/agp/intel-agp.c
index ddf5def1b0da..710af89b176d 100644
--- a/drivers/char/agp/intel-agp.c
+++ b/drivers/char/agp/intel-agp.c
@@ -819,13 +819,16 @@ static const struct intel_driver_description {
819 "Sandybridge", NULL, &intel_gen6_driver }, 819 "Sandybridge", NULL, &intel_gen6_driver },
820 { PCI_DEVICE_ID_INTEL_SANDYBRIDGE_M_HB, PCI_DEVICE_ID_INTEL_SANDYBRIDGE_M_IG, 820 { PCI_DEVICE_ID_INTEL_SANDYBRIDGE_M_HB, PCI_DEVICE_ID_INTEL_SANDYBRIDGE_M_IG,
821 "Sandybridge", NULL, &intel_gen6_driver }, 821 "Sandybridge", NULL, &intel_gen6_driver },
822 { PCI_DEVICE_ID_INTEL_SANDYBRIDGE_M_HB, PCI_DEVICE_ID_INTEL_SANDYBRIDGE_M_D0_IG,
823 "Sandybridge", NULL, &intel_gen6_driver },
822 { 0, 0, NULL, NULL, NULL } 824 { 0, 0, NULL, NULL, NULL }
823}; 825};
824 826
825static int __devinit intel_gmch_probe(struct pci_dev *pdev, 827static int __devinit intel_gmch_probe(struct pci_dev *pdev,
826 struct agp_bridge_data *bridge) 828 struct agp_bridge_data *bridge)
827{ 829{
828 int i; 830 int i, mask;
831
829 bridge->driver = NULL; 832 bridge->driver = NULL;
830 833
831 for (i = 0; intel_agp_chipsets[i].name != NULL; i++) { 834 for (i = 0; intel_agp_chipsets[i].name != NULL; i++) {
@@ -845,14 +848,19 @@ static int __devinit intel_gmch_probe(struct pci_dev *pdev,
845 848
846 dev_info(&pdev->dev, "Intel %s Chipset\n", intel_agp_chipsets[i].name); 849 dev_info(&pdev->dev, "Intel %s Chipset\n", intel_agp_chipsets[i].name);
847 850
848 if (bridge->driver->mask_memory == intel_i965_mask_memory) { 851 if (bridge->driver->mask_memory == intel_gen6_mask_memory)
849 if (pci_set_dma_mask(intel_private.pcidev, DMA_BIT_MASK(36))) 852 mask = 40;
850 dev_err(&intel_private.pcidev->dev, 853 else if (bridge->driver->mask_memory == intel_i965_mask_memory)
851 "set gfx device dma mask 36bit failed!\n"); 854 mask = 36;
852 else 855 else
853 pci_set_consistent_dma_mask(intel_private.pcidev, 856 mask = 32;
854 DMA_BIT_MASK(36)); 857
855 } 858 if (pci_set_dma_mask(intel_private.pcidev, DMA_BIT_MASK(mask)))
859 dev_err(&intel_private.pcidev->dev,
860 "set gfx device dma mask %d-bit failed!\n", mask);
861 else
862 pci_set_consistent_dma_mask(intel_private.pcidev,
863 DMA_BIT_MASK(mask));
856 864
857 return 1; 865 return 1;
858} 866}
diff --git a/drivers/char/agp/intel-agp.h b/drivers/char/agp/intel-agp.h
index c05e3e518268..08d47532e605 100644
--- a/drivers/char/agp/intel-agp.h
+++ b/drivers/char/agp/intel-agp.h
@@ -204,6 +204,7 @@
204#define PCI_DEVICE_ID_INTEL_SANDYBRIDGE_IG 0x0102 204#define PCI_DEVICE_ID_INTEL_SANDYBRIDGE_IG 0x0102
205#define PCI_DEVICE_ID_INTEL_SANDYBRIDGE_M_HB 0x0104 205#define PCI_DEVICE_ID_INTEL_SANDYBRIDGE_M_HB 0x0104
206#define PCI_DEVICE_ID_INTEL_SANDYBRIDGE_M_IG 0x0106 206#define PCI_DEVICE_ID_INTEL_SANDYBRIDGE_M_IG 0x0106
207#define PCI_DEVICE_ID_INTEL_SANDYBRIDGE_M_D0_IG 0x0126
207 208
208/* cover 915 and 945 variants */ 209/* cover 915 and 945 variants */
209#define IS_I915 (agp_bridge->dev->device == PCI_DEVICE_ID_INTEL_E7221_HB || \ 210#define IS_I915 (agp_bridge->dev->device == PCI_DEVICE_ID_INTEL_E7221_HB || \
diff --git a/drivers/gpu/drm/i915/Makefile b/drivers/gpu/drm/i915/Makefile
index da78f2c0d909..5c8e53458edb 100644
--- a/drivers/gpu/drm/i915/Makefile
+++ b/drivers/gpu/drm/i915/Makefile
@@ -8,6 +8,7 @@ i915-y := i915_drv.o i915_dma.o i915_irq.o i915_mem.o \
8 i915_suspend.o \ 8 i915_suspend.o \
9 i915_gem.o \ 9 i915_gem.o \
10 i915_gem_debug.o \ 10 i915_gem_debug.o \
11 i915_gem_evict.o \
11 i915_gem_tiling.o \ 12 i915_gem_tiling.o \
12 i915_trace_points.o \ 13 i915_trace_points.o \
13 intel_display.o \ 14 intel_display.o \
@@ -18,6 +19,7 @@ i915-y := i915_drv.o i915_dma.o i915_irq.o i915_mem.o \
18 intel_hdmi.o \ 19 intel_hdmi.o \
19 intel_sdvo.o \ 20 intel_sdvo.o \
20 intel_modes.o \ 21 intel_modes.o \
22 intel_panel.o \
21 intel_i2c.o \ 23 intel_i2c.o \
22 intel_fb.o \ 24 intel_fb.o \
23 intel_tv.o \ 25 intel_tv.o \
diff --git a/drivers/gpu/drm/i915/dvo.h b/drivers/gpu/drm/i915/dvo.h
index 0d6ff640e1c6..8c2ad014c47f 100644
--- a/drivers/gpu/drm/i915/dvo.h
+++ b/drivers/gpu/drm/i915/dvo.h
@@ -30,20 +30,17 @@
30#include "intel_drv.h" 30#include "intel_drv.h"
31 31
32struct intel_dvo_device { 32struct intel_dvo_device {
33 char *name; 33 const char *name;
34 int type; 34 int type;
35 /* DVOA/B/C output register */ 35 /* DVOA/B/C output register */
36 u32 dvo_reg; 36 u32 dvo_reg;
37 /* GPIO register used for i2c bus to control this device */ 37 /* GPIO register used for i2c bus to control this device */
38 u32 gpio; 38 u32 gpio;
39 int slave_addr; 39 int slave_addr;
40 struct i2c_adapter *i2c_bus;
41 40
42 const struct intel_dvo_dev_ops *dev_ops; 41 const struct intel_dvo_dev_ops *dev_ops;
43 void *dev_priv; 42 void *dev_priv;
44 43 struct i2c_adapter *i2c_bus;
45 struct drm_display_mode *panel_fixed_mode;
46 bool panel_wants_dither;
47}; 44};
48 45
49struct intel_dvo_dev_ops { 46struct intel_dvo_dev_ops {
diff --git a/drivers/gpu/drm/i915/i915_debugfs.c b/drivers/gpu/drm/i915/i915_debugfs.c
index 9214119c0154..92d5605a34d1 100644
--- a/drivers/gpu/drm/i915/i915_debugfs.c
+++ b/drivers/gpu/drm/i915/i915_debugfs.c
@@ -467,6 +467,9 @@ static int i915_error_state(struct seq_file *m, void *unused)
467 } 467 }
468 } 468 }
469 469
470 if (error->overlay)
471 intel_overlay_print_error_state(m, error->overlay);
472
470out: 473out:
471 spin_unlock_irqrestore(&dev_priv->error_lock, flags); 474 spin_unlock_irqrestore(&dev_priv->error_lock, flags);
472 475
diff --git a/drivers/gpu/drm/i915/i915_dma.c b/drivers/gpu/drm/i915/i915_dma.c
index f19ffe87af3c..44af317731b6 100644
--- a/drivers/gpu/drm/i915/i915_dma.c
+++ b/drivers/gpu/drm/i915/i915_dma.c
@@ -499,6 +499,13 @@ static int i915_dispatch_batchbuffer(struct drm_device * dev,
499 } 499 }
500 } 500 }
501 501
502
503 if (IS_G4X(dev) || IS_IRONLAKE(dev)) {
504 BEGIN_LP_RING(2);
505 OUT_RING(MI_FLUSH | MI_NO_WRITE_FLUSH | MI_INVALIDATE_ISP);
506 OUT_RING(MI_NOOP);
507 ADVANCE_LP_RING();
508 }
502 i915_emit_breadcrumb(dev); 509 i915_emit_breadcrumb(dev);
503 510
504 return 0; 511 return 0;
diff --git a/drivers/gpu/drm/i915/i915_drv.c b/drivers/gpu/drm/i915/i915_drv.c
index 5044f653e8ea..00befce8fbb7 100644
--- a/drivers/gpu/drm/i915/i915_drv.c
+++ b/drivers/gpu/drm/i915/i915_drv.c
@@ -181,6 +181,7 @@ static const struct pci_device_id pciidlist[] = { /* aka */
181 INTEL_VGA_DEVICE(0x0046, &intel_ironlake_m_info), 181 INTEL_VGA_DEVICE(0x0046, &intel_ironlake_m_info),
182 INTEL_VGA_DEVICE(0x0102, &intel_sandybridge_d_info), 182 INTEL_VGA_DEVICE(0x0102, &intel_sandybridge_d_info),
183 INTEL_VGA_DEVICE(0x0106, &intel_sandybridge_m_info), 183 INTEL_VGA_DEVICE(0x0106, &intel_sandybridge_m_info),
184 INTEL_VGA_DEVICE(0x0126, &intel_sandybridge_m_info),
184 {0, 0, 0} 185 {0, 0, 0}
185}; 186};
186 187
diff --git a/drivers/gpu/drm/i915/i915_drv.h b/drivers/gpu/drm/i915/i915_drv.h
index 906663b9929e..047cd7ce7e1b 100644
--- a/drivers/gpu/drm/i915/i915_drv.h
+++ b/drivers/gpu/drm/i915/i915_drv.h
@@ -113,6 +113,9 @@ struct intel_opregion {
113 int enabled; 113 int enabled;
114}; 114};
115 115
116struct intel_overlay;
117struct intel_overlay_error_state;
118
116struct drm_i915_master_private { 119struct drm_i915_master_private {
117 drm_local_map_t *sarea; 120 drm_local_map_t *sarea;
118 struct _drm_i915_sarea *sarea_priv; 121 struct _drm_i915_sarea *sarea_priv;
@@ -166,6 +169,7 @@ struct drm_i915_error_state {
166 u32 purgeable:1; 169 u32 purgeable:1;
167 } *active_bo; 170 } *active_bo;
168 u32 active_bo_count; 171 u32 active_bo_count;
172 struct intel_overlay_error_state *overlay;
169}; 173};
170 174
171struct drm_i915_display_funcs { 175struct drm_i915_display_funcs {
@@ -186,8 +190,6 @@ struct drm_i915_display_funcs {
186 /* clock gating init */ 190 /* clock gating init */
187}; 191};
188 192
189struct intel_overlay;
190
191struct intel_device_info { 193struct intel_device_info {
192 u8 is_mobile : 1; 194 u8 is_mobile : 1;
193 u8 is_i8xx : 1; 195 u8 is_i8xx : 1;
@@ -242,6 +244,7 @@ typedef struct drm_i915_private {
242 struct pci_dev *bridge_dev; 244 struct pci_dev *bridge_dev;
243 struct intel_ring_buffer render_ring; 245 struct intel_ring_buffer render_ring;
244 struct intel_ring_buffer bsd_ring; 246 struct intel_ring_buffer bsd_ring;
247 uint32_t next_seqno;
245 248
246 drm_dma_handle_t *status_page_dmah; 249 drm_dma_handle_t *status_page_dmah;
247 void *seqno_page; 250 void *seqno_page;
@@ -251,6 +254,7 @@ typedef struct drm_i915_private {
251 drm_local_map_t hws_map; 254 drm_local_map_t hws_map;
252 struct drm_gem_object *seqno_obj; 255 struct drm_gem_object *seqno_obj;
253 struct drm_gem_object *pwrctx; 256 struct drm_gem_object *pwrctx;
257 struct drm_gem_object *renderctx;
254 258
255 struct resource mch_res; 259 struct resource mch_res;
256 260
@@ -285,6 +289,9 @@ typedef struct drm_i915_private {
285 unsigned int sr01, adpa, ppcr, dvob, dvoc, lvds; 289 unsigned int sr01, adpa, ppcr, dvob, dvoc, lvds;
286 int vblank_pipe; 290 int vblank_pipe;
287 int num_pipe; 291 int num_pipe;
292 u32 flush_rings;
293#define FLUSH_RENDER_RING 0x1
294#define FLUSH_BSD_RING 0x2
288 295
289 /* For hangcheck timer */ 296 /* For hangcheck timer */
290#define DRM_I915_HANGCHECK_PERIOD 75 /* in jiffies */ 297#define DRM_I915_HANGCHECK_PERIOD 75 /* in jiffies */
@@ -568,8 +575,6 @@ typedef struct drm_i915_private {
568 */ 575 */
569 struct delayed_work retire_work; 576 struct delayed_work retire_work;
570 577
571 uint32_t next_gem_seqno;
572
573 /** 578 /**
574 * Waiting sequence number, if any 579 * Waiting sequence number, if any
575 */ 580 */
@@ -610,6 +615,8 @@ typedef struct drm_i915_private {
610 struct sdvo_device_mapping sdvo_mappings[2]; 615 struct sdvo_device_mapping sdvo_mappings[2];
611 /* indicate whether the LVDS_BORDER should be enabled or not */ 616 /* indicate whether the LVDS_BORDER should be enabled or not */
612 unsigned int lvds_border_bits; 617 unsigned int lvds_border_bits;
618 /* Panel fitter placement and size for Ironlake+ */
619 u32 pch_pf_pos, pch_pf_size;
613 620
614 struct drm_crtc *plane_to_crtc_mapping[2]; 621 struct drm_crtc *plane_to_crtc_mapping[2];
615 struct drm_crtc *pipe_to_crtc_mapping[2]; 622 struct drm_crtc *pipe_to_crtc_mapping[2];
@@ -669,6 +676,8 @@ struct drm_i915_gem_object {
669 struct list_head list; 676 struct list_head list;
670 /** This object's place on GPU write list */ 677 /** This object's place on GPU write list */
671 struct list_head gpu_write_list; 678 struct list_head gpu_write_list;
679 /** This object's place on eviction list */
680 struct list_head evict_list;
672 681
673 /** 682 /**
674 * This is set if the object is on the active or flushing lists 683 * This is set if the object is on the active or flushing lists
@@ -978,6 +987,7 @@ int i915_gem_init_ringbuffer(struct drm_device *dev);
978void i915_gem_cleanup_ringbuffer(struct drm_device *dev); 987void i915_gem_cleanup_ringbuffer(struct drm_device *dev);
979int i915_gem_do_init(struct drm_device *dev, unsigned long start, 988int i915_gem_do_init(struct drm_device *dev, unsigned long start,
980 unsigned long end); 989 unsigned long end);
990int i915_gpu_idle(struct drm_device *dev);
981int i915_gem_idle(struct drm_device *dev); 991int i915_gem_idle(struct drm_device *dev);
982uint32_t i915_add_request(struct drm_device *dev, 992uint32_t i915_add_request(struct drm_device *dev,
983 struct drm_file *file_priv, 993 struct drm_file *file_priv,
@@ -991,7 +1001,9 @@ int i915_gem_object_set_to_gtt_domain(struct drm_gem_object *obj,
991 int write); 1001 int write);
992int i915_gem_object_set_to_display_plane(struct drm_gem_object *obj); 1002int i915_gem_object_set_to_display_plane(struct drm_gem_object *obj);
993int i915_gem_attach_phys_object(struct drm_device *dev, 1003int i915_gem_attach_phys_object(struct drm_device *dev,
994 struct drm_gem_object *obj, int id); 1004 struct drm_gem_object *obj,
1005 int id,
1006 int align);
995void i915_gem_detach_phys_object(struct drm_device *dev, 1007void i915_gem_detach_phys_object(struct drm_device *dev,
996 struct drm_gem_object *obj); 1008 struct drm_gem_object *obj);
997void i915_gem_free_all_phys_object(struct drm_device *dev); 1009void i915_gem_free_all_phys_object(struct drm_device *dev);
@@ -1003,6 +1015,11 @@ int i915_gem_object_flush_write_domain(struct drm_gem_object *obj);
1003void i915_gem_shrinker_init(void); 1015void i915_gem_shrinker_init(void);
1004void i915_gem_shrinker_exit(void); 1016void i915_gem_shrinker_exit(void);
1005 1017
1018/* i915_gem_evict.c */
1019int i915_gem_evict_something(struct drm_device *dev, int min_size, unsigned alignment);
1020int i915_gem_evict_everything(struct drm_device *dev);
1021int i915_gem_evict_inactive(struct drm_device *dev);
1022
1006/* i915_gem_tiling.c */ 1023/* i915_gem_tiling.c */
1007void i915_gem_detect_bit_6_swizzle(struct drm_device *dev); 1024void i915_gem_detect_bit_6_swizzle(struct drm_device *dev);
1008void i915_gem_object_do_bit_17_swizzle(struct drm_gem_object *obj); 1025void i915_gem_object_do_bit_17_swizzle(struct drm_gem_object *obj);
@@ -1066,6 +1083,10 @@ extern bool ironlake_set_drps(struct drm_device *dev, u8 val);
1066extern void intel_detect_pch (struct drm_device *dev); 1083extern void intel_detect_pch (struct drm_device *dev);
1067extern int intel_trans_dp_port_sel (struct drm_crtc *crtc); 1084extern int intel_trans_dp_port_sel (struct drm_crtc *crtc);
1068 1085
1086/* overlay */
1087extern struct intel_overlay_error_state *intel_overlay_capture_error_state(struct drm_device *dev);
1088extern void intel_overlay_print_error_state(struct seq_file *m, struct intel_overlay_error_state *error);
1089
1069/** 1090/**
1070 * Lock test for when it's just for synchronization of ring access. 1091 * Lock test for when it's just for synchronization of ring access.
1071 * 1092 *
@@ -1092,26 +1113,26 @@ extern int intel_trans_dp_port_sel (struct drm_crtc *crtc);
1092#define I915_VERBOSE 0 1113#define I915_VERBOSE 0
1093 1114
1094#define BEGIN_LP_RING(n) do { \ 1115#define BEGIN_LP_RING(n) do { \
1095 drm_i915_private_t *dev_priv = dev->dev_private; \ 1116 drm_i915_private_t *dev_priv__ = dev->dev_private; \
1096 if (I915_VERBOSE) \ 1117 if (I915_VERBOSE) \
1097 DRM_DEBUG(" BEGIN_LP_RING %x\n", (int)(n)); \ 1118 DRM_DEBUG(" BEGIN_LP_RING %x\n", (int)(n)); \
1098 intel_ring_begin(dev, &dev_priv->render_ring, (n)); \ 1119 intel_ring_begin(dev, &dev_priv__->render_ring, (n)); \
1099} while (0) 1120} while (0)
1100 1121
1101 1122
1102#define OUT_RING(x) do { \ 1123#define OUT_RING(x) do { \
1103 drm_i915_private_t *dev_priv = dev->dev_private; \ 1124 drm_i915_private_t *dev_priv__ = dev->dev_private; \
1104 if (I915_VERBOSE) \ 1125 if (I915_VERBOSE) \
1105 DRM_DEBUG(" OUT_RING %x\n", (int)(x)); \ 1126 DRM_DEBUG(" OUT_RING %x\n", (int)(x)); \
1106 intel_ring_emit(dev, &dev_priv->render_ring, x); \ 1127 intel_ring_emit(dev, &dev_priv__->render_ring, x); \
1107} while (0) 1128} while (0)
1108 1129
1109#define ADVANCE_LP_RING() do { \ 1130#define ADVANCE_LP_RING() do { \
1110 drm_i915_private_t *dev_priv = dev->dev_private; \ 1131 drm_i915_private_t *dev_priv__ = dev->dev_private; \
1111 if (I915_VERBOSE) \ 1132 if (I915_VERBOSE) \
1112 DRM_DEBUG("ADVANCE_LP_RING %x\n", \ 1133 DRM_DEBUG("ADVANCE_LP_RING %x\n", \
1113 dev_priv->render_ring.tail); \ 1134 dev_priv__->render_ring.tail); \
1114 intel_ring_advance(dev, &dev_priv->render_ring); \ 1135 intel_ring_advance(dev, &dev_priv__->render_ring); \
1115} while(0) 1136} while(0)
1116 1137
1117/** 1138/**
diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c
index 0758c7802e6b..df5a7135c261 100644
--- a/drivers/gpu/drm/i915/i915_gem.c
+++ b/drivers/gpu/drm/i915/i915_gem.c
@@ -35,6 +35,7 @@
35#include <linux/swap.h> 35#include <linux/swap.h>
36#include <linux/pci.h> 36#include <linux/pci.h>
37 37
38static uint32_t i915_gem_get_gtt_alignment(struct drm_gem_object *obj);
38static int i915_gem_object_flush_gpu_write_domain(struct drm_gem_object *obj); 39static int i915_gem_object_flush_gpu_write_domain(struct drm_gem_object *obj);
39static void i915_gem_object_flush_gtt_write_domain(struct drm_gem_object *obj); 40static void i915_gem_object_flush_gtt_write_domain(struct drm_gem_object *obj);
40static void i915_gem_object_flush_cpu_write_domain(struct drm_gem_object *obj); 41static void i915_gem_object_flush_cpu_write_domain(struct drm_gem_object *obj);
@@ -48,8 +49,6 @@ static int i915_gem_object_wait_rendering(struct drm_gem_object *obj);
48static int i915_gem_object_bind_to_gtt(struct drm_gem_object *obj, 49static int i915_gem_object_bind_to_gtt(struct drm_gem_object *obj,
49 unsigned alignment); 50 unsigned alignment);
50static void i915_gem_clear_fence_reg(struct drm_gem_object *obj); 51static void i915_gem_clear_fence_reg(struct drm_gem_object *obj);
51static int i915_gem_evict_something(struct drm_device *dev, int min_size);
52static int i915_gem_evict_from_inactive_list(struct drm_device *dev);
53static int i915_gem_phys_pwrite(struct drm_device *dev, struct drm_gem_object *obj, 52static int i915_gem_phys_pwrite(struct drm_device *dev, struct drm_gem_object *obj,
54 struct drm_i915_gem_pwrite *args, 53 struct drm_i915_gem_pwrite *args,
55 struct drm_file *file_priv); 54 struct drm_file *file_priv);
@@ -58,6 +57,14 @@ static void i915_gem_free_object_tail(struct drm_gem_object *obj);
58static LIST_HEAD(shrink_list); 57static LIST_HEAD(shrink_list);
59static DEFINE_SPINLOCK(shrink_list_lock); 58static DEFINE_SPINLOCK(shrink_list_lock);
60 59
60static inline bool
61i915_gem_object_is_inactive(struct drm_i915_gem_object *obj_priv)
62{
63 return obj_priv->gtt_space &&
64 !obj_priv->active &&
65 obj_priv->pin_count == 0;
66}
67
61int i915_gem_do_init(struct drm_device *dev, unsigned long start, 68int i915_gem_do_init(struct drm_device *dev, unsigned long start,
62 unsigned long end) 69 unsigned long end)
63{ 70{
@@ -313,7 +320,8 @@ i915_gem_object_get_pages_or_evict(struct drm_gem_object *obj)
313 if (ret == -ENOMEM) { 320 if (ret == -ENOMEM) {
314 struct drm_device *dev = obj->dev; 321 struct drm_device *dev = obj->dev;
315 322
316 ret = i915_gem_evict_something(dev, obj->size); 323 ret = i915_gem_evict_something(dev, obj->size,
324 i915_gem_get_gtt_alignment(obj));
317 if (ret) 325 if (ret)
318 return ret; 326 return ret;
319 327
@@ -1036,6 +1044,11 @@ i915_gem_set_domain_ioctl(struct drm_device *dev, void *data,
1036 ret = i915_gem_object_set_to_cpu_domain(obj, write_domain != 0); 1044 ret = i915_gem_object_set_to_cpu_domain(obj, write_domain != 0);
1037 } 1045 }
1038 1046
1047
1048 /* Maintain LRU order of "inactive" objects */
1049 if (ret == 0 && i915_gem_object_is_inactive(obj_priv))
1050 list_move_tail(&obj_priv->list, &dev_priv->mm.inactive_list);
1051
1039 drm_gem_object_unreference(obj); 1052 drm_gem_object_unreference(obj);
1040 mutex_unlock(&dev->struct_mutex); 1053 mutex_unlock(&dev->struct_mutex);
1041 return ret; 1054 return ret;
@@ -1137,7 +1150,7 @@ int i915_gem_fault(struct vm_area_struct *vma, struct vm_fault *vmf)
1137{ 1150{
1138 struct drm_gem_object *obj = vma->vm_private_data; 1151 struct drm_gem_object *obj = vma->vm_private_data;
1139 struct drm_device *dev = obj->dev; 1152 struct drm_device *dev = obj->dev;
1140 struct drm_i915_private *dev_priv = dev->dev_private; 1153 drm_i915_private_t *dev_priv = dev->dev_private;
1141 struct drm_i915_gem_object *obj_priv = to_intel_bo(obj); 1154 struct drm_i915_gem_object *obj_priv = to_intel_bo(obj);
1142 pgoff_t page_offset; 1155 pgoff_t page_offset;
1143 unsigned long pfn; 1156 unsigned long pfn;
@@ -1155,8 +1168,6 @@ int i915_gem_fault(struct vm_area_struct *vma, struct vm_fault *vmf)
1155 if (ret) 1168 if (ret)
1156 goto unlock; 1169 goto unlock;
1157 1170
1158 list_add_tail(&obj_priv->list, &dev_priv->mm.inactive_list);
1159
1160 ret = i915_gem_object_set_to_gtt_domain(obj, write); 1171 ret = i915_gem_object_set_to_gtt_domain(obj, write);
1161 if (ret) 1172 if (ret)
1162 goto unlock; 1173 goto unlock;
@@ -1169,6 +1180,9 @@ int i915_gem_fault(struct vm_area_struct *vma, struct vm_fault *vmf)
1169 goto unlock; 1180 goto unlock;
1170 } 1181 }
1171 1182
1183 if (i915_gem_object_is_inactive(obj_priv))
1184 list_move_tail(&obj_priv->list, &dev_priv->mm.inactive_list);
1185
1172 pfn = ((dev->agp->base + obj_priv->gtt_offset) >> PAGE_SHIFT) + 1186 pfn = ((dev->agp->base + obj_priv->gtt_offset) >> PAGE_SHIFT) +
1173 page_offset; 1187 page_offset;
1174 1188
@@ -1363,7 +1377,6 @@ i915_gem_mmap_gtt_ioctl(struct drm_device *dev, void *data,
1363 struct drm_file *file_priv) 1377 struct drm_file *file_priv)
1364{ 1378{
1365 struct drm_i915_gem_mmap_gtt *args = data; 1379 struct drm_i915_gem_mmap_gtt *args = data;
1366 struct drm_i915_private *dev_priv = dev->dev_private;
1367 struct drm_gem_object *obj; 1380 struct drm_gem_object *obj;
1368 struct drm_i915_gem_object *obj_priv; 1381 struct drm_i915_gem_object *obj_priv;
1369 int ret; 1382 int ret;
@@ -1409,7 +1422,6 @@ i915_gem_mmap_gtt_ioctl(struct drm_device *dev, void *data,
1409 mutex_unlock(&dev->struct_mutex); 1422 mutex_unlock(&dev->struct_mutex);
1410 return ret; 1423 return ret;
1411 } 1424 }
1412 list_add_tail(&obj_priv->list, &dev_priv->mm.inactive_list);
1413 } 1425 }
1414 1426
1415 drm_gem_object_unreference(obj); 1427 drm_gem_object_unreference(obj);
@@ -1493,9 +1505,16 @@ i915_gem_object_truncate(struct drm_gem_object *obj)
1493 struct drm_i915_gem_object *obj_priv = to_intel_bo(obj); 1505 struct drm_i915_gem_object *obj_priv = to_intel_bo(obj);
1494 struct inode *inode; 1506 struct inode *inode;
1495 1507
1508 /* Our goal here is to return as much of the memory as
1509 * is possible back to the system as we are called from OOM.
1510 * To do this we must instruct the shmfs to drop all of its
1511 * backing pages, *now*. Here we mirror the actions taken
1512 * when by shmem_delete_inode() to release the backing store.
1513 */
1496 inode = obj->filp->f_path.dentry->d_inode; 1514 inode = obj->filp->f_path.dentry->d_inode;
1497 if (inode->i_op->truncate) 1515 truncate_inode_pages(inode->i_mapping, 0);
1498 inode->i_op->truncate (inode); 1516 if (inode->i_op->truncate_range)
1517 inode->i_op->truncate_range(inode, 0, (loff_t)-1);
1499 1518
1500 obj_priv->madv = __I915_MADV_PURGED; 1519 obj_priv->madv = __I915_MADV_PURGED;
1501} 1520}
@@ -1887,19 +1906,6 @@ i915_gem_flush(struct drm_device *dev,
1887 flush_domains); 1906 flush_domains);
1888} 1907}
1889 1908
1890static void
1891i915_gem_flush_ring(struct drm_device *dev,
1892 uint32_t invalidate_domains,
1893 uint32_t flush_domains,
1894 struct intel_ring_buffer *ring)
1895{
1896 if (flush_domains & I915_GEM_DOMAIN_CPU)
1897 drm_agp_chipset_flush(dev);
1898 ring->flush(dev, ring,
1899 invalidate_domains,
1900 flush_domains);
1901}
1902
1903/** 1909/**
1904 * Ensures that all rendering to the object has completed and the object is 1910 * Ensures that all rendering to the object has completed and the object is
1905 * safe to unbind from the GTT or access from the CPU. 1911 * safe to unbind from the GTT or access from the CPU.
@@ -1973,8 +1979,6 @@ i915_gem_object_unbind(struct drm_gem_object *obj)
1973 * cause memory corruption through use-after-free. 1979 * cause memory corruption through use-after-free.
1974 */ 1980 */
1975 1981
1976 BUG_ON(obj_priv->active);
1977
1978 /* release the fence reg _after_ flushing */ 1982 /* release the fence reg _after_ flushing */
1979 if (obj_priv->fence_reg != I915_FENCE_REG_NONE) 1983 if (obj_priv->fence_reg != I915_FENCE_REG_NONE)
1980 i915_gem_clear_fence_reg(obj); 1984 i915_gem_clear_fence_reg(obj);
@@ -2010,34 +2014,7 @@ i915_gem_object_unbind(struct drm_gem_object *obj)
2010 return ret; 2014 return ret;
2011} 2015}
2012 2016
2013static struct drm_gem_object * 2017int
2014i915_gem_find_inactive_object(struct drm_device *dev, int min_size)
2015{
2016 drm_i915_private_t *dev_priv = dev->dev_private;
2017 struct drm_i915_gem_object *obj_priv;
2018 struct drm_gem_object *best = NULL;
2019 struct drm_gem_object *first = NULL;
2020
2021 /* Try to find the smallest clean object */
2022 list_for_each_entry(obj_priv, &dev_priv->mm.inactive_list, list) {
2023 struct drm_gem_object *obj = &obj_priv->base;
2024 if (obj->size >= min_size) {
2025 if ((!obj_priv->dirty ||
2026 i915_gem_object_is_purgeable(obj_priv)) &&
2027 (!best || obj->size < best->size)) {
2028 best = obj;
2029 if (best->size == min_size)
2030 return best;
2031 }
2032 if (!first)
2033 first = obj;
2034 }
2035 }
2036
2037 return best ? best : first;
2038}
2039
2040static int
2041i915_gpu_idle(struct drm_device *dev) 2018i915_gpu_idle(struct drm_device *dev)
2042{ 2019{
2043 drm_i915_private_t *dev_priv = dev->dev_private; 2020 drm_i915_private_t *dev_priv = dev->dev_private;
@@ -2078,155 +2055,6 @@ i915_gpu_idle(struct drm_device *dev)
2078 return ret; 2055 return ret;
2079} 2056}
2080 2057
2081static int
2082i915_gem_evict_everything(struct drm_device *dev)
2083{
2084 drm_i915_private_t *dev_priv = dev->dev_private;
2085 int ret;
2086 bool lists_empty;
2087
2088 spin_lock(&dev_priv->mm.active_list_lock);
2089 lists_empty = (list_empty(&dev_priv->mm.inactive_list) &&
2090 list_empty(&dev_priv->mm.flushing_list) &&
2091 list_empty(&dev_priv->render_ring.active_list) &&
2092 (!HAS_BSD(dev)
2093 || list_empty(&dev_priv->bsd_ring.active_list)));
2094 spin_unlock(&dev_priv->mm.active_list_lock);
2095
2096 if (lists_empty)
2097 return -ENOSPC;
2098
2099 /* Flush everything (on to the inactive lists) and evict */
2100 ret = i915_gpu_idle(dev);
2101 if (ret)
2102 return ret;
2103
2104 BUG_ON(!list_empty(&dev_priv->mm.flushing_list));
2105
2106 ret = i915_gem_evict_from_inactive_list(dev);
2107 if (ret)
2108 return ret;
2109
2110 spin_lock(&dev_priv->mm.active_list_lock);
2111 lists_empty = (list_empty(&dev_priv->mm.inactive_list) &&
2112 list_empty(&dev_priv->mm.flushing_list) &&
2113 list_empty(&dev_priv->render_ring.active_list) &&
2114 (!HAS_BSD(dev)
2115 || list_empty(&dev_priv->bsd_ring.active_list)));
2116 spin_unlock(&dev_priv->mm.active_list_lock);
2117 BUG_ON(!lists_empty);
2118
2119 return 0;
2120}
2121
2122static int
2123i915_gem_evict_something(struct drm_device *dev, int min_size)
2124{
2125 drm_i915_private_t *dev_priv = dev->dev_private;
2126 struct drm_gem_object *obj;
2127 int ret;
2128
2129 struct intel_ring_buffer *render_ring = &dev_priv->render_ring;
2130 struct intel_ring_buffer *bsd_ring = &dev_priv->bsd_ring;
2131 for (;;) {
2132 i915_gem_retire_requests(dev);
2133
2134 /* If there's an inactive buffer available now, grab it
2135 * and be done.
2136 */
2137 obj = i915_gem_find_inactive_object(dev, min_size);
2138 if (obj) {
2139 struct drm_i915_gem_object *obj_priv;
2140
2141#if WATCH_LRU
2142 DRM_INFO("%s: evicting %p\n", __func__, obj);
2143#endif
2144 obj_priv = to_intel_bo(obj);
2145 BUG_ON(obj_priv->pin_count != 0);
2146 BUG_ON(obj_priv->active);
2147
2148 /* Wait on the rendering and unbind the buffer. */
2149 return i915_gem_object_unbind(obj);
2150 }
2151
2152 /* If we didn't get anything, but the ring is still processing
2153 * things, wait for the next to finish and hopefully leave us
2154 * a buffer to evict.
2155 */
2156 if (!list_empty(&render_ring->request_list)) {
2157 struct drm_i915_gem_request *request;
2158
2159 request = list_first_entry(&render_ring->request_list,
2160 struct drm_i915_gem_request,
2161 list);
2162
2163 ret = i915_wait_request(dev,
2164 request->seqno, request->ring);
2165 if (ret)
2166 return ret;
2167
2168 continue;
2169 }
2170
2171 if (HAS_BSD(dev) && !list_empty(&bsd_ring->request_list)) {
2172 struct drm_i915_gem_request *request;
2173
2174 request = list_first_entry(&bsd_ring->request_list,
2175 struct drm_i915_gem_request,
2176 list);
2177
2178 ret = i915_wait_request(dev,
2179 request->seqno, request->ring);
2180 if (ret)
2181 return ret;
2182
2183 continue;
2184 }
2185
2186 /* If we didn't have anything on the request list but there
2187 * are buffers awaiting a flush, emit one and try again.
2188 * When we wait on it, those buffers waiting for that flush
2189 * will get moved to inactive.
2190 */
2191 if (!list_empty(&dev_priv->mm.flushing_list)) {
2192 struct drm_i915_gem_object *obj_priv;
2193
2194 /* Find an object that we can immediately reuse */
2195 list_for_each_entry(obj_priv, &dev_priv->mm.flushing_list, list) {
2196 obj = &obj_priv->base;
2197 if (obj->size >= min_size)
2198 break;
2199
2200 obj = NULL;
2201 }
2202
2203 if (obj != NULL) {
2204 uint32_t seqno;
2205
2206 i915_gem_flush_ring(dev,
2207 obj->write_domain,
2208 obj->write_domain,
2209 obj_priv->ring);
2210 seqno = i915_add_request(dev, NULL,
2211 obj->write_domain,
2212 obj_priv->ring);
2213 if (seqno == 0)
2214 return -ENOMEM;
2215 continue;
2216 }
2217 }
2218
2219 /* If we didn't do any of the above, there's no single buffer
2220 * large enough to swap out for the new one, so just evict
2221 * everything and start again. (This should be rare.)
2222 */
2223 if (!list_empty (&dev_priv->mm.inactive_list))
2224 return i915_gem_evict_from_inactive_list(dev);
2225 else
2226 return i915_gem_evict_everything(dev);
2227 }
2228}
2229
2230int 2058int
2231i915_gem_object_get_pages(struct drm_gem_object *obj, 2059i915_gem_object_get_pages(struct drm_gem_object *obj,
2232 gfp_t gfpmask) 2060 gfp_t gfpmask)
@@ -2666,7 +2494,7 @@ i915_gem_object_bind_to_gtt(struct drm_gem_object *obj, unsigned alignment)
2666#if WATCH_LRU 2494#if WATCH_LRU
2667 DRM_INFO("%s: GTT full, evicting something\n", __func__); 2495 DRM_INFO("%s: GTT full, evicting something\n", __func__);
2668#endif 2496#endif
2669 ret = i915_gem_evict_something(dev, obj->size); 2497 ret = i915_gem_evict_something(dev, obj->size, alignment);
2670 if (ret) 2498 if (ret)
2671 return ret; 2499 return ret;
2672 2500
@@ -2684,7 +2512,8 @@ i915_gem_object_bind_to_gtt(struct drm_gem_object *obj, unsigned alignment)
2684 2512
2685 if (ret == -ENOMEM) { 2513 if (ret == -ENOMEM) {
2686 /* first try to clear up some space from the GTT */ 2514 /* first try to clear up some space from the GTT */
2687 ret = i915_gem_evict_something(dev, obj->size); 2515 ret = i915_gem_evict_something(dev, obj->size,
2516 alignment);
2688 if (ret) { 2517 if (ret) {
2689 /* now try to shrink everyone else */ 2518 /* now try to shrink everyone else */
2690 if (gfpmask) { 2519 if (gfpmask) {
@@ -2714,7 +2543,7 @@ i915_gem_object_bind_to_gtt(struct drm_gem_object *obj, unsigned alignment)
2714 drm_mm_put_block(obj_priv->gtt_space); 2543 drm_mm_put_block(obj_priv->gtt_space);
2715 obj_priv->gtt_space = NULL; 2544 obj_priv->gtt_space = NULL;
2716 2545
2717 ret = i915_gem_evict_something(dev, obj->size); 2546 ret = i915_gem_evict_something(dev, obj->size, alignment);
2718 if (ret) 2547 if (ret)
2719 return ret; 2548 return ret;
2720 2549
@@ -2723,6 +2552,9 @@ i915_gem_object_bind_to_gtt(struct drm_gem_object *obj, unsigned alignment)
2723 atomic_inc(&dev->gtt_count); 2552 atomic_inc(&dev->gtt_count);
2724 atomic_add(obj->size, &dev->gtt_memory); 2553 atomic_add(obj->size, &dev->gtt_memory);
2725 2554
2555 /* keep track of bounds object by adding it to the inactive list */
2556 list_add_tail(&obj_priv->list, &dev_priv->mm.inactive_list);
2557
2726 /* Assert that the object is not currently in any GPU domain. As it 2558 /* Assert that the object is not currently in any GPU domain. As it
2727 * wasn't in the GTT, there shouldn't be any way it could have been in 2559 * wasn't in the GTT, there shouldn't be any way it could have been in
2728 * a GPU cache 2560 * a GPU cache
@@ -3117,6 +2949,7 @@ static void
3117i915_gem_object_set_to_gpu_domain(struct drm_gem_object *obj) 2949i915_gem_object_set_to_gpu_domain(struct drm_gem_object *obj)
3118{ 2950{
3119 struct drm_device *dev = obj->dev; 2951 struct drm_device *dev = obj->dev;
2952 drm_i915_private_t *dev_priv = dev->dev_private;
3120 struct drm_i915_gem_object *obj_priv = to_intel_bo(obj); 2953 struct drm_i915_gem_object *obj_priv = to_intel_bo(obj);
3121 uint32_t invalidate_domains = 0; 2954 uint32_t invalidate_domains = 0;
3122 uint32_t flush_domains = 0; 2955 uint32_t flush_domains = 0;
@@ -3179,6 +3012,13 @@ i915_gem_object_set_to_gpu_domain(struct drm_gem_object *obj)
3179 obj->pending_write_domain = obj->write_domain; 3012 obj->pending_write_domain = obj->write_domain;
3180 obj->read_domains = obj->pending_read_domains; 3013 obj->read_domains = obj->pending_read_domains;
3181 3014
3015 if (flush_domains & I915_GEM_GPU_DOMAINS) {
3016 if (obj_priv->ring == &dev_priv->render_ring)
3017 dev_priv->flush_rings |= FLUSH_RENDER_RING;
3018 else if (obj_priv->ring == &dev_priv->bsd_ring)
3019 dev_priv->flush_rings |= FLUSH_BSD_RING;
3020 }
3021
3182 dev->invalidate_domains |= invalidate_domains; 3022 dev->invalidate_domains |= invalidate_domains;
3183 dev->flush_domains |= flush_domains; 3023 dev->flush_domains |= flush_domains;
3184#if WATCH_BUF 3024#if WATCH_BUF
@@ -3718,7 +3558,6 @@ i915_gem_do_execbuffer(struct drm_device *dev, void *data,
3718 ring = &dev_priv->render_ring; 3558 ring = &dev_priv->render_ring;
3719 } 3559 }
3720 3560
3721
3722 if (args->buffer_count < 1) { 3561 if (args->buffer_count < 1) {
3723 DRM_ERROR("execbuf with %d buffers\n", args->buffer_count); 3562 DRM_ERROR("execbuf with %d buffers\n", args->buffer_count);
3724 return -EINVAL; 3563 return -EINVAL;
@@ -3892,6 +3731,7 @@ i915_gem_do_execbuffer(struct drm_device *dev, void *data,
3892 */ 3731 */
3893 dev->invalidate_domains = 0; 3732 dev->invalidate_domains = 0;
3894 dev->flush_domains = 0; 3733 dev->flush_domains = 0;
3734 dev_priv->flush_rings = 0;
3895 3735
3896 for (i = 0; i < args->buffer_count; i++) { 3736 for (i = 0; i < args->buffer_count; i++) {
3897 struct drm_gem_object *obj = object_list[i]; 3737 struct drm_gem_object *obj = object_list[i];
@@ -3912,16 +3752,14 @@ i915_gem_do_execbuffer(struct drm_device *dev, void *data,
3912 i915_gem_flush(dev, 3752 i915_gem_flush(dev,
3913 dev->invalidate_domains, 3753 dev->invalidate_domains,
3914 dev->flush_domains); 3754 dev->flush_domains);
3915 if (dev->flush_domains & I915_GEM_GPU_DOMAINS) { 3755 if (dev_priv->flush_rings & FLUSH_RENDER_RING)
3916 (void)i915_add_request(dev, file_priv, 3756 (void)i915_add_request(dev, file_priv,
3917 dev->flush_domains, 3757 dev->flush_domains,
3918 &dev_priv->render_ring); 3758 &dev_priv->render_ring);
3919 3759 if (dev_priv->flush_rings & FLUSH_BSD_RING)
3920 if (HAS_BSD(dev)) 3760 (void)i915_add_request(dev, file_priv,
3921 (void)i915_add_request(dev, file_priv, 3761 dev->flush_domains,
3922 dev->flush_domains, 3762 &dev_priv->bsd_ring);
3923 &dev_priv->bsd_ring);
3924 }
3925 } 3763 }
3926 3764
3927 for (i = 0; i < args->buffer_count; i++) { 3765 for (i = 0; i < args->buffer_count; i++) {
@@ -4192,6 +4030,10 @@ i915_gem_object_pin(struct drm_gem_object *obj, uint32_t alignment)
4192 if (alignment == 0) 4030 if (alignment == 0)
4193 alignment = i915_gem_get_gtt_alignment(obj); 4031 alignment = i915_gem_get_gtt_alignment(obj);
4194 if (obj_priv->gtt_offset & (alignment - 1)) { 4032 if (obj_priv->gtt_offset & (alignment - 1)) {
4033 WARN(obj_priv->pin_count,
4034 "bo is already pinned with incorrect alignment:"
4035 " offset=%x, req.alignment=%x\n",
4036 obj_priv->gtt_offset, alignment);
4195 ret = i915_gem_object_unbind(obj); 4037 ret = i915_gem_object_unbind(obj);
4196 if (ret) 4038 if (ret)
4197 return ret; 4039 return ret;
@@ -4213,8 +4055,7 @@ i915_gem_object_pin(struct drm_gem_object *obj, uint32_t alignment)
4213 atomic_inc(&dev->pin_count); 4055 atomic_inc(&dev->pin_count);
4214 atomic_add(obj->size, &dev->pin_memory); 4056 atomic_add(obj->size, &dev->pin_memory);
4215 if (!obj_priv->active && 4057 if (!obj_priv->active &&
4216 (obj->write_domain & I915_GEM_GPU_DOMAINS) == 0 && 4058 (obj->write_domain & I915_GEM_GPU_DOMAINS) == 0)
4217 !list_empty(&obj_priv->list))
4218 list_del_init(&obj_priv->list); 4059 list_del_init(&obj_priv->list);
4219 } 4060 }
4220 i915_verify_inactive(dev, __FILE__, __LINE__); 4061 i915_verify_inactive(dev, __FILE__, __LINE__);
@@ -4359,22 +4200,34 @@ i915_gem_busy_ioctl(struct drm_device *dev, void *data,
4359 } 4200 }
4360 4201
4361 mutex_lock(&dev->struct_mutex); 4202 mutex_lock(&dev->struct_mutex);
4362 /* Update the active list for the hardware's current position.
4363 * Otherwise this only updates on a delayed timer or when irqs are
4364 * actually unmasked, and our working set ends up being larger than
4365 * required.
4366 */
4367 i915_gem_retire_requests(dev);
4368 4203
4369 obj_priv = to_intel_bo(obj); 4204 /* Count all active objects as busy, even if they are currently not used
4370 /* Don't count being on the flushing list against the object being 4205 * by the gpu. Users of this interface expect objects to eventually
4371 * done. Otherwise, a buffer left on the flushing list but not getting 4206 * become non-busy without any further actions, therefore emit any
4372 * flushed (because nobody's flushing that domain) won't ever return 4207 * necessary flushes here.
4373 * unbusy and get reused by libdrm's bo cache. The other expected
4374 * consumer of this interface, OpenGL's occlusion queries, also specs
4375 * that the objects get unbusy "eventually" without any interference.
4376 */ 4208 */
4377 args->busy = obj_priv->active && obj_priv->last_rendering_seqno != 0; 4209 obj_priv = to_intel_bo(obj);
4210 args->busy = obj_priv->active;
4211 if (args->busy) {
4212 /* Unconditionally flush objects, even when the gpu still uses this
4213 * object. Userspace calling this function indicates that it wants to
4214 * use this buffer rather sooner than later, so issuing the required
4215 * flush earlier is beneficial.
4216 */
4217 if (obj->write_domain) {
4218 i915_gem_flush(dev, 0, obj->write_domain);
4219 (void)i915_add_request(dev, file_priv, obj->write_domain, obj_priv->ring);
4220 }
4221
4222 /* Update the active list for the hardware's current position.
4223 * Otherwise this only updates on a delayed timer or when irqs
4224 * are actually unmasked, and our working set ends up being
4225 * larger than required.
4226 */
4227 i915_gem_retire_requests_ring(dev, obj_priv->ring);
4228
4229 args->busy = obj_priv->active;
4230 }
4378 4231
4379 drm_gem_object_unreference(obj); 4232 drm_gem_object_unreference(obj);
4380 mutex_unlock(&dev->struct_mutex); 4233 mutex_unlock(&dev->struct_mutex);
@@ -4514,30 +4367,6 @@ void i915_gem_free_object(struct drm_gem_object *obj)
4514 i915_gem_free_object_tail(obj); 4367 i915_gem_free_object_tail(obj);
4515} 4368}
4516 4369
4517/** Unbinds all inactive objects. */
4518static int
4519i915_gem_evict_from_inactive_list(struct drm_device *dev)
4520{
4521 drm_i915_private_t *dev_priv = dev->dev_private;
4522
4523 while (!list_empty(&dev_priv->mm.inactive_list)) {
4524 struct drm_gem_object *obj;
4525 int ret;
4526
4527 obj = &list_first_entry(&dev_priv->mm.inactive_list,
4528 struct drm_i915_gem_object,
4529 list)->base;
4530
4531 ret = i915_gem_object_unbind(obj);
4532 if (ret != 0) {
4533 DRM_ERROR("Error unbinding object: %d\n", ret);
4534 return ret;
4535 }
4536 }
4537
4538 return 0;
4539}
4540
4541int 4370int
4542i915_gem_idle(struct drm_device *dev) 4371i915_gem_idle(struct drm_device *dev)
4543{ 4372{
@@ -4562,7 +4391,7 @@ i915_gem_idle(struct drm_device *dev)
4562 4391
4563 /* Under UMS, be paranoid and evict. */ 4392 /* Under UMS, be paranoid and evict. */
4564 if (!drm_core_check_feature(dev, DRIVER_MODESET)) { 4393 if (!drm_core_check_feature(dev, DRIVER_MODESET)) {
4565 ret = i915_gem_evict_from_inactive_list(dev); 4394 ret = i915_gem_evict_inactive(dev);
4566 if (ret) { 4395 if (ret) {
4567 mutex_unlock(&dev->struct_mutex); 4396 mutex_unlock(&dev->struct_mutex);
4568 return ret; 4397 return ret;
@@ -4680,6 +4509,8 @@ i915_gem_init_ringbuffer(struct drm_device *dev)
4680 goto cleanup_render_ring; 4509 goto cleanup_render_ring;
4681 } 4510 }
4682 4511
4512 dev_priv->next_seqno = 1;
4513
4683 return 0; 4514 return 0;
4684 4515
4685cleanup_render_ring: 4516cleanup_render_ring:
@@ -4841,7 +4672,7 @@ i915_gem_load(struct drm_device *dev)
4841 * e.g. for cursor + overlay regs 4672 * e.g. for cursor + overlay regs
4842 */ 4673 */
4843int i915_gem_init_phys_object(struct drm_device *dev, 4674int i915_gem_init_phys_object(struct drm_device *dev,
4844 int id, int size) 4675 int id, int size, int align)
4845{ 4676{
4846 drm_i915_private_t *dev_priv = dev->dev_private; 4677 drm_i915_private_t *dev_priv = dev->dev_private;
4847 struct drm_i915_gem_phys_object *phys_obj; 4678 struct drm_i915_gem_phys_object *phys_obj;
@@ -4856,7 +4687,7 @@ int i915_gem_init_phys_object(struct drm_device *dev,
4856 4687
4857 phys_obj->id = id; 4688 phys_obj->id = id;
4858 4689
4859 phys_obj->handle = drm_pci_alloc(dev, size, 0); 4690 phys_obj->handle = drm_pci_alloc(dev, size, align);
4860 if (!phys_obj->handle) { 4691 if (!phys_obj->handle) {
4861 ret = -ENOMEM; 4692 ret = -ENOMEM;
4862 goto kfree_obj; 4693 goto kfree_obj;
@@ -4938,7 +4769,9 @@ out:
4938 4769
4939int 4770int
4940i915_gem_attach_phys_object(struct drm_device *dev, 4771i915_gem_attach_phys_object(struct drm_device *dev,
4941 struct drm_gem_object *obj, int id) 4772 struct drm_gem_object *obj,
4773 int id,
4774 int align)
4942{ 4775{
4943 drm_i915_private_t *dev_priv = dev->dev_private; 4776 drm_i915_private_t *dev_priv = dev->dev_private;
4944 struct drm_i915_gem_object *obj_priv; 4777 struct drm_i915_gem_object *obj_priv;
@@ -4957,11 +4790,10 @@ i915_gem_attach_phys_object(struct drm_device *dev,
4957 i915_gem_detach_phys_object(dev, obj); 4790 i915_gem_detach_phys_object(dev, obj);
4958 } 4791 }
4959 4792
4960
4961 /* create a new object */ 4793 /* create a new object */
4962 if (!dev_priv->mm.phys_objs[id - 1]) { 4794 if (!dev_priv->mm.phys_objs[id - 1]) {
4963 ret = i915_gem_init_phys_object(dev, id, 4795 ret = i915_gem_init_phys_object(dev, id,
4964 obj->size); 4796 obj->size, align);
4965 if (ret) { 4797 if (ret) {
4966 DRM_ERROR("failed to init phys object %d size: %zu\n", id, obj->size); 4798 DRM_ERROR("failed to init phys object %d size: %zu\n", id, obj->size);
4967 goto out; 4799 goto out;
diff --git a/drivers/gpu/drm/i915/i915_gem_evict.c b/drivers/gpu/drm/i915/i915_gem_evict.c
new file mode 100644
index 000000000000..72cae3cccad8
--- /dev/null
+++ b/drivers/gpu/drm/i915/i915_gem_evict.c
@@ -0,0 +1,271 @@
1/*
2 * Copyright © 2008-2010 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Eric Anholt <eric@anholt.net>
25 * Chris Wilson <chris@chris-wilson.co.uuk>
26 *
27 */
28
29#include "drmP.h"
30#include "drm.h"
31#include "i915_drv.h"
32#include "i915_drm.h"
33
34static struct drm_i915_gem_object *
35i915_gem_next_active_object(struct drm_device *dev,
36 struct list_head **render_iter,
37 struct list_head **bsd_iter)
38{
39 drm_i915_private_t *dev_priv = dev->dev_private;
40 struct drm_i915_gem_object *render_obj = NULL, *bsd_obj = NULL;
41
42 if (*render_iter != &dev_priv->render_ring.active_list)
43 render_obj = list_entry(*render_iter,
44 struct drm_i915_gem_object,
45 list);
46
47 if (HAS_BSD(dev)) {
48 if (*bsd_iter != &dev_priv->bsd_ring.active_list)
49 bsd_obj = list_entry(*bsd_iter,
50 struct drm_i915_gem_object,
51 list);
52
53 if (render_obj == NULL) {
54 *bsd_iter = (*bsd_iter)->next;
55 return bsd_obj;
56 }
57
58 if (bsd_obj == NULL) {
59 *render_iter = (*render_iter)->next;
60 return render_obj;
61 }
62
63 /* XXX can we handle seqno wrapping? */
64 if (render_obj->last_rendering_seqno < bsd_obj->last_rendering_seqno) {
65 *render_iter = (*render_iter)->next;
66 return render_obj;
67 } else {
68 *bsd_iter = (*bsd_iter)->next;
69 return bsd_obj;
70 }
71 } else {
72 *render_iter = (*render_iter)->next;
73 return render_obj;
74 }
75}
76
77static bool
78mark_free(struct drm_i915_gem_object *obj_priv,
79 struct list_head *unwind)
80{
81 list_add(&obj_priv->evict_list, unwind);
82 return drm_mm_scan_add_block(obj_priv->gtt_space);
83}
84
85#define i915_for_each_active_object(OBJ, R, B) \
86 *(R) = dev_priv->render_ring.active_list.next; \
87 *(B) = dev_priv->bsd_ring.active_list.next; \
88 while (((OBJ) = i915_gem_next_active_object(dev, (R), (B))) != NULL)
89
90int
91i915_gem_evict_something(struct drm_device *dev, int min_size, unsigned alignment)
92{
93 drm_i915_private_t *dev_priv = dev->dev_private;
94 struct list_head eviction_list, unwind_list;
95 struct drm_i915_gem_object *obj_priv, *tmp_obj_priv;
96 struct list_head *render_iter, *bsd_iter;
97 int ret = 0;
98
99 i915_gem_retire_requests(dev);
100
101 /* Re-check for free space after retiring requests */
102 if (drm_mm_search_free(&dev_priv->mm.gtt_space,
103 min_size, alignment, 0))
104 return 0;
105
106 /*
107 * The goal is to evict objects and amalgamate space in LRU order.
108 * The oldest idle objects reside on the inactive list, which is in
109 * retirement order. The next objects to retire are those on the (per
110 * ring) active list that do not have an outstanding flush. Once the
111 * hardware reports completion (the seqno is updated after the
112 * batchbuffer has been finished) the clean buffer objects would
113 * be retired to the inactive list. Any dirty objects would be added
114 * to the tail of the flushing list. So after processing the clean
115 * active objects we need to emit a MI_FLUSH to retire the flushing
116 * list, hence the retirement order of the flushing list is in
117 * advance of the dirty objects on the active lists.
118 *
119 * The retirement sequence is thus:
120 * 1. Inactive objects (already retired)
121 * 2. Clean active objects
122 * 3. Flushing list
123 * 4. Dirty active objects.
124 *
125 * On each list, the oldest objects lie at the HEAD with the freshest
126 * object on the TAIL.
127 */
128
129 INIT_LIST_HEAD(&unwind_list);
130 drm_mm_init_scan(&dev_priv->mm.gtt_space, min_size, alignment);
131
132 /* First see if there is a large enough contiguous idle region... */
133 list_for_each_entry(obj_priv, &dev_priv->mm.inactive_list, list) {
134 if (mark_free(obj_priv, &unwind_list))
135 goto found;
136 }
137
138 /* Now merge in the soon-to-be-expired objects... */
139 i915_for_each_active_object(obj_priv, &render_iter, &bsd_iter) {
140 /* Does the object require an outstanding flush? */
141 if (obj_priv->base.write_domain || obj_priv->pin_count)
142 continue;
143
144 if (mark_free(obj_priv, &unwind_list))
145 goto found;
146 }
147
148 /* Finally add anything with a pending flush (in order of retirement) */
149 list_for_each_entry(obj_priv, &dev_priv->mm.flushing_list, list) {
150 if (obj_priv->pin_count)
151 continue;
152
153 if (mark_free(obj_priv, &unwind_list))
154 goto found;
155 }
156 i915_for_each_active_object(obj_priv, &render_iter, &bsd_iter) {
157 if (! obj_priv->base.write_domain || obj_priv->pin_count)
158 continue;
159
160 if (mark_free(obj_priv, &unwind_list))
161 goto found;
162 }
163
164 /* Nothing found, clean up and bail out! */
165 list_for_each_entry(obj_priv, &unwind_list, evict_list) {
166 ret = drm_mm_scan_remove_block(obj_priv->gtt_space);
167 BUG_ON(ret);
168 }
169
170 /* We expect the caller to unpin, evict all and try again, or give up.
171 * So calling i915_gem_evict_everything() is unnecessary.
172 */
173 return -ENOSPC;
174
175found:
176 INIT_LIST_HEAD(&eviction_list);
177 list_for_each_entry_safe(obj_priv, tmp_obj_priv,
178 &unwind_list, evict_list) {
179 if (drm_mm_scan_remove_block(obj_priv->gtt_space)) {
180 /* drm_mm doesn't allow any other other operations while
181 * scanning, therefore store to be evicted objects on a
182 * temporary list. */
183 list_move(&obj_priv->evict_list, &eviction_list);
184 }
185 }
186
187 /* Unbinding will emit any required flushes */
188 list_for_each_entry_safe(obj_priv, tmp_obj_priv,
189 &eviction_list, evict_list) {
190#if WATCH_LRU
191 DRM_INFO("%s: evicting %p\n", __func__, obj);
192#endif
193 ret = i915_gem_object_unbind(&obj_priv->base);
194 if (ret)
195 return ret;
196 }
197
198 /* The just created free hole should be on the top of the free stack
199 * maintained by drm_mm, so this BUG_ON actually executes in O(1).
200 * Furthermore all accessed data has just recently been used, so it
201 * should be really fast, too. */
202 BUG_ON(!drm_mm_search_free(&dev_priv->mm.gtt_space, min_size,
203 alignment, 0));
204
205 return 0;
206}
207
208int
209i915_gem_evict_everything(struct drm_device *dev)
210{
211 drm_i915_private_t *dev_priv = dev->dev_private;
212 int ret;
213 bool lists_empty;
214
215 spin_lock(&dev_priv->mm.active_list_lock);
216 lists_empty = (list_empty(&dev_priv->mm.inactive_list) &&
217 list_empty(&dev_priv->mm.flushing_list) &&
218 list_empty(&dev_priv->render_ring.active_list) &&
219 (!HAS_BSD(dev)
220 || list_empty(&dev_priv->bsd_ring.active_list)));
221 spin_unlock(&dev_priv->mm.active_list_lock);
222
223 if (lists_empty)
224 return -ENOSPC;
225
226 /* Flush everything (on to the inactive lists) and evict */
227 ret = i915_gpu_idle(dev);
228 if (ret)
229 return ret;
230
231 BUG_ON(!list_empty(&dev_priv->mm.flushing_list));
232
233 ret = i915_gem_evict_inactive(dev);
234 if (ret)
235 return ret;
236
237 spin_lock(&dev_priv->mm.active_list_lock);
238 lists_empty = (list_empty(&dev_priv->mm.inactive_list) &&
239 list_empty(&dev_priv->mm.flushing_list) &&
240 list_empty(&dev_priv->render_ring.active_list) &&
241 (!HAS_BSD(dev)
242 || list_empty(&dev_priv->bsd_ring.active_list)));
243 spin_unlock(&dev_priv->mm.active_list_lock);
244 BUG_ON(!lists_empty);
245
246 return 0;
247}
248
249/** Unbinds all inactive objects. */
250int
251i915_gem_evict_inactive(struct drm_device *dev)
252{
253 drm_i915_private_t *dev_priv = dev->dev_private;
254
255 while (!list_empty(&dev_priv->mm.inactive_list)) {
256 struct drm_gem_object *obj;
257 int ret;
258
259 obj = &list_first_entry(&dev_priv->mm.inactive_list,
260 struct drm_i915_gem_object,
261 list)->base;
262
263 ret = i915_gem_object_unbind(obj);
264 if (ret != 0) {
265 DRM_ERROR("Error unbinding object: %d\n", ret);
266 return ret;
267 }
268 }
269
270 return 0;
271}
diff --git a/drivers/gpu/drm/i915/i915_irq.c b/drivers/gpu/drm/i915/i915_irq.c
index 85785a8844ed..16861b800fee 100644
--- a/drivers/gpu/drm/i915/i915_irq.c
+++ b/drivers/gpu/drm/i915/i915_irq.c
@@ -425,9 +425,11 @@ static struct drm_i915_error_object *
425i915_error_object_create(struct drm_device *dev, 425i915_error_object_create(struct drm_device *dev,
426 struct drm_gem_object *src) 426 struct drm_gem_object *src)
427{ 427{
428 drm_i915_private_t *dev_priv = dev->dev_private;
428 struct drm_i915_error_object *dst; 429 struct drm_i915_error_object *dst;
429 struct drm_i915_gem_object *src_priv; 430 struct drm_i915_gem_object *src_priv;
430 int page, page_count; 431 int page, page_count;
432 u32 reloc_offset;
431 433
432 if (src == NULL) 434 if (src == NULL)
433 return NULL; 435 return NULL;
@@ -442,18 +444,27 @@ i915_error_object_create(struct drm_device *dev,
442 if (dst == NULL) 444 if (dst == NULL)
443 return NULL; 445 return NULL;
444 446
447 reloc_offset = src_priv->gtt_offset;
445 for (page = 0; page < page_count; page++) { 448 for (page = 0; page < page_count; page++) {
446 void *s, *d = kmalloc(PAGE_SIZE, GFP_ATOMIC);
447 unsigned long flags; 449 unsigned long flags;
450 void __iomem *s;
451 void *d;
448 452
453 d = kmalloc(PAGE_SIZE, GFP_ATOMIC);
449 if (d == NULL) 454 if (d == NULL)
450 goto unwind; 455 goto unwind;
456
451 local_irq_save(flags); 457 local_irq_save(flags);
452 s = kmap_atomic(src_priv->pages[page], KM_IRQ0); 458 s = io_mapping_map_atomic_wc(dev_priv->mm.gtt_mapping,
453 memcpy(d, s, PAGE_SIZE); 459 reloc_offset,
454 kunmap_atomic(s, KM_IRQ0); 460 KM_IRQ0);
461 memcpy_fromio(d, s, PAGE_SIZE);
462 io_mapping_unmap_atomic(s, KM_IRQ0);
455 local_irq_restore(flags); 463 local_irq_restore(flags);
464
456 dst->pages[page] = d; 465 dst->pages[page] = d;
466
467 reloc_offset += PAGE_SIZE;
457 } 468 }
458 dst->page_count = page_count; 469 dst->page_count = page_count;
459 dst->gtt_offset = src_priv->gtt_offset; 470 dst->gtt_offset = src_priv->gtt_offset;
@@ -489,6 +500,7 @@ i915_error_state_free(struct drm_device *dev,
489 i915_error_object_free(error->batchbuffer[1]); 500 i915_error_object_free(error->batchbuffer[1]);
490 i915_error_object_free(error->ringbuffer); 501 i915_error_object_free(error->ringbuffer);
491 kfree(error->active_bo); 502 kfree(error->active_bo);
503 kfree(error->overlay);
492 kfree(error); 504 kfree(error);
493} 505}
494 506
@@ -612,18 +624,57 @@ static void i915_capture_error_state(struct drm_device *dev)
612 624
613 if (batchbuffer[1] == NULL && 625 if (batchbuffer[1] == NULL &&
614 error->acthd >= obj_priv->gtt_offset && 626 error->acthd >= obj_priv->gtt_offset &&
615 error->acthd < obj_priv->gtt_offset + obj->size && 627 error->acthd < obj_priv->gtt_offset + obj->size)
616 batchbuffer[0] != obj)
617 batchbuffer[1] = obj; 628 batchbuffer[1] = obj;
618 629
619 count++; 630 count++;
620 } 631 }
632 /* Scan the other lists for completeness for those bizarre errors. */
633 if (batchbuffer[0] == NULL || batchbuffer[1] == NULL) {
634 list_for_each_entry(obj_priv, &dev_priv->mm.flushing_list, list) {
635 struct drm_gem_object *obj = &obj_priv->base;
636
637 if (batchbuffer[0] == NULL &&
638 bbaddr >= obj_priv->gtt_offset &&
639 bbaddr < obj_priv->gtt_offset + obj->size)
640 batchbuffer[0] = obj;
641
642 if (batchbuffer[1] == NULL &&
643 error->acthd >= obj_priv->gtt_offset &&
644 error->acthd < obj_priv->gtt_offset + obj->size)
645 batchbuffer[1] = obj;
646
647 if (batchbuffer[0] && batchbuffer[1])
648 break;
649 }
650 }
651 if (batchbuffer[0] == NULL || batchbuffer[1] == NULL) {
652 list_for_each_entry(obj_priv, &dev_priv->mm.inactive_list, list) {
653 struct drm_gem_object *obj = &obj_priv->base;
654
655 if (batchbuffer[0] == NULL &&
656 bbaddr >= obj_priv->gtt_offset &&
657 bbaddr < obj_priv->gtt_offset + obj->size)
658 batchbuffer[0] = obj;
659
660 if (batchbuffer[1] == NULL &&
661 error->acthd >= obj_priv->gtt_offset &&
662 error->acthd < obj_priv->gtt_offset + obj->size)
663 batchbuffer[1] = obj;
664
665 if (batchbuffer[0] && batchbuffer[1])
666 break;
667 }
668 }
621 669
622 /* We need to copy these to an anonymous buffer as the simplest 670 /* We need to copy these to an anonymous buffer as the simplest
623 * method to avoid being overwritten by userpace. 671 * method to avoid being overwritten by userpace.
624 */ 672 */
625 error->batchbuffer[0] = i915_error_object_create(dev, batchbuffer[0]); 673 error->batchbuffer[0] = i915_error_object_create(dev, batchbuffer[0]);
626 error->batchbuffer[1] = i915_error_object_create(dev, batchbuffer[1]); 674 if (batchbuffer[1] != batchbuffer[0])
675 error->batchbuffer[1] = i915_error_object_create(dev, batchbuffer[1]);
676 else
677 error->batchbuffer[1] = NULL;
627 678
628 /* Record the ringbuffer */ 679 /* Record the ringbuffer */
629 error->ringbuffer = i915_error_object_create(dev, 680 error->ringbuffer = i915_error_object_create(dev,
@@ -667,6 +718,8 @@ static void i915_capture_error_state(struct drm_device *dev)
667 718
668 do_gettimeofday(&error->time); 719 do_gettimeofday(&error->time);
669 720
721 error->overlay = intel_overlay_capture_error_state(dev);
722
670 spin_lock_irqsave(&dev_priv->error_lock, flags); 723 spin_lock_irqsave(&dev_priv->error_lock, flags);
671 if (dev_priv->first_error == NULL) { 724 if (dev_priv->first_error == NULL) {
672 dev_priv->first_error = error; 725 dev_priv->first_error = error;
@@ -1251,6 +1304,16 @@ void i915_hangcheck_elapsed(unsigned long data)
1251 &dev_priv->render_ring), 1304 &dev_priv->render_ring),
1252 i915_get_tail_request(dev)->seqno)) { 1305 i915_get_tail_request(dev)->seqno)) {
1253 dev_priv->hangcheck_count = 0; 1306 dev_priv->hangcheck_count = 0;
1307
1308 /* Issue a wake-up to catch stuck h/w. */
1309 if (dev_priv->render_ring.waiting_gem_seqno |
1310 dev_priv->bsd_ring.waiting_gem_seqno) {
1311 DRM_ERROR("Hangcheck timer elapsed... GPU idle, missed IRQ.\n");
1312 if (dev_priv->render_ring.waiting_gem_seqno)
1313 DRM_WAKEUP(&dev_priv->render_ring.irq_queue);
1314 if (dev_priv->bsd_ring.waiting_gem_seqno)
1315 DRM_WAKEUP(&dev_priv->bsd_ring.irq_queue);
1316 }
1254 return; 1317 return;
1255 } 1318 }
1256 1319
@@ -1318,12 +1381,17 @@ static int ironlake_irq_postinstall(struct drm_device *dev)
1318 I915_WRITE(DEIER, dev_priv->de_irq_enable_reg); 1381 I915_WRITE(DEIER, dev_priv->de_irq_enable_reg);
1319 (void) I915_READ(DEIER); 1382 (void) I915_READ(DEIER);
1320 1383
1321 /* user interrupt should be enabled, but masked initial */ 1384 /* Gen6 only needs render pipe_control now */
1385 if (IS_GEN6(dev))
1386 render_mask = GT_PIPE_NOTIFY;
1387
1322 dev_priv->gt_irq_mask_reg = ~render_mask; 1388 dev_priv->gt_irq_mask_reg = ~render_mask;
1323 dev_priv->gt_irq_enable_reg = render_mask; 1389 dev_priv->gt_irq_enable_reg = render_mask;
1324 1390
1325 I915_WRITE(GTIIR, I915_READ(GTIIR)); 1391 I915_WRITE(GTIIR, I915_READ(GTIIR));
1326 I915_WRITE(GTIMR, dev_priv->gt_irq_mask_reg); 1392 I915_WRITE(GTIMR, dev_priv->gt_irq_mask_reg);
1393 if (IS_GEN6(dev))
1394 I915_WRITE(GEN6_RENDER_IMR, ~GEN6_RENDER_PIPE_CONTROL_NOTIFY_INTERRUPT);
1327 I915_WRITE(GTIER, dev_priv->gt_irq_enable_reg); 1395 I915_WRITE(GTIER, dev_priv->gt_irq_enable_reg);
1328 (void) I915_READ(GTIER); 1396 (void) I915_READ(GTIER);
1329 1397
diff --git a/drivers/gpu/drm/i915/i915_opregion.c b/drivers/gpu/drm/i915/i915_opregion.c
index d1bf92b99788..ea5d3fea4b61 100644
--- a/drivers/gpu/drm/i915/i915_opregion.c
+++ b/drivers/gpu/drm/i915/i915_opregion.c
@@ -114,10 +114,6 @@ struct opregion_asle {
114#define ASLE_REQ_MSK 0xf 114#define ASLE_REQ_MSK 0xf
115 115
116/* response bits of ASLE irq request */ 116/* response bits of ASLE irq request */
117#define ASLE_ALS_ILLUM_FAIL (2<<10)
118#define ASLE_BACKLIGHT_FAIL (2<<12)
119#define ASLE_PFIT_FAIL (2<<14)
120#define ASLE_PWM_FREQ_FAIL (2<<16)
121#define ASLE_ALS_ILLUM_FAILED (1<<10) 117#define ASLE_ALS_ILLUM_FAILED (1<<10)
122#define ASLE_BACKLIGHT_FAILED (1<<12) 118#define ASLE_BACKLIGHT_FAILED (1<<12)
123#define ASLE_PFIT_FAILED (1<<14) 119#define ASLE_PFIT_FAILED (1<<14)
@@ -155,11 +151,11 @@ static u32 asle_set_backlight(struct drm_device *dev, u32 bclp)
155 u32 max_backlight, level, shift; 151 u32 max_backlight, level, shift;
156 152
157 if (!(bclp & ASLE_BCLP_VALID)) 153 if (!(bclp & ASLE_BCLP_VALID))
158 return ASLE_BACKLIGHT_FAIL; 154 return ASLE_BACKLIGHT_FAILED;
159 155
160 bclp &= ASLE_BCLP_MSK; 156 bclp &= ASLE_BCLP_MSK;
161 if (bclp < 0 || bclp > 255) 157 if (bclp < 0 || bclp > 255)
162 return ASLE_BACKLIGHT_FAIL; 158 return ASLE_BACKLIGHT_FAILED;
163 159
164 blc_pwm_ctl = I915_READ(BLC_PWM_CTL); 160 blc_pwm_ctl = I915_READ(BLC_PWM_CTL);
165 blc_pwm_ctl2 = I915_READ(BLC_PWM_CTL2); 161 blc_pwm_ctl2 = I915_READ(BLC_PWM_CTL2);
@@ -211,7 +207,7 @@ static u32 asle_set_pfit(struct drm_device *dev, u32 pfit)
211 /* Panel fitting is currently controlled by the X code, so this is a 207 /* Panel fitting is currently controlled by the X code, so this is a
212 noop until modesetting support works fully */ 208 noop until modesetting support works fully */
213 if (!(pfit & ASLE_PFIT_VALID)) 209 if (!(pfit & ASLE_PFIT_VALID))
214 return ASLE_PFIT_FAIL; 210 return ASLE_PFIT_FAILED;
215 return 0; 211 return 0;
216} 212}
217 213
diff --git a/drivers/gpu/drm/i915/i915_reg.h b/drivers/gpu/drm/i915/i915_reg.h
index 281db6e5403a..67e3ec1a6af9 100644
--- a/drivers/gpu/drm/i915/i915_reg.h
+++ b/drivers/gpu/drm/i915/i915_reg.h
@@ -170,6 +170,7 @@
170#define MI_NO_WRITE_FLUSH (1 << 2) 170#define MI_NO_WRITE_FLUSH (1 << 2)
171#define MI_SCENE_COUNT (1 << 3) /* just increment scene count */ 171#define MI_SCENE_COUNT (1 << 3) /* just increment scene count */
172#define MI_END_SCENE (1 << 4) /* flush binner and incr scene count */ 172#define MI_END_SCENE (1 << 4) /* flush binner and incr scene count */
173#define MI_INVALIDATE_ISP (1 << 5) /* invalidate indirect state pointers */
173#define MI_BATCH_BUFFER_END MI_INSTR(0x0a, 0) 174#define MI_BATCH_BUFFER_END MI_INSTR(0x0a, 0)
174#define MI_REPORT_HEAD MI_INSTR(0x07, 0) 175#define MI_REPORT_HEAD MI_INSTR(0x07, 0)
175#define MI_OVERLAY_FLIP MI_INSTR(0x11,0) 176#define MI_OVERLAY_FLIP MI_INSTR(0x11,0)
@@ -180,6 +181,12 @@
180#define MI_DISPLAY_FLIP MI_INSTR(0x14, 2) 181#define MI_DISPLAY_FLIP MI_INSTR(0x14, 2)
181#define MI_DISPLAY_FLIP_I915 MI_INSTR(0x14, 1) 182#define MI_DISPLAY_FLIP_I915 MI_INSTR(0x14, 1)
182#define MI_DISPLAY_FLIP_PLANE(n) ((n) << 20) 183#define MI_DISPLAY_FLIP_PLANE(n) ((n) << 20)
184#define MI_SET_CONTEXT MI_INSTR(0x18, 0)
185#define MI_MM_SPACE_GTT (1<<8)
186#define MI_MM_SPACE_PHYSICAL (0<<8)
187#define MI_SAVE_EXT_STATE_EN (1<<3)
188#define MI_RESTORE_EXT_STATE_EN (1<<2)
189#define MI_RESTORE_INHIBIT (1<<0)
183#define MI_STORE_DWORD_IMM MI_INSTR(0x20, 1) 190#define MI_STORE_DWORD_IMM MI_INSTR(0x20, 1)
184#define MI_MEM_VIRTUAL (1 << 22) /* 965+ only */ 191#define MI_MEM_VIRTUAL (1 << 22) /* 965+ only */
185#define MI_STORE_DWORD_INDEX MI_INSTR(0x21, 1) 192#define MI_STORE_DWORD_INDEX MI_INSTR(0x21, 1)
@@ -1100,6 +1107,11 @@
1100#define PEG_BAND_GAP_DATA 0x14d68 1107#define PEG_BAND_GAP_DATA 0x14d68
1101 1108
1102/* 1109/*
1110 * Logical Context regs
1111 */
1112#define CCID 0x2180
1113#define CCID_EN (1<<0)
1114/*
1103 * Overlay regs 1115 * Overlay regs
1104 */ 1116 */
1105 1117
@@ -2069,6 +2081,7 @@
2069#define PIPE_DITHER_TYPE_ST01 (1 << 2) 2081#define PIPE_DITHER_TYPE_ST01 (1 << 2)
2070/* Pipe A */ 2082/* Pipe A */
2071#define PIPEADSL 0x70000 2083#define PIPEADSL 0x70000
2084#define DSL_LINEMASK 0x00000fff
2072#define PIPEACONF 0x70008 2085#define PIPEACONF 0x70008
2073#define PIPEACONF_ENABLE (1<<31) 2086#define PIPEACONF_ENABLE (1<<31)
2074#define PIPEACONF_DISABLE 0 2087#define PIPEACONF_DISABLE 0
@@ -2928,6 +2941,7 @@
2928#define TRANS_DP_VSYNC_ACTIVE_LOW 0 2941#define TRANS_DP_VSYNC_ACTIVE_LOW 0
2929#define TRANS_DP_HSYNC_ACTIVE_HIGH (1<<3) 2942#define TRANS_DP_HSYNC_ACTIVE_HIGH (1<<3)
2930#define TRANS_DP_HSYNC_ACTIVE_LOW 0 2943#define TRANS_DP_HSYNC_ACTIVE_LOW 0
2944#define TRANS_DP_SYNC_MASK (3<<3)
2931 2945
2932/* SNB eDP training params */ 2946/* SNB eDP training params */
2933/* SNB A-stepping */ 2947/* SNB A-stepping */
diff --git a/drivers/gpu/drm/i915/i915_suspend.c b/drivers/gpu/drm/i915/i915_suspend.c
index 6e2025274db5..2c6b98f2440e 100644
--- a/drivers/gpu/drm/i915/i915_suspend.c
+++ b/drivers/gpu/drm/i915/i915_suspend.c
@@ -34,7 +34,7 @@ static bool i915_pipe_enabled(struct drm_device *dev, enum pipe pipe)
34 struct drm_i915_private *dev_priv = dev->dev_private; 34 struct drm_i915_private *dev_priv = dev->dev_private;
35 u32 dpll_reg; 35 u32 dpll_reg;
36 36
37 if (IS_IRONLAKE(dev)) { 37 if (HAS_PCH_SPLIT(dev)) {
38 dpll_reg = (pipe == PIPE_A) ? PCH_DPLL_A: PCH_DPLL_B; 38 dpll_reg = (pipe == PIPE_A) ? PCH_DPLL_A: PCH_DPLL_B;
39 } else { 39 } else {
40 dpll_reg = (pipe == PIPE_A) ? DPLL_A: DPLL_B; 40 dpll_reg = (pipe == PIPE_A) ? DPLL_A: DPLL_B;
@@ -53,7 +53,7 @@ static void i915_save_palette(struct drm_device *dev, enum pipe pipe)
53 if (!i915_pipe_enabled(dev, pipe)) 53 if (!i915_pipe_enabled(dev, pipe))
54 return; 54 return;
55 55
56 if (IS_IRONLAKE(dev)) 56 if (HAS_PCH_SPLIT(dev))
57 reg = (pipe == PIPE_A) ? LGC_PALETTE_A : LGC_PALETTE_B; 57 reg = (pipe == PIPE_A) ? LGC_PALETTE_A : LGC_PALETTE_B;
58 58
59 if (pipe == PIPE_A) 59 if (pipe == PIPE_A)
@@ -75,7 +75,7 @@ static void i915_restore_palette(struct drm_device *dev, enum pipe pipe)
75 if (!i915_pipe_enabled(dev, pipe)) 75 if (!i915_pipe_enabled(dev, pipe))
76 return; 76 return;
77 77
78 if (IS_IRONLAKE(dev)) 78 if (HAS_PCH_SPLIT(dev))
79 reg = (pipe == PIPE_A) ? LGC_PALETTE_A : LGC_PALETTE_B; 79 reg = (pipe == PIPE_A) ? LGC_PALETTE_A : LGC_PALETTE_B;
80 80
81 if (pipe == PIPE_A) 81 if (pipe == PIPE_A)
@@ -239,7 +239,7 @@ static void i915_save_modeset_reg(struct drm_device *dev)
239 if (drm_core_check_feature(dev, DRIVER_MODESET)) 239 if (drm_core_check_feature(dev, DRIVER_MODESET))
240 return; 240 return;
241 241
242 if (IS_IRONLAKE(dev)) { 242 if (HAS_PCH_SPLIT(dev)) {
243 dev_priv->savePCH_DREF_CONTROL = I915_READ(PCH_DREF_CONTROL); 243 dev_priv->savePCH_DREF_CONTROL = I915_READ(PCH_DREF_CONTROL);
244 dev_priv->saveDISP_ARB_CTL = I915_READ(DISP_ARB_CTL); 244 dev_priv->saveDISP_ARB_CTL = I915_READ(DISP_ARB_CTL);
245 } 245 }
@@ -247,7 +247,7 @@ static void i915_save_modeset_reg(struct drm_device *dev)
247 /* Pipe & plane A info */ 247 /* Pipe & plane A info */
248 dev_priv->savePIPEACONF = I915_READ(PIPEACONF); 248 dev_priv->savePIPEACONF = I915_READ(PIPEACONF);
249 dev_priv->savePIPEASRC = I915_READ(PIPEASRC); 249 dev_priv->savePIPEASRC = I915_READ(PIPEASRC);
250 if (IS_IRONLAKE(dev)) { 250 if (HAS_PCH_SPLIT(dev)) {
251 dev_priv->saveFPA0 = I915_READ(PCH_FPA0); 251 dev_priv->saveFPA0 = I915_READ(PCH_FPA0);
252 dev_priv->saveFPA1 = I915_READ(PCH_FPA1); 252 dev_priv->saveFPA1 = I915_READ(PCH_FPA1);
253 dev_priv->saveDPLL_A = I915_READ(PCH_DPLL_A); 253 dev_priv->saveDPLL_A = I915_READ(PCH_DPLL_A);
@@ -256,7 +256,7 @@ static void i915_save_modeset_reg(struct drm_device *dev)
256 dev_priv->saveFPA1 = I915_READ(FPA1); 256 dev_priv->saveFPA1 = I915_READ(FPA1);
257 dev_priv->saveDPLL_A = I915_READ(DPLL_A); 257 dev_priv->saveDPLL_A = I915_READ(DPLL_A);
258 } 258 }
259 if (IS_I965G(dev) && !IS_IRONLAKE(dev)) 259 if (IS_I965G(dev) && !HAS_PCH_SPLIT(dev))
260 dev_priv->saveDPLL_A_MD = I915_READ(DPLL_A_MD); 260 dev_priv->saveDPLL_A_MD = I915_READ(DPLL_A_MD);
261 dev_priv->saveHTOTAL_A = I915_READ(HTOTAL_A); 261 dev_priv->saveHTOTAL_A = I915_READ(HTOTAL_A);
262 dev_priv->saveHBLANK_A = I915_READ(HBLANK_A); 262 dev_priv->saveHBLANK_A = I915_READ(HBLANK_A);
@@ -264,10 +264,10 @@ static void i915_save_modeset_reg(struct drm_device *dev)
264 dev_priv->saveVTOTAL_A = I915_READ(VTOTAL_A); 264 dev_priv->saveVTOTAL_A = I915_READ(VTOTAL_A);
265 dev_priv->saveVBLANK_A = I915_READ(VBLANK_A); 265 dev_priv->saveVBLANK_A = I915_READ(VBLANK_A);
266 dev_priv->saveVSYNC_A = I915_READ(VSYNC_A); 266 dev_priv->saveVSYNC_A = I915_READ(VSYNC_A);
267 if (!IS_IRONLAKE(dev)) 267 if (!HAS_PCH_SPLIT(dev))
268 dev_priv->saveBCLRPAT_A = I915_READ(BCLRPAT_A); 268 dev_priv->saveBCLRPAT_A = I915_READ(BCLRPAT_A);
269 269
270 if (IS_IRONLAKE(dev)) { 270 if (HAS_PCH_SPLIT(dev)) {
271 dev_priv->savePIPEA_DATA_M1 = I915_READ(PIPEA_DATA_M1); 271 dev_priv->savePIPEA_DATA_M1 = I915_READ(PIPEA_DATA_M1);
272 dev_priv->savePIPEA_DATA_N1 = I915_READ(PIPEA_DATA_N1); 272 dev_priv->savePIPEA_DATA_N1 = I915_READ(PIPEA_DATA_N1);
273 dev_priv->savePIPEA_LINK_M1 = I915_READ(PIPEA_LINK_M1); 273 dev_priv->savePIPEA_LINK_M1 = I915_READ(PIPEA_LINK_M1);
@@ -304,7 +304,7 @@ static void i915_save_modeset_reg(struct drm_device *dev)
304 /* Pipe & plane B info */ 304 /* Pipe & plane B info */
305 dev_priv->savePIPEBCONF = I915_READ(PIPEBCONF); 305 dev_priv->savePIPEBCONF = I915_READ(PIPEBCONF);
306 dev_priv->savePIPEBSRC = I915_READ(PIPEBSRC); 306 dev_priv->savePIPEBSRC = I915_READ(PIPEBSRC);
307 if (IS_IRONLAKE(dev)) { 307 if (HAS_PCH_SPLIT(dev)) {
308 dev_priv->saveFPB0 = I915_READ(PCH_FPB0); 308 dev_priv->saveFPB0 = I915_READ(PCH_FPB0);
309 dev_priv->saveFPB1 = I915_READ(PCH_FPB1); 309 dev_priv->saveFPB1 = I915_READ(PCH_FPB1);
310 dev_priv->saveDPLL_B = I915_READ(PCH_DPLL_B); 310 dev_priv->saveDPLL_B = I915_READ(PCH_DPLL_B);
@@ -313,7 +313,7 @@ static void i915_save_modeset_reg(struct drm_device *dev)
313 dev_priv->saveFPB1 = I915_READ(FPB1); 313 dev_priv->saveFPB1 = I915_READ(FPB1);
314 dev_priv->saveDPLL_B = I915_READ(DPLL_B); 314 dev_priv->saveDPLL_B = I915_READ(DPLL_B);
315 } 315 }
316 if (IS_I965G(dev) && !IS_IRONLAKE(dev)) 316 if (IS_I965G(dev) && !HAS_PCH_SPLIT(dev))
317 dev_priv->saveDPLL_B_MD = I915_READ(DPLL_B_MD); 317 dev_priv->saveDPLL_B_MD = I915_READ(DPLL_B_MD);
318 dev_priv->saveHTOTAL_B = I915_READ(HTOTAL_B); 318 dev_priv->saveHTOTAL_B = I915_READ(HTOTAL_B);
319 dev_priv->saveHBLANK_B = I915_READ(HBLANK_B); 319 dev_priv->saveHBLANK_B = I915_READ(HBLANK_B);
@@ -321,10 +321,10 @@ static void i915_save_modeset_reg(struct drm_device *dev)
321 dev_priv->saveVTOTAL_B = I915_READ(VTOTAL_B); 321 dev_priv->saveVTOTAL_B = I915_READ(VTOTAL_B);
322 dev_priv->saveVBLANK_B = I915_READ(VBLANK_B); 322 dev_priv->saveVBLANK_B = I915_READ(VBLANK_B);
323 dev_priv->saveVSYNC_B = I915_READ(VSYNC_B); 323 dev_priv->saveVSYNC_B = I915_READ(VSYNC_B);
324 if (!IS_IRONLAKE(dev)) 324 if (!HAS_PCH_SPLIT(dev))
325 dev_priv->saveBCLRPAT_B = I915_READ(BCLRPAT_B); 325 dev_priv->saveBCLRPAT_B = I915_READ(BCLRPAT_B);
326 326
327 if (IS_IRONLAKE(dev)) { 327 if (HAS_PCH_SPLIT(dev)) {
328 dev_priv->savePIPEB_DATA_M1 = I915_READ(PIPEB_DATA_M1); 328 dev_priv->savePIPEB_DATA_M1 = I915_READ(PIPEB_DATA_M1);
329 dev_priv->savePIPEB_DATA_N1 = I915_READ(PIPEB_DATA_N1); 329 dev_priv->savePIPEB_DATA_N1 = I915_READ(PIPEB_DATA_N1);
330 dev_priv->savePIPEB_LINK_M1 = I915_READ(PIPEB_LINK_M1); 330 dev_priv->savePIPEB_LINK_M1 = I915_READ(PIPEB_LINK_M1);
@@ -369,7 +369,7 @@ static void i915_restore_modeset_reg(struct drm_device *dev)
369 if (drm_core_check_feature(dev, DRIVER_MODESET)) 369 if (drm_core_check_feature(dev, DRIVER_MODESET))
370 return; 370 return;
371 371
372 if (IS_IRONLAKE(dev)) { 372 if (HAS_PCH_SPLIT(dev)) {
373 dpll_a_reg = PCH_DPLL_A; 373 dpll_a_reg = PCH_DPLL_A;
374 dpll_b_reg = PCH_DPLL_B; 374 dpll_b_reg = PCH_DPLL_B;
375 fpa0_reg = PCH_FPA0; 375 fpa0_reg = PCH_FPA0;
@@ -385,7 +385,7 @@ static void i915_restore_modeset_reg(struct drm_device *dev)
385 fpb1_reg = FPB1; 385 fpb1_reg = FPB1;
386 } 386 }
387 387
388 if (IS_IRONLAKE(dev)) { 388 if (HAS_PCH_SPLIT(dev)) {
389 I915_WRITE(PCH_DREF_CONTROL, dev_priv->savePCH_DREF_CONTROL); 389 I915_WRITE(PCH_DREF_CONTROL, dev_priv->savePCH_DREF_CONTROL);
390 I915_WRITE(DISP_ARB_CTL, dev_priv->saveDISP_ARB_CTL); 390 I915_WRITE(DISP_ARB_CTL, dev_priv->saveDISP_ARB_CTL);
391 } 391 }
@@ -395,16 +395,20 @@ static void i915_restore_modeset_reg(struct drm_device *dev)
395 if (dev_priv->saveDPLL_A & DPLL_VCO_ENABLE) { 395 if (dev_priv->saveDPLL_A & DPLL_VCO_ENABLE) {
396 I915_WRITE(dpll_a_reg, dev_priv->saveDPLL_A & 396 I915_WRITE(dpll_a_reg, dev_priv->saveDPLL_A &
397 ~DPLL_VCO_ENABLE); 397 ~DPLL_VCO_ENABLE);
398 DRM_UDELAY(150); 398 POSTING_READ(dpll_a_reg);
399 udelay(150);
399 } 400 }
400 I915_WRITE(fpa0_reg, dev_priv->saveFPA0); 401 I915_WRITE(fpa0_reg, dev_priv->saveFPA0);
401 I915_WRITE(fpa1_reg, dev_priv->saveFPA1); 402 I915_WRITE(fpa1_reg, dev_priv->saveFPA1);
402 /* Actually enable it */ 403 /* Actually enable it */
403 I915_WRITE(dpll_a_reg, dev_priv->saveDPLL_A); 404 I915_WRITE(dpll_a_reg, dev_priv->saveDPLL_A);
404 DRM_UDELAY(150); 405 POSTING_READ(dpll_a_reg);
405 if (IS_I965G(dev) && !IS_IRONLAKE(dev)) 406 udelay(150);
407 if (IS_I965G(dev) && !HAS_PCH_SPLIT(dev)) {
406 I915_WRITE(DPLL_A_MD, dev_priv->saveDPLL_A_MD); 408 I915_WRITE(DPLL_A_MD, dev_priv->saveDPLL_A_MD);
407 DRM_UDELAY(150); 409 POSTING_READ(DPLL_A_MD);
410 }
411 udelay(150);
408 412
409 /* Restore mode */ 413 /* Restore mode */
410 I915_WRITE(HTOTAL_A, dev_priv->saveHTOTAL_A); 414 I915_WRITE(HTOTAL_A, dev_priv->saveHTOTAL_A);
@@ -413,10 +417,10 @@ static void i915_restore_modeset_reg(struct drm_device *dev)
413 I915_WRITE(VTOTAL_A, dev_priv->saveVTOTAL_A); 417 I915_WRITE(VTOTAL_A, dev_priv->saveVTOTAL_A);
414 I915_WRITE(VBLANK_A, dev_priv->saveVBLANK_A); 418 I915_WRITE(VBLANK_A, dev_priv->saveVBLANK_A);
415 I915_WRITE(VSYNC_A, dev_priv->saveVSYNC_A); 419 I915_WRITE(VSYNC_A, dev_priv->saveVSYNC_A);
416 if (!IS_IRONLAKE(dev)) 420 if (!HAS_PCH_SPLIT(dev))
417 I915_WRITE(BCLRPAT_A, dev_priv->saveBCLRPAT_A); 421 I915_WRITE(BCLRPAT_A, dev_priv->saveBCLRPAT_A);
418 422
419 if (IS_IRONLAKE(dev)) { 423 if (HAS_PCH_SPLIT(dev)) {
420 I915_WRITE(PIPEA_DATA_M1, dev_priv->savePIPEA_DATA_M1); 424 I915_WRITE(PIPEA_DATA_M1, dev_priv->savePIPEA_DATA_M1);
421 I915_WRITE(PIPEA_DATA_N1, dev_priv->savePIPEA_DATA_N1); 425 I915_WRITE(PIPEA_DATA_N1, dev_priv->savePIPEA_DATA_N1);
422 I915_WRITE(PIPEA_LINK_M1, dev_priv->savePIPEA_LINK_M1); 426 I915_WRITE(PIPEA_LINK_M1, dev_priv->savePIPEA_LINK_M1);
@@ -460,16 +464,20 @@ static void i915_restore_modeset_reg(struct drm_device *dev)
460 if (dev_priv->saveDPLL_B & DPLL_VCO_ENABLE) { 464 if (dev_priv->saveDPLL_B & DPLL_VCO_ENABLE) {
461 I915_WRITE(dpll_b_reg, dev_priv->saveDPLL_B & 465 I915_WRITE(dpll_b_reg, dev_priv->saveDPLL_B &
462 ~DPLL_VCO_ENABLE); 466 ~DPLL_VCO_ENABLE);
463 DRM_UDELAY(150); 467 POSTING_READ(dpll_b_reg);
468 udelay(150);
464 } 469 }
465 I915_WRITE(fpb0_reg, dev_priv->saveFPB0); 470 I915_WRITE(fpb0_reg, dev_priv->saveFPB0);
466 I915_WRITE(fpb1_reg, dev_priv->saveFPB1); 471 I915_WRITE(fpb1_reg, dev_priv->saveFPB1);
467 /* Actually enable it */ 472 /* Actually enable it */
468 I915_WRITE(dpll_b_reg, dev_priv->saveDPLL_B); 473 I915_WRITE(dpll_b_reg, dev_priv->saveDPLL_B);
469 DRM_UDELAY(150); 474 POSTING_READ(dpll_b_reg);
470 if (IS_I965G(dev) && !IS_IRONLAKE(dev)) 475 udelay(150);
476 if (IS_I965G(dev) && !HAS_PCH_SPLIT(dev)) {
471 I915_WRITE(DPLL_B_MD, dev_priv->saveDPLL_B_MD); 477 I915_WRITE(DPLL_B_MD, dev_priv->saveDPLL_B_MD);
472 DRM_UDELAY(150); 478 POSTING_READ(DPLL_B_MD);
479 }
480 udelay(150);
473 481
474 /* Restore mode */ 482 /* Restore mode */
475 I915_WRITE(HTOTAL_B, dev_priv->saveHTOTAL_B); 483 I915_WRITE(HTOTAL_B, dev_priv->saveHTOTAL_B);
@@ -478,10 +486,10 @@ static void i915_restore_modeset_reg(struct drm_device *dev)
478 I915_WRITE(VTOTAL_B, dev_priv->saveVTOTAL_B); 486 I915_WRITE(VTOTAL_B, dev_priv->saveVTOTAL_B);
479 I915_WRITE(VBLANK_B, dev_priv->saveVBLANK_B); 487 I915_WRITE(VBLANK_B, dev_priv->saveVBLANK_B);
480 I915_WRITE(VSYNC_B, dev_priv->saveVSYNC_B); 488 I915_WRITE(VSYNC_B, dev_priv->saveVSYNC_B);
481 if (!IS_IRONLAKE(dev)) 489 if (!HAS_PCH_SPLIT(dev))
482 I915_WRITE(BCLRPAT_B, dev_priv->saveBCLRPAT_B); 490 I915_WRITE(BCLRPAT_B, dev_priv->saveBCLRPAT_B);
483 491
484 if (IS_IRONLAKE(dev)) { 492 if (HAS_PCH_SPLIT(dev)) {
485 I915_WRITE(PIPEB_DATA_M1, dev_priv->savePIPEB_DATA_M1); 493 I915_WRITE(PIPEB_DATA_M1, dev_priv->savePIPEB_DATA_M1);
486 I915_WRITE(PIPEB_DATA_N1, dev_priv->savePIPEB_DATA_N1); 494 I915_WRITE(PIPEB_DATA_N1, dev_priv->savePIPEB_DATA_N1);
487 I915_WRITE(PIPEB_LINK_M1, dev_priv->savePIPEB_LINK_M1); 495 I915_WRITE(PIPEB_LINK_M1, dev_priv->savePIPEB_LINK_M1);
@@ -546,14 +554,14 @@ void i915_save_display(struct drm_device *dev)
546 dev_priv->saveCURSIZE = I915_READ(CURSIZE); 554 dev_priv->saveCURSIZE = I915_READ(CURSIZE);
547 555
548 /* CRT state */ 556 /* CRT state */
549 if (IS_IRONLAKE(dev)) { 557 if (HAS_PCH_SPLIT(dev)) {
550 dev_priv->saveADPA = I915_READ(PCH_ADPA); 558 dev_priv->saveADPA = I915_READ(PCH_ADPA);
551 } else { 559 } else {
552 dev_priv->saveADPA = I915_READ(ADPA); 560 dev_priv->saveADPA = I915_READ(ADPA);
553 } 561 }
554 562
555 /* LVDS state */ 563 /* LVDS state */
556 if (IS_IRONLAKE(dev)) { 564 if (HAS_PCH_SPLIT(dev)) {
557 dev_priv->savePP_CONTROL = I915_READ(PCH_PP_CONTROL); 565 dev_priv->savePP_CONTROL = I915_READ(PCH_PP_CONTROL);
558 dev_priv->saveBLC_PWM_CTL = I915_READ(BLC_PWM_PCH_CTL1); 566 dev_priv->saveBLC_PWM_CTL = I915_READ(BLC_PWM_PCH_CTL1);
559 dev_priv->saveBLC_PWM_CTL2 = I915_READ(BLC_PWM_PCH_CTL2); 567 dev_priv->saveBLC_PWM_CTL2 = I915_READ(BLC_PWM_PCH_CTL2);
@@ -571,10 +579,10 @@ void i915_save_display(struct drm_device *dev)
571 dev_priv->saveLVDS = I915_READ(LVDS); 579 dev_priv->saveLVDS = I915_READ(LVDS);
572 } 580 }
573 581
574 if (!IS_I830(dev) && !IS_845G(dev) && !IS_IRONLAKE(dev)) 582 if (!IS_I830(dev) && !IS_845G(dev) && !HAS_PCH_SPLIT(dev))
575 dev_priv->savePFIT_CONTROL = I915_READ(PFIT_CONTROL); 583 dev_priv->savePFIT_CONTROL = I915_READ(PFIT_CONTROL);
576 584
577 if (IS_IRONLAKE(dev)) { 585 if (HAS_PCH_SPLIT(dev)) {
578 dev_priv->savePP_ON_DELAYS = I915_READ(PCH_PP_ON_DELAYS); 586 dev_priv->savePP_ON_DELAYS = I915_READ(PCH_PP_ON_DELAYS);
579 dev_priv->savePP_OFF_DELAYS = I915_READ(PCH_PP_OFF_DELAYS); 587 dev_priv->savePP_OFF_DELAYS = I915_READ(PCH_PP_OFF_DELAYS);
580 dev_priv->savePP_DIVISOR = I915_READ(PCH_PP_DIVISOR); 588 dev_priv->savePP_DIVISOR = I915_READ(PCH_PP_DIVISOR);
@@ -602,7 +610,7 @@ void i915_save_display(struct drm_device *dev)
602 610
603 /* Only save FBC state on the platform that supports FBC */ 611 /* Only save FBC state on the platform that supports FBC */
604 if (I915_HAS_FBC(dev)) { 612 if (I915_HAS_FBC(dev)) {
605 if (IS_IRONLAKE_M(dev)) { 613 if (HAS_PCH_SPLIT(dev)) {
606 dev_priv->saveDPFC_CB_BASE = I915_READ(ILK_DPFC_CB_BASE); 614 dev_priv->saveDPFC_CB_BASE = I915_READ(ILK_DPFC_CB_BASE);
607 } else if (IS_GM45(dev)) { 615 } else if (IS_GM45(dev)) {
608 dev_priv->saveDPFC_CB_BASE = I915_READ(DPFC_CB_BASE); 616 dev_priv->saveDPFC_CB_BASE = I915_READ(DPFC_CB_BASE);
@@ -618,7 +626,7 @@ void i915_save_display(struct drm_device *dev)
618 dev_priv->saveVGA0 = I915_READ(VGA0); 626 dev_priv->saveVGA0 = I915_READ(VGA0);
619 dev_priv->saveVGA1 = I915_READ(VGA1); 627 dev_priv->saveVGA1 = I915_READ(VGA1);
620 dev_priv->saveVGA_PD = I915_READ(VGA_PD); 628 dev_priv->saveVGA_PD = I915_READ(VGA_PD);
621 if (IS_IRONLAKE(dev)) 629 if (HAS_PCH_SPLIT(dev))
622 dev_priv->saveVGACNTRL = I915_READ(CPU_VGACNTRL); 630 dev_priv->saveVGACNTRL = I915_READ(CPU_VGACNTRL);
623 else 631 else
624 dev_priv->saveVGACNTRL = I915_READ(VGACNTRL); 632 dev_priv->saveVGACNTRL = I915_READ(VGACNTRL);
@@ -660,24 +668,24 @@ void i915_restore_display(struct drm_device *dev)
660 I915_WRITE(CURSIZE, dev_priv->saveCURSIZE); 668 I915_WRITE(CURSIZE, dev_priv->saveCURSIZE);
661 669
662 /* CRT state */ 670 /* CRT state */
663 if (IS_IRONLAKE(dev)) 671 if (HAS_PCH_SPLIT(dev))
664 I915_WRITE(PCH_ADPA, dev_priv->saveADPA); 672 I915_WRITE(PCH_ADPA, dev_priv->saveADPA);
665 else 673 else
666 I915_WRITE(ADPA, dev_priv->saveADPA); 674 I915_WRITE(ADPA, dev_priv->saveADPA);
667 675
668 /* LVDS state */ 676 /* LVDS state */
669 if (IS_I965G(dev) && !IS_IRONLAKE(dev)) 677 if (IS_I965G(dev) && !HAS_PCH_SPLIT(dev))
670 I915_WRITE(BLC_PWM_CTL2, dev_priv->saveBLC_PWM_CTL2); 678 I915_WRITE(BLC_PWM_CTL2, dev_priv->saveBLC_PWM_CTL2);
671 679
672 if (IS_IRONLAKE(dev)) { 680 if (HAS_PCH_SPLIT(dev)) {
673 I915_WRITE(PCH_LVDS, dev_priv->saveLVDS); 681 I915_WRITE(PCH_LVDS, dev_priv->saveLVDS);
674 } else if (IS_MOBILE(dev) && !IS_I830(dev)) 682 } else if (IS_MOBILE(dev) && !IS_I830(dev))
675 I915_WRITE(LVDS, dev_priv->saveLVDS); 683 I915_WRITE(LVDS, dev_priv->saveLVDS);
676 684
677 if (!IS_I830(dev) && !IS_845G(dev) && !IS_IRONLAKE(dev)) 685 if (!IS_I830(dev) && !IS_845G(dev) && !HAS_PCH_SPLIT(dev))
678 I915_WRITE(PFIT_CONTROL, dev_priv->savePFIT_CONTROL); 686 I915_WRITE(PFIT_CONTROL, dev_priv->savePFIT_CONTROL);
679 687
680 if (IS_IRONLAKE(dev)) { 688 if (HAS_PCH_SPLIT(dev)) {
681 I915_WRITE(BLC_PWM_PCH_CTL1, dev_priv->saveBLC_PWM_CTL); 689 I915_WRITE(BLC_PWM_PCH_CTL1, dev_priv->saveBLC_PWM_CTL);
682 I915_WRITE(BLC_PWM_PCH_CTL2, dev_priv->saveBLC_PWM_CTL2); 690 I915_WRITE(BLC_PWM_PCH_CTL2, dev_priv->saveBLC_PWM_CTL2);
683 I915_WRITE(BLC_PWM_CPU_CTL, dev_priv->saveBLC_CPU_PWM_CTL); 691 I915_WRITE(BLC_PWM_CPU_CTL, dev_priv->saveBLC_CPU_PWM_CTL);
@@ -708,7 +716,7 @@ void i915_restore_display(struct drm_device *dev)
708 716
709 /* only restore FBC info on the platform that supports FBC*/ 717 /* only restore FBC info on the platform that supports FBC*/
710 if (I915_HAS_FBC(dev)) { 718 if (I915_HAS_FBC(dev)) {
711 if (IS_IRONLAKE_M(dev)) { 719 if (HAS_PCH_SPLIT(dev)) {
712 ironlake_disable_fbc(dev); 720 ironlake_disable_fbc(dev);
713 I915_WRITE(ILK_DPFC_CB_BASE, dev_priv->saveDPFC_CB_BASE); 721 I915_WRITE(ILK_DPFC_CB_BASE, dev_priv->saveDPFC_CB_BASE);
714 } else if (IS_GM45(dev)) { 722 } else if (IS_GM45(dev)) {
@@ -723,14 +731,15 @@ void i915_restore_display(struct drm_device *dev)
723 } 731 }
724 } 732 }
725 /* VGA state */ 733 /* VGA state */
726 if (IS_IRONLAKE(dev)) 734 if (HAS_PCH_SPLIT(dev))
727 I915_WRITE(CPU_VGACNTRL, dev_priv->saveVGACNTRL); 735 I915_WRITE(CPU_VGACNTRL, dev_priv->saveVGACNTRL);
728 else 736 else
729 I915_WRITE(VGACNTRL, dev_priv->saveVGACNTRL); 737 I915_WRITE(VGACNTRL, dev_priv->saveVGACNTRL);
730 I915_WRITE(VGA0, dev_priv->saveVGA0); 738 I915_WRITE(VGA0, dev_priv->saveVGA0);
731 I915_WRITE(VGA1, dev_priv->saveVGA1); 739 I915_WRITE(VGA1, dev_priv->saveVGA1);
732 I915_WRITE(VGA_PD, dev_priv->saveVGA_PD); 740 I915_WRITE(VGA_PD, dev_priv->saveVGA_PD);
733 DRM_UDELAY(150); 741 POSTING_READ(VGA_PD);
742 udelay(150);
734 743
735 i915_restore_vga(dev); 744 i915_restore_vga(dev);
736} 745}
@@ -748,7 +757,7 @@ int i915_save_state(struct drm_device *dev)
748 i915_save_display(dev); 757 i915_save_display(dev);
749 758
750 /* Interrupt state */ 759 /* Interrupt state */
751 if (IS_IRONLAKE(dev)) { 760 if (HAS_PCH_SPLIT(dev)) {
752 dev_priv->saveDEIER = I915_READ(DEIER); 761 dev_priv->saveDEIER = I915_READ(DEIER);
753 dev_priv->saveDEIMR = I915_READ(DEIMR); 762 dev_priv->saveDEIMR = I915_READ(DEIMR);
754 dev_priv->saveGTIER = I915_READ(GTIER); 763 dev_priv->saveGTIER = I915_READ(GTIER);
@@ -762,7 +771,7 @@ int i915_save_state(struct drm_device *dev)
762 dev_priv->saveIMR = I915_READ(IMR); 771 dev_priv->saveIMR = I915_READ(IMR);
763 } 772 }
764 773
765 if (IS_IRONLAKE_M(dev)) 774 if (HAS_PCH_SPLIT(dev))
766 ironlake_disable_drps(dev); 775 ironlake_disable_drps(dev);
767 776
768 /* Cache mode state */ 777 /* Cache mode state */
@@ -820,7 +829,7 @@ int i915_restore_state(struct drm_device *dev)
820 i915_restore_display(dev); 829 i915_restore_display(dev);
821 830
822 /* Interrupt state */ 831 /* Interrupt state */
823 if (IS_IRONLAKE(dev)) { 832 if (HAS_PCH_SPLIT(dev)) {
824 I915_WRITE(DEIER, dev_priv->saveDEIER); 833 I915_WRITE(DEIER, dev_priv->saveDEIER);
825 I915_WRITE(DEIMR, dev_priv->saveDEIMR); 834 I915_WRITE(DEIMR, dev_priv->saveDEIMR);
826 I915_WRITE(GTIER, dev_priv->saveGTIER); 835 I915_WRITE(GTIER, dev_priv->saveGTIER);
@@ -835,7 +844,7 @@ int i915_restore_state(struct drm_device *dev)
835 /* Clock gating state */ 844 /* Clock gating state */
836 intel_init_clock_gating(dev); 845 intel_init_clock_gating(dev);
837 846
838 if (IS_IRONLAKE_M(dev)) 847 if (HAS_PCH_SPLIT(dev))
839 ironlake_enable_drps(dev); 848 ironlake_enable_drps(dev);
840 849
841 /* Cache mode state */ 850 /* Cache mode state */
diff --git a/drivers/gpu/drm/i915/intel_crt.c b/drivers/gpu/drm/i915/intel_crt.c
index ee0732b222a1..4b7735196cd5 100644
--- a/drivers/gpu/drm/i915/intel_crt.c
+++ b/drivers/gpu/drm/i915/intel_crt.c
@@ -160,19 +160,20 @@ static bool intel_ironlake_crt_detect_hotplug(struct drm_connector *connector)
160 struct drm_i915_private *dev_priv = dev->dev_private; 160 struct drm_i915_private *dev_priv = dev->dev_private;
161 u32 adpa, temp; 161 u32 adpa, temp;
162 bool ret; 162 bool ret;
163 bool turn_off_dac = false;
163 164
164 temp = adpa = I915_READ(PCH_ADPA); 165 temp = adpa = I915_READ(PCH_ADPA);
165 166
166 if (HAS_PCH_CPT(dev)) { 167 if (HAS_PCH_SPLIT(dev))
167 /* Disable DAC before force detect */ 168 turn_off_dac = true;
168 I915_WRITE(PCH_ADPA, adpa & ~ADPA_DAC_ENABLE); 169
169 (void)I915_READ(PCH_ADPA); 170 adpa &= ~ADPA_CRT_HOTPLUG_MASK;
170 } else { 171 if (turn_off_dac)
171 adpa &= ~ADPA_CRT_HOTPLUG_MASK; 172 adpa &= ~ADPA_DAC_ENABLE;
172 /* disable HPD first */ 173
173 I915_WRITE(PCH_ADPA, adpa); 174 /* disable HPD first */
174 (void)I915_READ(PCH_ADPA); 175 I915_WRITE(PCH_ADPA, adpa);
175 } 176 (void)I915_READ(PCH_ADPA);
176 177
177 adpa |= (ADPA_CRT_HOTPLUG_PERIOD_128 | 178 adpa |= (ADPA_CRT_HOTPLUG_PERIOD_128 |
178 ADPA_CRT_HOTPLUG_WARMUP_10MS | 179 ADPA_CRT_HOTPLUG_WARMUP_10MS |
@@ -185,10 +186,11 @@ static bool intel_ironlake_crt_detect_hotplug(struct drm_connector *connector)
185 DRM_DEBUG_KMS("pch crt adpa 0x%x", adpa); 186 DRM_DEBUG_KMS("pch crt adpa 0x%x", adpa);
186 I915_WRITE(PCH_ADPA, adpa); 187 I915_WRITE(PCH_ADPA, adpa);
187 188
188 while ((I915_READ(PCH_ADPA) & ADPA_CRT_HOTPLUG_FORCE_TRIGGER) != 0) 189 if (wait_for((I915_READ(PCH_ADPA) & ADPA_CRT_HOTPLUG_FORCE_TRIGGER) == 0,
189 ; 190 1000, 1))
191 DRM_ERROR("timed out waiting for FORCE_TRIGGER");
190 192
191 if (HAS_PCH_CPT(dev)) { 193 if (turn_off_dac) {
192 I915_WRITE(PCH_ADPA, temp); 194 I915_WRITE(PCH_ADPA, temp);
193 (void)I915_READ(PCH_ADPA); 195 (void)I915_READ(PCH_ADPA);
194 } 196 }
@@ -237,17 +239,13 @@ static bool intel_crt_detect_hotplug(struct drm_connector *connector)
237 hotplug_en |= CRT_HOTPLUG_FORCE_DETECT; 239 hotplug_en |= CRT_HOTPLUG_FORCE_DETECT;
238 240
239 for (i = 0; i < tries ; i++) { 241 for (i = 0; i < tries ; i++) {
240 unsigned long timeout;
241 /* turn on the FORCE_DETECT */ 242 /* turn on the FORCE_DETECT */
242 I915_WRITE(PORT_HOTPLUG_EN, hotplug_en); 243 I915_WRITE(PORT_HOTPLUG_EN, hotplug_en);
243 timeout = jiffies + msecs_to_jiffies(1000);
244 /* wait for FORCE_DETECT to go off */ 244 /* wait for FORCE_DETECT to go off */
245 do { 245 if (wait_for((I915_READ(PORT_HOTPLUG_EN) &
246 if (!(I915_READ(PORT_HOTPLUG_EN) & 246 CRT_HOTPLUG_FORCE_DETECT) == 0,
247 CRT_HOTPLUG_FORCE_DETECT)) 247 1000, 1))
248 break; 248 DRM_ERROR("timed out waiting for FORCE_DETECT to go off");
249 msleep(1);
250 } while (time_after(timeout, jiffies));
251 } 249 }
252 250
253 stat = I915_READ(PORT_HOTPLUG_STAT); 251 stat = I915_READ(PORT_HOTPLUG_STAT);
@@ -331,7 +329,7 @@ intel_crt_load_detect(struct drm_crtc *crtc, struct intel_encoder *intel_encoder
331 I915_WRITE(pipeconf_reg, pipeconf | PIPECONF_FORCE_BORDER); 329 I915_WRITE(pipeconf_reg, pipeconf | PIPECONF_FORCE_BORDER);
332 /* Wait for next Vblank to substitue 330 /* Wait for next Vblank to substitue
333 * border color for Color info */ 331 * border color for Color info */
334 intel_wait_for_vblank(dev); 332 intel_wait_for_vblank(dev, pipe);
335 st00 = I915_READ8(VGA_MSR_WRITE); 333 st00 = I915_READ8(VGA_MSR_WRITE);
336 status = ((st00 & (1 << 4)) != 0) ? 334 status = ((st00 & (1 << 4)) != 0) ?
337 connector_status_connected : 335 connector_status_connected :
@@ -508,17 +506,8 @@ static const struct drm_connector_helper_funcs intel_crt_connector_helper_funcs
508 .best_encoder = intel_attached_encoder, 506 .best_encoder = intel_attached_encoder,
509}; 507};
510 508
511static void intel_crt_enc_destroy(struct drm_encoder *encoder)
512{
513 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder);
514
515 intel_i2c_destroy(intel_encoder->ddc_bus);
516 drm_encoder_cleanup(encoder);
517 kfree(intel_encoder);
518}
519
520static const struct drm_encoder_funcs intel_crt_enc_funcs = { 509static const struct drm_encoder_funcs intel_crt_enc_funcs = {
521 .destroy = intel_crt_enc_destroy, 510 .destroy = intel_encoder_destroy,
522}; 511};
523 512
524void intel_crt_init(struct drm_device *dev) 513void intel_crt_init(struct drm_device *dev)
diff --git a/drivers/gpu/drm/i915/intel_display.c b/drivers/gpu/drm/i915/intel_display.c
index 5ec10e02341b..23157e1de3be 100644
--- a/drivers/gpu/drm/i915/intel_display.c
+++ b/drivers/gpu/drm/i915/intel_display.c
@@ -29,6 +29,7 @@
29#include <linux/i2c.h> 29#include <linux/i2c.h>
30#include <linux/kernel.h> 30#include <linux/kernel.h>
31#include <linux/slab.h> 31#include <linux/slab.h>
32#include <linux/vgaarb.h>
32#include "drmP.h" 33#include "drmP.h"
33#include "intel_drv.h" 34#include "intel_drv.h"
34#include "i915_drm.h" 35#include "i915_drm.h"
@@ -976,14 +977,54 @@ intel_find_pll_g4x_dp(const intel_limit_t *limit, struct drm_crtc *crtc,
976 return true; 977 return true;
977} 978}
978 979
979void 980/**
980intel_wait_for_vblank(struct drm_device *dev) 981 * intel_wait_for_vblank - wait for vblank on a given pipe
982 * @dev: drm device
983 * @pipe: pipe to wait for
984 *
985 * Wait for vblank to occur on a given pipe. Needed for various bits of
986 * mode setting code.
987 */
988void intel_wait_for_vblank(struct drm_device *dev, int pipe)
981{ 989{
982 /* Wait for 20ms, i.e. one cycle at 50hz. */ 990 struct drm_i915_private *dev_priv = dev->dev_private;
983 if (in_dbg_master()) 991 int pipestat_reg = (pipe == 0 ? PIPEASTAT : PIPEBSTAT);
984 mdelay(20); /* The kernel debugger cannot call msleep() */ 992
985 else 993 /* Wait for vblank interrupt bit to set */
986 msleep(20); 994 if (wait_for((I915_READ(pipestat_reg) &
995 PIPE_VBLANK_INTERRUPT_STATUS) == 0,
996 50, 0))
997 DRM_DEBUG_KMS("vblank wait timed out\n");
998}
999
1000/**
1001 * intel_wait_for_vblank_off - wait for vblank after disabling a pipe
1002 * @dev: drm device
1003 * @pipe: pipe to wait for
1004 *
1005 * After disabling a pipe, we can't wait for vblank in the usual way,
1006 * spinning on the vblank interrupt status bit, since we won't actually
1007 * see an interrupt when the pipe is disabled.
1008 *
1009 * So this function waits for the display line value to settle (it
1010 * usually ends up stopping at the start of the next frame).
1011 */
1012void intel_wait_for_vblank_off(struct drm_device *dev, int pipe)
1013{
1014 struct drm_i915_private *dev_priv = dev->dev_private;
1015 int pipedsl_reg = (pipe == 0 ? PIPEADSL : PIPEBDSL);
1016 unsigned long timeout = jiffies + msecs_to_jiffies(100);
1017 u32 last_line;
1018
1019 /* Wait for the display line to settle */
1020 do {
1021 last_line = I915_READ(pipedsl_reg) & DSL_LINEMASK;
1022 mdelay(5);
1023 } while (((I915_READ(pipedsl_reg) & DSL_LINEMASK) != last_line) &&
1024 time_after(timeout, jiffies));
1025
1026 if (time_after(jiffies, timeout))
1027 DRM_DEBUG_KMS("vblank wait timed out\n");
987} 1028}
988 1029
989/* Parameters have changed, update FBC info */ 1030/* Parameters have changed, update FBC info */
@@ -1037,7 +1078,6 @@ static void i8xx_enable_fbc(struct drm_crtc *crtc, unsigned long interval)
1037void i8xx_disable_fbc(struct drm_device *dev) 1078void i8xx_disable_fbc(struct drm_device *dev)
1038{ 1079{
1039 struct drm_i915_private *dev_priv = dev->dev_private; 1080 struct drm_i915_private *dev_priv = dev->dev_private;
1040 unsigned long timeout = jiffies + msecs_to_jiffies(1);
1041 u32 fbc_ctl; 1081 u32 fbc_ctl;
1042 1082
1043 if (!I915_HAS_FBC(dev)) 1083 if (!I915_HAS_FBC(dev))
@@ -1052,16 +1092,11 @@ void i8xx_disable_fbc(struct drm_device *dev)
1052 I915_WRITE(FBC_CONTROL, fbc_ctl); 1092 I915_WRITE(FBC_CONTROL, fbc_ctl);
1053 1093
1054 /* Wait for compressing bit to clear */ 1094 /* Wait for compressing bit to clear */
1055 while (I915_READ(FBC_STATUS) & FBC_STAT_COMPRESSING) { 1095 if (wait_for((I915_READ(FBC_STATUS) & FBC_STAT_COMPRESSING) == 0, 10, 0)) {
1056 if (time_after(jiffies, timeout)) { 1096 DRM_DEBUG_KMS("FBC idle timed out\n");
1057 DRM_DEBUG_DRIVER("FBC idle timed out\n"); 1097 return;
1058 break;
1059 }
1060 ; /* do nothing */
1061 } 1098 }
1062 1099
1063 intel_wait_for_vblank(dev);
1064
1065 DRM_DEBUG_KMS("disabled FBC\n"); 1100 DRM_DEBUG_KMS("disabled FBC\n");
1066} 1101}
1067 1102
@@ -1118,7 +1153,6 @@ void g4x_disable_fbc(struct drm_device *dev)
1118 dpfc_ctl = I915_READ(DPFC_CONTROL); 1153 dpfc_ctl = I915_READ(DPFC_CONTROL);
1119 dpfc_ctl &= ~DPFC_CTL_EN; 1154 dpfc_ctl &= ~DPFC_CTL_EN;
1120 I915_WRITE(DPFC_CONTROL, dpfc_ctl); 1155 I915_WRITE(DPFC_CONTROL, dpfc_ctl);
1121 intel_wait_for_vblank(dev);
1122 1156
1123 DRM_DEBUG_KMS("disabled FBC\n"); 1157 DRM_DEBUG_KMS("disabled FBC\n");
1124} 1158}
@@ -1179,7 +1213,6 @@ void ironlake_disable_fbc(struct drm_device *dev)
1179 dpfc_ctl = I915_READ(ILK_DPFC_CONTROL); 1213 dpfc_ctl = I915_READ(ILK_DPFC_CONTROL);
1180 dpfc_ctl &= ~DPFC_CTL_EN; 1214 dpfc_ctl &= ~DPFC_CTL_EN;
1181 I915_WRITE(ILK_DPFC_CONTROL, dpfc_ctl); 1215 I915_WRITE(ILK_DPFC_CONTROL, dpfc_ctl);
1182 intel_wait_for_vblank(dev);
1183 1216
1184 DRM_DEBUG_KMS("disabled FBC\n"); 1217 DRM_DEBUG_KMS("disabled FBC\n");
1185} 1218}
@@ -1478,7 +1511,7 @@ intel_pipe_set_base_atomic(struct drm_crtc *crtc, struct drm_framebuffer *fb,
1478 if ((IS_I965G(dev) || plane == 0)) 1511 if ((IS_I965G(dev) || plane == 0))
1479 intel_update_fbc(crtc, &crtc->mode); 1512 intel_update_fbc(crtc, &crtc->mode);
1480 1513
1481 intel_wait_for_vblank(dev); 1514 intel_wait_for_vblank(dev, intel_crtc->pipe);
1482 intel_increase_pllclock(crtc, true); 1515 intel_increase_pllclock(crtc, true);
1483 1516
1484 return 0; 1517 return 0;
@@ -1585,20 +1618,18 @@ intel_pipe_set_base(struct drm_crtc *crtc, int x, int y,
1585 Start, Offset, x, y, crtc->fb->pitch); 1618 Start, Offset, x, y, crtc->fb->pitch);
1586 I915_WRITE(dspstride, crtc->fb->pitch); 1619 I915_WRITE(dspstride, crtc->fb->pitch);
1587 if (IS_I965G(dev)) { 1620 if (IS_I965G(dev)) {
1588 I915_WRITE(dspbase, Offset);
1589 I915_READ(dspbase);
1590 I915_WRITE(dspsurf, Start); 1621 I915_WRITE(dspsurf, Start);
1591 I915_READ(dspsurf);
1592 I915_WRITE(dsptileoff, (y << 16) | x); 1622 I915_WRITE(dsptileoff, (y << 16) | x);
1623 I915_WRITE(dspbase, Offset);
1593 } else { 1624 } else {
1594 I915_WRITE(dspbase, Start + Offset); 1625 I915_WRITE(dspbase, Start + Offset);
1595 I915_READ(dspbase);
1596 } 1626 }
1627 POSTING_READ(dspbase);
1597 1628
1598 if ((IS_I965G(dev) || plane == 0)) 1629 if ((IS_I965G(dev) || plane == 0))
1599 intel_update_fbc(crtc, &crtc->mode); 1630 intel_update_fbc(crtc, &crtc->mode);
1600 1631
1601 intel_wait_for_vblank(dev); 1632 intel_wait_for_vblank(dev, pipe);
1602 1633
1603 if (old_fb) { 1634 if (old_fb) {
1604 intel_fb = to_intel_framebuffer(old_fb); 1635 intel_fb = to_intel_framebuffer(old_fb);
@@ -1627,54 +1658,6 @@ intel_pipe_set_base(struct drm_crtc *crtc, int x, int y,
1627 return 0; 1658 return 0;
1628} 1659}
1629 1660
1630/* Disable the VGA plane that we never use */
1631static void i915_disable_vga (struct drm_device *dev)
1632{
1633 struct drm_i915_private *dev_priv = dev->dev_private;
1634 u8 sr1;
1635 u32 vga_reg;
1636
1637 if (HAS_PCH_SPLIT(dev))
1638 vga_reg = CPU_VGACNTRL;
1639 else
1640 vga_reg = VGACNTRL;
1641
1642 if (I915_READ(vga_reg) & VGA_DISP_DISABLE)
1643 return;
1644
1645 I915_WRITE8(VGA_SR_INDEX, 1);
1646 sr1 = I915_READ8(VGA_SR_DATA);
1647 I915_WRITE8(VGA_SR_DATA, sr1 | (1 << 5));
1648 udelay(100);
1649
1650 I915_WRITE(vga_reg, VGA_DISP_DISABLE);
1651}
1652
1653static void ironlake_disable_pll_edp (struct drm_crtc *crtc)
1654{
1655 struct drm_device *dev = crtc->dev;
1656 struct drm_i915_private *dev_priv = dev->dev_private;
1657 u32 dpa_ctl;
1658
1659 DRM_DEBUG_KMS("\n");
1660 dpa_ctl = I915_READ(DP_A);
1661 dpa_ctl &= ~DP_PLL_ENABLE;
1662 I915_WRITE(DP_A, dpa_ctl);
1663}
1664
1665static void ironlake_enable_pll_edp (struct drm_crtc *crtc)
1666{
1667 struct drm_device *dev = crtc->dev;
1668 struct drm_i915_private *dev_priv = dev->dev_private;
1669 u32 dpa_ctl;
1670
1671 dpa_ctl = I915_READ(DP_A);
1672 dpa_ctl |= DP_PLL_ENABLE;
1673 I915_WRITE(DP_A, dpa_ctl);
1674 udelay(200);
1675}
1676
1677
1678static void ironlake_set_pll_edp (struct drm_crtc *crtc, int clock) 1661static void ironlake_set_pll_edp (struct drm_crtc *crtc, int clock)
1679{ 1662{
1680 struct drm_device *dev = crtc->dev; 1663 struct drm_device *dev = crtc->dev;
@@ -1945,7 +1928,6 @@ static void ironlake_crtc_dpms(struct drm_crtc *crtc, int mode)
1945 int trans_vsync_reg = (pipe == 0) ? TRANS_VSYNC_A : TRANS_VSYNC_B; 1928 int trans_vsync_reg = (pipe == 0) ? TRANS_VSYNC_A : TRANS_VSYNC_B;
1946 int trans_dpll_sel = (pipe == 0) ? 0 : 1; 1929 int trans_dpll_sel = (pipe == 0) ? 0 : 1;
1947 u32 temp; 1930 u32 temp;
1948 int n;
1949 u32 pipe_bpc; 1931 u32 pipe_bpc;
1950 1932
1951 temp = I915_READ(pipeconf_reg); 1933 temp = I915_READ(pipeconf_reg);
@@ -1958,7 +1940,7 @@ static void ironlake_crtc_dpms(struct drm_crtc *crtc, int mode)
1958 case DRM_MODE_DPMS_ON: 1940 case DRM_MODE_DPMS_ON:
1959 case DRM_MODE_DPMS_STANDBY: 1941 case DRM_MODE_DPMS_STANDBY:
1960 case DRM_MODE_DPMS_SUSPEND: 1942 case DRM_MODE_DPMS_SUSPEND:
1961 DRM_DEBUG_KMS("crtc %d dpms on\n", pipe); 1943 DRM_DEBUG_KMS("crtc %d/%d dpms on\n", pipe, plane);
1962 1944
1963 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS)) { 1945 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS)) {
1964 temp = I915_READ(PCH_LVDS); 1946 temp = I915_READ(PCH_LVDS);
@@ -1968,10 +1950,7 @@ static void ironlake_crtc_dpms(struct drm_crtc *crtc, int mode)
1968 } 1950 }
1969 } 1951 }
1970 1952
1971 if (HAS_eDP) { 1953 if (!HAS_eDP) {
1972 /* enable eDP PLL */
1973 ironlake_enable_pll_edp(crtc);
1974 } else {
1975 1954
1976 /* enable PCH FDI RX PLL, wait warmup plus DMI latency */ 1955 /* enable PCH FDI RX PLL, wait warmup plus DMI latency */
1977 temp = I915_READ(fdi_rx_reg); 1956 temp = I915_READ(fdi_rx_reg);
@@ -2005,15 +1984,13 @@ static void ironlake_crtc_dpms(struct drm_crtc *crtc, int mode)
2005 /* Enable panel fitting for LVDS */ 1984 /* Enable panel fitting for LVDS */
2006 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS) 1985 if (intel_pipe_has_type(crtc, INTEL_OUTPUT_LVDS)
2007 || HAS_eDP || intel_pch_has_edp(crtc)) { 1986 || HAS_eDP || intel_pch_has_edp(crtc)) {
2008 temp = I915_READ(pf_ctl_reg); 1987 if (dev_priv->pch_pf_size) {
2009 I915_WRITE(pf_ctl_reg, temp | PF_ENABLE | PF_FILTER_MED_3x3); 1988 temp = I915_READ(pf_ctl_reg);
2010 1989 I915_WRITE(pf_ctl_reg, temp | PF_ENABLE | PF_FILTER_MED_3x3);
2011 /* currently full aspect */ 1990 I915_WRITE(pf_win_pos, dev_priv->pch_pf_pos);
2012 I915_WRITE(pf_win_pos, 0); 1991 I915_WRITE(pf_win_size, dev_priv->pch_pf_size);
2013 1992 } else
2014 I915_WRITE(pf_win_size, 1993 I915_WRITE(pf_ctl_reg, temp & ~PF_ENABLE);
2015 (dev_priv->panel_fixed_mode->hdisplay << 16) |
2016 (dev_priv->panel_fixed_mode->vdisplay));
2017 } 1994 }
2018 1995
2019 /* Enable CPU pipe */ 1996 /* Enable CPU pipe */
@@ -2097,9 +2074,10 @@ static void ironlake_crtc_dpms(struct drm_crtc *crtc, int mode)
2097 int reg; 2074 int reg;
2098 2075
2099 reg = I915_READ(trans_dp_ctl); 2076 reg = I915_READ(trans_dp_ctl);
2100 reg &= ~TRANS_DP_PORT_SEL_MASK; 2077 reg &= ~(TRANS_DP_PORT_SEL_MASK |
2101 reg = TRANS_DP_OUTPUT_ENABLE | 2078 TRANS_DP_SYNC_MASK);
2102 TRANS_DP_ENH_FRAMING; 2079 reg |= (TRANS_DP_OUTPUT_ENABLE |
2080 TRANS_DP_ENH_FRAMING);
2103 2081
2104 if (crtc->mode.flags & DRM_MODE_FLAG_PHSYNC) 2082 if (crtc->mode.flags & DRM_MODE_FLAG_PHSYNC)
2105 reg |= TRANS_DP_HSYNC_ACTIVE_HIGH; 2083 reg |= TRANS_DP_HSYNC_ACTIVE_HIGH;
@@ -2137,18 +2115,17 @@ static void ironlake_crtc_dpms(struct drm_crtc *crtc, int mode)
2137 I915_WRITE(transconf_reg, temp | TRANS_ENABLE); 2115 I915_WRITE(transconf_reg, temp | TRANS_ENABLE);
2138 I915_READ(transconf_reg); 2116 I915_READ(transconf_reg);
2139 2117
2140 while ((I915_READ(transconf_reg) & TRANS_STATE_ENABLE) == 0) 2118 if (wait_for(I915_READ(transconf_reg) & TRANS_STATE_ENABLE, 10, 0))
2141 ; 2119 DRM_ERROR("failed to enable transcoder\n");
2142
2143 } 2120 }
2144 2121
2145 intel_crtc_load_lut(crtc); 2122 intel_crtc_load_lut(crtc);
2146 2123
2147 intel_update_fbc(crtc, &crtc->mode); 2124 intel_update_fbc(crtc, &crtc->mode);
2125 break;
2148 2126
2149 break;
2150 case DRM_MODE_DPMS_OFF: 2127 case DRM_MODE_DPMS_OFF:
2151 DRM_DEBUG_KMS("crtc %d dpms off\n", pipe); 2128 DRM_DEBUG_KMS("crtc %d/%d dpms off\n", pipe, plane);
2152 2129
2153 drm_vblank_off(dev, pipe); 2130 drm_vblank_off(dev, pipe);
2154 /* Disable display plane */ 2131 /* Disable display plane */
@@ -2164,26 +2141,14 @@ static void ironlake_crtc_dpms(struct drm_crtc *crtc, int mode)
2164 dev_priv->display.disable_fbc) 2141 dev_priv->display.disable_fbc)
2165 dev_priv->display.disable_fbc(dev); 2142 dev_priv->display.disable_fbc(dev);
2166 2143
2167 i915_disable_vga(dev);
2168
2169 /* disable cpu pipe, disable after all planes disabled */ 2144 /* disable cpu pipe, disable after all planes disabled */
2170 temp = I915_READ(pipeconf_reg); 2145 temp = I915_READ(pipeconf_reg);
2171 if ((temp & PIPEACONF_ENABLE) != 0) { 2146 if ((temp & PIPEACONF_ENABLE) != 0) {
2172 I915_WRITE(pipeconf_reg, temp & ~PIPEACONF_ENABLE); 2147 I915_WRITE(pipeconf_reg, temp & ~PIPEACONF_ENABLE);
2173 I915_READ(pipeconf_reg); 2148
2174 n = 0;
2175 /* wait for cpu pipe off, pipe state */ 2149 /* wait for cpu pipe off, pipe state */
2176 while ((I915_READ(pipeconf_reg) & I965_PIPECONF_ACTIVE) != 0) { 2150 if (wait_for((I915_READ(pipeconf_reg) & I965_PIPECONF_ACTIVE) == 0, 50, 1))
2177 n++; 2151 DRM_ERROR("failed to turn off cpu pipe\n");
2178 if (n < 60) {
2179 udelay(500);
2180 continue;
2181 } else {
2182 DRM_DEBUG_KMS("pipe %d off delay\n",
2183 pipe);
2184 break;
2185 }
2186 }
2187 } else 2152 } else
2188 DRM_DEBUG_KMS("crtc %d is disabled\n", pipe); 2153 DRM_DEBUG_KMS("crtc %d is disabled\n", pipe);
2189 2154
@@ -2244,20 +2209,10 @@ static void ironlake_crtc_dpms(struct drm_crtc *crtc, int mode)
2244 temp = I915_READ(transconf_reg); 2209 temp = I915_READ(transconf_reg);
2245 if ((temp & TRANS_ENABLE) != 0) { 2210 if ((temp & TRANS_ENABLE) != 0) {
2246 I915_WRITE(transconf_reg, temp & ~TRANS_ENABLE); 2211 I915_WRITE(transconf_reg, temp & ~TRANS_ENABLE);
2247 I915_READ(transconf_reg); 2212
2248 n = 0;
2249 /* wait for PCH transcoder off, transcoder state */ 2213 /* wait for PCH transcoder off, transcoder state */
2250 while ((I915_READ(transconf_reg) & TRANS_STATE_ENABLE) != 0) { 2214 if (wait_for((I915_READ(transconf_reg) & TRANS_STATE_ENABLE) == 0, 50, 1))
2251 n++; 2215 DRM_ERROR("failed to disable transcoder\n");
2252 if (n < 60) {
2253 udelay(500);
2254 continue;
2255 } else {
2256 DRM_DEBUG_KMS("transcoder %d off "
2257 "delay\n", pipe);
2258 break;
2259 }
2260 }
2261 } 2216 }
2262 2217
2263 temp = I915_READ(transconf_reg); 2218 temp = I915_READ(transconf_reg);
@@ -2294,10 +2249,6 @@ static void ironlake_crtc_dpms(struct drm_crtc *crtc, int mode)
2294 I915_WRITE(pch_dpll_reg, temp & ~DPLL_VCO_ENABLE); 2249 I915_WRITE(pch_dpll_reg, temp & ~DPLL_VCO_ENABLE);
2295 I915_READ(pch_dpll_reg); 2250 I915_READ(pch_dpll_reg);
2296 2251
2297 if (HAS_eDP) {
2298 ironlake_disable_pll_edp(crtc);
2299 }
2300
2301 /* Switch from PCDclk to Rawclk */ 2252 /* Switch from PCDclk to Rawclk */
2302 temp = I915_READ(fdi_rx_reg); 2253 temp = I915_READ(fdi_rx_reg);
2303 temp &= ~FDI_SEL_PCDCLK; 2254 temp &= ~FDI_SEL_PCDCLK;
@@ -2372,8 +2323,6 @@ static void i9xx_crtc_dpms(struct drm_crtc *crtc, int mode)
2372 case DRM_MODE_DPMS_ON: 2323 case DRM_MODE_DPMS_ON:
2373 case DRM_MODE_DPMS_STANDBY: 2324 case DRM_MODE_DPMS_STANDBY:
2374 case DRM_MODE_DPMS_SUSPEND: 2325 case DRM_MODE_DPMS_SUSPEND:
2375 intel_update_watermarks(dev);
2376
2377 /* Enable the DPLL */ 2326 /* Enable the DPLL */
2378 temp = I915_READ(dpll_reg); 2327 temp = I915_READ(dpll_reg);
2379 if ((temp & DPLL_VCO_ENABLE) == 0) { 2328 if ((temp & DPLL_VCO_ENABLE) == 0) {
@@ -2413,8 +2362,6 @@ static void i9xx_crtc_dpms(struct drm_crtc *crtc, int mode)
2413 intel_crtc_dpms_overlay(intel_crtc, true); 2362 intel_crtc_dpms_overlay(intel_crtc, true);
2414 break; 2363 break;
2415 case DRM_MODE_DPMS_OFF: 2364 case DRM_MODE_DPMS_OFF:
2416 intel_update_watermarks(dev);
2417
2418 /* Give the overlay scaler a chance to disable if it's on this pipe */ 2365 /* Give the overlay scaler a chance to disable if it's on this pipe */
2419 intel_crtc_dpms_overlay(intel_crtc, false); 2366 intel_crtc_dpms_overlay(intel_crtc, false);
2420 drm_vblank_off(dev, pipe); 2367 drm_vblank_off(dev, pipe);
@@ -2423,9 +2370,6 @@ static void i9xx_crtc_dpms(struct drm_crtc *crtc, int mode)
2423 dev_priv->display.disable_fbc) 2370 dev_priv->display.disable_fbc)
2424 dev_priv->display.disable_fbc(dev); 2371 dev_priv->display.disable_fbc(dev);
2425 2372
2426 /* Disable the VGA plane that we never use */
2427 i915_disable_vga(dev);
2428
2429 /* Disable display plane */ 2373 /* Disable display plane */
2430 temp = I915_READ(dspcntr_reg); 2374 temp = I915_READ(dspcntr_reg);
2431 if ((temp & DISPLAY_PLANE_ENABLE) != 0) { 2375 if ((temp & DISPLAY_PLANE_ENABLE) != 0) {
@@ -2435,10 +2379,8 @@ static void i9xx_crtc_dpms(struct drm_crtc *crtc, int mode)
2435 I915_READ(dspbase_reg); 2379 I915_READ(dspbase_reg);
2436 } 2380 }
2437 2381
2438 if (!IS_I9XX(dev)) { 2382 /* Wait for vblank for the disable to take effect */
2439 /* Wait for vblank for the disable to take effect */ 2383 intel_wait_for_vblank_off(dev, pipe);
2440 intel_wait_for_vblank(dev);
2441 }
2442 2384
2443 /* Don't disable pipe A or pipe A PLLs if needed */ 2385 /* Don't disable pipe A or pipe A PLLs if needed */
2444 if (pipeconf_reg == PIPEACONF && 2386 if (pipeconf_reg == PIPEACONF &&
@@ -2453,7 +2395,7 @@ static void i9xx_crtc_dpms(struct drm_crtc *crtc, int mode)
2453 } 2395 }
2454 2396
2455 /* Wait for vblank for the disable to take effect. */ 2397 /* Wait for vblank for the disable to take effect. */
2456 intel_wait_for_vblank(dev); 2398 intel_wait_for_vblank_off(dev, pipe);
2457 2399
2458 temp = I915_READ(dpll_reg); 2400 temp = I915_READ(dpll_reg);
2459 if ((temp & DPLL_VCO_ENABLE) != 0) { 2401 if ((temp & DPLL_VCO_ENABLE) != 0) {
@@ -2469,9 +2411,6 @@ static void i9xx_crtc_dpms(struct drm_crtc *crtc, int mode)
2469 2411
2470/** 2412/**
2471 * Sets the power management mode of the pipe and plane. 2413 * Sets the power management mode of the pipe and plane.
2472 *
2473 * This code should probably grow support for turning the cursor off and back
2474 * on appropriately at the same time as we're turning the pipe off/on.
2475 */ 2414 */
2476static void intel_crtc_dpms(struct drm_crtc *crtc, int mode) 2415static void intel_crtc_dpms(struct drm_crtc *crtc, int mode)
2477{ 2416{
@@ -2482,9 +2421,26 @@ static void intel_crtc_dpms(struct drm_crtc *crtc, int mode)
2482 int pipe = intel_crtc->pipe; 2421 int pipe = intel_crtc->pipe;
2483 bool enabled; 2422 bool enabled;
2484 2423
2424 intel_crtc->dpms_mode = mode;
2425 intel_crtc->cursor_on = mode == DRM_MODE_DPMS_ON;
2426
2427 /* When switching on the display, ensure that SR is disabled
2428 * with multiple pipes prior to enabling to new pipe.
2429 *
2430 * When switching off the display, make sure the cursor is
2431 * properly hidden prior to disabling the pipe.
2432 */
2433 if (mode == DRM_MODE_DPMS_ON)
2434 intel_update_watermarks(dev);
2435 else
2436 intel_crtc_update_cursor(crtc);
2437
2485 dev_priv->display.dpms(crtc, mode); 2438 dev_priv->display.dpms(crtc, mode);
2486 2439
2487 intel_crtc->dpms_mode = mode; 2440 if (mode == DRM_MODE_DPMS_ON)
2441 intel_crtc_update_cursor(crtc);
2442 else
2443 intel_update_watermarks(dev);
2488 2444
2489 if (!dev->primary->master) 2445 if (!dev->primary->master)
2490 return; 2446 return;
@@ -2536,6 +2492,20 @@ void intel_encoder_commit (struct drm_encoder *encoder)
2536 encoder_funcs->dpms(encoder, DRM_MODE_DPMS_ON); 2492 encoder_funcs->dpms(encoder, DRM_MODE_DPMS_ON);
2537} 2493}
2538 2494
2495void intel_encoder_destroy(struct drm_encoder *encoder)
2496{
2497 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder);
2498
2499 if (intel_encoder->ddc_bus)
2500 intel_i2c_destroy(intel_encoder->ddc_bus);
2501
2502 if (intel_encoder->i2c_bus)
2503 intel_i2c_destroy(intel_encoder->i2c_bus);
2504
2505 drm_encoder_cleanup(encoder);
2506 kfree(intel_encoder);
2507}
2508
2539static bool intel_crtc_mode_fixup(struct drm_crtc *crtc, 2509static bool intel_crtc_mode_fixup(struct drm_crtc *crtc,
2540 struct drm_display_mode *mode, 2510 struct drm_display_mode *mode,
2541 struct drm_display_mode *adjusted_mode) 2511 struct drm_display_mode *adjusted_mode)
@@ -2867,7 +2837,7 @@ struct cxsr_latency {
2867 unsigned long cursor_hpll_disable; 2837 unsigned long cursor_hpll_disable;
2868}; 2838};
2869 2839
2870static struct cxsr_latency cxsr_latency_table[] = { 2840static const struct cxsr_latency cxsr_latency_table[] = {
2871 {1, 0, 800, 400, 3382, 33382, 3983, 33983}, /* DDR2-400 SC */ 2841 {1, 0, 800, 400, 3382, 33382, 3983, 33983}, /* DDR2-400 SC */
2872 {1, 0, 800, 667, 3354, 33354, 3807, 33807}, /* DDR2-667 SC */ 2842 {1, 0, 800, 667, 3354, 33354, 3807, 33807}, /* DDR2-667 SC */
2873 {1, 0, 800, 800, 3347, 33347, 3763, 33763}, /* DDR2-800 SC */ 2843 {1, 0, 800, 800, 3347, 33347, 3763, 33763}, /* DDR2-800 SC */
@@ -2905,11 +2875,13 @@ static struct cxsr_latency cxsr_latency_table[] = {
2905 {0, 1, 400, 800, 6042, 36042, 6584, 36584}, /* DDR3-800 SC */ 2875 {0, 1, 400, 800, 6042, 36042, 6584, 36584}, /* DDR3-800 SC */
2906}; 2876};
2907 2877
2908static struct cxsr_latency *intel_get_cxsr_latency(int is_desktop, int is_ddr3, 2878static const struct cxsr_latency *intel_get_cxsr_latency(int is_desktop,
2909 int fsb, int mem) 2879 int is_ddr3,
2880 int fsb,
2881 int mem)
2910{ 2882{
2883 const struct cxsr_latency *latency;
2911 int i; 2884 int i;
2912 struct cxsr_latency *latency;
2913 2885
2914 if (fsb == 0 || mem == 0) 2886 if (fsb == 0 || mem == 0)
2915 return NULL; 2887 return NULL;
@@ -2930,13 +2902,9 @@ static struct cxsr_latency *intel_get_cxsr_latency(int is_desktop, int is_ddr3,
2930static void pineview_disable_cxsr(struct drm_device *dev) 2902static void pineview_disable_cxsr(struct drm_device *dev)
2931{ 2903{
2932 struct drm_i915_private *dev_priv = dev->dev_private; 2904 struct drm_i915_private *dev_priv = dev->dev_private;
2933 u32 reg;
2934 2905
2935 /* deactivate cxsr */ 2906 /* deactivate cxsr */
2936 reg = I915_READ(DSPFW3); 2907 I915_WRITE(DSPFW3, I915_READ(DSPFW3) & ~PINEVIEW_SELF_REFRESH_EN);
2937 reg &= ~(PINEVIEW_SELF_REFRESH_EN);
2938 I915_WRITE(DSPFW3, reg);
2939 DRM_INFO("Big FIFO is disabled\n");
2940} 2908}
2941 2909
2942/* 2910/*
@@ -3024,12 +2992,12 @@ static void pineview_update_wm(struct drm_device *dev, int planea_clock,
3024 int pixel_size) 2992 int pixel_size)
3025{ 2993{
3026 struct drm_i915_private *dev_priv = dev->dev_private; 2994 struct drm_i915_private *dev_priv = dev->dev_private;
2995 const struct cxsr_latency *latency;
3027 u32 reg; 2996 u32 reg;
3028 unsigned long wm; 2997 unsigned long wm;
3029 struct cxsr_latency *latency;
3030 int sr_clock; 2998 int sr_clock;
3031 2999
3032 latency = intel_get_cxsr_latency(IS_PINEVIEW_G(dev), dev_priv->is_ddr3, 3000 latency = intel_get_cxsr_latency(IS_PINEVIEW_G(dev), dev_priv->is_ddr3,
3033 dev_priv->fsb_freq, dev_priv->mem_freq); 3001 dev_priv->fsb_freq, dev_priv->mem_freq);
3034 if (!latency) { 3002 if (!latency) {
3035 DRM_DEBUG_KMS("Unknown FSB/MEM found, disable CxSR\n"); 3003 DRM_DEBUG_KMS("Unknown FSB/MEM found, disable CxSR\n");
@@ -3075,9 +3043,8 @@ static void pineview_update_wm(struct drm_device *dev, int planea_clock,
3075 DRM_DEBUG_KMS("DSPFW3 register is %x\n", reg); 3043 DRM_DEBUG_KMS("DSPFW3 register is %x\n", reg);
3076 3044
3077 /* activate cxsr */ 3045 /* activate cxsr */
3078 reg = I915_READ(DSPFW3); 3046 I915_WRITE(DSPFW3,
3079 reg |= PINEVIEW_SELF_REFRESH_EN; 3047 I915_READ(DSPFW3) | PINEVIEW_SELF_REFRESH_EN);
3080 I915_WRITE(DSPFW3, reg);
3081 DRM_DEBUG_KMS("Self-refresh is enabled\n"); 3048 DRM_DEBUG_KMS("Self-refresh is enabled\n");
3082 } else { 3049 } else {
3083 pineview_disable_cxsr(dev); 3050 pineview_disable_cxsr(dev);
@@ -3354,12 +3321,11 @@ static void ironlake_update_wm(struct drm_device *dev, int planea_clock,
3354 int line_count; 3321 int line_count;
3355 int planea_htotal = 0, planeb_htotal = 0; 3322 int planea_htotal = 0, planeb_htotal = 0;
3356 struct drm_crtc *crtc; 3323 struct drm_crtc *crtc;
3357 struct intel_crtc *intel_crtc;
3358 3324
3359 /* Need htotal for all active display plane */ 3325 /* Need htotal for all active display plane */
3360 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 3326 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
3361 intel_crtc = to_intel_crtc(crtc); 3327 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
3362 if (crtc->enabled) { 3328 if (intel_crtc->dpms_mode == DRM_MODE_DPMS_ON) {
3363 if (intel_crtc->plane == 0) 3329 if (intel_crtc->plane == 0)
3364 planea_htotal = crtc->mode.htotal; 3330 planea_htotal = crtc->mode.htotal;
3365 else 3331 else
@@ -3519,7 +3485,6 @@ static void intel_update_watermarks(struct drm_device *dev)
3519{ 3485{
3520 struct drm_i915_private *dev_priv = dev->dev_private; 3486 struct drm_i915_private *dev_priv = dev->dev_private;
3521 struct drm_crtc *crtc; 3487 struct drm_crtc *crtc;
3522 struct intel_crtc *intel_crtc;
3523 int sr_hdisplay = 0; 3488 int sr_hdisplay = 0;
3524 unsigned long planea_clock = 0, planeb_clock = 0, sr_clock = 0; 3489 unsigned long planea_clock = 0, planeb_clock = 0, sr_clock = 0;
3525 int enabled = 0, pixel_size = 0; 3490 int enabled = 0, pixel_size = 0;
@@ -3530,8 +3495,8 @@ static void intel_update_watermarks(struct drm_device *dev)
3530 3495
3531 /* Get the clock config from both planes */ 3496 /* Get the clock config from both planes */
3532 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 3497 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
3533 intel_crtc = to_intel_crtc(crtc); 3498 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
3534 if (crtc->enabled) { 3499 if (intel_crtc->dpms_mode == DRM_MODE_DPMS_ON) {
3535 enabled++; 3500 enabled++;
3536 if (intel_crtc->plane == 0) { 3501 if (intel_crtc->plane == 0) {
3537 DRM_DEBUG_KMS("plane A (pipe %d) clock: %d\n", 3502 DRM_DEBUG_KMS("plane A (pipe %d) clock: %d\n",
@@ -3966,9 +3931,7 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
3966 dpll_reg = pch_dpll_reg; 3931 dpll_reg = pch_dpll_reg;
3967 } 3932 }
3968 3933
3969 if (is_edp) { 3934 if (!is_edp) {
3970 ironlake_disable_pll_edp(crtc);
3971 } else if ((dpll & DPLL_VCO_ENABLE)) {
3972 I915_WRITE(fp_reg, fp); 3935 I915_WRITE(fp_reg, fp);
3973 I915_WRITE(dpll_reg, dpll & ~DPLL_VCO_ENABLE); 3936 I915_WRITE(dpll_reg, dpll & ~DPLL_VCO_ENABLE);
3974 I915_READ(dpll_reg); 3937 I915_READ(dpll_reg);
@@ -4167,7 +4130,7 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
4167 I915_WRITE(pipeconf_reg, pipeconf); 4130 I915_WRITE(pipeconf_reg, pipeconf);
4168 I915_READ(pipeconf_reg); 4131 I915_READ(pipeconf_reg);
4169 4132
4170 intel_wait_for_vblank(dev); 4133 intel_wait_for_vblank(dev, pipe);
4171 4134
4172 if (IS_IRONLAKE(dev)) { 4135 if (IS_IRONLAKE(dev)) {
4173 /* enable address swizzle for tiling buffer */ 4136 /* enable address swizzle for tiling buffer */
@@ -4180,9 +4143,6 @@ static int intel_crtc_mode_set(struct drm_crtc *crtc,
4180 /* Flush the plane changes */ 4143 /* Flush the plane changes */
4181 ret = intel_pipe_set_base(crtc, x, y, old_fb); 4144 ret = intel_pipe_set_base(crtc, x, y, old_fb);
4182 4145
4183 if ((IS_I965G(dev) || plane == 0))
4184 intel_update_fbc(crtc, &crtc->mode);
4185
4186 intel_update_watermarks(dev); 4146 intel_update_watermarks(dev);
4187 4147
4188 drm_vblank_post_modeset(dev, pipe); 4148 drm_vblank_post_modeset(dev, pipe);
@@ -4216,6 +4176,62 @@ void intel_crtc_load_lut(struct drm_crtc *crtc)
4216 } 4176 }
4217} 4177}
4218 4178
4179static void i845_update_cursor(struct drm_crtc *crtc, u32 base)
4180{
4181 struct drm_device *dev = crtc->dev;
4182 struct drm_i915_private *dev_priv = dev->dev_private;
4183 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
4184 bool visible = base != 0;
4185 u32 cntl;
4186
4187 if (intel_crtc->cursor_visible == visible)
4188 return;
4189
4190 cntl = I915_READ(CURACNTR);
4191 if (visible) {
4192 /* On these chipsets we can only modify the base whilst
4193 * the cursor is disabled.
4194 */
4195 I915_WRITE(CURABASE, base);
4196
4197 cntl &= ~(CURSOR_FORMAT_MASK);
4198 /* XXX width must be 64, stride 256 => 0x00 << 28 */
4199 cntl |= CURSOR_ENABLE |
4200 CURSOR_GAMMA_ENABLE |
4201 CURSOR_FORMAT_ARGB;
4202 } else
4203 cntl &= ~(CURSOR_ENABLE | CURSOR_GAMMA_ENABLE);
4204 I915_WRITE(CURACNTR, cntl);
4205
4206 intel_crtc->cursor_visible = visible;
4207}
4208
4209static void i9xx_update_cursor(struct drm_crtc *crtc, u32 base)
4210{
4211 struct drm_device *dev = crtc->dev;
4212 struct drm_i915_private *dev_priv = dev->dev_private;
4213 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
4214 int pipe = intel_crtc->pipe;
4215 bool visible = base != 0;
4216
4217 if (intel_crtc->cursor_visible != visible) {
4218 uint32_t cntl = I915_READ(pipe == 0 ? CURACNTR : CURBCNTR);
4219 if (base) {
4220 cntl &= ~(CURSOR_MODE | MCURSOR_PIPE_SELECT);
4221 cntl |= CURSOR_MODE_64_ARGB_AX | MCURSOR_GAMMA_ENABLE;
4222 cntl |= pipe << 28; /* Connect to correct pipe */
4223 } else {
4224 cntl &= ~(CURSOR_MODE | MCURSOR_GAMMA_ENABLE);
4225 cntl |= CURSOR_MODE_DISABLE;
4226 }
4227 I915_WRITE(pipe == 0 ? CURACNTR : CURBCNTR, cntl);
4228
4229 intel_crtc->cursor_visible = visible;
4230 }
4231 /* and commit changes on next vblank */
4232 I915_WRITE(pipe == 0 ? CURABASE : CURBBASE, base);
4233}
4234
4219/* If no-part of the cursor is visible on the framebuffer, then the GPU may hang... */ 4235/* If no-part of the cursor is visible on the framebuffer, then the GPU may hang... */
4220static void intel_crtc_update_cursor(struct drm_crtc *crtc) 4236static void intel_crtc_update_cursor(struct drm_crtc *crtc)
4221{ 4237{
@@ -4225,12 +4241,12 @@ static void intel_crtc_update_cursor(struct drm_crtc *crtc)
4225 int pipe = intel_crtc->pipe; 4241 int pipe = intel_crtc->pipe;
4226 int x = intel_crtc->cursor_x; 4242 int x = intel_crtc->cursor_x;
4227 int y = intel_crtc->cursor_y; 4243 int y = intel_crtc->cursor_y;
4228 uint32_t base, pos; 4244 u32 base, pos;
4229 bool visible; 4245 bool visible;
4230 4246
4231 pos = 0; 4247 pos = 0;
4232 4248
4233 if (crtc->fb) { 4249 if (intel_crtc->cursor_on && crtc->fb) {
4234 base = intel_crtc->cursor_addr; 4250 base = intel_crtc->cursor_addr;
4235 if (x > (int) crtc->fb->width) 4251 if (x > (int) crtc->fb->width)
4236 base = 0; 4252 base = 0;
@@ -4259,37 +4275,14 @@ static void intel_crtc_update_cursor(struct drm_crtc *crtc)
4259 pos |= y << CURSOR_Y_SHIFT; 4275 pos |= y << CURSOR_Y_SHIFT;
4260 4276
4261 visible = base != 0; 4277 visible = base != 0;
4262 if (!visible && !intel_crtc->cursor_visble) 4278 if (!visible && !intel_crtc->cursor_visible)
4263 return; 4279 return;
4264 4280
4265 I915_WRITE(pipe == 0 ? CURAPOS : CURBPOS, pos); 4281 I915_WRITE(pipe == 0 ? CURAPOS : CURBPOS, pos);
4266 if (intel_crtc->cursor_visble != visible) { 4282 if (IS_845G(dev) || IS_I865G(dev))
4267 uint32_t cntl = I915_READ(pipe == 0 ? CURACNTR : CURBCNTR); 4283 i845_update_cursor(crtc, base);
4268 if (base) { 4284 else
4269 /* Hooray for CUR*CNTR differences */ 4285 i9xx_update_cursor(crtc, base);
4270 if (IS_MOBILE(dev) || IS_I9XX(dev)) {
4271 cntl &= ~(CURSOR_MODE | MCURSOR_PIPE_SELECT);
4272 cntl |= CURSOR_MODE_64_ARGB_AX | MCURSOR_GAMMA_ENABLE;
4273 cntl |= pipe << 28; /* Connect to correct pipe */
4274 } else {
4275 cntl &= ~(CURSOR_FORMAT_MASK);
4276 cntl |= CURSOR_ENABLE;
4277 cntl |= CURSOR_FORMAT_ARGB | CURSOR_GAMMA_ENABLE;
4278 }
4279 } else {
4280 if (IS_MOBILE(dev) || IS_I9XX(dev)) {
4281 cntl &= ~(CURSOR_MODE | MCURSOR_GAMMA_ENABLE);
4282 cntl |= CURSOR_MODE_DISABLE;
4283 } else {
4284 cntl &= ~(CURSOR_ENABLE | CURSOR_GAMMA_ENABLE);
4285 }
4286 }
4287 I915_WRITE(pipe == 0 ? CURACNTR : CURBCNTR, cntl);
4288
4289 intel_crtc->cursor_visble = visible;
4290 }
4291 /* and commit changes on next vblank */
4292 I915_WRITE(pipe == 0 ? CURABASE : CURBBASE, base);
4293 4286
4294 if (visible) 4287 if (visible)
4295 intel_mark_busy(dev, to_intel_framebuffer(crtc->fb)->obj); 4288 intel_mark_busy(dev, to_intel_framebuffer(crtc->fb)->obj);
@@ -4354,8 +4347,10 @@ static int intel_crtc_cursor_set(struct drm_crtc *crtc,
4354 4347
4355 addr = obj_priv->gtt_offset; 4348 addr = obj_priv->gtt_offset;
4356 } else { 4349 } else {
4350 int align = IS_I830(dev) ? 16 * 1024 : 256;
4357 ret = i915_gem_attach_phys_object(dev, bo, 4351 ret = i915_gem_attach_phys_object(dev, bo,
4358 (intel_crtc->pipe == 0) ? I915_GEM_PHYS_CURSOR_0 : I915_GEM_PHYS_CURSOR_1); 4352 (intel_crtc->pipe == 0) ? I915_GEM_PHYS_CURSOR_0 : I915_GEM_PHYS_CURSOR_1,
4353 align);
4359 if (ret) { 4354 if (ret) {
4360 DRM_ERROR("failed to attach phys object\n"); 4355 DRM_ERROR("failed to attach phys object\n");
4361 goto fail_locked; 4356 goto fail_locked;
@@ -4544,7 +4539,7 @@ struct drm_crtc *intel_get_load_detect_pipe(struct intel_encoder *intel_encoder,
4544 encoder_funcs->commit(encoder); 4539 encoder_funcs->commit(encoder);
4545 } 4540 }
4546 /* let the connector get through one full cycle before testing */ 4541 /* let the connector get through one full cycle before testing */
4547 intel_wait_for_vblank(dev); 4542 intel_wait_for_vblank(dev, intel_crtc->pipe);
4548 4543
4549 return crtc; 4544 return crtc;
4550} 4545}
@@ -4749,7 +4744,7 @@ static void intel_increase_pllclock(struct drm_crtc *crtc, bool schedule)
4749 dpll &= ~DISPLAY_RATE_SELECT_FPA1; 4744 dpll &= ~DISPLAY_RATE_SELECT_FPA1;
4750 I915_WRITE(dpll_reg, dpll); 4745 I915_WRITE(dpll_reg, dpll);
4751 dpll = I915_READ(dpll_reg); 4746 dpll = I915_READ(dpll_reg);
4752 intel_wait_for_vblank(dev); 4747 intel_wait_for_vblank(dev, pipe);
4753 dpll = I915_READ(dpll_reg); 4748 dpll = I915_READ(dpll_reg);
4754 if (dpll & DISPLAY_RATE_SELECT_FPA1) 4749 if (dpll & DISPLAY_RATE_SELECT_FPA1)
4755 DRM_DEBUG_DRIVER("failed to upclock LVDS!\n"); 4750 DRM_DEBUG_DRIVER("failed to upclock LVDS!\n");
@@ -4793,7 +4788,7 @@ static void intel_decrease_pllclock(struct drm_crtc *crtc)
4793 dpll |= DISPLAY_RATE_SELECT_FPA1; 4788 dpll |= DISPLAY_RATE_SELECT_FPA1;
4794 I915_WRITE(dpll_reg, dpll); 4789 I915_WRITE(dpll_reg, dpll);
4795 dpll = I915_READ(dpll_reg); 4790 dpll = I915_READ(dpll_reg);
4796 intel_wait_for_vblank(dev); 4791 intel_wait_for_vblank(dev, pipe);
4797 dpll = I915_READ(dpll_reg); 4792 dpll = I915_READ(dpll_reg);
4798 if (!(dpll & DISPLAY_RATE_SELECT_FPA1)) 4793 if (!(dpll & DISPLAY_RATE_SELECT_FPA1))
4799 DRM_DEBUG_DRIVER("failed to downclock LVDS!\n"); 4794 DRM_DEBUG_DRIVER("failed to downclock LVDS!\n");
@@ -5083,14 +5078,16 @@ static int intel_crtc_page_flip(struct drm_crtc *crtc,
5083 work->pending_flip_obj = obj; 5078 work->pending_flip_obj = obj;
5084 5079
5085 if (intel_crtc->plane) 5080 if (intel_crtc->plane)
5086 flip_mask = I915_DISPLAY_PLANE_B_FLIP_PENDING_INTERRUPT; 5081 flip_mask = MI_WAIT_FOR_PLANE_B_FLIP;
5087 else 5082 else
5088 flip_mask = I915_DISPLAY_PLANE_A_FLIP_PENDING_INTERRUPT; 5083 flip_mask = MI_WAIT_FOR_PLANE_A_FLIP;
5089 5084
5090 /* Wait for any previous flip to finish */ 5085 if (IS_GEN3(dev) || IS_GEN2(dev)) {
5091 if (IS_GEN3(dev)) 5086 BEGIN_LP_RING(2);
5092 while (I915_READ(ISR) & flip_mask) 5087 OUT_RING(MI_WAIT_FOR_EVENT | flip_mask);
5093 ; 5088 OUT_RING(0);
5089 ADVANCE_LP_RING();
5090 }
5094 5091
5095 /* Offset into the new buffer for cases of shared fbs between CRTCs */ 5092 /* Offset into the new buffer for cases of shared fbs between CRTCs */
5096 offset = obj_priv->gtt_offset; 5093 offset = obj_priv->gtt_offset;
@@ -5104,12 +5101,18 @@ static int intel_crtc_page_flip(struct drm_crtc *crtc,
5104 OUT_RING(offset | obj_priv->tiling_mode); 5101 OUT_RING(offset | obj_priv->tiling_mode);
5105 pipesrc = I915_READ(pipesrc_reg); 5102 pipesrc = I915_READ(pipesrc_reg);
5106 OUT_RING(pipesrc & 0x0fff0fff); 5103 OUT_RING(pipesrc & 0x0fff0fff);
5107 } else { 5104 } else if (IS_GEN3(dev)) {
5108 OUT_RING(MI_DISPLAY_FLIP_I915 | 5105 OUT_RING(MI_DISPLAY_FLIP_I915 |
5109 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane)); 5106 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane));
5110 OUT_RING(fb->pitch); 5107 OUT_RING(fb->pitch);
5111 OUT_RING(offset); 5108 OUT_RING(offset);
5112 OUT_RING(MI_NOOP); 5109 OUT_RING(MI_NOOP);
5110 } else {
5111 OUT_RING(MI_DISPLAY_FLIP |
5112 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane));
5113 OUT_RING(fb->pitch);
5114 OUT_RING(offset);
5115 OUT_RING(MI_NOOP);
5113 } 5116 }
5114 ADVANCE_LP_RING(); 5117 ADVANCE_LP_RING();
5115 5118
@@ -5432,37 +5435,37 @@ static const struct drm_mode_config_funcs intel_mode_funcs = {
5432}; 5435};
5433 5436
5434static struct drm_gem_object * 5437static struct drm_gem_object *
5435intel_alloc_power_context(struct drm_device *dev) 5438intel_alloc_context_page(struct drm_device *dev)
5436{ 5439{
5437 struct drm_gem_object *pwrctx; 5440 struct drm_gem_object *ctx;
5438 int ret; 5441 int ret;
5439 5442
5440 pwrctx = i915_gem_alloc_object(dev, 4096); 5443 ctx = i915_gem_alloc_object(dev, 4096);
5441 if (!pwrctx) { 5444 if (!ctx) {
5442 DRM_DEBUG("failed to alloc power context, RC6 disabled\n"); 5445 DRM_DEBUG("failed to alloc power context, RC6 disabled\n");
5443 return NULL; 5446 return NULL;
5444 } 5447 }
5445 5448
5446 mutex_lock(&dev->struct_mutex); 5449 mutex_lock(&dev->struct_mutex);
5447 ret = i915_gem_object_pin(pwrctx, 4096); 5450 ret = i915_gem_object_pin(ctx, 4096);
5448 if (ret) { 5451 if (ret) {
5449 DRM_ERROR("failed to pin power context: %d\n", ret); 5452 DRM_ERROR("failed to pin power context: %d\n", ret);
5450 goto err_unref; 5453 goto err_unref;
5451 } 5454 }
5452 5455
5453 ret = i915_gem_object_set_to_gtt_domain(pwrctx, 1); 5456 ret = i915_gem_object_set_to_gtt_domain(ctx, 1);
5454 if (ret) { 5457 if (ret) {
5455 DRM_ERROR("failed to set-domain on power context: %d\n", ret); 5458 DRM_ERROR("failed to set-domain on power context: %d\n", ret);
5456 goto err_unpin; 5459 goto err_unpin;
5457 } 5460 }
5458 mutex_unlock(&dev->struct_mutex); 5461 mutex_unlock(&dev->struct_mutex);
5459 5462
5460 return pwrctx; 5463 return ctx;
5461 5464
5462err_unpin: 5465err_unpin:
5463 i915_gem_object_unpin(pwrctx); 5466 i915_gem_object_unpin(ctx);
5464err_unref: 5467err_unref:
5465 drm_gem_object_unreference(pwrctx); 5468 drm_gem_object_unreference(ctx);
5466 mutex_unlock(&dev->struct_mutex); 5469 mutex_unlock(&dev->struct_mutex);
5467 return NULL; 5470 return NULL;
5468} 5471}
@@ -5494,7 +5497,6 @@ void ironlake_enable_drps(struct drm_device *dev)
5494 struct drm_i915_private *dev_priv = dev->dev_private; 5497 struct drm_i915_private *dev_priv = dev->dev_private;
5495 u32 rgvmodectl = I915_READ(MEMMODECTL); 5498 u32 rgvmodectl = I915_READ(MEMMODECTL);
5496 u8 fmax, fmin, fstart, vstart; 5499 u8 fmax, fmin, fstart, vstart;
5497 int i = 0;
5498 5500
5499 /* 100ms RC evaluation intervals */ 5501 /* 100ms RC evaluation intervals */
5500 I915_WRITE(RCUPEI, 100000); 5502 I915_WRITE(RCUPEI, 100000);
@@ -5538,13 +5540,8 @@ void ironlake_enable_drps(struct drm_device *dev)
5538 rgvmodectl |= MEMMODE_SWMODE_EN; 5540 rgvmodectl |= MEMMODE_SWMODE_EN;
5539 I915_WRITE(MEMMODECTL, rgvmodectl); 5541 I915_WRITE(MEMMODECTL, rgvmodectl);
5540 5542
5541 while (I915_READ(MEMSWCTL) & MEMCTL_CMD_STS) { 5543 if (wait_for((I915_READ(MEMSWCTL) & MEMCTL_CMD_STS) == 0, 1, 0))
5542 if (i++ > 100) { 5544 DRM_ERROR("stuck trying to change perf mode\n");
5543 DRM_ERROR("stuck trying to change perf mode\n");
5544 break;
5545 }
5546 msleep(1);
5547 }
5548 msleep(1); 5545 msleep(1);
5549 5546
5550 ironlake_set_drps(dev, fstart); 5547 ironlake_set_drps(dev, fstart);
@@ -5725,7 +5722,8 @@ void intel_init_clock_gating(struct drm_device *dev)
5725 ILK_DPFC_DIS2 | 5722 ILK_DPFC_DIS2 |
5726 ILK_CLK_FBC); 5723 ILK_CLK_FBC);
5727 } 5724 }
5728 return; 5725 if (IS_GEN6(dev))
5726 return;
5729 } else if (IS_G4X(dev)) { 5727 } else if (IS_G4X(dev)) {
5730 uint32_t dspclk_gate; 5728 uint32_t dspclk_gate;
5731 I915_WRITE(RENCLK_GATE_D1, 0); 5729 I915_WRITE(RENCLK_GATE_D1, 0);
@@ -5768,6 +5766,31 @@ void intel_init_clock_gating(struct drm_device *dev)
5768 * GPU can automatically power down the render unit if given a page 5766 * GPU can automatically power down the render unit if given a page
5769 * to save state. 5767 * to save state.
5770 */ 5768 */
5769 if (IS_IRONLAKE_M(dev)) {
5770 if (dev_priv->renderctx == NULL)
5771 dev_priv->renderctx = intel_alloc_context_page(dev);
5772 if (dev_priv->renderctx) {
5773 struct drm_i915_gem_object *obj_priv;
5774 obj_priv = to_intel_bo(dev_priv->renderctx);
5775 if (obj_priv) {
5776 BEGIN_LP_RING(4);
5777 OUT_RING(MI_SET_CONTEXT);
5778 OUT_RING(obj_priv->gtt_offset |
5779 MI_MM_SPACE_GTT |
5780 MI_SAVE_EXT_STATE_EN |
5781 MI_RESTORE_EXT_STATE_EN |
5782 MI_RESTORE_INHIBIT);
5783 OUT_RING(MI_NOOP);
5784 OUT_RING(MI_FLUSH);
5785 ADVANCE_LP_RING();
5786 }
5787 } else {
5788 DRM_DEBUG_KMS("Failed to allocate render context."
5789 "Disable RC6\n");
5790 return;
5791 }
5792 }
5793
5771 if (I915_HAS_RC6(dev) && drm_core_check_feature(dev, DRIVER_MODESET)) { 5794 if (I915_HAS_RC6(dev) && drm_core_check_feature(dev, DRIVER_MODESET)) {
5772 struct drm_i915_gem_object *obj_priv = NULL; 5795 struct drm_i915_gem_object *obj_priv = NULL;
5773 5796
@@ -5776,7 +5799,7 @@ void intel_init_clock_gating(struct drm_device *dev)
5776 } else { 5799 } else {
5777 struct drm_gem_object *pwrctx; 5800 struct drm_gem_object *pwrctx;
5778 5801
5779 pwrctx = intel_alloc_power_context(dev); 5802 pwrctx = intel_alloc_context_page(dev);
5780 if (pwrctx) { 5803 if (pwrctx) {
5781 dev_priv->pwrctx = pwrctx; 5804 dev_priv->pwrctx = pwrctx;
5782 obj_priv = to_intel_bo(pwrctx); 5805 obj_priv = to_intel_bo(pwrctx);
@@ -5948,6 +5971,29 @@ static void intel_init_quirks(struct drm_device *dev)
5948 } 5971 }
5949} 5972}
5950 5973
5974/* Disable the VGA plane that we never use */
5975static void i915_disable_vga(struct drm_device *dev)
5976{
5977 struct drm_i915_private *dev_priv = dev->dev_private;
5978 u8 sr1;
5979 u32 vga_reg;
5980
5981 if (HAS_PCH_SPLIT(dev))
5982 vga_reg = CPU_VGACNTRL;
5983 else
5984 vga_reg = VGACNTRL;
5985
5986 vga_get_uninterruptible(dev->pdev, VGA_RSRC_LEGACY_IO);
5987 outb(1, VGA_SR_INDEX);
5988 sr1 = inb(VGA_SR_DATA);
5989 outb(sr1 | 1<<5, VGA_SR_DATA);
5990 vga_put(dev->pdev, VGA_RSRC_LEGACY_IO);
5991 udelay(300);
5992
5993 I915_WRITE(vga_reg, VGA_DISP_DISABLE);
5994 POSTING_READ(vga_reg);
5995}
5996
5951void intel_modeset_init(struct drm_device *dev) 5997void intel_modeset_init(struct drm_device *dev)
5952{ 5998{
5953 struct drm_i915_private *dev_priv = dev->dev_private; 5999 struct drm_i915_private *dev_priv = dev->dev_private;
@@ -5996,6 +6042,9 @@ void intel_modeset_init(struct drm_device *dev)
5996 6042
5997 intel_init_clock_gating(dev); 6043 intel_init_clock_gating(dev);
5998 6044
6045 /* Just disable it once at startup */
6046 i915_disable_vga(dev);
6047
5999 if (IS_IRONLAKE_M(dev)) { 6048 if (IS_IRONLAKE_M(dev)) {
6000 ironlake_enable_drps(dev); 6049 ironlake_enable_drps(dev);
6001 intel_init_emon(dev); 6050 intel_init_emon(dev);
@@ -6034,6 +6083,16 @@ void intel_modeset_cleanup(struct drm_device *dev)
6034 if (dev_priv->display.disable_fbc) 6083 if (dev_priv->display.disable_fbc)
6035 dev_priv->display.disable_fbc(dev); 6084 dev_priv->display.disable_fbc(dev);
6036 6085
6086 if (dev_priv->renderctx) {
6087 struct drm_i915_gem_object *obj_priv;
6088
6089 obj_priv = to_intel_bo(dev_priv->renderctx);
6090 I915_WRITE(CCID, obj_priv->gtt_offset &~ CCID_EN);
6091 I915_READ(CCID);
6092 i915_gem_object_unpin(dev_priv->renderctx);
6093 drm_gem_object_unreference(dev_priv->renderctx);
6094 }
6095
6037 if (dev_priv->pwrctx) { 6096 if (dev_priv->pwrctx) {
6038 struct drm_i915_gem_object *obj_priv; 6097 struct drm_i915_gem_object *obj_priv;
6039 6098
diff --git a/drivers/gpu/drm/i915/intel_dp.c b/drivers/gpu/drm/i915/intel_dp.c
index 40be1fa65be1..9caccd03dccb 100644
--- a/drivers/gpu/drm/i915/intel_dp.c
+++ b/drivers/gpu/drm/i915/intel_dp.c
@@ -42,10 +42,11 @@
42 42
43#define DP_LINK_CONFIGURATION_SIZE 9 43#define DP_LINK_CONFIGURATION_SIZE 9
44 44
45#define IS_eDP(i) ((i)->type == INTEL_OUTPUT_EDP) 45#define IS_eDP(i) ((i)->base.type == INTEL_OUTPUT_EDP)
46#define IS_PCH_eDP(dp_priv) ((dp_priv)->is_pch_edp) 46#define IS_PCH_eDP(i) ((i)->is_pch_edp)
47 47
48struct intel_dp_priv { 48struct intel_dp {
49 struct intel_encoder base;
49 uint32_t output_reg; 50 uint32_t output_reg;
50 uint32_t DP; 51 uint32_t DP;
51 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE]; 52 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE];
@@ -54,40 +55,39 @@ struct intel_dp_priv {
54 uint8_t link_bw; 55 uint8_t link_bw;
55 uint8_t lane_count; 56 uint8_t lane_count;
56 uint8_t dpcd[4]; 57 uint8_t dpcd[4];
57 struct intel_encoder *intel_encoder;
58 struct i2c_adapter adapter; 58 struct i2c_adapter adapter;
59 struct i2c_algo_dp_aux_data algo; 59 struct i2c_algo_dp_aux_data algo;
60 bool is_pch_edp; 60 bool is_pch_edp;
61}; 61};
62 62
63static void 63static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder)
64intel_dp_link_train(struct intel_encoder *intel_encoder, uint32_t DP, 64{
65 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE]); 65 return container_of(enc_to_intel_encoder(encoder), struct intel_dp, base);
66}
66 67
67static void 68static void intel_dp_link_train(struct intel_dp *intel_dp);
68intel_dp_link_down(struct intel_encoder *intel_encoder, uint32_t DP); 69static void intel_dp_link_down(struct intel_dp *intel_dp);
69 70
70void 71void
71intel_edp_link_config (struct intel_encoder *intel_encoder, 72intel_edp_link_config (struct intel_encoder *intel_encoder,
72 int *lane_num, int *link_bw) 73 int *lane_num, int *link_bw)
73{ 74{
74 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv; 75 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
75 76
76 *lane_num = dp_priv->lane_count; 77 *lane_num = intel_dp->lane_count;
77 if (dp_priv->link_bw == DP_LINK_BW_1_62) 78 if (intel_dp->link_bw == DP_LINK_BW_1_62)
78 *link_bw = 162000; 79 *link_bw = 162000;
79 else if (dp_priv->link_bw == DP_LINK_BW_2_7) 80 else if (intel_dp->link_bw == DP_LINK_BW_2_7)
80 *link_bw = 270000; 81 *link_bw = 270000;
81} 82}
82 83
83static int 84static int
84intel_dp_max_lane_count(struct intel_encoder *intel_encoder) 85intel_dp_max_lane_count(struct intel_dp *intel_dp)
85{ 86{
86 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv;
87 int max_lane_count = 4; 87 int max_lane_count = 4;
88 88
89 if (dp_priv->dpcd[0] >= 0x11) { 89 if (intel_dp->dpcd[0] >= 0x11) {
90 max_lane_count = dp_priv->dpcd[2] & 0x1f; 90 max_lane_count = intel_dp->dpcd[2] & 0x1f;
91 switch (max_lane_count) { 91 switch (max_lane_count) {
92 case 1: case 2: case 4: 92 case 1: case 2: case 4:
93 break; 93 break;
@@ -99,10 +99,9 @@ intel_dp_max_lane_count(struct intel_encoder *intel_encoder)
99} 99}
100 100
101static int 101static int
102intel_dp_max_link_bw(struct intel_encoder *intel_encoder) 102intel_dp_max_link_bw(struct intel_dp *intel_dp)
103{ 103{
104 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv; 104 int max_link_bw = intel_dp->dpcd[1];
105 int max_link_bw = dp_priv->dpcd[1];
106 105
107 switch (max_link_bw) { 106 switch (max_link_bw) {
108 case DP_LINK_BW_1_62: 107 case DP_LINK_BW_1_62:
@@ -126,13 +125,11 @@ intel_dp_link_clock(uint8_t link_bw)
126 125
127/* I think this is a fiction */ 126/* I think this is a fiction */
128static int 127static int
129intel_dp_link_required(struct drm_device *dev, 128intel_dp_link_required(struct drm_device *dev, struct intel_dp *intel_dp, int pixel_clock)
130 struct intel_encoder *intel_encoder, int pixel_clock)
131{ 129{
132 struct drm_i915_private *dev_priv = dev->dev_private; 130 struct drm_i915_private *dev_priv = dev->dev_private;
133 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv;
134 131
135 if (IS_eDP(intel_encoder) || IS_PCH_eDP(dp_priv)) 132 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp))
136 return (pixel_clock * dev_priv->edp_bpp) / 8; 133 return (pixel_clock * dev_priv->edp_bpp) / 8;
137 else 134 else
138 return pixel_clock * 3; 135 return pixel_clock * 3;
@@ -149,14 +146,13 @@ intel_dp_mode_valid(struct drm_connector *connector,
149 struct drm_display_mode *mode) 146 struct drm_display_mode *mode)
150{ 147{
151 struct drm_encoder *encoder = intel_attached_encoder(connector); 148 struct drm_encoder *encoder = intel_attached_encoder(connector);
152 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 149 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
153 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv;
154 struct drm_device *dev = connector->dev; 150 struct drm_device *dev = connector->dev;
155 struct drm_i915_private *dev_priv = dev->dev_private; 151 struct drm_i915_private *dev_priv = dev->dev_private;
156 int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_encoder)); 152 int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp));
157 int max_lanes = intel_dp_max_lane_count(intel_encoder); 153 int max_lanes = intel_dp_max_lane_count(intel_dp);
158 154
159 if ((IS_eDP(intel_encoder) || IS_PCH_eDP(dp_priv)) && 155 if ((IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) &&
160 dev_priv->panel_fixed_mode) { 156 dev_priv->panel_fixed_mode) {
161 if (mode->hdisplay > dev_priv->panel_fixed_mode->hdisplay) 157 if (mode->hdisplay > dev_priv->panel_fixed_mode->hdisplay)
162 return MODE_PANEL; 158 return MODE_PANEL;
@@ -167,8 +163,8 @@ intel_dp_mode_valid(struct drm_connector *connector,
167 163
168 /* only refuse the mode on non eDP since we have seen some wierd eDP panels 164 /* only refuse the mode on non eDP since we have seen some wierd eDP panels
169 which are outside spec tolerances but somehow work by magic */ 165 which are outside spec tolerances but somehow work by magic */
170 if (!IS_eDP(intel_encoder) && 166 if (!IS_eDP(intel_dp) &&
171 (intel_dp_link_required(connector->dev, intel_encoder, mode->clock) 167 (intel_dp_link_required(connector->dev, intel_dp, mode->clock)
172 > intel_dp_max_data_rate(max_link_clock, max_lanes))) 168 > intel_dp_max_data_rate(max_link_clock, max_lanes)))
173 return MODE_CLOCK_HIGH; 169 return MODE_CLOCK_HIGH;
174 170
@@ -232,13 +228,12 @@ intel_hrawclk(struct drm_device *dev)
232} 228}
233 229
234static int 230static int
235intel_dp_aux_ch(struct intel_encoder *intel_encoder, 231intel_dp_aux_ch(struct intel_dp *intel_dp,
236 uint8_t *send, int send_bytes, 232 uint8_t *send, int send_bytes,
237 uint8_t *recv, int recv_size) 233 uint8_t *recv, int recv_size)
238{ 234{
239 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv; 235 uint32_t output_reg = intel_dp->output_reg;
240 uint32_t output_reg = dp_priv->output_reg; 236 struct drm_device *dev = intel_dp->base.enc.dev;
241 struct drm_device *dev = intel_encoder->enc.dev;
242 struct drm_i915_private *dev_priv = dev->dev_private; 237 struct drm_i915_private *dev_priv = dev->dev_private;
243 uint32_t ch_ctl = output_reg + 0x10; 238 uint32_t ch_ctl = output_reg + 0x10;
244 uint32_t ch_data = ch_ctl + 4; 239 uint32_t ch_data = ch_ctl + 4;
@@ -253,7 +248,7 @@ intel_dp_aux_ch(struct intel_encoder *intel_encoder,
253 * and would like to run at 2MHz. So, take the 248 * and would like to run at 2MHz. So, take the
254 * hrawclk value and divide by 2 and use that 249 * hrawclk value and divide by 2 and use that
255 */ 250 */
256 if (IS_eDP(intel_encoder)) { 251 if (IS_eDP(intel_dp)) {
257 if (IS_GEN6(dev)) 252 if (IS_GEN6(dev))
258 aux_clock_divider = 200; /* SNB eDP input clock at 400Mhz */ 253 aux_clock_divider = 200; /* SNB eDP input clock at 400Mhz */
259 else 254 else
@@ -344,7 +339,7 @@ intel_dp_aux_ch(struct intel_encoder *intel_encoder,
344 339
345/* Write data to the aux channel in native mode */ 340/* Write data to the aux channel in native mode */
346static int 341static int
347intel_dp_aux_native_write(struct intel_encoder *intel_encoder, 342intel_dp_aux_native_write(struct intel_dp *intel_dp,
348 uint16_t address, uint8_t *send, int send_bytes) 343 uint16_t address, uint8_t *send, int send_bytes)
349{ 344{
350 int ret; 345 int ret;
@@ -361,7 +356,7 @@ intel_dp_aux_native_write(struct intel_encoder *intel_encoder,
361 memcpy(&msg[4], send, send_bytes); 356 memcpy(&msg[4], send, send_bytes);
362 msg_bytes = send_bytes + 4; 357 msg_bytes = send_bytes + 4;
363 for (;;) { 358 for (;;) {
364 ret = intel_dp_aux_ch(intel_encoder, msg, msg_bytes, &ack, 1); 359 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1);
365 if (ret < 0) 360 if (ret < 0)
366 return ret; 361 return ret;
367 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 362 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
@@ -376,15 +371,15 @@ intel_dp_aux_native_write(struct intel_encoder *intel_encoder,
376 371
377/* Write a single byte to the aux channel in native mode */ 372/* Write a single byte to the aux channel in native mode */
378static int 373static int
379intel_dp_aux_native_write_1(struct intel_encoder *intel_encoder, 374intel_dp_aux_native_write_1(struct intel_dp *intel_dp,
380 uint16_t address, uint8_t byte) 375 uint16_t address, uint8_t byte)
381{ 376{
382 return intel_dp_aux_native_write(intel_encoder, address, &byte, 1); 377 return intel_dp_aux_native_write(intel_dp, address, &byte, 1);
383} 378}
384 379
385/* read bytes from a native aux channel */ 380/* read bytes from a native aux channel */
386static int 381static int
387intel_dp_aux_native_read(struct intel_encoder *intel_encoder, 382intel_dp_aux_native_read(struct intel_dp *intel_dp,
388 uint16_t address, uint8_t *recv, int recv_bytes) 383 uint16_t address, uint8_t *recv, int recv_bytes)
389{ 384{
390 uint8_t msg[4]; 385 uint8_t msg[4];
@@ -403,7 +398,7 @@ intel_dp_aux_native_read(struct intel_encoder *intel_encoder,
403 reply_bytes = recv_bytes + 1; 398 reply_bytes = recv_bytes + 1;
404 399
405 for (;;) { 400 for (;;) {
406 ret = intel_dp_aux_ch(intel_encoder, msg, msg_bytes, 401 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes,
407 reply, reply_bytes); 402 reply, reply_bytes);
408 if (ret == 0) 403 if (ret == 0)
409 return -EPROTO; 404 return -EPROTO;
@@ -426,10 +421,9 @@ intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
426 uint8_t write_byte, uint8_t *read_byte) 421 uint8_t write_byte, uint8_t *read_byte)
427{ 422{
428 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 423 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
429 struct intel_dp_priv *dp_priv = container_of(adapter, 424 struct intel_dp *intel_dp = container_of(adapter,
430 struct intel_dp_priv, 425 struct intel_dp,
431 adapter); 426 adapter);
432 struct intel_encoder *intel_encoder = dp_priv->intel_encoder;
433 uint16_t address = algo_data->address; 427 uint16_t address = algo_data->address;
434 uint8_t msg[5]; 428 uint8_t msg[5];
435 uint8_t reply[2]; 429 uint8_t reply[2];
@@ -468,7 +462,7 @@ intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
468 } 462 }
469 463
470 for (;;) { 464 for (;;) {
471 ret = intel_dp_aux_ch(intel_encoder, 465 ret = intel_dp_aux_ch(intel_dp,
472 msg, msg_bytes, 466 msg, msg_bytes,
473 reply, reply_bytes); 467 reply, reply_bytes);
474 if (ret < 0) { 468 if (ret < 0) {
@@ -496,57 +490,42 @@ intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
496} 490}
497 491
498static int 492static int
499intel_dp_i2c_init(struct intel_encoder *intel_encoder, 493intel_dp_i2c_init(struct intel_dp *intel_dp,
500 struct intel_connector *intel_connector, const char *name) 494 struct intel_connector *intel_connector, const char *name)
501{ 495{
502 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv;
503
504 DRM_DEBUG_KMS("i2c_init %s\n", name); 496 DRM_DEBUG_KMS("i2c_init %s\n", name);
505 dp_priv->algo.running = false; 497 intel_dp->algo.running = false;
506 dp_priv->algo.address = 0; 498 intel_dp->algo.address = 0;
507 dp_priv->algo.aux_ch = intel_dp_i2c_aux_ch; 499 intel_dp->algo.aux_ch = intel_dp_i2c_aux_ch;
508 500
509 memset(&dp_priv->adapter, '\0', sizeof (dp_priv->adapter)); 501 memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter));
510 dp_priv->adapter.owner = THIS_MODULE; 502 intel_dp->adapter.owner = THIS_MODULE;
511 dp_priv->adapter.class = I2C_CLASS_DDC; 503 intel_dp->adapter.class = I2C_CLASS_DDC;
512 strncpy (dp_priv->adapter.name, name, sizeof(dp_priv->adapter.name) - 1); 504 strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
513 dp_priv->adapter.name[sizeof(dp_priv->adapter.name) - 1] = '\0'; 505 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
514 dp_priv->adapter.algo_data = &dp_priv->algo; 506 intel_dp->adapter.algo_data = &intel_dp->algo;
515 dp_priv->adapter.dev.parent = &intel_connector->base.kdev; 507 intel_dp->adapter.dev.parent = &intel_connector->base.kdev;
516 508
517 return i2c_dp_aux_add_bus(&dp_priv->adapter); 509 return i2c_dp_aux_add_bus(&intel_dp->adapter);
518} 510}
519 511
520static bool 512static bool
521intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode, 513intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
522 struct drm_display_mode *adjusted_mode) 514 struct drm_display_mode *adjusted_mode)
523{ 515{
524 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder);
525 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv;
526 struct drm_device *dev = encoder->dev; 516 struct drm_device *dev = encoder->dev;
527 struct drm_i915_private *dev_priv = dev->dev_private; 517 struct drm_i915_private *dev_priv = dev->dev_private;
518 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
528 int lane_count, clock; 519 int lane_count, clock;
529 int max_lane_count = intel_dp_max_lane_count(intel_encoder); 520 int max_lane_count = intel_dp_max_lane_count(intel_dp);
530 int max_clock = intel_dp_max_link_bw(intel_encoder) == DP_LINK_BW_2_7 ? 1 : 0; 521 int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0;
531 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 }; 522 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
532 523
533 if ((IS_eDP(intel_encoder) || IS_PCH_eDP(dp_priv)) && 524 if ((IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) &&
534 dev_priv->panel_fixed_mode) { 525 dev_priv->panel_fixed_mode) {
535 struct drm_display_mode *fixed_mode = dev_priv->panel_fixed_mode; 526 intel_fixed_panel_mode(dev_priv->panel_fixed_mode, adjusted_mode);
536 527 intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN,
537 adjusted_mode->hdisplay = fixed_mode->hdisplay; 528 mode, adjusted_mode);
538 adjusted_mode->hsync_start = fixed_mode->hsync_start;
539 adjusted_mode->hsync_end = fixed_mode->hsync_end;
540 adjusted_mode->htotal = fixed_mode->htotal;
541
542 adjusted_mode->vdisplay = fixed_mode->vdisplay;
543 adjusted_mode->vsync_start = fixed_mode->vsync_start;
544 adjusted_mode->vsync_end = fixed_mode->vsync_end;
545 adjusted_mode->vtotal = fixed_mode->vtotal;
546
547 adjusted_mode->clock = fixed_mode->clock;
548 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
549
550 /* 529 /*
551 * the mode->clock is used to calculate the Data&Link M/N 530 * the mode->clock is used to calculate the Data&Link M/N
552 * of the pipe. For the eDP the fixed clock should be used. 531 * of the pipe. For the eDP the fixed clock should be used.
@@ -558,31 +537,33 @@ intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
558 for (clock = 0; clock <= max_clock; clock++) { 537 for (clock = 0; clock <= max_clock; clock++) {
559 int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count); 538 int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count);
560 539
561 if (intel_dp_link_required(encoder->dev, intel_encoder, mode->clock) 540 if (intel_dp_link_required(encoder->dev, intel_dp, mode->clock)
562 <= link_avail) { 541 <= link_avail) {
563 dp_priv->link_bw = bws[clock]; 542 intel_dp->link_bw = bws[clock];
564 dp_priv->lane_count = lane_count; 543 intel_dp->lane_count = lane_count;
565 adjusted_mode->clock = intel_dp_link_clock(dp_priv->link_bw); 544 adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw);
566 DRM_DEBUG_KMS("Display port link bw %02x lane " 545 DRM_DEBUG_KMS("Display port link bw %02x lane "
567 "count %d clock %d\n", 546 "count %d clock %d\n",
568 dp_priv->link_bw, dp_priv->lane_count, 547 intel_dp->link_bw, intel_dp->lane_count,
569 adjusted_mode->clock); 548 adjusted_mode->clock);
570 return true; 549 return true;
571 } 550 }
572 } 551 }
573 } 552 }
574 553
575 if (IS_eDP(intel_encoder) || IS_PCH_eDP(dp_priv)) { 554 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) {
576 /* okay we failed just pick the highest */ 555 /* okay we failed just pick the highest */
577 dp_priv->lane_count = max_lane_count; 556 intel_dp->lane_count = max_lane_count;
578 dp_priv->link_bw = bws[max_clock]; 557 intel_dp->link_bw = bws[max_clock];
579 adjusted_mode->clock = intel_dp_link_clock(dp_priv->link_bw); 558 adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw);
580 DRM_DEBUG_KMS("Force picking display port link bw %02x lane " 559 DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
581 "count %d clock %d\n", 560 "count %d clock %d\n",
582 dp_priv->link_bw, dp_priv->lane_count, 561 intel_dp->link_bw, intel_dp->lane_count,
583 adjusted_mode->clock); 562 adjusted_mode->clock);
563
584 return true; 564 return true;
585 } 565 }
566
586 return false; 567 return false;
587} 568}
588 569
@@ -626,17 +607,14 @@ bool intel_pch_has_edp(struct drm_crtc *crtc)
626 struct drm_encoder *encoder; 607 struct drm_encoder *encoder;
627 608
628 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 609 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
629 struct intel_encoder *intel_encoder; 610 struct intel_dp *intel_dp;
630 struct intel_dp_priv *dp_priv;
631 611
632 if (!encoder || encoder->crtc != crtc) 612 if (encoder->crtc != crtc)
633 continue; 613 continue;
634 614
635 intel_encoder = enc_to_intel_encoder(encoder); 615 intel_dp = enc_to_intel_dp(encoder);
636 dp_priv = intel_encoder->dev_priv; 616 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT)
637 617 return intel_dp->is_pch_edp;
638 if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT)
639 return dp_priv->is_pch_edp;
640 } 618 }
641 return false; 619 return false;
642} 620}
@@ -657,18 +635,15 @@ intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
657 * Find the lane count in the intel_encoder private 635 * Find the lane count in the intel_encoder private
658 */ 636 */
659 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 637 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
660 struct intel_encoder *intel_encoder; 638 struct intel_dp *intel_dp;
661 struct intel_dp_priv *dp_priv;
662 639
663 if (encoder->crtc != crtc) 640 if (encoder->crtc != crtc)
664 continue; 641 continue;
665 642
666 intel_encoder = enc_to_intel_encoder(encoder); 643 intel_dp = enc_to_intel_dp(encoder);
667 dp_priv = intel_encoder->dev_priv; 644 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT) {
668 645 lane_count = intel_dp->lane_count;
669 if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) { 646 if (IS_PCH_eDP(intel_dp))
670 lane_count = dp_priv->lane_count;
671 if (IS_PCH_eDP(dp_priv))
672 bpp = dev_priv->edp_bpp; 647 bpp = dev_priv->edp_bpp;
673 break; 648 break;
674 } 649 }
@@ -724,107 +699,114 @@ intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
724 struct drm_display_mode *adjusted_mode) 699 struct drm_display_mode *adjusted_mode)
725{ 700{
726 struct drm_device *dev = encoder->dev; 701 struct drm_device *dev = encoder->dev;
727 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 702 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
728 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv; 703 struct drm_crtc *crtc = intel_dp->base.enc.crtc;
729 struct drm_crtc *crtc = intel_encoder->enc.crtc;
730 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 704 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
731 705
732 dp_priv->DP = (DP_VOLTAGE_0_4 | 706 intel_dp->DP = (DP_VOLTAGE_0_4 |
733 DP_PRE_EMPHASIS_0); 707 DP_PRE_EMPHASIS_0);
734 708
735 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 709 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
736 dp_priv->DP |= DP_SYNC_HS_HIGH; 710 intel_dp->DP |= DP_SYNC_HS_HIGH;
737 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 711 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
738 dp_priv->DP |= DP_SYNC_VS_HIGH; 712 intel_dp->DP |= DP_SYNC_VS_HIGH;
739 713
740 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_encoder)) 714 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_dp))
741 dp_priv->DP |= DP_LINK_TRAIN_OFF_CPT; 715 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
742 else 716 else
743 dp_priv->DP |= DP_LINK_TRAIN_OFF; 717 intel_dp->DP |= DP_LINK_TRAIN_OFF;
744 718
745 switch (dp_priv->lane_count) { 719 switch (intel_dp->lane_count) {
746 case 1: 720 case 1:
747 dp_priv->DP |= DP_PORT_WIDTH_1; 721 intel_dp->DP |= DP_PORT_WIDTH_1;
748 break; 722 break;
749 case 2: 723 case 2:
750 dp_priv->DP |= DP_PORT_WIDTH_2; 724 intel_dp->DP |= DP_PORT_WIDTH_2;
751 break; 725 break;
752 case 4: 726 case 4:
753 dp_priv->DP |= DP_PORT_WIDTH_4; 727 intel_dp->DP |= DP_PORT_WIDTH_4;
754 break; 728 break;
755 } 729 }
756 if (dp_priv->has_audio) 730 if (intel_dp->has_audio)
757 dp_priv->DP |= DP_AUDIO_OUTPUT_ENABLE; 731 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
758 732
759 memset(dp_priv->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); 733 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
760 dp_priv->link_configuration[0] = dp_priv->link_bw; 734 intel_dp->link_configuration[0] = intel_dp->link_bw;
761 dp_priv->link_configuration[1] = dp_priv->lane_count; 735 intel_dp->link_configuration[1] = intel_dp->lane_count;
762 736
763 /* 737 /*
764 * Check for DPCD version > 1.1 and enhanced framing support 738 * Check for DPCD version > 1.1 and enhanced framing support
765 */ 739 */
766 if (dp_priv->dpcd[0] >= 0x11 && (dp_priv->dpcd[2] & DP_ENHANCED_FRAME_CAP)) { 740 if (intel_dp->dpcd[0] >= 0x11 && (intel_dp->dpcd[2] & DP_ENHANCED_FRAME_CAP)) {
767 dp_priv->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 741 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
768 dp_priv->DP |= DP_ENHANCED_FRAMING; 742 intel_dp->DP |= DP_ENHANCED_FRAMING;
769 } 743 }
770 744
771 /* CPT DP's pipe select is decided in TRANS_DP_CTL */ 745 /* CPT DP's pipe select is decided in TRANS_DP_CTL */
772 if (intel_crtc->pipe == 1 && !HAS_PCH_CPT(dev)) 746 if (intel_crtc->pipe == 1 && !HAS_PCH_CPT(dev))
773 dp_priv->DP |= DP_PIPEB_SELECT; 747 intel_dp->DP |= DP_PIPEB_SELECT;
774 748
775 if (IS_eDP(intel_encoder)) { 749 if (IS_eDP(intel_dp)) {
776 /* don't miss out required setting for eDP */ 750 /* don't miss out required setting for eDP */
777 dp_priv->DP |= DP_PLL_ENABLE; 751 intel_dp->DP |= DP_PLL_ENABLE;
778 if (adjusted_mode->clock < 200000) 752 if (adjusted_mode->clock < 200000)
779 dp_priv->DP |= DP_PLL_FREQ_160MHZ; 753 intel_dp->DP |= DP_PLL_FREQ_160MHZ;
780 else 754 else
781 dp_priv->DP |= DP_PLL_FREQ_270MHZ; 755 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
782 } 756 }
783} 757}
784 758
785static void ironlake_edp_panel_on (struct drm_device *dev) 759static void ironlake_edp_panel_on (struct drm_device *dev)
786{ 760{
787 struct drm_i915_private *dev_priv = dev->dev_private; 761 struct drm_i915_private *dev_priv = dev->dev_private;
788 unsigned long timeout = jiffies + msecs_to_jiffies(5000); 762 u32 pp;
789 u32 pp, pp_status;
790 763
791 pp_status = I915_READ(PCH_PP_STATUS); 764 if (I915_READ(PCH_PP_STATUS) & PP_ON)
792 if (pp_status & PP_ON)
793 return; 765 return;
794 766
795 pp = I915_READ(PCH_PP_CONTROL); 767 pp = I915_READ(PCH_PP_CONTROL);
768
769 /* ILK workaround: disable reset around power sequence */
770 pp &= ~PANEL_POWER_RESET;
771 I915_WRITE(PCH_PP_CONTROL, pp);
772 POSTING_READ(PCH_PP_CONTROL);
773
796 pp |= PANEL_UNLOCK_REGS | POWER_TARGET_ON; 774 pp |= PANEL_UNLOCK_REGS | POWER_TARGET_ON;
797 I915_WRITE(PCH_PP_CONTROL, pp); 775 I915_WRITE(PCH_PP_CONTROL, pp);
798 do {
799 pp_status = I915_READ(PCH_PP_STATUS);
800 } while (((pp_status & PP_ON) == 0) && !time_after(jiffies, timeout));
801 776
802 if (time_after(jiffies, timeout)) 777 if (wait_for(I915_READ(PCH_PP_STATUS) & PP_ON, 5000, 10))
803 DRM_DEBUG_KMS("panel on wait timed out: 0x%08x\n", pp_status); 778 DRM_ERROR("panel on wait timed out: 0x%08x\n",
779 I915_READ(PCH_PP_STATUS));
804 780
805 pp &= ~(PANEL_UNLOCK_REGS | EDP_FORCE_VDD); 781 pp &= ~(PANEL_UNLOCK_REGS | EDP_FORCE_VDD);
782 pp |= PANEL_POWER_RESET; /* restore panel reset bit */
806 I915_WRITE(PCH_PP_CONTROL, pp); 783 I915_WRITE(PCH_PP_CONTROL, pp);
784 POSTING_READ(PCH_PP_CONTROL);
807} 785}
808 786
809static void ironlake_edp_panel_off (struct drm_device *dev) 787static void ironlake_edp_panel_off (struct drm_device *dev)
810{ 788{
811 struct drm_i915_private *dev_priv = dev->dev_private; 789 struct drm_i915_private *dev_priv = dev->dev_private;
812 unsigned long timeout = jiffies + msecs_to_jiffies(5000); 790 u32 pp;
813 u32 pp, pp_status;
814 791
815 pp = I915_READ(PCH_PP_CONTROL); 792 pp = I915_READ(PCH_PP_CONTROL);
793
794 /* ILK workaround: disable reset around power sequence */
795 pp &= ~PANEL_POWER_RESET;
796 I915_WRITE(PCH_PP_CONTROL, pp);
797 POSTING_READ(PCH_PP_CONTROL);
798
816 pp &= ~POWER_TARGET_ON; 799 pp &= ~POWER_TARGET_ON;
817 I915_WRITE(PCH_PP_CONTROL, pp); 800 I915_WRITE(PCH_PP_CONTROL, pp);
818 do {
819 pp_status = I915_READ(PCH_PP_STATUS);
820 } while ((pp_status & PP_ON) && !time_after(jiffies, timeout));
821 801
822 if (time_after(jiffies, timeout)) 802 if (wait_for((I915_READ(PCH_PP_STATUS) & PP_ON) == 0, 5000, 10))
823 DRM_DEBUG_KMS("panel off wait timed out\n"); 803 DRM_ERROR("panel off wait timed out: 0x%08x\n",
804 I915_READ(PCH_PP_STATUS));
824 805
825 /* Make sure VDD is enabled so DP AUX will work */ 806 /* Make sure VDD is enabled so DP AUX will work */
826 pp |= EDP_FORCE_VDD; 807 pp |= EDP_FORCE_VDD | PANEL_POWER_RESET; /* restore panel reset bit */
827 I915_WRITE(PCH_PP_CONTROL, pp); 808 I915_WRITE(PCH_PP_CONTROL, pp);
809 POSTING_READ(PCH_PP_CONTROL);
828} 810}
829 811
830static void ironlake_edp_backlight_on (struct drm_device *dev) 812static void ironlake_edp_backlight_on (struct drm_device *dev)
@@ -849,33 +831,87 @@ static void ironlake_edp_backlight_off (struct drm_device *dev)
849 I915_WRITE(PCH_PP_CONTROL, pp); 831 I915_WRITE(PCH_PP_CONTROL, pp);
850} 832}
851 833
834static void ironlake_edp_pll_on(struct drm_encoder *encoder)
835{
836 struct drm_device *dev = encoder->dev;
837 struct drm_i915_private *dev_priv = dev->dev_private;
838 u32 dpa_ctl;
839
840 DRM_DEBUG_KMS("\n");
841 dpa_ctl = I915_READ(DP_A);
842 dpa_ctl &= ~DP_PLL_ENABLE;
843 I915_WRITE(DP_A, dpa_ctl);
844}
845
846static void ironlake_edp_pll_off(struct drm_encoder *encoder)
847{
848 struct drm_device *dev = encoder->dev;
849 struct drm_i915_private *dev_priv = dev->dev_private;
850 u32 dpa_ctl;
851
852 dpa_ctl = I915_READ(DP_A);
853 dpa_ctl |= DP_PLL_ENABLE;
854 I915_WRITE(DP_A, dpa_ctl);
855 udelay(200);
856}
857
858static void intel_dp_prepare(struct drm_encoder *encoder)
859{
860 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
861 struct drm_device *dev = encoder->dev;
862 struct drm_i915_private *dev_priv = dev->dev_private;
863 uint32_t dp_reg = I915_READ(intel_dp->output_reg);
864
865 if (IS_eDP(intel_dp)) {
866 ironlake_edp_backlight_off(dev);
867 ironlake_edp_panel_on(dev);
868 ironlake_edp_pll_on(encoder);
869 }
870 if (dp_reg & DP_PORT_EN)
871 intel_dp_link_down(intel_dp);
872}
873
874static void intel_dp_commit(struct drm_encoder *encoder)
875{
876 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
877 struct drm_device *dev = encoder->dev;
878 struct drm_i915_private *dev_priv = dev->dev_private;
879 uint32_t dp_reg = I915_READ(intel_dp->output_reg);
880
881 if (!(dp_reg & DP_PORT_EN)) {
882 intel_dp_link_train(intel_dp);
883 }
884 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp))
885 ironlake_edp_backlight_on(dev);
886}
887
852static void 888static void
853intel_dp_dpms(struct drm_encoder *encoder, int mode) 889intel_dp_dpms(struct drm_encoder *encoder, int mode)
854{ 890{
855 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 891 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
856 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv;
857 struct drm_device *dev = encoder->dev; 892 struct drm_device *dev = encoder->dev;
858 struct drm_i915_private *dev_priv = dev->dev_private; 893 struct drm_i915_private *dev_priv = dev->dev_private;
859 uint32_t dp_reg = I915_READ(dp_priv->output_reg); 894 uint32_t dp_reg = I915_READ(intel_dp->output_reg);
860 895
861 if (mode != DRM_MODE_DPMS_ON) { 896 if (mode != DRM_MODE_DPMS_ON) {
862 if (dp_reg & DP_PORT_EN) { 897 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) {
863 intel_dp_link_down(intel_encoder, dp_priv->DP); 898 ironlake_edp_backlight_off(dev);
864 if (IS_eDP(intel_encoder) || IS_PCH_eDP(dp_priv)) { 899 ironlake_edp_panel_off(dev);
865 ironlake_edp_backlight_off(dev);
866 ironlake_edp_panel_off(dev);
867 }
868 } 900 }
901 if (dp_reg & DP_PORT_EN)
902 intel_dp_link_down(intel_dp);
903 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp))
904 ironlake_edp_pll_off(encoder);
869 } else { 905 } else {
870 if (!(dp_reg & DP_PORT_EN)) { 906 if (!(dp_reg & DP_PORT_EN)) {
871 intel_dp_link_train(intel_encoder, dp_priv->DP, dp_priv->link_configuration); 907 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp))
872 if (IS_eDP(intel_encoder) || IS_PCH_eDP(dp_priv)) {
873 ironlake_edp_panel_on(dev); 908 ironlake_edp_panel_on(dev);
909 intel_dp_link_train(intel_dp);
910 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp))
874 ironlake_edp_backlight_on(dev); 911 ironlake_edp_backlight_on(dev);
875 }
876 } 912 }
877 } 913 }
878 dp_priv->dpms_mode = mode; 914 intel_dp->dpms_mode = mode;
879} 915}
880 916
881/* 917/*
@@ -883,12 +919,12 @@ intel_dp_dpms(struct drm_encoder *encoder, int mode)
883 * link status information 919 * link status information
884 */ 920 */
885static bool 921static bool
886intel_dp_get_link_status(struct intel_encoder *intel_encoder, 922intel_dp_get_link_status(struct intel_dp *intel_dp,
887 uint8_t link_status[DP_LINK_STATUS_SIZE]) 923 uint8_t link_status[DP_LINK_STATUS_SIZE])
888{ 924{
889 int ret; 925 int ret;
890 926
891 ret = intel_dp_aux_native_read(intel_encoder, 927 ret = intel_dp_aux_native_read(intel_dp,
892 DP_LANE0_1_STATUS, 928 DP_LANE0_1_STATUS,
893 link_status, DP_LINK_STATUS_SIZE); 929 link_status, DP_LINK_STATUS_SIZE);
894 if (ret != DP_LINK_STATUS_SIZE) 930 if (ret != DP_LINK_STATUS_SIZE)
@@ -965,7 +1001,7 @@ intel_dp_pre_emphasis_max(uint8_t voltage_swing)
965} 1001}
966 1002
967static void 1003static void
968intel_get_adjust_train(struct intel_encoder *intel_encoder, 1004intel_get_adjust_train(struct intel_dp *intel_dp,
969 uint8_t link_status[DP_LINK_STATUS_SIZE], 1005 uint8_t link_status[DP_LINK_STATUS_SIZE],
970 int lane_count, 1006 int lane_count,
971 uint8_t train_set[4]) 1007 uint8_t train_set[4])
@@ -1101,27 +1137,27 @@ intel_channel_eq_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1101} 1137}
1102 1138
1103static bool 1139static bool
1104intel_dp_set_link_train(struct intel_encoder *intel_encoder, 1140intel_dp_set_link_train(struct intel_dp *intel_dp,
1105 uint32_t dp_reg_value, 1141 uint32_t dp_reg_value,
1106 uint8_t dp_train_pat, 1142 uint8_t dp_train_pat,
1107 uint8_t train_set[4], 1143 uint8_t train_set[4],
1108 bool first) 1144 bool first)
1109{ 1145{
1110 struct drm_device *dev = intel_encoder->enc.dev; 1146 struct drm_device *dev = intel_dp->base.enc.dev;
1111 struct drm_i915_private *dev_priv = dev->dev_private; 1147 struct drm_i915_private *dev_priv = dev->dev_private;
1112 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv; 1148 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.enc.crtc);
1113 int ret; 1149 int ret;
1114 1150
1115 I915_WRITE(dp_priv->output_reg, dp_reg_value); 1151 I915_WRITE(intel_dp->output_reg, dp_reg_value);
1116 POSTING_READ(dp_priv->output_reg); 1152 POSTING_READ(intel_dp->output_reg);
1117 if (first) 1153 if (first)
1118 intel_wait_for_vblank(dev); 1154 intel_wait_for_vblank(dev, intel_crtc->pipe);
1119 1155
1120 intel_dp_aux_native_write_1(intel_encoder, 1156 intel_dp_aux_native_write_1(intel_dp,
1121 DP_TRAINING_PATTERN_SET, 1157 DP_TRAINING_PATTERN_SET,
1122 dp_train_pat); 1158 dp_train_pat);
1123 1159
1124 ret = intel_dp_aux_native_write(intel_encoder, 1160 ret = intel_dp_aux_native_write(intel_dp,
1125 DP_TRAINING_LANE0_SET, train_set, 4); 1161 DP_TRAINING_LANE0_SET, train_set, 4);
1126 if (ret != 4) 1162 if (ret != 4)
1127 return false; 1163 return false;
@@ -1130,12 +1166,10 @@ intel_dp_set_link_train(struct intel_encoder *intel_encoder,
1130} 1166}
1131 1167
1132static void 1168static void
1133intel_dp_link_train(struct intel_encoder *intel_encoder, uint32_t DP, 1169intel_dp_link_train(struct intel_dp *intel_dp)
1134 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE])
1135{ 1170{
1136 struct drm_device *dev = intel_encoder->enc.dev; 1171 struct drm_device *dev = intel_dp->base.enc.dev;
1137 struct drm_i915_private *dev_priv = dev->dev_private; 1172 struct drm_i915_private *dev_priv = dev->dev_private;
1138 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv;
1139 uint8_t train_set[4]; 1173 uint8_t train_set[4];
1140 uint8_t link_status[DP_LINK_STATUS_SIZE]; 1174 uint8_t link_status[DP_LINK_STATUS_SIZE];
1141 int i; 1175 int i;
@@ -1145,13 +1179,15 @@ intel_dp_link_train(struct intel_encoder *intel_encoder, uint32_t DP,
1145 bool first = true; 1179 bool first = true;
1146 int tries; 1180 int tries;
1147 u32 reg; 1181 u32 reg;
1182 uint32_t DP = intel_dp->DP;
1148 1183
1149 /* Write the link configuration data */ 1184 /* Write the link configuration data */
1150 intel_dp_aux_native_write(intel_encoder, DP_LINK_BW_SET, 1185 intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET,
1151 link_configuration, DP_LINK_CONFIGURATION_SIZE); 1186 intel_dp->link_configuration,
1187 DP_LINK_CONFIGURATION_SIZE);
1152 1188
1153 DP |= DP_PORT_EN; 1189 DP |= DP_PORT_EN;
1154 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_encoder)) 1190 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_dp))
1155 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1191 DP &= ~DP_LINK_TRAIN_MASK_CPT;
1156 else 1192 else
1157 DP &= ~DP_LINK_TRAIN_MASK; 1193 DP &= ~DP_LINK_TRAIN_MASK;
@@ -1162,39 +1198,39 @@ intel_dp_link_train(struct intel_encoder *intel_encoder, uint32_t DP,
1162 for (;;) { 1198 for (;;) {
1163 /* Use train_set[0] to set the voltage and pre emphasis values */ 1199 /* Use train_set[0] to set the voltage and pre emphasis values */
1164 uint32_t signal_levels; 1200 uint32_t signal_levels;
1165 if (IS_GEN6(dev) && IS_eDP(intel_encoder)) { 1201 if (IS_GEN6(dev) && IS_eDP(intel_dp)) {
1166 signal_levels = intel_gen6_edp_signal_levels(train_set[0]); 1202 signal_levels = intel_gen6_edp_signal_levels(train_set[0]);
1167 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1203 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1168 } else { 1204 } else {
1169 signal_levels = intel_dp_signal_levels(train_set[0], dp_priv->lane_count); 1205 signal_levels = intel_dp_signal_levels(train_set[0], intel_dp->lane_count);
1170 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1206 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1171 } 1207 }
1172 1208
1173 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_encoder)) 1209 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_dp))
1174 reg = DP | DP_LINK_TRAIN_PAT_1_CPT; 1210 reg = DP | DP_LINK_TRAIN_PAT_1_CPT;
1175 else 1211 else
1176 reg = DP | DP_LINK_TRAIN_PAT_1; 1212 reg = DP | DP_LINK_TRAIN_PAT_1;
1177 1213
1178 if (!intel_dp_set_link_train(intel_encoder, reg, 1214 if (!intel_dp_set_link_train(intel_dp, reg,
1179 DP_TRAINING_PATTERN_1, train_set, first)) 1215 DP_TRAINING_PATTERN_1, train_set, first))
1180 break; 1216 break;
1181 first = false; 1217 first = false;
1182 /* Set training pattern 1 */ 1218 /* Set training pattern 1 */
1183 1219
1184 udelay(100); 1220 udelay(100);
1185 if (!intel_dp_get_link_status(intel_encoder, link_status)) 1221 if (!intel_dp_get_link_status(intel_dp, link_status))
1186 break; 1222 break;
1187 1223
1188 if (intel_clock_recovery_ok(link_status, dp_priv->lane_count)) { 1224 if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
1189 clock_recovery = true; 1225 clock_recovery = true;
1190 break; 1226 break;
1191 } 1227 }
1192 1228
1193 /* Check to see if we've tried the max voltage */ 1229 /* Check to see if we've tried the max voltage */
1194 for (i = 0; i < dp_priv->lane_count; i++) 1230 for (i = 0; i < intel_dp->lane_count; i++)
1195 if ((train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 1231 if ((train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1196 break; 1232 break;
1197 if (i == dp_priv->lane_count) 1233 if (i == intel_dp->lane_count)
1198 break; 1234 break;
1199 1235
1200 /* Check to see if we've tried the same voltage 5 times */ 1236 /* Check to see if we've tried the same voltage 5 times */
@@ -1207,7 +1243,7 @@ intel_dp_link_train(struct intel_encoder *intel_encoder, uint32_t DP,
1207 voltage = train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 1243 voltage = train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1208 1244
1209 /* Compute new train_set as requested by target */ 1245 /* Compute new train_set as requested by target */
1210 intel_get_adjust_train(intel_encoder, link_status, dp_priv->lane_count, train_set); 1246 intel_get_adjust_train(intel_dp, link_status, intel_dp->lane_count, train_set);
1211 } 1247 }
1212 1248
1213 /* channel equalization */ 1249 /* channel equalization */
@@ -1217,30 +1253,30 @@ intel_dp_link_train(struct intel_encoder *intel_encoder, uint32_t DP,
1217 /* Use train_set[0] to set the voltage and pre emphasis values */ 1253 /* Use train_set[0] to set the voltage and pre emphasis values */
1218 uint32_t signal_levels; 1254 uint32_t signal_levels;
1219 1255
1220 if (IS_GEN6(dev) && IS_eDP(intel_encoder)) { 1256 if (IS_GEN6(dev) && IS_eDP(intel_dp)) {
1221 signal_levels = intel_gen6_edp_signal_levels(train_set[0]); 1257 signal_levels = intel_gen6_edp_signal_levels(train_set[0]);
1222 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1258 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1223 } else { 1259 } else {
1224 signal_levels = intel_dp_signal_levels(train_set[0], dp_priv->lane_count); 1260 signal_levels = intel_dp_signal_levels(train_set[0], intel_dp->lane_count);
1225 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1261 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1226 } 1262 }
1227 1263
1228 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_encoder)) 1264 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_dp))
1229 reg = DP | DP_LINK_TRAIN_PAT_2_CPT; 1265 reg = DP | DP_LINK_TRAIN_PAT_2_CPT;
1230 else 1266 else
1231 reg = DP | DP_LINK_TRAIN_PAT_2; 1267 reg = DP | DP_LINK_TRAIN_PAT_2;
1232 1268
1233 /* channel eq pattern */ 1269 /* channel eq pattern */
1234 if (!intel_dp_set_link_train(intel_encoder, reg, 1270 if (!intel_dp_set_link_train(intel_dp, reg,
1235 DP_TRAINING_PATTERN_2, train_set, 1271 DP_TRAINING_PATTERN_2, train_set,
1236 false)) 1272 false))
1237 break; 1273 break;
1238 1274
1239 udelay(400); 1275 udelay(400);
1240 if (!intel_dp_get_link_status(intel_encoder, link_status)) 1276 if (!intel_dp_get_link_status(intel_dp, link_status))
1241 break; 1277 break;
1242 1278
1243 if (intel_channel_eq_ok(link_status, dp_priv->lane_count)) { 1279 if (intel_channel_eq_ok(link_status, intel_dp->lane_count)) {
1244 channel_eq = true; 1280 channel_eq = true;
1245 break; 1281 break;
1246 } 1282 }
@@ -1250,53 +1286,53 @@ intel_dp_link_train(struct intel_encoder *intel_encoder, uint32_t DP,
1250 break; 1286 break;
1251 1287
1252 /* Compute new train_set as requested by target */ 1288 /* Compute new train_set as requested by target */
1253 intel_get_adjust_train(intel_encoder, link_status, dp_priv->lane_count, train_set); 1289 intel_get_adjust_train(intel_dp, link_status, intel_dp->lane_count, train_set);
1254 ++tries; 1290 ++tries;
1255 } 1291 }
1256 1292
1257 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_encoder)) 1293 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_dp))
1258 reg = DP | DP_LINK_TRAIN_OFF_CPT; 1294 reg = DP | DP_LINK_TRAIN_OFF_CPT;
1259 else 1295 else
1260 reg = DP | DP_LINK_TRAIN_OFF; 1296 reg = DP | DP_LINK_TRAIN_OFF;
1261 1297
1262 I915_WRITE(dp_priv->output_reg, reg); 1298 I915_WRITE(intel_dp->output_reg, reg);
1263 POSTING_READ(dp_priv->output_reg); 1299 POSTING_READ(intel_dp->output_reg);
1264 intel_dp_aux_native_write_1(intel_encoder, 1300 intel_dp_aux_native_write_1(intel_dp,
1265 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE); 1301 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1266} 1302}
1267 1303
1268static void 1304static void
1269intel_dp_link_down(struct intel_encoder *intel_encoder, uint32_t DP) 1305intel_dp_link_down(struct intel_dp *intel_dp)
1270{ 1306{
1271 struct drm_device *dev = intel_encoder->enc.dev; 1307 struct drm_device *dev = intel_dp->base.enc.dev;
1272 struct drm_i915_private *dev_priv = dev->dev_private; 1308 struct drm_i915_private *dev_priv = dev->dev_private;
1273 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv; 1309 uint32_t DP = intel_dp->DP;
1274 1310
1275 DRM_DEBUG_KMS("\n"); 1311 DRM_DEBUG_KMS("\n");
1276 1312
1277 if (IS_eDP(intel_encoder)) { 1313 if (IS_eDP(intel_dp)) {
1278 DP &= ~DP_PLL_ENABLE; 1314 DP &= ~DP_PLL_ENABLE;
1279 I915_WRITE(dp_priv->output_reg, DP); 1315 I915_WRITE(intel_dp->output_reg, DP);
1280 POSTING_READ(dp_priv->output_reg); 1316 POSTING_READ(intel_dp->output_reg);
1281 udelay(100); 1317 udelay(100);
1282 } 1318 }
1283 1319
1284 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_encoder)) { 1320 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_dp)) {
1285 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1321 DP &= ~DP_LINK_TRAIN_MASK_CPT;
1286 I915_WRITE(dp_priv->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT); 1322 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT);
1287 POSTING_READ(dp_priv->output_reg); 1323 POSTING_READ(intel_dp->output_reg);
1288 } else { 1324 } else {
1289 DP &= ~DP_LINK_TRAIN_MASK; 1325 DP &= ~DP_LINK_TRAIN_MASK;
1290 I915_WRITE(dp_priv->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE); 1326 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1291 POSTING_READ(dp_priv->output_reg); 1327 POSTING_READ(intel_dp->output_reg);
1292 } 1328 }
1293 1329
1294 udelay(17000); 1330 udelay(17000);
1295 1331
1296 if (IS_eDP(intel_encoder)) 1332 if (IS_eDP(intel_dp))
1297 DP |= DP_LINK_TRAIN_OFF; 1333 DP |= DP_LINK_TRAIN_OFF;
1298 I915_WRITE(dp_priv->output_reg, DP & ~DP_PORT_EN); 1334 I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1299 POSTING_READ(dp_priv->output_reg); 1335 POSTING_READ(intel_dp->output_reg);
1300} 1336}
1301 1337
1302/* 1338/*
@@ -1309,41 +1345,39 @@ intel_dp_link_down(struct intel_encoder *intel_encoder, uint32_t DP)
1309 */ 1345 */
1310 1346
1311static void 1347static void
1312intel_dp_check_link_status(struct intel_encoder *intel_encoder) 1348intel_dp_check_link_status(struct intel_dp *intel_dp)
1313{ 1349{
1314 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv;
1315 uint8_t link_status[DP_LINK_STATUS_SIZE]; 1350 uint8_t link_status[DP_LINK_STATUS_SIZE];
1316 1351
1317 if (!intel_encoder->enc.crtc) 1352 if (!intel_dp->base.enc.crtc)
1318 return; 1353 return;
1319 1354
1320 if (!intel_dp_get_link_status(intel_encoder, link_status)) { 1355 if (!intel_dp_get_link_status(intel_dp, link_status)) {
1321 intel_dp_link_down(intel_encoder, dp_priv->DP); 1356 intel_dp_link_down(intel_dp);
1322 return; 1357 return;
1323 } 1358 }
1324 1359
1325 if (!intel_channel_eq_ok(link_status, dp_priv->lane_count)) 1360 if (!intel_channel_eq_ok(link_status, intel_dp->lane_count))
1326 intel_dp_link_train(intel_encoder, dp_priv->DP, dp_priv->link_configuration); 1361 intel_dp_link_train(intel_dp);
1327} 1362}
1328 1363
1329static enum drm_connector_status 1364static enum drm_connector_status
1330ironlake_dp_detect(struct drm_connector *connector) 1365ironlake_dp_detect(struct drm_connector *connector)
1331{ 1366{
1332 struct drm_encoder *encoder = intel_attached_encoder(connector); 1367 struct drm_encoder *encoder = intel_attached_encoder(connector);
1333 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1368 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1334 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv;
1335 enum drm_connector_status status; 1369 enum drm_connector_status status;
1336 1370
1337 status = connector_status_disconnected; 1371 status = connector_status_disconnected;
1338 if (intel_dp_aux_native_read(intel_encoder, 1372 if (intel_dp_aux_native_read(intel_dp,
1339 0x000, dp_priv->dpcd, 1373 0x000, intel_dp->dpcd,
1340 sizeof (dp_priv->dpcd)) == sizeof (dp_priv->dpcd)) 1374 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
1341 { 1375 {
1342 if (dp_priv->dpcd[0] != 0) 1376 if (intel_dp->dpcd[0] != 0)
1343 status = connector_status_connected; 1377 status = connector_status_connected;
1344 } 1378 }
1345 DRM_DEBUG_KMS("DPCD: %hx%hx%hx%hx\n", dp_priv->dpcd[0], 1379 DRM_DEBUG_KMS("DPCD: %hx%hx%hx%hx\n", intel_dp->dpcd[0],
1346 dp_priv->dpcd[1], dp_priv->dpcd[2], dp_priv->dpcd[3]); 1380 intel_dp->dpcd[1], intel_dp->dpcd[2], intel_dp->dpcd[3]);
1347 return status; 1381 return status;
1348} 1382}
1349 1383
@@ -1357,19 +1391,18 @@ static enum drm_connector_status
1357intel_dp_detect(struct drm_connector *connector) 1391intel_dp_detect(struct drm_connector *connector)
1358{ 1392{
1359 struct drm_encoder *encoder = intel_attached_encoder(connector); 1393 struct drm_encoder *encoder = intel_attached_encoder(connector);
1360 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1394 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1361 struct drm_device *dev = intel_encoder->enc.dev; 1395 struct drm_device *dev = intel_dp->base.enc.dev;
1362 struct drm_i915_private *dev_priv = dev->dev_private; 1396 struct drm_i915_private *dev_priv = dev->dev_private;
1363 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv;
1364 uint32_t temp, bit; 1397 uint32_t temp, bit;
1365 enum drm_connector_status status; 1398 enum drm_connector_status status;
1366 1399
1367 dp_priv->has_audio = false; 1400 intel_dp->has_audio = false;
1368 1401
1369 if (HAS_PCH_SPLIT(dev)) 1402 if (HAS_PCH_SPLIT(dev))
1370 return ironlake_dp_detect(connector); 1403 return ironlake_dp_detect(connector);
1371 1404
1372 switch (dp_priv->output_reg) { 1405 switch (intel_dp->output_reg) {
1373 case DP_B: 1406 case DP_B:
1374 bit = DPB_HOTPLUG_INT_STATUS; 1407 bit = DPB_HOTPLUG_INT_STATUS;
1375 break; 1408 break;
@@ -1389,11 +1422,11 @@ intel_dp_detect(struct drm_connector *connector)
1389 return connector_status_disconnected; 1422 return connector_status_disconnected;
1390 1423
1391 status = connector_status_disconnected; 1424 status = connector_status_disconnected;
1392 if (intel_dp_aux_native_read(intel_encoder, 1425 if (intel_dp_aux_native_read(intel_dp,
1393 0x000, dp_priv->dpcd, 1426 0x000, intel_dp->dpcd,
1394 sizeof (dp_priv->dpcd)) == sizeof (dp_priv->dpcd)) 1427 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
1395 { 1428 {
1396 if (dp_priv->dpcd[0] != 0) 1429 if (intel_dp->dpcd[0] != 0)
1397 status = connector_status_connected; 1430 status = connector_status_connected;
1398 } 1431 }
1399 return status; 1432 return status;
@@ -1402,18 +1435,17 @@ intel_dp_detect(struct drm_connector *connector)
1402static int intel_dp_get_modes(struct drm_connector *connector) 1435static int intel_dp_get_modes(struct drm_connector *connector)
1403{ 1436{
1404 struct drm_encoder *encoder = intel_attached_encoder(connector); 1437 struct drm_encoder *encoder = intel_attached_encoder(connector);
1405 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1438 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1406 struct drm_device *dev = intel_encoder->enc.dev; 1439 struct drm_device *dev = intel_dp->base.enc.dev;
1407 struct drm_i915_private *dev_priv = dev->dev_private; 1440 struct drm_i915_private *dev_priv = dev->dev_private;
1408 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv;
1409 int ret; 1441 int ret;
1410 1442
1411 /* We should parse the EDID data and find out if it has an audio sink 1443 /* We should parse the EDID data and find out if it has an audio sink
1412 */ 1444 */
1413 1445
1414 ret = intel_ddc_get_modes(connector, intel_encoder->ddc_bus); 1446 ret = intel_ddc_get_modes(connector, intel_dp->base.ddc_bus);
1415 if (ret) { 1447 if (ret) {
1416 if ((IS_eDP(intel_encoder) || IS_PCH_eDP(dp_priv)) && 1448 if ((IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) &&
1417 !dev_priv->panel_fixed_mode) { 1449 !dev_priv->panel_fixed_mode) {
1418 struct drm_display_mode *newmode; 1450 struct drm_display_mode *newmode;
1419 list_for_each_entry(newmode, &connector->probed_modes, 1451 list_for_each_entry(newmode, &connector->probed_modes,
@@ -1430,7 +1462,7 @@ static int intel_dp_get_modes(struct drm_connector *connector)
1430 } 1462 }
1431 1463
1432 /* if eDP has no EDID, try to use fixed panel mode from VBT */ 1464 /* if eDP has no EDID, try to use fixed panel mode from VBT */
1433 if (IS_eDP(intel_encoder) || IS_PCH_eDP(dp_priv)) { 1465 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) {
1434 if (dev_priv->panel_fixed_mode != NULL) { 1466 if (dev_priv->panel_fixed_mode != NULL) {
1435 struct drm_display_mode *mode; 1467 struct drm_display_mode *mode;
1436 mode = drm_mode_duplicate(dev, dev_priv->panel_fixed_mode); 1468 mode = drm_mode_duplicate(dev, dev_priv->panel_fixed_mode);
@@ -1452,9 +1484,9 @@ intel_dp_destroy (struct drm_connector *connector)
1452static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = { 1484static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = {
1453 .dpms = intel_dp_dpms, 1485 .dpms = intel_dp_dpms,
1454 .mode_fixup = intel_dp_mode_fixup, 1486 .mode_fixup = intel_dp_mode_fixup,
1455 .prepare = intel_encoder_prepare, 1487 .prepare = intel_dp_prepare,
1456 .mode_set = intel_dp_mode_set, 1488 .mode_set = intel_dp_mode_set,
1457 .commit = intel_encoder_commit, 1489 .commit = intel_dp_commit,
1458}; 1490};
1459 1491
1460static const struct drm_connector_funcs intel_dp_connector_funcs = { 1492static const struct drm_connector_funcs intel_dp_connector_funcs = {
@@ -1470,27 +1502,17 @@ static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs =
1470 .best_encoder = intel_attached_encoder, 1502 .best_encoder = intel_attached_encoder,
1471}; 1503};
1472 1504
1473static void intel_dp_enc_destroy(struct drm_encoder *encoder)
1474{
1475 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder);
1476
1477 if (intel_encoder->i2c_bus)
1478 intel_i2c_destroy(intel_encoder->i2c_bus);
1479 drm_encoder_cleanup(encoder);
1480 kfree(intel_encoder);
1481}
1482
1483static const struct drm_encoder_funcs intel_dp_enc_funcs = { 1505static const struct drm_encoder_funcs intel_dp_enc_funcs = {
1484 .destroy = intel_dp_enc_destroy, 1506 .destroy = intel_encoder_destroy,
1485}; 1507};
1486 1508
1487void 1509void
1488intel_dp_hot_plug(struct intel_encoder *intel_encoder) 1510intel_dp_hot_plug(struct intel_encoder *intel_encoder)
1489{ 1511{
1490 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv; 1512 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
1491 1513
1492 if (dp_priv->dpms_mode == DRM_MODE_DPMS_ON) 1514 if (intel_dp->dpms_mode == DRM_MODE_DPMS_ON)
1493 intel_dp_check_link_status(intel_encoder); 1515 intel_dp_check_link_status(intel_dp);
1494} 1516}
1495 1517
1496/* Return which DP Port should be selected for Transcoder DP control */ 1518/* Return which DP Port should be selected for Transcoder DP control */
@@ -1500,18 +1522,18 @@ intel_trans_dp_port_sel (struct drm_crtc *crtc)
1500 struct drm_device *dev = crtc->dev; 1522 struct drm_device *dev = crtc->dev;
1501 struct drm_mode_config *mode_config = &dev->mode_config; 1523 struct drm_mode_config *mode_config = &dev->mode_config;
1502 struct drm_encoder *encoder; 1524 struct drm_encoder *encoder;
1503 struct intel_encoder *intel_encoder = NULL;
1504 1525
1505 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 1526 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
1527 struct intel_dp *intel_dp;
1528
1506 if (encoder->crtc != crtc) 1529 if (encoder->crtc != crtc)
1507 continue; 1530 continue;
1508 1531
1509 intel_encoder = enc_to_intel_encoder(encoder); 1532 intel_dp = enc_to_intel_dp(encoder);
1510 if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) { 1533 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT)
1511 struct intel_dp_priv *dp_priv = intel_encoder->dev_priv; 1534 return intel_dp->output_reg;
1512 return dp_priv->output_reg;
1513 }
1514 } 1535 }
1536
1515 return -1; 1537 return -1;
1516} 1538}
1517 1539
@@ -1540,30 +1562,28 @@ intel_dp_init(struct drm_device *dev, int output_reg)
1540{ 1562{
1541 struct drm_i915_private *dev_priv = dev->dev_private; 1563 struct drm_i915_private *dev_priv = dev->dev_private;
1542 struct drm_connector *connector; 1564 struct drm_connector *connector;
1565 struct intel_dp *intel_dp;
1543 struct intel_encoder *intel_encoder; 1566 struct intel_encoder *intel_encoder;
1544 struct intel_connector *intel_connector; 1567 struct intel_connector *intel_connector;
1545 struct intel_dp_priv *dp_priv;
1546 const char *name = NULL; 1568 const char *name = NULL;
1547 int type; 1569 int type;
1548 1570
1549 intel_encoder = kcalloc(sizeof(struct intel_encoder) + 1571 intel_dp = kzalloc(sizeof(struct intel_dp), GFP_KERNEL);
1550 sizeof(struct intel_dp_priv), 1, GFP_KERNEL); 1572 if (!intel_dp)
1551 if (!intel_encoder)
1552 return; 1573 return;
1553 1574
1554 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL); 1575 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL);
1555 if (!intel_connector) { 1576 if (!intel_connector) {
1556 kfree(intel_encoder); 1577 kfree(intel_dp);
1557 return; 1578 return;
1558 } 1579 }
1580 intel_encoder = &intel_dp->base;
1559 1581
1560 dp_priv = (struct intel_dp_priv *)(intel_encoder + 1); 1582 if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D)
1561
1562 if (HAS_PCH_SPLIT(dev) && (output_reg == PCH_DP_D))
1563 if (intel_dpd_is_edp(dev)) 1583 if (intel_dpd_is_edp(dev))
1564 dp_priv->is_pch_edp = true; 1584 intel_dp->is_pch_edp = true;
1565 1585
1566 if (output_reg == DP_A || IS_PCH_eDP(dp_priv)) { 1586 if (output_reg == DP_A || IS_PCH_eDP(intel_dp)) {
1567 type = DRM_MODE_CONNECTOR_eDP; 1587 type = DRM_MODE_CONNECTOR_eDP;
1568 intel_encoder->type = INTEL_OUTPUT_EDP; 1588 intel_encoder->type = INTEL_OUTPUT_EDP;
1569 } else { 1589 } else {
@@ -1584,18 +1604,16 @@ intel_dp_init(struct drm_device *dev, int output_reg)
1584 else if (output_reg == DP_D || output_reg == PCH_DP_D) 1604 else if (output_reg == DP_D || output_reg == PCH_DP_D)
1585 intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT); 1605 intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT);
1586 1606
1587 if (IS_eDP(intel_encoder)) 1607 if (IS_eDP(intel_dp))
1588 intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT); 1608 intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT);
1589 1609
1590 intel_encoder->crtc_mask = (1 << 0) | (1 << 1); 1610 intel_encoder->crtc_mask = (1 << 0) | (1 << 1);
1591 connector->interlace_allowed = true; 1611 connector->interlace_allowed = true;
1592 connector->doublescan_allowed = 0; 1612 connector->doublescan_allowed = 0;
1593 1613
1594 dp_priv->intel_encoder = intel_encoder; 1614 intel_dp->output_reg = output_reg;
1595 dp_priv->output_reg = output_reg; 1615 intel_dp->has_audio = false;
1596 dp_priv->has_audio = false; 1616 intel_dp->dpms_mode = DRM_MODE_DPMS_ON;
1597 dp_priv->dpms_mode = DRM_MODE_DPMS_ON;
1598 intel_encoder->dev_priv = dp_priv;
1599 1617
1600 drm_encoder_init(dev, &intel_encoder->enc, &intel_dp_enc_funcs, 1618 drm_encoder_init(dev, &intel_encoder->enc, &intel_dp_enc_funcs,
1601 DRM_MODE_ENCODER_TMDS); 1619 DRM_MODE_ENCODER_TMDS);
@@ -1630,12 +1648,12 @@ intel_dp_init(struct drm_device *dev, int output_reg)
1630 break; 1648 break;
1631 } 1649 }
1632 1650
1633 intel_dp_i2c_init(intel_encoder, intel_connector, name); 1651 intel_dp_i2c_init(intel_dp, intel_connector, name);
1634 1652
1635 intel_encoder->ddc_bus = &dp_priv->adapter; 1653 intel_encoder->ddc_bus = &intel_dp->adapter;
1636 intel_encoder->hot_plug = intel_dp_hot_plug; 1654 intel_encoder->hot_plug = intel_dp_hot_plug;
1637 1655
1638 if (output_reg == DP_A || IS_PCH_eDP(dp_priv)) { 1656 if (output_reg == DP_A || IS_PCH_eDP(intel_dp)) {
1639 /* initialize panel mode from VBT if available for eDP */ 1657 /* initialize panel mode from VBT if available for eDP */
1640 if (dev_priv->lfp_lvds_vbt_mode) { 1658 if (dev_priv->lfp_lvds_vbt_mode) {
1641 dev_priv->panel_fixed_mode = 1659 dev_priv->panel_fixed_mode =
diff --git a/drivers/gpu/drm/i915/intel_drv.h b/drivers/gpu/drm/i915/intel_drv.h
index b2190148703a..0e92aa07b382 100644
--- a/drivers/gpu/drm/i915/intel_drv.h
+++ b/drivers/gpu/drm/i915/intel_drv.h
@@ -32,6 +32,20 @@
32#include "drm_crtc.h" 32#include "drm_crtc.h"
33 33
34#include "drm_crtc_helper.h" 34#include "drm_crtc_helper.h"
35
36#define wait_for(COND, MS, W) ({ \
37 unsigned long timeout__ = jiffies + msecs_to_jiffies(MS); \
38 int ret__ = 0; \
39 while (! (COND)) { \
40 if (time_after(jiffies, timeout__)) { \
41 ret__ = -ETIMEDOUT; \
42 break; \
43 } \
44 if (W) msleep(W); \
45 } \
46 ret__; \
47})
48
35/* 49/*
36 * Display related stuff 50 * Display related stuff
37 */ 51 */
@@ -102,7 +116,6 @@ struct intel_encoder {
102 struct i2c_adapter *ddc_bus; 116 struct i2c_adapter *ddc_bus;
103 bool load_detect_temp; 117 bool load_detect_temp;
104 bool needs_tv_clock; 118 bool needs_tv_clock;
105 void *dev_priv;
106 void (*hot_plug)(struct intel_encoder *); 119 void (*hot_plug)(struct intel_encoder *);
107 int crtc_mask; 120 int crtc_mask;
108 int clone_mask; 121 int clone_mask;
@@ -110,7 +123,6 @@ struct intel_encoder {
110 123
111struct intel_connector { 124struct intel_connector {
112 struct drm_connector base; 125 struct drm_connector base;
113 void *dev_priv;
114}; 126};
115 127
116struct intel_crtc; 128struct intel_crtc;
@@ -156,7 +168,7 @@ struct intel_crtc {
156 uint32_t cursor_addr; 168 uint32_t cursor_addr;
157 int16_t cursor_x, cursor_y; 169 int16_t cursor_x, cursor_y;
158 int16_t cursor_width, cursor_height; 170 int16_t cursor_width, cursor_height;
159 bool cursor_visble; 171 bool cursor_visible, cursor_on;
160}; 172};
161 173
162#define to_intel_crtc(x) container_of(x, struct intel_crtc, base) 174#define to_intel_crtc(x) container_of(x, struct intel_crtc, base)
@@ -188,10 +200,18 @@ extern bool intel_dpd_is_edp(struct drm_device *dev);
188extern void intel_edp_link_config (struct intel_encoder *, int *, int *); 200extern void intel_edp_link_config (struct intel_encoder *, int *, int *);
189 201
190 202
203extern void intel_fixed_panel_mode(struct drm_display_mode *fixed_mode,
204 struct drm_display_mode *adjusted_mode);
205extern void intel_pch_panel_fitting(struct drm_device *dev,
206 int fitting_mode,
207 struct drm_display_mode *mode,
208 struct drm_display_mode *adjusted_mode);
209
191extern int intel_panel_fitter_pipe (struct drm_device *dev); 210extern int intel_panel_fitter_pipe (struct drm_device *dev);
192extern void intel_crtc_load_lut(struct drm_crtc *crtc); 211extern void intel_crtc_load_lut(struct drm_crtc *crtc);
193extern void intel_encoder_prepare (struct drm_encoder *encoder); 212extern void intel_encoder_prepare (struct drm_encoder *encoder);
194extern void intel_encoder_commit (struct drm_encoder *encoder); 213extern void intel_encoder_commit (struct drm_encoder *encoder);
214extern void intel_encoder_destroy(struct drm_encoder *encoder);
195 215
196extern struct drm_encoder *intel_attached_encoder(struct drm_connector *connector); 216extern struct drm_encoder *intel_attached_encoder(struct drm_connector *connector);
197 217
@@ -199,7 +219,8 @@ extern struct drm_display_mode *intel_crtc_mode_get(struct drm_device *dev,
199 struct drm_crtc *crtc); 219 struct drm_crtc *crtc);
200int intel_get_pipe_from_crtc_id(struct drm_device *dev, void *data, 220int intel_get_pipe_from_crtc_id(struct drm_device *dev, void *data,
201 struct drm_file *file_priv); 221 struct drm_file *file_priv);
202extern void intel_wait_for_vblank(struct drm_device *dev); 222extern void intel_wait_for_vblank_off(struct drm_device *dev, int pipe);
223extern void intel_wait_for_vblank(struct drm_device *dev, int pipe);
203extern struct drm_crtc *intel_get_crtc_from_pipe(struct drm_device *dev, int pipe); 224extern struct drm_crtc *intel_get_crtc_from_pipe(struct drm_device *dev, int pipe);
204extern struct drm_crtc *intel_get_load_detect_pipe(struct intel_encoder *intel_encoder, 225extern struct drm_crtc *intel_get_load_detect_pipe(struct intel_encoder *intel_encoder,
205 struct drm_connector *connector, 226 struct drm_connector *connector,
diff --git a/drivers/gpu/drm/i915/intel_dvo.c b/drivers/gpu/drm/i915/intel_dvo.c
index 227feca7cf8d..a399f4b2c1c5 100644
--- a/drivers/gpu/drm/i915/intel_dvo.c
+++ b/drivers/gpu/drm/i915/intel_dvo.c
@@ -38,7 +38,7 @@
38#define CH7xxx_ADDR 0x76 38#define CH7xxx_ADDR 0x76
39#define TFP410_ADDR 0x38 39#define TFP410_ADDR 0x38
40 40
41static struct intel_dvo_device intel_dvo_devices[] = { 41static const struct intel_dvo_device intel_dvo_devices[] = {
42 { 42 {
43 .type = INTEL_DVO_CHIP_TMDS, 43 .type = INTEL_DVO_CHIP_TMDS,
44 .name = "sil164", 44 .name = "sil164",
@@ -77,20 +77,33 @@ static struct intel_dvo_device intel_dvo_devices[] = {
77 } 77 }
78}; 78};
79 79
80struct intel_dvo {
81 struct intel_encoder base;
82
83 struct intel_dvo_device dev;
84
85 struct drm_display_mode *panel_fixed_mode;
86 bool panel_wants_dither;
87};
88
89static struct intel_dvo *enc_to_intel_dvo(struct drm_encoder *encoder)
90{
91 return container_of(enc_to_intel_encoder(encoder), struct intel_dvo, base);
92}
93
80static void intel_dvo_dpms(struct drm_encoder *encoder, int mode) 94static void intel_dvo_dpms(struct drm_encoder *encoder, int mode)
81{ 95{
82 struct drm_i915_private *dev_priv = encoder->dev->dev_private; 96 struct drm_i915_private *dev_priv = encoder->dev->dev_private;
83 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 97 struct intel_dvo *intel_dvo = enc_to_intel_dvo(encoder);
84 struct intel_dvo_device *dvo = intel_encoder->dev_priv; 98 u32 dvo_reg = intel_dvo->dev.dvo_reg;
85 u32 dvo_reg = dvo->dvo_reg;
86 u32 temp = I915_READ(dvo_reg); 99 u32 temp = I915_READ(dvo_reg);
87 100
88 if (mode == DRM_MODE_DPMS_ON) { 101 if (mode == DRM_MODE_DPMS_ON) {
89 I915_WRITE(dvo_reg, temp | DVO_ENABLE); 102 I915_WRITE(dvo_reg, temp | DVO_ENABLE);
90 I915_READ(dvo_reg); 103 I915_READ(dvo_reg);
91 dvo->dev_ops->dpms(dvo, mode); 104 intel_dvo->dev.dev_ops->dpms(&intel_dvo->dev, mode);
92 } else { 105 } else {
93 dvo->dev_ops->dpms(dvo, mode); 106 intel_dvo->dev.dev_ops->dpms(&intel_dvo->dev, mode);
94 I915_WRITE(dvo_reg, temp & ~DVO_ENABLE); 107 I915_WRITE(dvo_reg, temp & ~DVO_ENABLE);
95 I915_READ(dvo_reg); 108 I915_READ(dvo_reg);
96 } 109 }
@@ -100,38 +113,36 @@ static int intel_dvo_mode_valid(struct drm_connector *connector,
100 struct drm_display_mode *mode) 113 struct drm_display_mode *mode)
101{ 114{
102 struct drm_encoder *encoder = intel_attached_encoder(connector); 115 struct drm_encoder *encoder = intel_attached_encoder(connector);
103 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 116 struct intel_dvo *intel_dvo = enc_to_intel_dvo(encoder);
104 struct intel_dvo_device *dvo = intel_encoder->dev_priv;
105 117
106 if (mode->flags & DRM_MODE_FLAG_DBLSCAN) 118 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
107 return MODE_NO_DBLESCAN; 119 return MODE_NO_DBLESCAN;
108 120
109 /* XXX: Validate clock range */ 121 /* XXX: Validate clock range */
110 122
111 if (dvo->panel_fixed_mode) { 123 if (intel_dvo->panel_fixed_mode) {
112 if (mode->hdisplay > dvo->panel_fixed_mode->hdisplay) 124 if (mode->hdisplay > intel_dvo->panel_fixed_mode->hdisplay)
113 return MODE_PANEL; 125 return MODE_PANEL;
114 if (mode->vdisplay > dvo->panel_fixed_mode->vdisplay) 126 if (mode->vdisplay > intel_dvo->panel_fixed_mode->vdisplay)
115 return MODE_PANEL; 127 return MODE_PANEL;
116 } 128 }
117 129
118 return dvo->dev_ops->mode_valid(dvo, mode); 130 return intel_dvo->dev.dev_ops->mode_valid(&intel_dvo->dev, mode);
119} 131}
120 132
121static bool intel_dvo_mode_fixup(struct drm_encoder *encoder, 133static bool intel_dvo_mode_fixup(struct drm_encoder *encoder,
122 struct drm_display_mode *mode, 134 struct drm_display_mode *mode,
123 struct drm_display_mode *adjusted_mode) 135 struct drm_display_mode *adjusted_mode)
124{ 136{
125 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 137 struct intel_dvo *intel_dvo = enc_to_intel_dvo(encoder);
126 struct intel_dvo_device *dvo = intel_encoder->dev_priv;
127 138
128 /* If we have timings from the BIOS for the panel, put them in 139 /* If we have timings from the BIOS for the panel, put them in
129 * to the adjusted mode. The CRTC will be set up for this mode, 140 * to the adjusted mode. The CRTC will be set up for this mode,
130 * with the panel scaling set up to source from the H/VDisplay 141 * with the panel scaling set up to source from the H/VDisplay
131 * of the original mode. 142 * of the original mode.
132 */ 143 */
133 if (dvo->panel_fixed_mode != NULL) { 144 if (intel_dvo->panel_fixed_mode != NULL) {
134#define C(x) adjusted_mode->x = dvo->panel_fixed_mode->x 145#define C(x) adjusted_mode->x = intel_dvo->panel_fixed_mode->x
135 C(hdisplay); 146 C(hdisplay);
136 C(hsync_start); 147 C(hsync_start);
137 C(hsync_end); 148 C(hsync_end);
@@ -145,8 +156,8 @@ static bool intel_dvo_mode_fixup(struct drm_encoder *encoder,
145#undef C 156#undef C
146 } 157 }
147 158
148 if (dvo->dev_ops->mode_fixup) 159 if (intel_dvo->dev.dev_ops->mode_fixup)
149 return dvo->dev_ops->mode_fixup(dvo, mode, adjusted_mode); 160 return intel_dvo->dev.dev_ops->mode_fixup(&intel_dvo->dev, mode, adjusted_mode);
150 161
151 return true; 162 return true;
152} 163}
@@ -158,11 +169,10 @@ static void intel_dvo_mode_set(struct drm_encoder *encoder,
158 struct drm_device *dev = encoder->dev; 169 struct drm_device *dev = encoder->dev;
159 struct drm_i915_private *dev_priv = dev->dev_private; 170 struct drm_i915_private *dev_priv = dev->dev_private;
160 struct intel_crtc *intel_crtc = to_intel_crtc(encoder->crtc); 171 struct intel_crtc *intel_crtc = to_intel_crtc(encoder->crtc);
161 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 172 struct intel_dvo *intel_dvo = enc_to_intel_dvo(encoder);
162 struct intel_dvo_device *dvo = intel_encoder->dev_priv;
163 int pipe = intel_crtc->pipe; 173 int pipe = intel_crtc->pipe;
164 u32 dvo_val; 174 u32 dvo_val;
165 u32 dvo_reg = dvo->dvo_reg, dvo_srcdim_reg; 175 u32 dvo_reg = intel_dvo->dev.dvo_reg, dvo_srcdim_reg;
166 int dpll_reg = (pipe == 0) ? DPLL_A : DPLL_B; 176 int dpll_reg = (pipe == 0) ? DPLL_A : DPLL_B;
167 177
168 switch (dvo_reg) { 178 switch (dvo_reg) {
@@ -178,7 +188,7 @@ static void intel_dvo_mode_set(struct drm_encoder *encoder,
178 break; 188 break;
179 } 189 }
180 190
181 dvo->dev_ops->mode_set(dvo, mode, adjusted_mode); 191 intel_dvo->dev.dev_ops->mode_set(&intel_dvo->dev, mode, adjusted_mode);
182 192
183 /* Save the data order, since I don't know what it should be set to. */ 193 /* Save the data order, since I don't know what it should be set to. */
184 dvo_val = I915_READ(dvo_reg) & 194 dvo_val = I915_READ(dvo_reg) &
@@ -214,40 +224,38 @@ static void intel_dvo_mode_set(struct drm_encoder *encoder,
214static enum drm_connector_status intel_dvo_detect(struct drm_connector *connector) 224static enum drm_connector_status intel_dvo_detect(struct drm_connector *connector)
215{ 225{
216 struct drm_encoder *encoder = intel_attached_encoder(connector); 226 struct drm_encoder *encoder = intel_attached_encoder(connector);
217 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 227 struct intel_dvo *intel_dvo = enc_to_intel_dvo(encoder);
218 struct intel_dvo_device *dvo = intel_encoder->dev_priv;
219 228
220 return dvo->dev_ops->detect(dvo); 229 return intel_dvo->dev.dev_ops->detect(&intel_dvo->dev);
221} 230}
222 231
223static int intel_dvo_get_modes(struct drm_connector *connector) 232static int intel_dvo_get_modes(struct drm_connector *connector)
224{ 233{
225 struct drm_encoder *encoder = intel_attached_encoder(connector); 234 struct drm_encoder *encoder = intel_attached_encoder(connector);
226 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 235 struct intel_dvo *intel_dvo = enc_to_intel_dvo(encoder);
227 struct intel_dvo_device *dvo = intel_encoder->dev_priv;
228 236
229 /* We should probably have an i2c driver get_modes function for those 237 /* We should probably have an i2c driver get_modes function for those
230 * devices which will have a fixed set of modes determined by the chip 238 * devices which will have a fixed set of modes determined by the chip
231 * (TV-out, for example), but for now with just TMDS and LVDS, 239 * (TV-out, for example), but for now with just TMDS and LVDS,
232 * that's not the case. 240 * that's not the case.
233 */ 241 */
234 intel_ddc_get_modes(connector, intel_encoder->ddc_bus); 242 intel_ddc_get_modes(connector, intel_dvo->base.ddc_bus);
235 if (!list_empty(&connector->probed_modes)) 243 if (!list_empty(&connector->probed_modes))
236 return 1; 244 return 1;
237 245
238 246 if (intel_dvo->panel_fixed_mode != NULL) {
239 if (dvo->panel_fixed_mode != NULL) {
240 struct drm_display_mode *mode; 247 struct drm_display_mode *mode;
241 mode = drm_mode_duplicate(connector->dev, dvo->panel_fixed_mode); 248 mode = drm_mode_duplicate(connector->dev, intel_dvo->panel_fixed_mode);
242 if (mode) { 249 if (mode) {
243 drm_mode_probed_add(connector, mode); 250 drm_mode_probed_add(connector, mode);
244 return 1; 251 return 1;
245 } 252 }
246 } 253 }
254
247 return 0; 255 return 0;
248} 256}
249 257
250static void intel_dvo_destroy (struct drm_connector *connector) 258static void intel_dvo_destroy(struct drm_connector *connector)
251{ 259{
252 drm_sysfs_connector_remove(connector); 260 drm_sysfs_connector_remove(connector);
253 drm_connector_cleanup(connector); 261 drm_connector_cleanup(connector);
@@ -277,28 +285,20 @@ static const struct drm_connector_helper_funcs intel_dvo_connector_helper_funcs
277 285
278static void intel_dvo_enc_destroy(struct drm_encoder *encoder) 286static void intel_dvo_enc_destroy(struct drm_encoder *encoder)
279{ 287{
280 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 288 struct intel_dvo *intel_dvo = enc_to_intel_dvo(encoder);
281 struct intel_dvo_device *dvo = intel_encoder->dev_priv; 289
282 290 if (intel_dvo->dev.dev_ops->destroy)
283 if (dvo) { 291 intel_dvo->dev.dev_ops->destroy(&intel_dvo->dev);
284 if (dvo->dev_ops->destroy) 292
285 dvo->dev_ops->destroy(dvo); 293 kfree(intel_dvo->panel_fixed_mode);
286 if (dvo->panel_fixed_mode) 294
287 kfree(dvo->panel_fixed_mode); 295 intel_encoder_destroy(encoder);
288 }
289 if (intel_encoder->i2c_bus)
290 intel_i2c_destroy(intel_encoder->i2c_bus);
291 if (intel_encoder->ddc_bus)
292 intel_i2c_destroy(intel_encoder->ddc_bus);
293 drm_encoder_cleanup(encoder);
294 kfree(intel_encoder);
295} 296}
296 297
297static const struct drm_encoder_funcs intel_dvo_enc_funcs = { 298static const struct drm_encoder_funcs intel_dvo_enc_funcs = {
298 .destroy = intel_dvo_enc_destroy, 299 .destroy = intel_dvo_enc_destroy,
299}; 300};
300 301
301
302/** 302/**
303 * Attempts to get a fixed panel timing for LVDS (currently only the i830). 303 * Attempts to get a fixed panel timing for LVDS (currently only the i830).
304 * 304 *
@@ -306,15 +306,13 @@ static const struct drm_encoder_funcs intel_dvo_enc_funcs = {
306 * chip being on DVOB/C and having multiple pipes. 306 * chip being on DVOB/C and having multiple pipes.
307 */ 307 */
308static struct drm_display_mode * 308static struct drm_display_mode *
309intel_dvo_get_current_mode (struct drm_connector *connector) 309intel_dvo_get_current_mode(struct drm_connector *connector)
310{ 310{
311 struct drm_device *dev = connector->dev; 311 struct drm_device *dev = connector->dev;
312 struct drm_i915_private *dev_priv = dev->dev_private; 312 struct drm_i915_private *dev_priv = dev->dev_private;
313 struct drm_encoder *encoder = intel_attached_encoder(connector); 313 struct drm_encoder *encoder = intel_attached_encoder(connector);
314 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 314 struct intel_dvo *intel_dvo = enc_to_intel_dvo(encoder);
315 struct intel_dvo_device *dvo = intel_encoder->dev_priv; 315 uint32_t dvo_val = I915_READ(intel_dvo->dev.dvo_reg);
316 uint32_t dvo_reg = dvo->dvo_reg;
317 uint32_t dvo_val = I915_READ(dvo_reg);
318 struct drm_display_mode *mode = NULL; 316 struct drm_display_mode *mode = NULL;
319 317
320 /* If the DVO port is active, that'll be the LVDS, so we can pull out 318 /* If the DVO port is active, that'll be the LVDS, so we can pull out
@@ -327,7 +325,6 @@ intel_dvo_get_current_mode (struct drm_connector *connector)
327 crtc = intel_get_crtc_from_pipe(dev, pipe); 325 crtc = intel_get_crtc_from_pipe(dev, pipe);
328 if (crtc) { 326 if (crtc) {
329 mode = intel_crtc_mode_get(dev, crtc); 327 mode = intel_crtc_mode_get(dev, crtc);
330
331 if (mode) { 328 if (mode) {
332 mode->type |= DRM_MODE_TYPE_PREFERRED; 329 mode->type |= DRM_MODE_TYPE_PREFERRED;
333 if (dvo_val & DVO_HSYNC_ACTIVE_HIGH) 330 if (dvo_val & DVO_HSYNC_ACTIVE_HIGH)
@@ -337,28 +334,32 @@ intel_dvo_get_current_mode (struct drm_connector *connector)
337 } 334 }
338 } 335 }
339 } 336 }
337
340 return mode; 338 return mode;
341} 339}
342 340
343void intel_dvo_init(struct drm_device *dev) 341void intel_dvo_init(struct drm_device *dev)
344{ 342{
345 struct intel_encoder *intel_encoder; 343 struct intel_encoder *intel_encoder;
344 struct intel_dvo *intel_dvo;
346 struct intel_connector *intel_connector; 345 struct intel_connector *intel_connector;
347 struct intel_dvo_device *dvo;
348 struct i2c_adapter *i2cbus = NULL; 346 struct i2c_adapter *i2cbus = NULL;
349 int ret = 0; 347 int ret = 0;
350 int i; 348 int i;
351 int encoder_type = DRM_MODE_ENCODER_NONE; 349 int encoder_type = DRM_MODE_ENCODER_NONE;
352 intel_encoder = kzalloc (sizeof(struct intel_encoder), GFP_KERNEL); 350
353 if (!intel_encoder) 351 intel_dvo = kzalloc(sizeof(struct intel_dvo), GFP_KERNEL);
352 if (!intel_dvo)
354 return; 353 return;
355 354
356 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL); 355 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL);
357 if (!intel_connector) { 356 if (!intel_connector) {
358 kfree(intel_encoder); 357 kfree(intel_dvo);
359 return; 358 return;
360 } 359 }
361 360
361 intel_encoder = &intel_dvo->base;
362
362 /* Set up the DDC bus */ 363 /* Set up the DDC bus */
363 intel_encoder->ddc_bus = intel_i2c_create(dev, GPIOD, "DVODDC_D"); 364 intel_encoder->ddc_bus = intel_i2c_create(dev, GPIOD, "DVODDC_D");
364 if (!intel_encoder->ddc_bus) 365 if (!intel_encoder->ddc_bus)
@@ -367,10 +368,9 @@ void intel_dvo_init(struct drm_device *dev)
367 /* Now, try to find a controller */ 368 /* Now, try to find a controller */
368 for (i = 0; i < ARRAY_SIZE(intel_dvo_devices); i++) { 369 for (i = 0; i < ARRAY_SIZE(intel_dvo_devices); i++) {
369 struct drm_connector *connector = &intel_connector->base; 370 struct drm_connector *connector = &intel_connector->base;
371 const struct intel_dvo_device *dvo = &intel_dvo_devices[i];
370 int gpio; 372 int gpio;
371 373
372 dvo = &intel_dvo_devices[i];
373
374 /* Allow the I2C driver info to specify the GPIO to be used in 374 /* Allow the I2C driver info to specify the GPIO to be used in
375 * special cases, but otherwise default to what's defined 375 * special cases, but otherwise default to what's defined
376 * in the spec. 376 * in the spec.
@@ -393,11 +393,8 @@ void intel_dvo_init(struct drm_device *dev)
393 continue; 393 continue;
394 } 394 }
395 395
396 if (dvo->dev_ops!= NULL) 396 intel_dvo->dev = *dvo;
397 ret = dvo->dev_ops->init(dvo, i2cbus); 397 ret = dvo->dev_ops->init(&intel_dvo->dev, i2cbus);
398 else
399 ret = false;
400
401 if (!ret) 398 if (!ret)
402 continue; 399 continue;
403 400
@@ -429,9 +426,6 @@ void intel_dvo_init(struct drm_device *dev)
429 connector->interlace_allowed = false; 426 connector->interlace_allowed = false;
430 connector->doublescan_allowed = false; 427 connector->doublescan_allowed = false;
431 428
432 intel_encoder->dev_priv = dvo;
433 intel_encoder->i2c_bus = i2cbus;
434
435 drm_encoder_init(dev, &intel_encoder->enc, 429 drm_encoder_init(dev, &intel_encoder->enc,
436 &intel_dvo_enc_funcs, encoder_type); 430 &intel_dvo_enc_funcs, encoder_type);
437 drm_encoder_helper_add(&intel_encoder->enc, 431 drm_encoder_helper_add(&intel_encoder->enc,
@@ -447,9 +441,9 @@ void intel_dvo_init(struct drm_device *dev)
447 * headers, likely), so for now, just get the current 441 * headers, likely), so for now, just get the current
448 * mode being output through DVO. 442 * mode being output through DVO.
449 */ 443 */
450 dvo->panel_fixed_mode = 444 intel_dvo->panel_fixed_mode =
451 intel_dvo_get_current_mode(connector); 445 intel_dvo_get_current_mode(connector);
452 dvo->panel_wants_dither = true; 446 intel_dvo->panel_wants_dither = true;
453 } 447 }
454 448
455 drm_sysfs_connector_add(connector); 449 drm_sysfs_connector_add(connector);
@@ -461,6 +455,6 @@ void intel_dvo_init(struct drm_device *dev)
461 if (i2cbus != NULL) 455 if (i2cbus != NULL)
462 intel_i2c_destroy(i2cbus); 456 intel_i2c_destroy(i2cbus);
463free_intel: 457free_intel:
464 kfree(intel_encoder); 458 kfree(intel_dvo);
465 kfree(intel_connector); 459 kfree(intel_connector);
466} 460}
diff --git a/drivers/gpu/drm/i915/intel_hdmi.c b/drivers/gpu/drm/i915/intel_hdmi.c
index 197887ed1823..ccd4c97e6524 100644
--- a/drivers/gpu/drm/i915/intel_hdmi.c
+++ b/drivers/gpu/drm/i915/intel_hdmi.c
@@ -37,11 +37,17 @@
37#include "i915_drm.h" 37#include "i915_drm.h"
38#include "i915_drv.h" 38#include "i915_drv.h"
39 39
40struct intel_hdmi_priv { 40struct intel_hdmi {
41 struct intel_encoder base;
41 u32 sdvox_reg; 42 u32 sdvox_reg;
42 bool has_hdmi_sink; 43 bool has_hdmi_sink;
43}; 44};
44 45
46static struct intel_hdmi *enc_to_intel_hdmi(struct drm_encoder *encoder)
47{
48 return container_of(enc_to_intel_encoder(encoder), struct intel_hdmi, base);
49}
50
45static void intel_hdmi_mode_set(struct drm_encoder *encoder, 51static void intel_hdmi_mode_set(struct drm_encoder *encoder,
46 struct drm_display_mode *mode, 52 struct drm_display_mode *mode,
47 struct drm_display_mode *adjusted_mode) 53 struct drm_display_mode *adjusted_mode)
@@ -50,8 +56,7 @@ static void intel_hdmi_mode_set(struct drm_encoder *encoder,
50 struct drm_i915_private *dev_priv = dev->dev_private; 56 struct drm_i915_private *dev_priv = dev->dev_private;
51 struct drm_crtc *crtc = encoder->crtc; 57 struct drm_crtc *crtc = encoder->crtc;
52 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 58 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
53 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 59 struct intel_hdmi *intel_hdmi = enc_to_intel_hdmi(encoder);
54 struct intel_hdmi_priv *hdmi_priv = intel_encoder->dev_priv;
55 u32 sdvox; 60 u32 sdvox;
56 61
57 sdvox = SDVO_ENCODING_HDMI | SDVO_BORDER_ENABLE; 62 sdvox = SDVO_ENCODING_HDMI | SDVO_BORDER_ENABLE;
@@ -60,7 +65,7 @@ static void intel_hdmi_mode_set(struct drm_encoder *encoder,
60 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 65 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
61 sdvox |= SDVO_HSYNC_ACTIVE_HIGH; 66 sdvox |= SDVO_HSYNC_ACTIVE_HIGH;
62 67
63 if (hdmi_priv->has_hdmi_sink) { 68 if (intel_hdmi->has_hdmi_sink) {
64 sdvox |= SDVO_AUDIO_ENABLE; 69 sdvox |= SDVO_AUDIO_ENABLE;
65 if (HAS_PCH_CPT(dev)) 70 if (HAS_PCH_CPT(dev))
66 sdvox |= HDMI_MODE_SELECT; 71 sdvox |= HDMI_MODE_SELECT;
@@ -73,26 +78,25 @@ static void intel_hdmi_mode_set(struct drm_encoder *encoder,
73 sdvox |= SDVO_PIPE_B_SELECT; 78 sdvox |= SDVO_PIPE_B_SELECT;
74 } 79 }
75 80
76 I915_WRITE(hdmi_priv->sdvox_reg, sdvox); 81 I915_WRITE(intel_hdmi->sdvox_reg, sdvox);
77 POSTING_READ(hdmi_priv->sdvox_reg); 82 POSTING_READ(intel_hdmi->sdvox_reg);
78} 83}
79 84
80static void intel_hdmi_dpms(struct drm_encoder *encoder, int mode) 85static void intel_hdmi_dpms(struct drm_encoder *encoder, int mode)
81{ 86{
82 struct drm_device *dev = encoder->dev; 87 struct drm_device *dev = encoder->dev;
83 struct drm_i915_private *dev_priv = dev->dev_private; 88 struct drm_i915_private *dev_priv = dev->dev_private;
84 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 89 struct intel_hdmi *intel_hdmi = enc_to_intel_hdmi(encoder);
85 struct intel_hdmi_priv *hdmi_priv = intel_encoder->dev_priv;
86 u32 temp; 90 u32 temp;
87 91
88 temp = I915_READ(hdmi_priv->sdvox_reg); 92 temp = I915_READ(intel_hdmi->sdvox_reg);
89 93
90 /* HW workaround, need to toggle enable bit off and on for 12bpc, but 94 /* HW workaround, need to toggle enable bit off and on for 12bpc, but
91 * we do this anyway which shows more stable in testing. 95 * we do this anyway which shows more stable in testing.
92 */ 96 */
93 if (HAS_PCH_SPLIT(dev)) { 97 if (HAS_PCH_SPLIT(dev)) {
94 I915_WRITE(hdmi_priv->sdvox_reg, temp & ~SDVO_ENABLE); 98 I915_WRITE(intel_hdmi->sdvox_reg, temp & ~SDVO_ENABLE);
95 POSTING_READ(hdmi_priv->sdvox_reg); 99 POSTING_READ(intel_hdmi->sdvox_reg);
96 } 100 }
97 101
98 if (mode != DRM_MODE_DPMS_ON) { 102 if (mode != DRM_MODE_DPMS_ON) {
@@ -101,15 +105,15 @@ static void intel_hdmi_dpms(struct drm_encoder *encoder, int mode)
101 temp |= SDVO_ENABLE; 105 temp |= SDVO_ENABLE;
102 } 106 }
103 107
104 I915_WRITE(hdmi_priv->sdvox_reg, temp); 108 I915_WRITE(intel_hdmi->sdvox_reg, temp);
105 POSTING_READ(hdmi_priv->sdvox_reg); 109 POSTING_READ(intel_hdmi->sdvox_reg);
106 110
107 /* HW workaround, need to write this twice for issue that may result 111 /* HW workaround, need to write this twice for issue that may result
108 * in first write getting masked. 112 * in first write getting masked.
109 */ 113 */
110 if (HAS_PCH_SPLIT(dev)) { 114 if (HAS_PCH_SPLIT(dev)) {
111 I915_WRITE(hdmi_priv->sdvox_reg, temp); 115 I915_WRITE(intel_hdmi->sdvox_reg, temp);
112 POSTING_READ(hdmi_priv->sdvox_reg); 116 POSTING_READ(intel_hdmi->sdvox_reg);
113 } 117 }
114} 118}
115 119
@@ -138,19 +142,17 @@ static enum drm_connector_status
138intel_hdmi_detect(struct drm_connector *connector) 142intel_hdmi_detect(struct drm_connector *connector)
139{ 143{
140 struct drm_encoder *encoder = intel_attached_encoder(connector); 144 struct drm_encoder *encoder = intel_attached_encoder(connector);
141 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 145 struct intel_hdmi *intel_hdmi = enc_to_intel_hdmi(encoder);
142 struct intel_hdmi_priv *hdmi_priv = intel_encoder->dev_priv;
143 struct edid *edid = NULL; 146 struct edid *edid = NULL;
144 enum drm_connector_status status = connector_status_disconnected; 147 enum drm_connector_status status = connector_status_disconnected;
145 148
146 hdmi_priv->has_hdmi_sink = false; 149 intel_hdmi->has_hdmi_sink = false;
147 edid = drm_get_edid(connector, 150 edid = drm_get_edid(connector, intel_hdmi->base.ddc_bus);
148 intel_encoder->ddc_bus);
149 151
150 if (edid) { 152 if (edid) {
151 if (edid->input & DRM_EDID_INPUT_DIGITAL) { 153 if (edid->input & DRM_EDID_INPUT_DIGITAL) {
152 status = connector_status_connected; 154 status = connector_status_connected;
153 hdmi_priv->has_hdmi_sink = drm_detect_hdmi_monitor(edid); 155 intel_hdmi->has_hdmi_sink = drm_detect_hdmi_monitor(edid);
154 } 156 }
155 connector->display_info.raw_edid = NULL; 157 connector->display_info.raw_edid = NULL;
156 kfree(edid); 158 kfree(edid);
@@ -162,13 +164,13 @@ intel_hdmi_detect(struct drm_connector *connector)
162static int intel_hdmi_get_modes(struct drm_connector *connector) 164static int intel_hdmi_get_modes(struct drm_connector *connector)
163{ 165{
164 struct drm_encoder *encoder = intel_attached_encoder(connector); 166 struct drm_encoder *encoder = intel_attached_encoder(connector);
165 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 167 struct intel_hdmi *intel_hdmi = enc_to_intel_hdmi(encoder);
166 168
167 /* We should parse the EDID data and find out if it's an HDMI sink so 169 /* We should parse the EDID data and find out if it's an HDMI sink so
168 * we can send audio to it. 170 * we can send audio to it.
169 */ 171 */
170 172
171 return intel_ddc_get_modes(connector, intel_encoder->ddc_bus); 173 return intel_ddc_get_modes(connector, intel_hdmi->base.ddc_bus);
172} 174}
173 175
174static void intel_hdmi_destroy(struct drm_connector *connector) 176static void intel_hdmi_destroy(struct drm_connector *connector)
@@ -199,18 +201,8 @@ static const struct drm_connector_helper_funcs intel_hdmi_connector_helper_funcs
199 .best_encoder = intel_attached_encoder, 201 .best_encoder = intel_attached_encoder,
200}; 202};
201 203
202static void intel_hdmi_enc_destroy(struct drm_encoder *encoder)
203{
204 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder);
205
206 if (intel_encoder->i2c_bus)
207 intel_i2c_destroy(intel_encoder->i2c_bus);
208 drm_encoder_cleanup(encoder);
209 kfree(intel_encoder);
210}
211
212static const struct drm_encoder_funcs intel_hdmi_enc_funcs = { 204static const struct drm_encoder_funcs intel_hdmi_enc_funcs = {
213 .destroy = intel_hdmi_enc_destroy, 205 .destroy = intel_encoder_destroy,
214}; 206};
215 207
216void intel_hdmi_init(struct drm_device *dev, int sdvox_reg) 208void intel_hdmi_init(struct drm_device *dev, int sdvox_reg)
@@ -219,21 +211,19 @@ void intel_hdmi_init(struct drm_device *dev, int sdvox_reg)
219 struct drm_connector *connector; 211 struct drm_connector *connector;
220 struct intel_encoder *intel_encoder; 212 struct intel_encoder *intel_encoder;
221 struct intel_connector *intel_connector; 213 struct intel_connector *intel_connector;
222 struct intel_hdmi_priv *hdmi_priv; 214 struct intel_hdmi *intel_hdmi;
223 215
224 intel_encoder = kcalloc(sizeof(struct intel_encoder) + 216 intel_hdmi = kzalloc(sizeof(struct intel_hdmi), GFP_KERNEL);
225 sizeof(struct intel_hdmi_priv), 1, GFP_KERNEL); 217 if (!intel_hdmi)
226 if (!intel_encoder)
227 return; 218 return;
228 219
229 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL); 220 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL);
230 if (!intel_connector) { 221 if (!intel_connector) {
231 kfree(intel_encoder); 222 kfree(intel_hdmi);
232 return; 223 return;
233 } 224 }
234 225
235 hdmi_priv = (struct intel_hdmi_priv *)(intel_encoder + 1); 226 intel_encoder = &intel_hdmi->base;
236
237 connector = &intel_connector->base; 227 connector = &intel_connector->base;
238 drm_connector_init(dev, connector, &intel_hdmi_connector_funcs, 228 drm_connector_init(dev, connector, &intel_hdmi_connector_funcs,
239 DRM_MODE_CONNECTOR_HDMIA); 229 DRM_MODE_CONNECTOR_HDMIA);
@@ -274,8 +264,7 @@ void intel_hdmi_init(struct drm_device *dev, int sdvox_reg)
274 if (!intel_encoder->ddc_bus) 264 if (!intel_encoder->ddc_bus)
275 goto err_connector; 265 goto err_connector;
276 266
277 hdmi_priv->sdvox_reg = sdvox_reg; 267 intel_hdmi->sdvox_reg = sdvox_reg;
278 intel_encoder->dev_priv = hdmi_priv;
279 268
280 drm_encoder_init(dev, &intel_encoder->enc, &intel_hdmi_enc_funcs, 269 drm_encoder_init(dev, &intel_encoder->enc, &intel_hdmi_enc_funcs,
281 DRM_MODE_ENCODER_TMDS); 270 DRM_MODE_ENCODER_TMDS);
@@ -298,7 +287,7 @@ void intel_hdmi_init(struct drm_device *dev, int sdvox_reg)
298 287
299err_connector: 288err_connector:
300 drm_connector_cleanup(connector); 289 drm_connector_cleanup(connector);
301 kfree(intel_encoder); 290 kfree(intel_hdmi);
302 kfree(intel_connector); 291 kfree(intel_connector);
303 292
304 return; 293 return;
diff --git a/drivers/gpu/drm/i915/intel_lvds.c b/drivers/gpu/drm/i915/intel_lvds.c
index 0a2e60059fb3..b819c1081147 100644
--- a/drivers/gpu/drm/i915/intel_lvds.c
+++ b/drivers/gpu/drm/i915/intel_lvds.c
@@ -41,12 +41,18 @@
41#include <linux/acpi.h> 41#include <linux/acpi.h>
42 42
43/* Private structure for the integrated LVDS support */ 43/* Private structure for the integrated LVDS support */
44struct intel_lvds_priv { 44struct intel_lvds {
45 struct intel_encoder base;
45 int fitting_mode; 46 int fitting_mode;
46 u32 pfit_control; 47 u32 pfit_control;
47 u32 pfit_pgm_ratios; 48 u32 pfit_pgm_ratios;
48}; 49};
49 50
51static struct intel_lvds *enc_to_intel_lvds(struct drm_encoder *encoder)
52{
53 return container_of(enc_to_intel_encoder(encoder), struct intel_lvds, base);
54}
55
50/** 56/**
51 * Sets the backlight level. 57 * Sets the backlight level.
52 * 58 *
@@ -90,7 +96,7 @@ static u32 intel_lvds_get_max_backlight(struct drm_device *dev)
90static void intel_lvds_set_power(struct drm_device *dev, bool on) 96static void intel_lvds_set_power(struct drm_device *dev, bool on)
91{ 97{
92 struct drm_i915_private *dev_priv = dev->dev_private; 98 struct drm_i915_private *dev_priv = dev->dev_private;
93 u32 pp_status, ctl_reg, status_reg, lvds_reg; 99 u32 ctl_reg, status_reg, lvds_reg;
94 100
95 if (HAS_PCH_SPLIT(dev)) { 101 if (HAS_PCH_SPLIT(dev)) {
96 ctl_reg = PCH_PP_CONTROL; 102 ctl_reg = PCH_PP_CONTROL;
@@ -108,9 +114,8 @@ static void intel_lvds_set_power(struct drm_device *dev, bool on)
108 114
109 I915_WRITE(ctl_reg, I915_READ(ctl_reg) | 115 I915_WRITE(ctl_reg, I915_READ(ctl_reg) |
110 POWER_TARGET_ON); 116 POWER_TARGET_ON);
111 do { 117 if (wait_for(I915_READ(status_reg) & PP_ON, 1000, 0))
112 pp_status = I915_READ(status_reg); 118 DRM_ERROR("timed out waiting to enable LVDS pipe");
113 } while ((pp_status & PP_ON) == 0);
114 119
115 intel_lvds_set_backlight(dev, dev_priv->backlight_duty_cycle); 120 intel_lvds_set_backlight(dev, dev_priv->backlight_duty_cycle);
116 } else { 121 } else {
@@ -118,9 +123,8 @@ static void intel_lvds_set_power(struct drm_device *dev, bool on)
118 123
119 I915_WRITE(ctl_reg, I915_READ(ctl_reg) & 124 I915_WRITE(ctl_reg, I915_READ(ctl_reg) &
120 ~POWER_TARGET_ON); 125 ~POWER_TARGET_ON);
121 do { 126 if (wait_for((I915_READ(status_reg) & PP_ON) == 0, 1000, 0))
122 pp_status = I915_READ(status_reg); 127 DRM_ERROR("timed out waiting for LVDS pipe to turn off");
123 } while (pp_status & PP_ON);
124 128
125 I915_WRITE(lvds_reg, I915_READ(lvds_reg) & ~LVDS_PORT_EN); 129 I915_WRITE(lvds_reg, I915_READ(lvds_reg) & ~LVDS_PORT_EN);
126 POSTING_READ(lvds_reg); 130 POSTING_READ(lvds_reg);
@@ -219,9 +223,8 @@ static bool intel_lvds_mode_fixup(struct drm_encoder *encoder,
219 struct drm_device *dev = encoder->dev; 223 struct drm_device *dev = encoder->dev;
220 struct drm_i915_private *dev_priv = dev->dev_private; 224 struct drm_i915_private *dev_priv = dev->dev_private;
221 struct intel_crtc *intel_crtc = to_intel_crtc(encoder->crtc); 225 struct intel_crtc *intel_crtc = to_intel_crtc(encoder->crtc);
226 struct intel_lvds *intel_lvds = enc_to_intel_lvds(encoder);
222 struct drm_encoder *tmp_encoder; 227 struct drm_encoder *tmp_encoder;
223 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder);
224 struct intel_lvds_priv *lvds_priv = intel_encoder->dev_priv;
225 u32 pfit_control = 0, pfit_pgm_ratios = 0, border = 0; 228 u32 pfit_control = 0, pfit_pgm_ratios = 0, border = 0;
226 229
227 /* Should never happen!! */ 230 /* Should never happen!! */
@@ -241,26 +244,20 @@ static bool intel_lvds_mode_fixup(struct drm_encoder *encoder,
241 /* If we don't have a panel mode, there is nothing we can do */ 244 /* If we don't have a panel mode, there is nothing we can do */
242 if (dev_priv->panel_fixed_mode == NULL) 245 if (dev_priv->panel_fixed_mode == NULL)
243 return true; 246 return true;
247
244 /* 248 /*
245 * We have timings from the BIOS for the panel, put them in 249 * We have timings from the BIOS for the panel, put them in
246 * to the adjusted mode. The CRTC will be set up for this mode, 250 * to the adjusted mode. The CRTC will be set up for this mode,
247 * with the panel scaling set up to source from the H/VDisplay 251 * with the panel scaling set up to source from the H/VDisplay
248 * of the original mode. 252 * of the original mode.
249 */ 253 */
250 adjusted_mode->hdisplay = dev_priv->panel_fixed_mode->hdisplay; 254 intel_fixed_panel_mode(dev_priv->panel_fixed_mode, adjusted_mode);
251 adjusted_mode->hsync_start = 255
252 dev_priv->panel_fixed_mode->hsync_start; 256 if (HAS_PCH_SPLIT(dev)) {
253 adjusted_mode->hsync_end = 257 intel_pch_panel_fitting(dev, intel_lvds->fitting_mode,
254 dev_priv->panel_fixed_mode->hsync_end; 258 mode, adjusted_mode);
255 adjusted_mode->htotal = dev_priv->panel_fixed_mode->htotal; 259 return true;
256 adjusted_mode->vdisplay = dev_priv->panel_fixed_mode->vdisplay; 260 }
257 adjusted_mode->vsync_start =
258 dev_priv->panel_fixed_mode->vsync_start;
259 adjusted_mode->vsync_end =
260 dev_priv->panel_fixed_mode->vsync_end;
261 adjusted_mode->vtotal = dev_priv->panel_fixed_mode->vtotal;
262 adjusted_mode->clock = dev_priv->panel_fixed_mode->clock;
263 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
264 261
265 /* Make sure pre-965s set dither correctly */ 262 /* Make sure pre-965s set dither correctly */
266 if (!IS_I965G(dev)) { 263 if (!IS_I965G(dev)) {
@@ -273,10 +270,6 @@ static bool intel_lvds_mode_fixup(struct drm_encoder *encoder,
273 adjusted_mode->vdisplay == mode->vdisplay) 270 adjusted_mode->vdisplay == mode->vdisplay)
274 goto out; 271 goto out;
275 272
276 /* full screen scale for now */
277 if (HAS_PCH_SPLIT(dev))
278 goto out;
279
280 /* 965+ wants fuzzy fitting */ 273 /* 965+ wants fuzzy fitting */
281 if (IS_I965G(dev)) 274 if (IS_I965G(dev))
282 pfit_control |= ((intel_crtc->pipe << PFIT_PIPE_SHIFT) | 275 pfit_control |= ((intel_crtc->pipe << PFIT_PIPE_SHIFT) |
@@ -288,12 +281,10 @@ static bool intel_lvds_mode_fixup(struct drm_encoder *encoder,
288 * to register description and PRM. 281 * to register description and PRM.
289 * Change the value here to see the borders for debugging 282 * Change the value here to see the borders for debugging
290 */ 283 */
291 if (!HAS_PCH_SPLIT(dev)) { 284 I915_WRITE(BCLRPAT_A, 0);
292 I915_WRITE(BCLRPAT_A, 0); 285 I915_WRITE(BCLRPAT_B, 0);
293 I915_WRITE(BCLRPAT_B, 0);
294 }
295 286
296 switch (lvds_priv->fitting_mode) { 287 switch (intel_lvds->fitting_mode) {
297 case DRM_MODE_SCALE_CENTER: 288 case DRM_MODE_SCALE_CENTER:
298 /* 289 /*
299 * For centered modes, we have to calculate border widths & 290 * For centered modes, we have to calculate border widths &
@@ -378,8 +369,8 @@ static bool intel_lvds_mode_fixup(struct drm_encoder *encoder,
378 } 369 }
379 370
380out: 371out:
381 lvds_priv->pfit_control = pfit_control; 372 intel_lvds->pfit_control = pfit_control;
382 lvds_priv->pfit_pgm_ratios = pfit_pgm_ratios; 373 intel_lvds->pfit_pgm_ratios = pfit_pgm_ratios;
383 dev_priv->lvds_border_bits = border; 374 dev_priv->lvds_border_bits = border;
384 375
385 /* 376 /*
@@ -427,8 +418,7 @@ static void intel_lvds_mode_set(struct drm_encoder *encoder,
427{ 418{
428 struct drm_device *dev = encoder->dev; 419 struct drm_device *dev = encoder->dev;
429 struct drm_i915_private *dev_priv = dev->dev_private; 420 struct drm_i915_private *dev_priv = dev->dev_private;
430 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 421 struct intel_lvds *intel_lvds = enc_to_intel_lvds(encoder);
431 struct intel_lvds_priv *lvds_priv = intel_encoder->dev_priv;
432 422
433 /* 423 /*
434 * The LVDS pin pair will already have been turned on in the 424 * The LVDS pin pair will already have been turned on in the
@@ -444,8 +434,8 @@ static void intel_lvds_mode_set(struct drm_encoder *encoder,
444 * screen. Should be enabled before the pipe is enabled, according to 434 * screen. Should be enabled before the pipe is enabled, according to
445 * register description and PRM. 435 * register description and PRM.
446 */ 436 */
447 I915_WRITE(PFIT_PGM_RATIOS, lvds_priv->pfit_pgm_ratios); 437 I915_WRITE(PFIT_PGM_RATIOS, intel_lvds->pfit_pgm_ratios);
448 I915_WRITE(PFIT_CONTROL, lvds_priv->pfit_control); 438 I915_WRITE(PFIT_CONTROL, intel_lvds->pfit_control);
449} 439}
450 440
451/** 441/**
@@ -600,18 +590,17 @@ static int intel_lvds_set_property(struct drm_connector *connector,
600 connector->encoder) { 590 connector->encoder) {
601 struct drm_crtc *crtc = connector->encoder->crtc; 591 struct drm_crtc *crtc = connector->encoder->crtc;
602 struct drm_encoder *encoder = connector->encoder; 592 struct drm_encoder *encoder = connector->encoder;
603 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 593 struct intel_lvds *intel_lvds = enc_to_intel_lvds(encoder);
604 struct intel_lvds_priv *lvds_priv = intel_encoder->dev_priv;
605 594
606 if (value == DRM_MODE_SCALE_NONE) { 595 if (value == DRM_MODE_SCALE_NONE) {
607 DRM_DEBUG_KMS("no scaling not supported\n"); 596 DRM_DEBUG_KMS("no scaling not supported\n");
608 return 0; 597 return 0;
609 } 598 }
610 if (lvds_priv->fitting_mode == value) { 599 if (intel_lvds->fitting_mode == value) {
611 /* the LVDS scaling property is not changed */ 600 /* the LVDS scaling property is not changed */
612 return 0; 601 return 0;
613 } 602 }
614 lvds_priv->fitting_mode = value; 603 intel_lvds->fitting_mode = value;
615 if (crtc && crtc->enabled) { 604 if (crtc && crtc->enabled) {
616 /* 605 /*
617 * If the CRTC is enabled, the display will be changed 606 * If the CRTC is enabled, the display will be changed
@@ -647,19 +636,8 @@ static const struct drm_connector_funcs intel_lvds_connector_funcs = {
647 .destroy = intel_lvds_destroy, 636 .destroy = intel_lvds_destroy,
648}; 637};
649 638
650
651static void intel_lvds_enc_destroy(struct drm_encoder *encoder)
652{
653 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder);
654
655 if (intel_encoder->ddc_bus)
656 intel_i2c_destroy(intel_encoder->ddc_bus);
657 drm_encoder_cleanup(encoder);
658 kfree(intel_encoder);
659}
660
661static const struct drm_encoder_funcs intel_lvds_enc_funcs = { 639static const struct drm_encoder_funcs intel_lvds_enc_funcs = {
662 .destroy = intel_lvds_enc_destroy, 640 .destroy = intel_encoder_destroy,
663}; 641};
664 642
665static int __init intel_no_lvds_dmi_callback(const struct dmi_system_id *id) 643static int __init intel_no_lvds_dmi_callback(const struct dmi_system_id *id)
@@ -843,13 +821,13 @@ static int lvds_is_present_in_vbt(struct drm_device *dev)
843void intel_lvds_init(struct drm_device *dev) 821void intel_lvds_init(struct drm_device *dev)
844{ 822{
845 struct drm_i915_private *dev_priv = dev->dev_private; 823 struct drm_i915_private *dev_priv = dev->dev_private;
824 struct intel_lvds *intel_lvds;
846 struct intel_encoder *intel_encoder; 825 struct intel_encoder *intel_encoder;
847 struct intel_connector *intel_connector; 826 struct intel_connector *intel_connector;
848 struct drm_connector *connector; 827 struct drm_connector *connector;
849 struct drm_encoder *encoder; 828 struct drm_encoder *encoder;
850 struct drm_display_mode *scan; /* *modes, *bios_mode; */ 829 struct drm_display_mode *scan; /* *modes, *bios_mode; */
851 struct drm_crtc *crtc; 830 struct drm_crtc *crtc;
852 struct intel_lvds_priv *lvds_priv;
853 u32 lvds; 831 u32 lvds;
854 int pipe, gpio = GPIOC; 832 int pipe, gpio = GPIOC;
855 833
@@ -872,20 +850,20 @@ void intel_lvds_init(struct drm_device *dev)
872 gpio = PCH_GPIOC; 850 gpio = PCH_GPIOC;
873 } 851 }
874 852
875 intel_encoder = kzalloc(sizeof(struct intel_encoder) + 853 intel_lvds = kzalloc(sizeof(struct intel_lvds), GFP_KERNEL);
876 sizeof(struct intel_lvds_priv), GFP_KERNEL); 854 if (!intel_lvds) {
877 if (!intel_encoder) {
878 return; 855 return;
879 } 856 }
880 857
881 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL); 858 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL);
882 if (!intel_connector) { 859 if (!intel_connector) {
883 kfree(intel_encoder); 860 kfree(intel_lvds);
884 return; 861 return;
885 } 862 }
886 863
887 connector = &intel_connector->base; 864 intel_encoder = &intel_lvds->base;
888 encoder = &intel_encoder->enc; 865 encoder = &intel_encoder->enc;
866 connector = &intel_connector->base;
889 drm_connector_init(dev, &intel_connector->base, &intel_lvds_connector_funcs, 867 drm_connector_init(dev, &intel_connector->base, &intel_lvds_connector_funcs,
890 DRM_MODE_CONNECTOR_LVDS); 868 DRM_MODE_CONNECTOR_LVDS);
891 869
@@ -905,8 +883,6 @@ void intel_lvds_init(struct drm_device *dev)
905 connector->interlace_allowed = false; 883 connector->interlace_allowed = false;
906 connector->doublescan_allowed = false; 884 connector->doublescan_allowed = false;
907 885
908 lvds_priv = (struct intel_lvds_priv *)(intel_encoder + 1);
909 intel_encoder->dev_priv = lvds_priv;
910 /* create the scaling mode property */ 886 /* create the scaling mode property */
911 drm_mode_create_scaling_mode_property(dev); 887 drm_mode_create_scaling_mode_property(dev);
912 /* 888 /*
@@ -916,7 +892,7 @@ void intel_lvds_init(struct drm_device *dev)
916 drm_connector_attach_property(&intel_connector->base, 892 drm_connector_attach_property(&intel_connector->base,
917 dev->mode_config.scaling_mode_property, 893 dev->mode_config.scaling_mode_property,
918 DRM_MODE_SCALE_ASPECT); 894 DRM_MODE_SCALE_ASPECT);
919 lvds_priv->fitting_mode = DRM_MODE_SCALE_ASPECT; 895 intel_lvds->fitting_mode = DRM_MODE_SCALE_ASPECT;
920 /* 896 /*
921 * LVDS discovery: 897 * LVDS discovery:
922 * 1) check for EDID on DDC 898 * 1) check for EDID on DDC
@@ -1024,6 +1000,6 @@ failed:
1024 intel_i2c_destroy(intel_encoder->ddc_bus); 1000 intel_i2c_destroy(intel_encoder->ddc_bus);
1025 drm_connector_cleanup(connector); 1001 drm_connector_cleanup(connector);
1026 drm_encoder_cleanup(encoder); 1002 drm_encoder_cleanup(encoder);
1027 kfree(intel_encoder); 1003 kfree(intel_lvds);
1028 kfree(intel_connector); 1004 kfree(intel_connector);
1029} 1005}
diff --git a/drivers/gpu/drm/i915/intel_overlay.c b/drivers/gpu/drm/i915/intel_overlay.c
index d39aea24eabe..4f00390d7c61 100644
--- a/drivers/gpu/drm/i915/intel_overlay.c
+++ b/drivers/gpu/drm/i915/intel_overlay.c
@@ -1367,7 +1367,8 @@ void intel_setup_overlay(struct drm_device *dev)
1367 overlay->flip_addr = overlay->reg_bo->gtt_offset; 1367 overlay->flip_addr = overlay->reg_bo->gtt_offset;
1368 } else { 1368 } else {
1369 ret = i915_gem_attach_phys_object(dev, reg_bo, 1369 ret = i915_gem_attach_phys_object(dev, reg_bo,
1370 I915_GEM_PHYS_OVERLAY_REGS); 1370 I915_GEM_PHYS_OVERLAY_REGS,
1371 0);
1371 if (ret) { 1372 if (ret) {
1372 DRM_ERROR("failed to attach phys overlay regs\n"); 1373 DRM_ERROR("failed to attach phys overlay regs\n");
1373 goto out_free_bo; 1374 goto out_free_bo;
@@ -1416,3 +1417,99 @@ void intel_cleanup_overlay(struct drm_device *dev)
1416 kfree(dev_priv->overlay); 1417 kfree(dev_priv->overlay);
1417 } 1418 }
1418} 1419}
1420
1421struct intel_overlay_error_state {
1422 struct overlay_registers regs;
1423 unsigned long base;
1424 u32 dovsta;
1425 u32 isr;
1426};
1427
1428struct intel_overlay_error_state *
1429intel_overlay_capture_error_state(struct drm_device *dev)
1430{
1431 drm_i915_private_t *dev_priv = dev->dev_private;
1432 struct intel_overlay *overlay = dev_priv->overlay;
1433 struct intel_overlay_error_state *error;
1434 struct overlay_registers __iomem *regs;
1435
1436 if (!overlay || !overlay->active)
1437 return NULL;
1438
1439 error = kmalloc(sizeof(*error), GFP_ATOMIC);
1440 if (error == NULL)
1441 return NULL;
1442
1443 error->dovsta = I915_READ(DOVSTA);
1444 error->isr = I915_READ(ISR);
1445 if (OVERLAY_NONPHYSICAL(overlay->dev))
1446 error->base = (long) overlay->reg_bo->gtt_offset;
1447 else
1448 error->base = (long) overlay->reg_bo->phys_obj->handle->vaddr;
1449
1450 regs = intel_overlay_map_regs_atomic(overlay);
1451 if (!regs)
1452 goto err;
1453
1454 memcpy_fromio(&error->regs, regs, sizeof(struct overlay_registers));
1455 intel_overlay_unmap_regs_atomic(overlay);
1456
1457 return error;
1458
1459err:
1460 kfree(error);
1461 return NULL;
1462}
1463
1464void
1465intel_overlay_print_error_state(struct seq_file *m, struct intel_overlay_error_state *error)
1466{
1467 seq_printf(m, "Overlay, status: 0x%08x, interrupt: 0x%08x\n",
1468 error->dovsta, error->isr);
1469 seq_printf(m, " Register file at 0x%08lx:\n",
1470 error->base);
1471
1472#define P(x) seq_printf(m, " " #x ": 0x%08x\n", error->regs.x)
1473 P(OBUF_0Y);
1474 P(OBUF_1Y);
1475 P(OBUF_0U);
1476 P(OBUF_0V);
1477 P(OBUF_1U);
1478 P(OBUF_1V);
1479 P(OSTRIDE);
1480 P(YRGB_VPH);
1481 P(UV_VPH);
1482 P(HORZ_PH);
1483 P(INIT_PHS);
1484 P(DWINPOS);
1485 P(DWINSZ);
1486 P(SWIDTH);
1487 P(SWIDTHSW);
1488 P(SHEIGHT);
1489 P(YRGBSCALE);
1490 P(UVSCALE);
1491 P(OCLRC0);
1492 P(OCLRC1);
1493 P(DCLRKV);
1494 P(DCLRKM);
1495 P(SCLRKVH);
1496 P(SCLRKVL);
1497 P(SCLRKEN);
1498 P(OCONFIG);
1499 P(OCMD);
1500 P(OSTART_0Y);
1501 P(OSTART_1Y);
1502 P(OSTART_0U);
1503 P(OSTART_0V);
1504 P(OSTART_1U);
1505 P(OSTART_1V);
1506 P(OTILEOFF_0Y);
1507 P(OTILEOFF_1Y);
1508 P(OTILEOFF_0U);
1509 P(OTILEOFF_0V);
1510 P(OTILEOFF_1U);
1511 P(OTILEOFF_1V);
1512 P(FASTHSCALE);
1513 P(UVSCALEV);
1514#undef P
1515}
diff --git a/drivers/gpu/drm/i915/intel_panel.c b/drivers/gpu/drm/i915/intel_panel.c
new file mode 100644
index 000000000000..e7f5299d9d57
--- /dev/null
+++ b/drivers/gpu/drm/i915/intel_panel.c
@@ -0,0 +1,111 @@
1/*
2 * Copyright © 2006-2010 Intel Corporation
3 * Copyright (c) 2006 Dave Airlie <airlied@linux.ie>
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 *
24 * Authors:
25 * Eric Anholt <eric@anholt.net>
26 * Dave Airlie <airlied@linux.ie>
27 * Jesse Barnes <jesse.barnes@intel.com>
28 * Chris Wilson <chris@chris-wilson.co.uk>
29 */
30
31#include "intel_drv.h"
32
33void
34intel_fixed_panel_mode(struct drm_display_mode *fixed_mode,
35 struct drm_display_mode *adjusted_mode)
36{
37 adjusted_mode->hdisplay = fixed_mode->hdisplay;
38 adjusted_mode->hsync_start = fixed_mode->hsync_start;
39 adjusted_mode->hsync_end = fixed_mode->hsync_end;
40 adjusted_mode->htotal = fixed_mode->htotal;
41
42 adjusted_mode->vdisplay = fixed_mode->vdisplay;
43 adjusted_mode->vsync_start = fixed_mode->vsync_start;
44 adjusted_mode->vsync_end = fixed_mode->vsync_end;
45 adjusted_mode->vtotal = fixed_mode->vtotal;
46
47 adjusted_mode->clock = fixed_mode->clock;
48
49 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
50}
51
52/* adjusted_mode has been preset to be the panel's fixed mode */
53void
54intel_pch_panel_fitting(struct drm_device *dev,
55 int fitting_mode,
56 struct drm_display_mode *mode,
57 struct drm_display_mode *adjusted_mode)
58{
59 struct drm_i915_private *dev_priv = dev->dev_private;
60 int x, y, width, height;
61
62 x = y = width = height = 0;
63
64 /* Native modes don't need fitting */
65 if (adjusted_mode->hdisplay == mode->hdisplay &&
66 adjusted_mode->vdisplay == mode->vdisplay)
67 goto done;
68
69 switch (fitting_mode) {
70 case DRM_MODE_SCALE_CENTER:
71 width = mode->hdisplay;
72 height = mode->vdisplay;
73 x = (adjusted_mode->hdisplay - width + 1)/2;
74 y = (adjusted_mode->vdisplay - height + 1)/2;
75 break;
76
77 case DRM_MODE_SCALE_ASPECT:
78 /* Scale but preserve the aspect ratio */
79 {
80 u32 scaled_width = adjusted_mode->hdisplay * mode->vdisplay;
81 u32 scaled_height = mode->hdisplay * adjusted_mode->vdisplay;
82 if (scaled_width > scaled_height) { /* pillar */
83 width = scaled_height / mode->vdisplay;
84 x = (adjusted_mode->hdisplay - width + 1) / 2;
85 y = 0;
86 height = adjusted_mode->vdisplay;
87 } else if (scaled_width < scaled_height) { /* letter */
88 height = scaled_width / mode->hdisplay;
89 y = (adjusted_mode->vdisplay - height + 1) / 2;
90 x = 0;
91 width = adjusted_mode->hdisplay;
92 } else {
93 x = y = 0;
94 width = adjusted_mode->hdisplay;
95 height = adjusted_mode->vdisplay;
96 }
97 }
98 break;
99
100 default:
101 case DRM_MODE_SCALE_FULLSCREEN:
102 x = y = 0;
103 width = adjusted_mode->hdisplay;
104 height = adjusted_mode->vdisplay;
105 break;
106 }
107
108done:
109 dev_priv->pch_pf_pos = (x << 16) | y;
110 dev_priv->pch_pf_size = (width << 16) | height;
111}
diff --git a/drivers/gpu/drm/i915/intel_ringbuffer.c b/drivers/gpu/drm/i915/intel_ringbuffer.c
index 26362f8495a8..51e9c9e718c4 100644
--- a/drivers/gpu/drm/i915/intel_ringbuffer.c
+++ b/drivers/gpu/drm/i915/intel_ringbuffer.c
@@ -33,18 +33,35 @@
33#include "i915_drm.h" 33#include "i915_drm.h"
34#include "i915_trace.h" 34#include "i915_trace.h"
35 35
36static u32 i915_gem_get_seqno(struct drm_device *dev)
37{
38 drm_i915_private_t *dev_priv = dev->dev_private;
39 u32 seqno;
40
41 seqno = dev_priv->next_seqno;
42
43 /* reserve 0 for non-seqno */
44 if (++dev_priv->next_seqno == 0)
45 dev_priv->next_seqno = 1;
46
47 return seqno;
48}
49
36static void 50static void
37render_ring_flush(struct drm_device *dev, 51render_ring_flush(struct drm_device *dev,
38 struct intel_ring_buffer *ring, 52 struct intel_ring_buffer *ring,
39 u32 invalidate_domains, 53 u32 invalidate_domains,
40 u32 flush_domains) 54 u32 flush_domains)
41{ 55{
56 drm_i915_private_t *dev_priv = dev->dev_private;
57 u32 cmd;
58
42#if WATCH_EXEC 59#if WATCH_EXEC
43 DRM_INFO("%s: invalidate %08x flush %08x\n", __func__, 60 DRM_INFO("%s: invalidate %08x flush %08x\n", __func__,
44 invalidate_domains, flush_domains); 61 invalidate_domains, flush_domains);
45#endif 62#endif
46 u32 cmd; 63
47 trace_i915_gem_request_flush(dev, ring->next_seqno, 64 trace_i915_gem_request_flush(dev, dev_priv->next_seqno,
48 invalidate_domains, flush_domains); 65 invalidate_domains, flush_domains);
49 66
50 if ((invalidate_domains | flush_domains) & I915_GEM_GPU_DOMAINS) { 67 if ((invalidate_domains | flush_domains) & I915_GEM_GPU_DOMAINS) {
@@ -233,9 +250,10 @@ render_ring_add_request(struct drm_device *dev,
233 struct drm_file *file_priv, 250 struct drm_file *file_priv,
234 u32 flush_domains) 251 u32 flush_domains)
235{ 252{
236 u32 seqno;
237 drm_i915_private_t *dev_priv = dev->dev_private; 253 drm_i915_private_t *dev_priv = dev->dev_private;
238 seqno = intel_ring_get_seqno(dev, ring); 254 u32 seqno;
255
256 seqno = i915_gem_get_seqno(dev);
239 257
240 if (IS_GEN6(dev)) { 258 if (IS_GEN6(dev)) {
241 BEGIN_LP_RING(6); 259 BEGIN_LP_RING(6);
@@ -405,7 +423,9 @@ bsd_ring_add_request(struct drm_device *dev,
405 u32 flush_domains) 423 u32 flush_domains)
406{ 424{
407 u32 seqno; 425 u32 seqno;
408 seqno = intel_ring_get_seqno(dev, ring); 426
427 seqno = i915_gem_get_seqno(dev);
428
409 intel_ring_begin(dev, ring, 4); 429 intel_ring_begin(dev, ring, 4);
410 intel_ring_emit(dev, ring, MI_STORE_DWORD_INDEX); 430 intel_ring_emit(dev, ring, MI_STORE_DWORD_INDEX);
411 intel_ring_emit(dev, ring, 431 intel_ring_emit(dev, ring,
@@ -479,7 +499,7 @@ render_ring_dispatch_gem_execbuffer(struct drm_device *dev,
479 exec_start = (uint32_t) exec_offset + exec->batch_start_offset; 499 exec_start = (uint32_t) exec_offset + exec->batch_start_offset;
480 exec_len = (uint32_t) exec->batch_len; 500 exec_len = (uint32_t) exec->batch_len;
481 501
482 trace_i915_gem_request_submit(dev, dev_priv->mm.next_gem_seqno + 1); 502 trace_i915_gem_request_submit(dev, dev_priv->next_seqno + 1);
483 503
484 count = nbox ? nbox : 1; 504 count = nbox ? nbox : 1;
485 505
@@ -515,7 +535,16 @@ render_ring_dispatch_gem_execbuffer(struct drm_device *dev,
515 intel_ring_advance(dev, ring); 535 intel_ring_advance(dev, ring);
516 } 536 }
517 537
538 if (IS_G4X(dev) || IS_IRONLAKE(dev)) {
539 intel_ring_begin(dev, ring, 2);
540 intel_ring_emit(dev, ring, MI_FLUSH |
541 MI_NO_WRITE_FLUSH |
542 MI_INVALIDATE_ISP );
543 intel_ring_emit(dev, ring, MI_NOOP);
544 intel_ring_advance(dev, ring);
545 }
518 /* XXX breadcrumb */ 546 /* XXX breadcrumb */
547
519 return 0; 548 return 0;
520} 549}
521 550
@@ -588,9 +617,10 @@ err:
588int intel_init_ring_buffer(struct drm_device *dev, 617int intel_init_ring_buffer(struct drm_device *dev,
589 struct intel_ring_buffer *ring) 618 struct intel_ring_buffer *ring)
590{ 619{
591 int ret;
592 struct drm_i915_gem_object *obj_priv; 620 struct drm_i915_gem_object *obj_priv;
593 struct drm_gem_object *obj; 621 struct drm_gem_object *obj;
622 int ret;
623
594 ring->dev = dev; 624 ring->dev = dev;
595 625
596 if (I915_NEED_GFX_HWS(dev)) { 626 if (I915_NEED_GFX_HWS(dev)) {
@@ -603,16 +633,14 @@ int intel_init_ring_buffer(struct drm_device *dev,
603 if (obj == NULL) { 633 if (obj == NULL) {
604 DRM_ERROR("Failed to allocate ringbuffer\n"); 634 DRM_ERROR("Failed to allocate ringbuffer\n");
605 ret = -ENOMEM; 635 ret = -ENOMEM;
606 goto cleanup; 636 goto err_hws;
607 } 637 }
608 638
609 ring->gem_object = obj; 639 ring->gem_object = obj;
610 640
611 ret = i915_gem_object_pin(obj, ring->alignment); 641 ret = i915_gem_object_pin(obj, ring->alignment);
612 if (ret != 0) { 642 if (ret)
613 drm_gem_object_unreference(obj); 643 goto err_unref;
614 goto cleanup;
615 }
616 644
617 obj_priv = to_intel_bo(obj); 645 obj_priv = to_intel_bo(obj);
618 ring->map.size = ring->size; 646 ring->map.size = ring->size;
@@ -624,18 +652,14 @@ int intel_init_ring_buffer(struct drm_device *dev,
624 drm_core_ioremap_wc(&ring->map, dev); 652 drm_core_ioremap_wc(&ring->map, dev);
625 if (ring->map.handle == NULL) { 653 if (ring->map.handle == NULL) {
626 DRM_ERROR("Failed to map ringbuffer.\n"); 654 DRM_ERROR("Failed to map ringbuffer.\n");
627 i915_gem_object_unpin(obj);
628 drm_gem_object_unreference(obj);
629 ret = -EINVAL; 655 ret = -EINVAL;
630 goto cleanup; 656 goto err_unpin;
631 } 657 }
632 658
633 ring->virtual_start = ring->map.handle; 659 ring->virtual_start = ring->map.handle;
634 ret = ring->init(dev, ring); 660 ret = ring->init(dev, ring);
635 if (ret != 0) { 661 if (ret)
636 intel_cleanup_ring_buffer(dev, ring); 662 goto err_unmap;
637 return ret;
638 }
639 663
640 if (!drm_core_check_feature(dev, DRIVER_MODESET)) 664 if (!drm_core_check_feature(dev, DRIVER_MODESET))
641 i915_kernel_lost_context(dev); 665 i915_kernel_lost_context(dev);
@@ -649,7 +673,15 @@ int intel_init_ring_buffer(struct drm_device *dev,
649 INIT_LIST_HEAD(&ring->active_list); 673 INIT_LIST_HEAD(&ring->active_list);
650 INIT_LIST_HEAD(&ring->request_list); 674 INIT_LIST_HEAD(&ring->request_list);
651 return ret; 675 return ret;
652cleanup: 676
677err_unmap:
678 drm_core_ioremapfree(&ring->map, dev);
679err_unpin:
680 i915_gem_object_unpin(obj);
681err_unref:
682 drm_gem_object_unreference(obj);
683 ring->gem_object = NULL;
684err_hws:
653 cleanup_status_page(dev, ring); 685 cleanup_status_page(dev, ring);
654 return ret; 686 return ret;
655} 687}
@@ -682,9 +714,11 @@ int intel_wrap_ring_buffer(struct drm_device *dev,
682 } 714 }
683 715
684 virt = (unsigned int *)(ring->virtual_start + ring->tail); 716 virt = (unsigned int *)(ring->virtual_start + ring->tail);
685 rem /= 4; 717 rem /= 8;
686 while (rem--) 718 while (rem--) {
719 *virt++ = MI_NOOP;
687 *virt++ = MI_NOOP; 720 *virt++ = MI_NOOP;
721 }
688 722
689 ring->tail = 0; 723 ring->tail = 0;
690 ring->space = ring->head - 8; 724 ring->space = ring->head - 8;
@@ -729,21 +763,14 @@ void intel_ring_begin(struct drm_device *dev,
729 intel_wrap_ring_buffer(dev, ring); 763 intel_wrap_ring_buffer(dev, ring);
730 if (unlikely(ring->space < n)) 764 if (unlikely(ring->space < n))
731 intel_wait_ring_buffer(dev, ring, n); 765 intel_wait_ring_buffer(dev, ring, n);
732}
733 766
734void intel_ring_emit(struct drm_device *dev, 767 ring->space -= n;
735 struct intel_ring_buffer *ring, unsigned int data)
736{
737 unsigned int *virt = ring->virtual_start + ring->tail;
738 *virt = data;
739 ring->tail += 4;
740 ring->tail &= ring->size - 1;
741 ring->space -= 4;
742} 768}
743 769
744void intel_ring_advance(struct drm_device *dev, 770void intel_ring_advance(struct drm_device *dev,
745 struct intel_ring_buffer *ring) 771 struct intel_ring_buffer *ring)
746{ 772{
773 ring->tail &= ring->size - 1;
747 ring->advance_ring(dev, ring); 774 ring->advance_ring(dev, ring);
748} 775}
749 776
@@ -762,18 +789,6 @@ void intel_fill_struct(struct drm_device *dev,
762 intel_ring_advance(dev, ring); 789 intel_ring_advance(dev, ring);
763} 790}
764 791
765u32 intel_ring_get_seqno(struct drm_device *dev,
766 struct intel_ring_buffer *ring)
767{
768 u32 seqno;
769 seqno = ring->next_seqno;
770
771 /* reserve 0 for non-seqno */
772 if (++ring->next_seqno == 0)
773 ring->next_seqno = 1;
774 return seqno;
775}
776
777struct intel_ring_buffer render_ring = { 792struct intel_ring_buffer render_ring = {
778 .name = "render ring", 793 .name = "render ring",
779 .regs = { 794 .regs = {
@@ -791,7 +806,6 @@ struct intel_ring_buffer render_ring = {
791 .head = 0, 806 .head = 0,
792 .tail = 0, 807 .tail = 0,
793 .space = 0, 808 .space = 0,
794 .next_seqno = 1,
795 .user_irq_refcount = 0, 809 .user_irq_refcount = 0,
796 .irq_gem_seqno = 0, 810 .irq_gem_seqno = 0,
797 .waiting_gem_seqno = 0, 811 .waiting_gem_seqno = 0,
@@ -830,7 +844,6 @@ struct intel_ring_buffer bsd_ring = {
830 .head = 0, 844 .head = 0,
831 .tail = 0, 845 .tail = 0,
832 .space = 0, 846 .space = 0,
833 .next_seqno = 1,
834 .user_irq_refcount = 0, 847 .user_irq_refcount = 0,
835 .irq_gem_seqno = 0, 848 .irq_gem_seqno = 0,
836 .waiting_gem_seqno = 0, 849 .waiting_gem_seqno = 0,
diff --git a/drivers/gpu/drm/i915/intel_ringbuffer.h b/drivers/gpu/drm/i915/intel_ringbuffer.h
index d5568d3766de..525e7d3edda8 100644
--- a/drivers/gpu/drm/i915/intel_ringbuffer.h
+++ b/drivers/gpu/drm/i915/intel_ringbuffer.h
@@ -26,7 +26,6 @@ struct intel_ring_buffer {
26 unsigned int head; 26 unsigned int head;
27 unsigned int tail; 27 unsigned int tail;
28 unsigned int space; 28 unsigned int space;
29 u32 next_seqno;
30 struct intel_hw_status_page status_page; 29 struct intel_hw_status_page status_page;
31 30
32 u32 irq_gem_seqno; /* last seq seem at irq time */ 31 u32 irq_gem_seqno; /* last seq seem at irq time */
@@ -106,8 +105,16 @@ int intel_wrap_ring_buffer(struct drm_device *dev,
106 struct intel_ring_buffer *ring); 105 struct intel_ring_buffer *ring);
107void intel_ring_begin(struct drm_device *dev, 106void intel_ring_begin(struct drm_device *dev,
108 struct intel_ring_buffer *ring, int n); 107 struct intel_ring_buffer *ring, int n);
109void intel_ring_emit(struct drm_device *dev, 108
110 struct intel_ring_buffer *ring, u32 data); 109static inline void intel_ring_emit(struct drm_device *dev,
110 struct intel_ring_buffer *ring,
111 unsigned int data)
112{
113 unsigned int *virt = ring->virtual_start + ring->tail;
114 *virt = data;
115 ring->tail += 4;
116}
117
111void intel_fill_struct(struct drm_device *dev, 118void intel_fill_struct(struct drm_device *dev,
112 struct intel_ring_buffer *ring, 119 struct intel_ring_buffer *ring,
113 void *data, 120 void *data,
diff --git a/drivers/gpu/drm/i915/intel_sdvo.c b/drivers/gpu/drm/i915/intel_sdvo.c
index d9d4d51aa89e..093e914e8a41 100644
--- a/drivers/gpu/drm/i915/intel_sdvo.c
+++ b/drivers/gpu/drm/i915/intel_sdvo.c
@@ -31,8 +31,8 @@
31#include "drmP.h" 31#include "drmP.h"
32#include "drm.h" 32#include "drm.h"
33#include "drm_crtc.h" 33#include "drm_crtc.h"
34#include "intel_drv.h"
35#include "drm_edid.h" 34#include "drm_edid.h"
35#include "intel_drv.h"
36#include "i915_drm.h" 36#include "i915_drm.h"
37#include "i915_drv.h" 37#include "i915_drv.h"
38#include "intel_sdvo_regs.h" 38#include "intel_sdvo_regs.h"
@@ -47,9 +47,10 @@
47 47
48#define IS_TV(c) (c->output_flag & SDVO_TV_MASK) 48#define IS_TV(c) (c->output_flag & SDVO_TV_MASK)
49#define IS_LVDS(c) (c->output_flag & SDVO_LVDS_MASK) 49#define IS_LVDS(c) (c->output_flag & SDVO_LVDS_MASK)
50#define IS_TV_OR_LVDS(c) (c->output_flag & (SDVO_TV_MASK | SDVO_LVDS_MASK))
50 51
51 52
52static char *tv_format_names[] = { 53static const char *tv_format_names[] = {
53 "NTSC_M" , "NTSC_J" , "NTSC_443", 54 "NTSC_M" , "NTSC_J" , "NTSC_443",
54 "PAL_B" , "PAL_D" , "PAL_G" , 55 "PAL_B" , "PAL_D" , "PAL_G" ,
55 "PAL_H" , "PAL_I" , "PAL_M" , 56 "PAL_H" , "PAL_I" , "PAL_M" ,
@@ -61,7 +62,9 @@ static char *tv_format_names[] = {
61 62
62#define TV_FORMAT_NUM (sizeof(tv_format_names) / sizeof(*tv_format_names)) 63#define TV_FORMAT_NUM (sizeof(tv_format_names) / sizeof(*tv_format_names))
63 64
64struct intel_sdvo_priv { 65struct intel_sdvo {
66 struct intel_encoder base;
67
65 u8 slave_addr; 68 u8 slave_addr;
66 69
67 /* Register for the SDVO device: SDVOB or SDVOC */ 70 /* Register for the SDVO device: SDVOB or SDVOC */
@@ -95,7 +98,7 @@ struct intel_sdvo_priv {
95 bool is_tv; 98 bool is_tv;
96 99
97 /* This is for current tv format name */ 100 /* This is for current tv format name */
98 char *tv_format_name; 101 int tv_format_index;
99 102
100 /** 103 /**
101 * This is set if we treat the device as HDMI, instead of DVI. 104 * This is set if we treat the device as HDMI, instead of DVI.
@@ -132,37 +135,40 @@ struct intel_sdvo_priv {
132}; 135};
133 136
134struct intel_sdvo_connector { 137struct intel_sdvo_connector {
138 struct intel_connector base;
139
135 /* Mark the type of connector */ 140 /* Mark the type of connector */
136 uint16_t output_flag; 141 uint16_t output_flag;
137 142
138 /* This contains all current supported TV format */ 143 /* This contains all current supported TV format */
139 char *tv_format_supported[TV_FORMAT_NUM]; 144 u8 tv_format_supported[TV_FORMAT_NUM];
140 int format_supported_num; 145 int format_supported_num;
141 struct drm_property *tv_format_property; 146 struct drm_property *tv_format;
142 struct drm_property *tv_format_name_property[TV_FORMAT_NUM];
143
144 /**
145 * Returned SDTV resolutions allowed for the current format, if the
146 * device reported it.
147 */
148 struct intel_sdvo_sdtv_resolution_reply sdtv_resolutions;
149 147
150 /* add the property for the SDVO-TV */ 148 /* add the property for the SDVO-TV */
151 struct drm_property *left_property; 149 struct drm_property *left;
152 struct drm_property *right_property; 150 struct drm_property *right;
153 struct drm_property *top_property; 151 struct drm_property *top;
154 struct drm_property *bottom_property; 152 struct drm_property *bottom;
155 struct drm_property *hpos_property; 153 struct drm_property *hpos;
156 struct drm_property *vpos_property; 154 struct drm_property *vpos;
155 struct drm_property *contrast;
156 struct drm_property *saturation;
157 struct drm_property *hue;
158 struct drm_property *sharpness;
159 struct drm_property *flicker_filter;
160 struct drm_property *flicker_filter_adaptive;
161 struct drm_property *flicker_filter_2d;
162 struct drm_property *tv_chroma_filter;
163 struct drm_property *tv_luma_filter;
164 struct drm_property *dot_crawl;
157 165
158 /* add the property for the SDVO-TV/LVDS */ 166 /* add the property for the SDVO-TV/LVDS */
159 struct drm_property *brightness_property; 167 struct drm_property *brightness;
160 struct drm_property *contrast_property;
161 struct drm_property *saturation_property;
162 struct drm_property *hue_property;
163 168
164 /* Add variable to record current setting for the above property */ 169 /* Add variable to record current setting for the above property */
165 u32 left_margin, right_margin, top_margin, bottom_margin; 170 u32 left_margin, right_margin, top_margin, bottom_margin;
171
166 /* this is to get the range of margin.*/ 172 /* this is to get the range of margin.*/
167 u32 max_hscan, max_vscan; 173 u32 max_hscan, max_vscan;
168 u32 max_hpos, cur_hpos; 174 u32 max_hpos, cur_hpos;
@@ -171,36 +177,54 @@ struct intel_sdvo_connector {
171 u32 cur_contrast, max_contrast; 177 u32 cur_contrast, max_contrast;
172 u32 cur_saturation, max_saturation; 178 u32 cur_saturation, max_saturation;
173 u32 cur_hue, max_hue; 179 u32 cur_hue, max_hue;
180 u32 cur_sharpness, max_sharpness;
181 u32 cur_flicker_filter, max_flicker_filter;
182 u32 cur_flicker_filter_adaptive, max_flicker_filter_adaptive;
183 u32 cur_flicker_filter_2d, max_flicker_filter_2d;
184 u32 cur_tv_chroma_filter, max_tv_chroma_filter;
185 u32 cur_tv_luma_filter, max_tv_luma_filter;
186 u32 cur_dot_crawl, max_dot_crawl;
174}; 187};
175 188
189static struct intel_sdvo *enc_to_intel_sdvo(struct drm_encoder *encoder)
190{
191 return container_of(enc_to_intel_encoder(encoder), struct intel_sdvo, base);
192}
193
194static struct intel_sdvo_connector *to_intel_sdvo_connector(struct drm_connector *connector)
195{
196 return container_of(to_intel_connector(connector), struct intel_sdvo_connector, base);
197}
198
176static bool 199static bool
177intel_sdvo_output_setup(struct intel_encoder *intel_encoder, 200intel_sdvo_output_setup(struct intel_sdvo *intel_sdvo, uint16_t flags);
178 uint16_t flags); 201static bool
179static void 202intel_sdvo_tv_create_property(struct intel_sdvo *intel_sdvo,
180intel_sdvo_tv_create_property(struct drm_connector *connector, int type); 203 struct intel_sdvo_connector *intel_sdvo_connector,
181static void 204 int type);
182intel_sdvo_create_enhance_property(struct drm_connector *connector); 205static bool
206intel_sdvo_create_enhance_property(struct intel_sdvo *intel_sdvo,
207 struct intel_sdvo_connector *intel_sdvo_connector);
183 208
184/** 209/**
185 * Writes the SDVOB or SDVOC with the given value, but always writes both 210 * Writes the SDVOB or SDVOC with the given value, but always writes both
186 * SDVOB and SDVOC to work around apparent hardware issues (according to 211 * SDVOB and SDVOC to work around apparent hardware issues (according to
187 * comments in the BIOS). 212 * comments in the BIOS).
188 */ 213 */
189static void intel_sdvo_write_sdvox(struct intel_encoder *intel_encoder, u32 val) 214static void intel_sdvo_write_sdvox(struct intel_sdvo *intel_sdvo, u32 val)
190{ 215{
191 struct drm_device *dev = intel_encoder->enc.dev; 216 struct drm_device *dev = intel_sdvo->base.enc.dev;
192 struct drm_i915_private *dev_priv = dev->dev_private; 217 struct drm_i915_private *dev_priv = dev->dev_private;
193 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
194 u32 bval = val, cval = val; 218 u32 bval = val, cval = val;
195 int i; 219 int i;
196 220
197 if (sdvo_priv->sdvo_reg == PCH_SDVOB) { 221 if (intel_sdvo->sdvo_reg == PCH_SDVOB) {
198 I915_WRITE(sdvo_priv->sdvo_reg, val); 222 I915_WRITE(intel_sdvo->sdvo_reg, val);
199 I915_READ(sdvo_priv->sdvo_reg); 223 I915_READ(intel_sdvo->sdvo_reg);
200 return; 224 return;
201 } 225 }
202 226
203 if (sdvo_priv->sdvo_reg == SDVOB) { 227 if (intel_sdvo->sdvo_reg == SDVOB) {
204 cval = I915_READ(SDVOC); 228 cval = I915_READ(SDVOC);
205 } else { 229 } else {
206 bval = I915_READ(SDVOB); 230 bval = I915_READ(SDVOB);
@@ -219,33 +243,27 @@ static void intel_sdvo_write_sdvox(struct intel_encoder *intel_encoder, u32 val)
219 } 243 }
220} 244}
221 245
222static bool intel_sdvo_read_byte(struct intel_encoder *intel_encoder, u8 addr, 246static bool intel_sdvo_read_byte(struct intel_sdvo *intel_sdvo, u8 addr, u8 *ch)
223 u8 *ch)
224{ 247{
225 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv; 248 u8 out_buf[2] = { addr, 0 };
226 u8 out_buf[2];
227 u8 buf[2]; 249 u8 buf[2];
228 int ret;
229
230 struct i2c_msg msgs[] = { 250 struct i2c_msg msgs[] = {
231 { 251 {
232 .addr = sdvo_priv->slave_addr >> 1, 252 .addr = intel_sdvo->slave_addr >> 1,
233 .flags = 0, 253 .flags = 0,
234 .len = 1, 254 .len = 1,
235 .buf = out_buf, 255 .buf = out_buf,
236 }, 256 },
237 { 257 {
238 .addr = sdvo_priv->slave_addr >> 1, 258 .addr = intel_sdvo->slave_addr >> 1,
239 .flags = I2C_M_RD, 259 .flags = I2C_M_RD,
240 .len = 1, 260 .len = 1,
241 .buf = buf, 261 .buf = buf,
242 } 262 }
243 }; 263 };
264 int ret;
244 265
245 out_buf[0] = addr; 266 if ((ret = i2c_transfer(intel_sdvo->base.i2c_bus, msgs, 2)) == 2)
246 out_buf[1] = 0;
247
248 if ((ret = i2c_transfer(intel_encoder->i2c_bus, msgs, 2)) == 2)
249 { 267 {
250 *ch = buf[0]; 268 *ch = buf[0];
251 return true; 269 return true;
@@ -255,35 +273,26 @@ static bool intel_sdvo_read_byte(struct intel_encoder *intel_encoder, u8 addr,
255 return false; 273 return false;
256} 274}
257 275
258static bool intel_sdvo_write_byte(struct intel_encoder *intel_encoder, int addr, 276static bool intel_sdvo_write_byte(struct intel_sdvo *intel_sdvo, int addr, u8 ch)
259 u8 ch)
260{ 277{
261 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv; 278 u8 out_buf[2] = { addr, ch };
262 u8 out_buf[2];
263 struct i2c_msg msgs[] = { 279 struct i2c_msg msgs[] = {
264 { 280 {
265 .addr = sdvo_priv->slave_addr >> 1, 281 .addr = intel_sdvo->slave_addr >> 1,
266 .flags = 0, 282 .flags = 0,
267 .len = 2, 283 .len = 2,
268 .buf = out_buf, 284 .buf = out_buf,
269 } 285 }
270 }; 286 };
271 287
272 out_buf[0] = addr; 288 return i2c_transfer(intel_sdvo->base.i2c_bus, msgs, 1) == 1;
273 out_buf[1] = ch;
274
275 if (i2c_transfer(intel_encoder->i2c_bus, msgs, 1) == 1)
276 {
277 return true;
278 }
279 return false;
280} 289}
281 290
282#define SDVO_CMD_NAME_ENTRY(cmd) {cmd, #cmd} 291#define SDVO_CMD_NAME_ENTRY(cmd) {cmd, #cmd}
283/** Mapping of command numbers to names, for debug output */ 292/** Mapping of command numbers to names, for debug output */
284static const struct _sdvo_cmd_name { 293static const struct _sdvo_cmd_name {
285 u8 cmd; 294 u8 cmd;
286 char *name; 295 const char *name;
287} sdvo_cmd_names[] = { 296} sdvo_cmd_names[] = {
288 SDVO_CMD_NAME_ENTRY(SDVO_CMD_RESET), 297 SDVO_CMD_NAME_ENTRY(SDVO_CMD_RESET),
289 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_DEVICE_CAPS), 298 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_DEVICE_CAPS),
@@ -328,13 +337,14 @@ static const struct _sdvo_cmd_name {
328 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_SDTV_RESOLUTION_SUPPORT), 337 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_SDTV_RESOLUTION_SUPPORT),
329 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_SCALED_HDTV_RESOLUTION_SUPPORT), 338 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_SCALED_HDTV_RESOLUTION_SUPPORT),
330 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_SUPPORTED_ENHANCEMENTS), 339 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_SUPPORTED_ENHANCEMENTS),
340
331 /* Add the op code for SDVO enhancements */ 341 /* Add the op code for SDVO enhancements */
332 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_POSITION_H), 342 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_HPOS),
333 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_POSITION_H), 343 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_HPOS),
334 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_POSITION_H), 344 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_HPOS),
335 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_POSITION_V), 345 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_VPOS),
336 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_POSITION_V), 346 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_VPOS),
337 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_POSITION_V), 347 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_VPOS),
338 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_SATURATION), 348 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_SATURATION),
339 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_SATURATION), 349 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_SATURATION),
340 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_SATURATION), 350 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_SATURATION),
@@ -353,6 +363,27 @@ static const struct _sdvo_cmd_name {
353 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_OVERSCAN_V), 363 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_OVERSCAN_V),
354 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_OVERSCAN_V), 364 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_OVERSCAN_V),
355 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_OVERSCAN_V), 365 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_OVERSCAN_V),
366 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_FLICKER_FILTER),
367 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_FLICKER_FILTER),
368 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_FLICKER_FILTER),
369 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_FLICKER_FILTER_ADAPTIVE),
370 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_FLICKER_FILTER_ADAPTIVE),
371 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_FLICKER_FILTER_ADAPTIVE),
372 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_FLICKER_FILTER_2D),
373 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_FLICKER_FILTER_2D),
374 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_FLICKER_FILTER_2D),
375 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_SHARPNESS),
376 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_SHARPNESS),
377 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_SHARPNESS),
378 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_DOT_CRAWL),
379 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_DOT_CRAWL),
380 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_TV_CHROMA_FILTER),
381 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_TV_CHROMA_FILTER),
382 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_TV_CHROMA_FILTER),
383 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_MAX_TV_LUMA_FILTER),
384 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_TV_LUMA_FILTER),
385 SDVO_CMD_NAME_ENTRY(SDVO_CMD_SET_TV_LUMA_FILTER),
386
356 /* HDMI op code */ 387 /* HDMI op code */
357 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_SUPP_ENCODE), 388 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_SUPP_ENCODE),
358 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_ENCODE), 389 SDVO_CMD_NAME_ENTRY(SDVO_CMD_GET_ENCODE),
@@ -377,17 +408,15 @@ static const struct _sdvo_cmd_name {
377}; 408};
378 409
379#define IS_SDVOB(reg) (reg == SDVOB || reg == PCH_SDVOB) 410#define IS_SDVOB(reg) (reg == SDVOB || reg == PCH_SDVOB)
380#define SDVO_NAME(dev_priv) (IS_SDVOB((dev_priv)->sdvo_reg) ? "SDVOB" : "SDVOC") 411#define SDVO_NAME(svdo) (IS_SDVOB((svdo)->sdvo_reg) ? "SDVOB" : "SDVOC")
381#define SDVO_PRIV(encoder) ((struct intel_sdvo_priv *) (encoder)->dev_priv)
382 412
383static void intel_sdvo_debug_write(struct intel_encoder *intel_encoder, u8 cmd, 413static void intel_sdvo_debug_write(struct intel_sdvo *intel_sdvo, u8 cmd,
384 void *args, int args_len) 414 const void *args, int args_len)
385{ 415{
386 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
387 int i; 416 int i;
388 417
389 DRM_DEBUG_KMS("%s: W: %02X ", 418 DRM_DEBUG_KMS("%s: W: %02X ",
390 SDVO_NAME(sdvo_priv), cmd); 419 SDVO_NAME(intel_sdvo), cmd);
391 for (i = 0; i < args_len; i++) 420 for (i = 0; i < args_len; i++)
392 DRM_LOG_KMS("%02X ", ((u8 *)args)[i]); 421 DRM_LOG_KMS("%02X ", ((u8 *)args)[i]);
393 for (; i < 8; i++) 422 for (; i < 8; i++)
@@ -403,19 +432,20 @@ static void intel_sdvo_debug_write(struct intel_encoder *intel_encoder, u8 cmd,
403 DRM_LOG_KMS("\n"); 432 DRM_LOG_KMS("\n");
404} 433}
405 434
406static void intel_sdvo_write_cmd(struct intel_encoder *intel_encoder, u8 cmd, 435static bool intel_sdvo_write_cmd(struct intel_sdvo *intel_sdvo, u8 cmd,
407 void *args, int args_len) 436 const void *args, int args_len)
408{ 437{
409 int i; 438 int i;
410 439
411 intel_sdvo_debug_write(intel_encoder, cmd, args, args_len); 440 intel_sdvo_debug_write(intel_sdvo, cmd, args, args_len);
412 441
413 for (i = 0; i < args_len; i++) { 442 for (i = 0; i < args_len; i++) {
414 intel_sdvo_write_byte(intel_encoder, SDVO_I2C_ARG_0 - i, 443 if (!intel_sdvo_write_byte(intel_sdvo, SDVO_I2C_ARG_0 - i,
415 ((u8*)args)[i]); 444 ((u8*)args)[i]))
445 return false;
416 } 446 }
417 447
418 intel_sdvo_write_byte(intel_encoder, SDVO_I2C_OPCODE, cmd); 448 return intel_sdvo_write_byte(intel_sdvo, SDVO_I2C_OPCODE, cmd);
419} 449}
420 450
421static const char *cmd_status_names[] = { 451static const char *cmd_status_names[] = {
@@ -428,14 +458,13 @@ static const char *cmd_status_names[] = {
428 "Scaling not supported" 458 "Scaling not supported"
429}; 459};
430 460
431static void intel_sdvo_debug_response(struct intel_encoder *intel_encoder, 461static void intel_sdvo_debug_response(struct intel_sdvo *intel_sdvo,
432 void *response, int response_len, 462 void *response, int response_len,
433 u8 status) 463 u8 status)
434{ 464{
435 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
436 int i; 465 int i;
437 466
438 DRM_DEBUG_KMS("%s: R: ", SDVO_NAME(sdvo_priv)); 467 DRM_DEBUG_KMS("%s: R: ", SDVO_NAME(intel_sdvo));
439 for (i = 0; i < response_len; i++) 468 for (i = 0; i < response_len; i++)
440 DRM_LOG_KMS("%02X ", ((u8 *)response)[i]); 469 DRM_LOG_KMS("%02X ", ((u8 *)response)[i]);
441 for (; i < 8; i++) 470 for (; i < 8; i++)
@@ -447,8 +476,8 @@ static void intel_sdvo_debug_response(struct intel_encoder *intel_encoder,
447 DRM_LOG_KMS("\n"); 476 DRM_LOG_KMS("\n");
448} 477}
449 478
450static u8 intel_sdvo_read_response(struct intel_encoder *intel_encoder, 479static bool intel_sdvo_read_response(struct intel_sdvo *intel_sdvo,
451 void *response, int response_len) 480 void *response, int response_len)
452{ 481{
453 int i; 482 int i;
454 u8 status; 483 u8 status;
@@ -457,24 +486,26 @@ static u8 intel_sdvo_read_response(struct intel_encoder *intel_encoder,
457 while (retry--) { 486 while (retry--) {
458 /* Read the command response */ 487 /* Read the command response */
459 for (i = 0; i < response_len; i++) { 488 for (i = 0; i < response_len; i++) {
460 intel_sdvo_read_byte(intel_encoder, 489 if (!intel_sdvo_read_byte(intel_sdvo,
461 SDVO_I2C_RETURN_0 + i, 490 SDVO_I2C_RETURN_0 + i,
462 &((u8 *)response)[i]); 491 &((u8 *)response)[i]))
492 return false;
463 } 493 }
464 494
465 /* read the return status */ 495 /* read the return status */
466 intel_sdvo_read_byte(intel_encoder, SDVO_I2C_CMD_STATUS, 496 if (!intel_sdvo_read_byte(intel_sdvo, SDVO_I2C_CMD_STATUS,
467 &status); 497 &status))
498 return false;
468 499
469 intel_sdvo_debug_response(intel_encoder, response, response_len, 500 intel_sdvo_debug_response(intel_sdvo, response, response_len,
470 status); 501 status);
471 if (status != SDVO_CMD_STATUS_PENDING) 502 if (status != SDVO_CMD_STATUS_PENDING)
472 return status; 503 break;
473 504
474 mdelay(50); 505 mdelay(50);
475 } 506 }
476 507
477 return status; 508 return status == SDVO_CMD_STATUS_SUCCESS;
478} 509}
479 510
480static int intel_sdvo_get_pixel_multiplier(struct drm_display_mode *mode) 511static int intel_sdvo_get_pixel_multiplier(struct drm_display_mode *mode)
@@ -494,37 +525,36 @@ static int intel_sdvo_get_pixel_multiplier(struct drm_display_mode *mode)
494 * another I2C transaction after issuing the DDC bus switch, it will be 525 * another I2C transaction after issuing the DDC bus switch, it will be
495 * switched to the internal SDVO register. 526 * switched to the internal SDVO register.
496 */ 527 */
497static void intel_sdvo_set_control_bus_switch(struct intel_encoder *intel_encoder, 528static void intel_sdvo_set_control_bus_switch(struct intel_sdvo *intel_sdvo,
498 u8 target) 529 u8 target)
499{ 530{
500 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
501 u8 out_buf[2], cmd_buf[2], ret_value[2], ret; 531 u8 out_buf[2], cmd_buf[2], ret_value[2], ret;
502 struct i2c_msg msgs[] = { 532 struct i2c_msg msgs[] = {
503 { 533 {
504 .addr = sdvo_priv->slave_addr >> 1, 534 .addr = intel_sdvo->slave_addr >> 1,
505 .flags = 0, 535 .flags = 0,
506 .len = 2, 536 .len = 2,
507 .buf = out_buf, 537 .buf = out_buf,
508 }, 538 },
509 /* the following two are to read the response */ 539 /* the following two are to read the response */
510 { 540 {
511 .addr = sdvo_priv->slave_addr >> 1, 541 .addr = intel_sdvo->slave_addr >> 1,
512 .flags = 0, 542 .flags = 0,
513 .len = 1, 543 .len = 1,
514 .buf = cmd_buf, 544 .buf = cmd_buf,
515 }, 545 },
516 { 546 {
517 .addr = sdvo_priv->slave_addr >> 1, 547 .addr = intel_sdvo->slave_addr >> 1,
518 .flags = I2C_M_RD, 548 .flags = I2C_M_RD,
519 .len = 1, 549 .len = 1,
520 .buf = ret_value, 550 .buf = ret_value,
521 }, 551 },
522 }; 552 };
523 553
524 intel_sdvo_debug_write(intel_encoder, SDVO_CMD_SET_CONTROL_BUS_SWITCH, 554 intel_sdvo_debug_write(intel_sdvo, SDVO_CMD_SET_CONTROL_BUS_SWITCH,
525 &target, 1); 555 &target, 1);
526 /* write the DDC switch command argument */ 556 /* write the DDC switch command argument */
527 intel_sdvo_write_byte(intel_encoder, SDVO_I2C_ARG_0, target); 557 intel_sdvo_write_byte(intel_sdvo, SDVO_I2C_ARG_0, target);
528 558
529 out_buf[0] = SDVO_I2C_OPCODE; 559 out_buf[0] = SDVO_I2C_OPCODE;
530 out_buf[1] = SDVO_CMD_SET_CONTROL_BUS_SWITCH; 560 out_buf[1] = SDVO_CMD_SET_CONTROL_BUS_SWITCH;
@@ -533,7 +563,7 @@ static void intel_sdvo_set_control_bus_switch(struct intel_encoder *intel_encode
533 ret_value[0] = 0; 563 ret_value[0] = 0;
534 ret_value[1] = 0; 564 ret_value[1] = 0;
535 565
536 ret = i2c_transfer(intel_encoder->i2c_bus, msgs, 3); 566 ret = i2c_transfer(intel_sdvo->base.i2c_bus, msgs, 3);
537 if (ret != 3) { 567 if (ret != 3) {
538 /* failure in I2C transfer */ 568 /* failure in I2C transfer */
539 DRM_DEBUG_KMS("I2c transfer returned %d\n", ret); 569 DRM_DEBUG_KMS("I2c transfer returned %d\n", ret);
@@ -547,23 +577,29 @@ static void intel_sdvo_set_control_bus_switch(struct intel_encoder *intel_encode
547 return; 577 return;
548} 578}
549 579
550static bool intel_sdvo_set_target_input(struct intel_encoder *intel_encoder, bool target_0, bool target_1) 580static bool intel_sdvo_set_value(struct intel_sdvo *intel_sdvo, u8 cmd, const void *data, int len)
551{ 581{
552 struct intel_sdvo_set_target_input_args targets = {0}; 582 if (!intel_sdvo_write_cmd(intel_sdvo, cmd, data, len))
553 u8 status; 583 return false;
554
555 if (target_0 && target_1)
556 return SDVO_CMD_STATUS_NOTSUPP;
557 584
558 if (target_1) 585 return intel_sdvo_read_response(intel_sdvo, NULL, 0);
559 targets.target_1 = 1; 586}
560 587
561 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_TARGET_INPUT, &targets, 588static bool
562 sizeof(targets)); 589intel_sdvo_get_value(struct intel_sdvo *intel_sdvo, u8 cmd, void *value, int len)
590{
591 if (!intel_sdvo_write_cmd(intel_sdvo, cmd, NULL, 0))
592 return false;
563 593
564 status = intel_sdvo_read_response(intel_encoder, NULL, 0); 594 return intel_sdvo_read_response(intel_sdvo, value, len);
595}
565 596
566 return (status == SDVO_CMD_STATUS_SUCCESS); 597static bool intel_sdvo_set_target_input(struct intel_sdvo *intel_sdvo)
598{
599 struct intel_sdvo_set_target_input_args targets = {0};
600 return intel_sdvo_set_value(intel_sdvo,
601 SDVO_CMD_SET_TARGET_INPUT,
602 &targets, sizeof(targets));
567} 603}
568 604
569/** 605/**
@@ -572,14 +608,12 @@ static bool intel_sdvo_set_target_input(struct intel_encoder *intel_encoder, boo
572 * This function is making an assumption about the layout of the response, 608 * This function is making an assumption about the layout of the response,
573 * which should be checked against the docs. 609 * which should be checked against the docs.
574 */ 610 */
575static bool intel_sdvo_get_trained_inputs(struct intel_encoder *intel_encoder, bool *input_1, bool *input_2) 611static bool intel_sdvo_get_trained_inputs(struct intel_sdvo *intel_sdvo, bool *input_1, bool *input_2)
576{ 612{
577 struct intel_sdvo_get_trained_inputs_response response; 613 struct intel_sdvo_get_trained_inputs_response response;
578 u8 status;
579 614
580 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_GET_TRAINED_INPUTS, NULL, 0); 615 if (!intel_sdvo_get_value(intel_sdvo, SDVO_CMD_GET_TRAINED_INPUTS,
581 status = intel_sdvo_read_response(intel_encoder, &response, sizeof(response)); 616 &response, sizeof(response)))
582 if (status != SDVO_CMD_STATUS_SUCCESS)
583 return false; 617 return false;
584 618
585 *input_1 = response.input0_trained; 619 *input_1 = response.input0_trained;
@@ -587,21 +621,18 @@ static bool intel_sdvo_get_trained_inputs(struct intel_encoder *intel_encoder, b
587 return true; 621 return true;
588} 622}
589 623
590static bool intel_sdvo_set_active_outputs(struct intel_encoder *intel_encoder, 624static bool intel_sdvo_set_active_outputs(struct intel_sdvo *intel_sdvo,
591 u16 outputs) 625 u16 outputs)
592{ 626{
593 u8 status; 627 return intel_sdvo_set_value(intel_sdvo,
594 628 SDVO_CMD_SET_ACTIVE_OUTPUTS,
595 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_ACTIVE_OUTPUTS, &outputs, 629 &outputs, sizeof(outputs));
596 sizeof(outputs));
597 status = intel_sdvo_read_response(intel_encoder, NULL, 0);
598 return (status == SDVO_CMD_STATUS_SUCCESS);
599} 630}
600 631
601static bool intel_sdvo_set_encoder_power_state(struct intel_encoder *intel_encoder, 632static bool intel_sdvo_set_encoder_power_state(struct intel_sdvo *intel_sdvo,
602 int mode) 633 int mode)
603{ 634{
604 u8 status, state = SDVO_ENCODER_STATE_ON; 635 u8 state = SDVO_ENCODER_STATE_ON;
605 636
606 switch (mode) { 637 switch (mode) {
607 case DRM_MODE_DPMS_ON: 638 case DRM_MODE_DPMS_ON:
@@ -618,88 +649,63 @@ static bool intel_sdvo_set_encoder_power_state(struct intel_encoder *intel_encod
618 break; 649 break;
619 } 650 }
620 651
621 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_ENCODER_POWER_STATE, &state, 652 return intel_sdvo_set_value(intel_sdvo,
622 sizeof(state)); 653 SDVO_CMD_SET_ENCODER_POWER_STATE, &state, sizeof(state));
623 status = intel_sdvo_read_response(intel_encoder, NULL, 0);
624
625 return (status == SDVO_CMD_STATUS_SUCCESS);
626} 654}
627 655
628static bool intel_sdvo_get_input_pixel_clock_range(struct intel_encoder *intel_encoder, 656static bool intel_sdvo_get_input_pixel_clock_range(struct intel_sdvo *intel_sdvo,
629 int *clock_min, 657 int *clock_min,
630 int *clock_max) 658 int *clock_max)
631{ 659{
632 struct intel_sdvo_pixel_clock_range clocks; 660 struct intel_sdvo_pixel_clock_range clocks;
633 u8 status;
634
635 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_GET_INPUT_PIXEL_CLOCK_RANGE,
636 NULL, 0);
637
638 status = intel_sdvo_read_response(intel_encoder, &clocks, sizeof(clocks));
639 661
640 if (status != SDVO_CMD_STATUS_SUCCESS) 662 if (!intel_sdvo_get_value(intel_sdvo,
663 SDVO_CMD_GET_INPUT_PIXEL_CLOCK_RANGE,
664 &clocks, sizeof(clocks)))
641 return false; 665 return false;
642 666
643 /* Convert the values from units of 10 kHz to kHz. */ 667 /* Convert the values from units of 10 kHz to kHz. */
644 *clock_min = clocks.min * 10; 668 *clock_min = clocks.min * 10;
645 *clock_max = clocks.max * 10; 669 *clock_max = clocks.max * 10;
646
647 return true; 670 return true;
648} 671}
649 672
650static bool intel_sdvo_set_target_output(struct intel_encoder *intel_encoder, 673static bool intel_sdvo_set_target_output(struct intel_sdvo *intel_sdvo,
651 u16 outputs) 674 u16 outputs)
652{ 675{
653 u8 status; 676 return intel_sdvo_set_value(intel_sdvo,
654 677 SDVO_CMD_SET_TARGET_OUTPUT,
655 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_TARGET_OUTPUT, &outputs, 678 &outputs, sizeof(outputs));
656 sizeof(outputs));
657
658 status = intel_sdvo_read_response(intel_encoder, NULL, 0);
659 return (status == SDVO_CMD_STATUS_SUCCESS);
660} 679}
661 680
662static bool intel_sdvo_set_timing(struct intel_encoder *intel_encoder, u8 cmd, 681static bool intel_sdvo_set_timing(struct intel_sdvo *intel_sdvo, u8 cmd,
663 struct intel_sdvo_dtd *dtd) 682 struct intel_sdvo_dtd *dtd)
664{ 683{
665 u8 status; 684 return intel_sdvo_set_value(intel_sdvo, cmd, &dtd->part1, sizeof(dtd->part1)) &&
666 685 intel_sdvo_set_value(intel_sdvo, cmd + 1, &dtd->part2, sizeof(dtd->part2));
667 intel_sdvo_write_cmd(intel_encoder, cmd, &dtd->part1, sizeof(dtd->part1));
668 status = intel_sdvo_read_response(intel_encoder, NULL, 0);
669 if (status != SDVO_CMD_STATUS_SUCCESS)
670 return false;
671
672 intel_sdvo_write_cmd(intel_encoder, cmd + 1, &dtd->part2, sizeof(dtd->part2));
673 status = intel_sdvo_read_response(intel_encoder, NULL, 0);
674 if (status != SDVO_CMD_STATUS_SUCCESS)
675 return false;
676
677 return true;
678} 686}
679 687
680static bool intel_sdvo_set_input_timing(struct intel_encoder *intel_encoder, 688static bool intel_sdvo_set_input_timing(struct intel_sdvo *intel_sdvo,
681 struct intel_sdvo_dtd *dtd) 689 struct intel_sdvo_dtd *dtd)
682{ 690{
683 return intel_sdvo_set_timing(intel_encoder, 691 return intel_sdvo_set_timing(intel_sdvo,
684 SDVO_CMD_SET_INPUT_TIMINGS_PART1, dtd); 692 SDVO_CMD_SET_INPUT_TIMINGS_PART1, dtd);
685} 693}
686 694
687static bool intel_sdvo_set_output_timing(struct intel_encoder *intel_encoder, 695static bool intel_sdvo_set_output_timing(struct intel_sdvo *intel_sdvo,
688 struct intel_sdvo_dtd *dtd) 696 struct intel_sdvo_dtd *dtd)
689{ 697{
690 return intel_sdvo_set_timing(intel_encoder, 698 return intel_sdvo_set_timing(intel_sdvo,
691 SDVO_CMD_SET_OUTPUT_TIMINGS_PART1, dtd); 699 SDVO_CMD_SET_OUTPUT_TIMINGS_PART1, dtd);
692} 700}
693 701
694static bool 702static bool
695intel_sdvo_create_preferred_input_timing(struct intel_encoder *intel_encoder, 703intel_sdvo_create_preferred_input_timing(struct intel_sdvo *intel_sdvo,
696 uint16_t clock, 704 uint16_t clock,
697 uint16_t width, 705 uint16_t width,
698 uint16_t height) 706 uint16_t height)
699{ 707{
700 struct intel_sdvo_preferred_input_timing_args args; 708 struct intel_sdvo_preferred_input_timing_args args;
701 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
702 uint8_t status;
703 709
704 memset(&args, 0, sizeof(args)); 710 memset(&args, 0, sizeof(args));
705 args.clock = clock; 711 args.clock = clock;
@@ -707,59 +713,32 @@ intel_sdvo_create_preferred_input_timing(struct intel_encoder *intel_encoder,
707 args.height = height; 713 args.height = height;
708 args.interlace = 0; 714 args.interlace = 0;
709 715
710 if (sdvo_priv->is_lvds && 716 if (intel_sdvo->is_lvds &&
711 (sdvo_priv->sdvo_lvds_fixed_mode->hdisplay != width || 717 (intel_sdvo->sdvo_lvds_fixed_mode->hdisplay != width ||
712 sdvo_priv->sdvo_lvds_fixed_mode->vdisplay != height)) 718 intel_sdvo->sdvo_lvds_fixed_mode->vdisplay != height))
713 args.scaled = 1; 719 args.scaled = 1;
714 720
715 intel_sdvo_write_cmd(intel_encoder, 721 return intel_sdvo_set_value(intel_sdvo,
716 SDVO_CMD_CREATE_PREFERRED_INPUT_TIMING, 722 SDVO_CMD_CREATE_PREFERRED_INPUT_TIMING,
717 &args, sizeof(args)); 723 &args, sizeof(args));
718 status = intel_sdvo_read_response(intel_encoder, NULL, 0);
719 if (status != SDVO_CMD_STATUS_SUCCESS)
720 return false;
721
722 return true;
723} 724}
724 725
725static bool intel_sdvo_get_preferred_input_timing(struct intel_encoder *intel_encoder, 726static bool intel_sdvo_get_preferred_input_timing(struct intel_sdvo *intel_sdvo,
726 struct intel_sdvo_dtd *dtd) 727 struct intel_sdvo_dtd *dtd)
727{ 728{
728 bool status; 729 return intel_sdvo_get_value(intel_sdvo, SDVO_CMD_GET_PREFERRED_INPUT_TIMING_PART1,
729 730 &dtd->part1, sizeof(dtd->part1)) &&
730 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_GET_PREFERRED_INPUT_TIMING_PART1, 731 intel_sdvo_get_value(intel_sdvo, SDVO_CMD_GET_PREFERRED_INPUT_TIMING_PART2,
731 NULL, 0); 732 &dtd->part2, sizeof(dtd->part2));
732
733 status = intel_sdvo_read_response(intel_encoder, &dtd->part1,
734 sizeof(dtd->part1));
735 if (status != SDVO_CMD_STATUS_SUCCESS)
736 return false;
737
738 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_GET_PREFERRED_INPUT_TIMING_PART2,
739 NULL, 0);
740
741 status = intel_sdvo_read_response(intel_encoder, &dtd->part2,
742 sizeof(dtd->part2));
743 if (status != SDVO_CMD_STATUS_SUCCESS)
744 return false;
745
746 return false;
747} 733}
748 734
749static bool intel_sdvo_set_clock_rate_mult(struct intel_encoder *intel_encoder, u8 val) 735static bool intel_sdvo_set_clock_rate_mult(struct intel_sdvo *intel_sdvo, u8 val)
750{ 736{
751 u8 status; 737 return intel_sdvo_set_value(intel_sdvo, SDVO_CMD_SET_CLOCK_RATE_MULT, &val, 1);
752
753 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_CLOCK_RATE_MULT, &val, 1);
754 status = intel_sdvo_read_response(intel_encoder, NULL, 0);
755 if (status != SDVO_CMD_STATUS_SUCCESS)
756 return false;
757
758 return true;
759} 738}
760 739
761static void intel_sdvo_get_dtd_from_mode(struct intel_sdvo_dtd *dtd, 740static void intel_sdvo_get_dtd_from_mode(struct intel_sdvo_dtd *dtd,
762 struct drm_display_mode *mode) 741 const struct drm_display_mode *mode)
763{ 742{
764 uint16_t width, height; 743 uint16_t width, height;
765 uint16_t h_blank_len, h_sync_len, v_blank_len, v_sync_len; 744 uint16_t h_blank_len, h_sync_len, v_blank_len, v_sync_len;
@@ -808,7 +787,7 @@ static void intel_sdvo_get_dtd_from_mode(struct intel_sdvo_dtd *dtd,
808} 787}
809 788
810static void intel_sdvo_get_mode_from_dtd(struct drm_display_mode * mode, 789static void intel_sdvo_get_mode_from_dtd(struct drm_display_mode * mode,
811 struct intel_sdvo_dtd *dtd) 790 const struct intel_sdvo_dtd *dtd)
812{ 791{
813 mode->hdisplay = dtd->part1.h_active; 792 mode->hdisplay = dtd->part1.h_active;
814 mode->hdisplay += ((dtd->part1.h_high >> 4) & 0x0f) << 8; 793 mode->hdisplay += ((dtd->part1.h_high >> 4) & 0x0f) << 8;
@@ -840,45 +819,33 @@ static void intel_sdvo_get_mode_from_dtd(struct drm_display_mode * mode,
840 mode->flags |= DRM_MODE_FLAG_PVSYNC; 819 mode->flags |= DRM_MODE_FLAG_PVSYNC;
841} 820}
842 821
843static bool intel_sdvo_get_supp_encode(struct intel_encoder *intel_encoder, 822static bool intel_sdvo_get_supp_encode(struct intel_sdvo *intel_sdvo,
844 struct intel_sdvo_encode *encode) 823 struct intel_sdvo_encode *encode)
845{ 824{
846 uint8_t status; 825 if (intel_sdvo_get_value(intel_sdvo,
847 826 SDVO_CMD_GET_SUPP_ENCODE,
848 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_GET_SUPP_ENCODE, NULL, 0); 827 encode, sizeof(*encode)))
849 status = intel_sdvo_read_response(intel_encoder, encode, sizeof(*encode)); 828 return true;
850 if (status != SDVO_CMD_STATUS_SUCCESS) { /* non-support means DVI */
851 memset(encode, 0, sizeof(*encode));
852 return false;
853 }
854 829
855 return true; 830 /* non-support means DVI */
831 memset(encode, 0, sizeof(*encode));
832 return false;
856} 833}
857 834
858static bool intel_sdvo_set_encode(struct intel_encoder *intel_encoder, 835static bool intel_sdvo_set_encode(struct intel_sdvo *intel_sdvo,
859 uint8_t mode) 836 uint8_t mode)
860{ 837{
861 uint8_t status; 838 return intel_sdvo_set_value(intel_sdvo, SDVO_CMD_SET_ENCODE, &mode, 1);
862
863 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_ENCODE, &mode, 1);
864 status = intel_sdvo_read_response(intel_encoder, NULL, 0);
865
866 return (status == SDVO_CMD_STATUS_SUCCESS);
867} 839}
868 840
869static bool intel_sdvo_set_colorimetry(struct intel_encoder *intel_encoder, 841static bool intel_sdvo_set_colorimetry(struct intel_sdvo *intel_sdvo,
870 uint8_t mode) 842 uint8_t mode)
871{ 843{
872 uint8_t status; 844 return intel_sdvo_set_value(intel_sdvo, SDVO_CMD_SET_COLORIMETRY, &mode, 1);
873
874 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_COLORIMETRY, &mode, 1);
875 status = intel_sdvo_read_response(intel_encoder, NULL, 0);
876
877 return (status == SDVO_CMD_STATUS_SUCCESS);
878} 845}
879 846
880#if 0 847#if 0
881static void intel_sdvo_dump_hdmi_buf(struct intel_encoder *intel_encoder) 848static void intel_sdvo_dump_hdmi_buf(struct intel_sdvo *intel_sdvo)
882{ 849{
883 int i, j; 850 int i, j;
884 uint8_t set_buf_index[2]; 851 uint8_t set_buf_index[2];
@@ -887,8 +854,7 @@ static void intel_sdvo_dump_hdmi_buf(struct intel_encoder *intel_encoder)
887 uint8_t buf[48]; 854 uint8_t buf[48];
888 uint8_t *pos; 855 uint8_t *pos;
889 856
890 intel_sdvo_write_cmd(encoder, SDVO_CMD_GET_HBUF_AV_SPLIT, NULL, 0); 857 intel_sdvo_get_value(encoder, SDVO_CMD_GET_HBUF_AV_SPLIT, &av_split, 1);
891 intel_sdvo_read_response(encoder, &av_split, 1);
892 858
893 for (i = 0; i <= av_split; i++) { 859 for (i = 0; i <= av_split; i++) {
894 set_buf_index[0] = i; set_buf_index[1] = 0; 860 set_buf_index[0] = i; set_buf_index[1] = 0;
@@ -908,7 +874,7 @@ static void intel_sdvo_dump_hdmi_buf(struct intel_encoder *intel_encoder)
908} 874}
909#endif 875#endif
910 876
911static void intel_sdvo_set_hdmi_buf(struct intel_encoder *intel_encoder, 877static bool intel_sdvo_set_hdmi_buf(struct intel_sdvo *intel_sdvo,
912 int index, 878 int index,
913 uint8_t *data, int8_t size, uint8_t tx_rate) 879 uint8_t *data, int8_t size, uint8_t tx_rate)
914{ 880{
@@ -917,15 +883,18 @@ static void intel_sdvo_set_hdmi_buf(struct intel_encoder *intel_encoder,
917 set_buf_index[0] = index; 883 set_buf_index[0] = index;
918 set_buf_index[1] = 0; 884 set_buf_index[1] = 0;
919 885
920 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_HBUF_INDEX, 886 if (!intel_sdvo_write_cmd(intel_sdvo, SDVO_CMD_SET_HBUF_INDEX,
921 set_buf_index, 2); 887 set_buf_index, 2))
888 return false;
922 889
923 for (; size > 0; size -= 8) { 890 for (; size > 0; size -= 8) {
924 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_HBUF_DATA, data, 8); 891 if (!intel_sdvo_write_cmd(intel_sdvo, SDVO_CMD_SET_HBUF_DATA, data, 8))
892 return false;
893
925 data += 8; 894 data += 8;
926 } 895 }
927 896
928 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_HBUF_TXRATE, &tx_rate, 1); 897 return intel_sdvo_write_cmd(intel_sdvo, SDVO_CMD_SET_HBUF_TXRATE, &tx_rate, 1);
929} 898}
930 899
931static uint8_t intel_sdvo_calc_hbuf_csum(uint8_t *data, uint8_t size) 900static uint8_t intel_sdvo_calc_hbuf_csum(uint8_t *data, uint8_t size)
@@ -1000,7 +969,7 @@ struct dip_infoframe {
1000 } __attribute__ ((packed)) u; 969 } __attribute__ ((packed)) u;
1001} __attribute__((packed)); 970} __attribute__((packed));
1002 971
1003static void intel_sdvo_set_avi_infoframe(struct intel_encoder *intel_encoder, 972static bool intel_sdvo_set_avi_infoframe(struct intel_sdvo *intel_sdvo,
1004 struct drm_display_mode * mode) 973 struct drm_display_mode * mode)
1005{ 974{
1006 struct dip_infoframe avi_if = { 975 struct dip_infoframe avi_if = {
@@ -1011,133 +980,105 @@ static void intel_sdvo_set_avi_infoframe(struct intel_encoder *intel_encoder,
1011 980
1012 avi_if.checksum = intel_sdvo_calc_hbuf_csum((uint8_t *)&avi_if, 981 avi_if.checksum = intel_sdvo_calc_hbuf_csum((uint8_t *)&avi_if,
1013 4 + avi_if.len); 982 4 + avi_if.len);
1014 intel_sdvo_set_hdmi_buf(intel_encoder, 1, (uint8_t *)&avi_if, 983 return intel_sdvo_set_hdmi_buf(intel_sdvo, 1, (uint8_t *)&avi_if,
1015 4 + avi_if.len, 984 4 + avi_if.len,
1016 SDVO_HBUF_TX_VSYNC); 985 SDVO_HBUF_TX_VSYNC);
1017} 986}
1018 987
1019static void intel_sdvo_set_tv_format(struct intel_encoder *intel_encoder) 988static bool intel_sdvo_set_tv_format(struct intel_sdvo *intel_sdvo)
1020{ 989{
1021
1022 struct intel_sdvo_tv_format format; 990 struct intel_sdvo_tv_format format;
1023 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv; 991 uint32_t format_map;
1024 uint32_t format_map, i;
1025 uint8_t status;
1026 992
1027 for (i = 0; i < TV_FORMAT_NUM; i++) 993 format_map = 1 << intel_sdvo->tv_format_index;
1028 if (tv_format_names[i] == sdvo_priv->tv_format_name)
1029 break;
1030
1031 format_map = 1 << i;
1032 memset(&format, 0, sizeof(format)); 994 memset(&format, 0, sizeof(format));
1033 memcpy(&format, &format_map, sizeof(format_map) > sizeof(format) ? 995 memcpy(&format, &format_map, min(sizeof(format), sizeof(format_map)));
1034 sizeof(format) : sizeof(format_map));
1035
1036 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_TV_FORMAT, &format,
1037 sizeof(format));
1038 996
1039 status = intel_sdvo_read_response(intel_encoder, NULL, 0); 997 BUILD_BUG_ON(sizeof(format) != 6);
1040 if (status != SDVO_CMD_STATUS_SUCCESS) 998 return intel_sdvo_set_value(intel_sdvo,
1041 DRM_DEBUG_KMS("%s: Failed to set TV format\n", 999 SDVO_CMD_SET_TV_FORMAT,
1042 SDVO_NAME(sdvo_priv)); 1000 &format, sizeof(format));
1043} 1001}
1044 1002
1045static bool intel_sdvo_mode_fixup(struct drm_encoder *encoder, 1003static bool
1046 struct drm_display_mode *mode, 1004intel_sdvo_set_output_timings_from_mode(struct intel_sdvo *intel_sdvo,
1047 struct drm_display_mode *adjusted_mode) 1005 struct drm_display_mode *mode)
1048{ 1006{
1049 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1007 struct intel_sdvo_dtd output_dtd;
1050 struct intel_sdvo_priv *dev_priv = intel_encoder->dev_priv;
1051 1008
1052 if (dev_priv->is_tv) { 1009 if (!intel_sdvo_set_target_output(intel_sdvo,
1053 struct intel_sdvo_dtd output_dtd; 1010 intel_sdvo->attached_output))
1054 bool success; 1011 return false;
1055 1012
1056 /* We need to construct preferred input timings based on our 1013 intel_sdvo_get_dtd_from_mode(&output_dtd, mode);
1057 * output timings. To do that, we have to set the output 1014 if (!intel_sdvo_set_output_timing(intel_sdvo, &output_dtd))
1058 * timings, even though this isn't really the right place in 1015 return false;
1059 * the sequence to do it. Oh well.
1060 */
1061 1016
1017 return true;
1018}
1062 1019
1063 /* Set output timings */ 1020static bool
1064 intel_sdvo_get_dtd_from_mode(&output_dtd, mode); 1021intel_sdvo_set_input_timings_for_mode(struct intel_sdvo *intel_sdvo,
1065 intel_sdvo_set_target_output(intel_encoder, 1022 struct drm_display_mode *mode,
1066 dev_priv->attached_output); 1023 struct drm_display_mode *adjusted_mode)
1067 intel_sdvo_set_output_timing(intel_encoder, &output_dtd); 1024{
1025 struct intel_sdvo_dtd input_dtd;
1068 1026
1069 /* Set the input timing to the screen. Assume always input 0. */ 1027 /* Reset the input timing to the screen. Assume always input 0. */
1070 intel_sdvo_set_target_input(intel_encoder, true, false); 1028 if (!intel_sdvo_set_target_input(intel_sdvo))
1029 return false;
1071 1030
1031 if (!intel_sdvo_create_preferred_input_timing(intel_sdvo,
1032 mode->clock / 10,
1033 mode->hdisplay,
1034 mode->vdisplay))
1035 return false;
1072 1036
1073 success = intel_sdvo_create_preferred_input_timing(intel_encoder, 1037 if (!intel_sdvo_get_preferred_input_timing(intel_sdvo,
1074 mode->clock / 10, 1038 &input_dtd))
1075 mode->hdisplay, 1039 return false;
1076 mode->vdisplay);
1077 if (success) {
1078 struct intel_sdvo_dtd input_dtd;
1079 1040
1080 intel_sdvo_get_preferred_input_timing(intel_encoder, 1041 intel_sdvo_get_mode_from_dtd(adjusted_mode, &input_dtd);
1081 &input_dtd); 1042 intel_sdvo->sdvo_flags = input_dtd.part2.sdvo_flags;
1082 intel_sdvo_get_mode_from_dtd(adjusted_mode, &input_dtd);
1083 dev_priv->sdvo_flags = input_dtd.part2.sdvo_flags;
1084 1043
1085 drm_mode_set_crtcinfo(adjusted_mode, 0); 1044 drm_mode_set_crtcinfo(adjusted_mode, 0);
1045 mode->clock = adjusted_mode->clock;
1046 return true;
1047}
1086 1048
1087 mode->clock = adjusted_mode->clock; 1049static bool intel_sdvo_mode_fixup(struct drm_encoder *encoder,
1050 struct drm_display_mode *mode,
1051 struct drm_display_mode *adjusted_mode)
1052{
1053 struct intel_sdvo *intel_sdvo = enc_to_intel_sdvo(encoder);
1088 1054
1089 adjusted_mode->clock *= 1055 /* We need to construct preferred input timings based on our
1090 intel_sdvo_get_pixel_multiplier(mode); 1056 * output timings. To do that, we have to set the output
1091 } else { 1057 * timings, even though this isn't really the right place in
1058 * the sequence to do it. Oh well.
1059 */
1060 if (intel_sdvo->is_tv) {
1061 if (!intel_sdvo_set_output_timings_from_mode(intel_sdvo, mode))
1092 return false; 1062 return false;
1093 }
1094 } else if (dev_priv->is_lvds) {
1095 struct intel_sdvo_dtd output_dtd;
1096 bool success;
1097
1098 drm_mode_set_crtcinfo(dev_priv->sdvo_lvds_fixed_mode, 0);
1099 /* Set output timings */
1100 intel_sdvo_get_dtd_from_mode(&output_dtd,
1101 dev_priv->sdvo_lvds_fixed_mode);
1102
1103 intel_sdvo_set_target_output(intel_encoder,
1104 dev_priv->attached_output);
1105 intel_sdvo_set_output_timing(intel_encoder, &output_dtd);
1106
1107 /* Set the input timing to the screen. Assume always input 0. */
1108 intel_sdvo_set_target_input(intel_encoder, true, false);
1109
1110
1111 success = intel_sdvo_create_preferred_input_timing(
1112 intel_encoder,
1113 mode->clock / 10,
1114 mode->hdisplay,
1115 mode->vdisplay);
1116 1063
1117 if (success) { 1064 if (!intel_sdvo_set_input_timings_for_mode(intel_sdvo, mode, adjusted_mode))
1118 struct intel_sdvo_dtd input_dtd; 1065 return false;
1119 1066 } else if (intel_sdvo->is_lvds) {
1120 intel_sdvo_get_preferred_input_timing(intel_encoder, 1067 drm_mode_set_crtcinfo(intel_sdvo->sdvo_lvds_fixed_mode, 0);
1121 &input_dtd);
1122 intel_sdvo_get_mode_from_dtd(adjusted_mode, &input_dtd);
1123 dev_priv->sdvo_flags = input_dtd.part2.sdvo_flags;
1124
1125 drm_mode_set_crtcinfo(adjusted_mode, 0);
1126
1127 mode->clock = adjusted_mode->clock;
1128 1068
1129 adjusted_mode->clock *= 1069 if (!intel_sdvo_set_output_timings_from_mode(intel_sdvo,
1130 intel_sdvo_get_pixel_multiplier(mode); 1070 intel_sdvo->sdvo_lvds_fixed_mode))
1131 } else {
1132 return false; 1071 return false;
1133 }
1134 1072
1135 } else { 1073 if (!intel_sdvo_set_input_timings_for_mode(intel_sdvo, mode, adjusted_mode))
1136 /* Make the CRTC code factor in the SDVO pixel multiplier. The 1074 return false;
1137 * SDVO device will be told of the multiplier during mode_set.
1138 */
1139 adjusted_mode->clock *= intel_sdvo_get_pixel_multiplier(mode);
1140 } 1075 }
1076
1077 /* Make the CRTC code factor in the SDVO pixel multiplier. The
1078 * SDVO device will be told of the multiplier during mode_set.
1079 */
1080 adjusted_mode->clock *= intel_sdvo_get_pixel_multiplier(mode);
1081
1141 return true; 1082 return true;
1142} 1083}
1143 1084
@@ -1149,13 +1090,11 @@ static void intel_sdvo_mode_set(struct drm_encoder *encoder,
1149 struct drm_i915_private *dev_priv = dev->dev_private; 1090 struct drm_i915_private *dev_priv = dev->dev_private;
1150 struct drm_crtc *crtc = encoder->crtc; 1091 struct drm_crtc *crtc = encoder->crtc;
1151 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 1092 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
1152 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1093 struct intel_sdvo *intel_sdvo = enc_to_intel_sdvo(encoder);
1153 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
1154 u32 sdvox = 0; 1094 u32 sdvox = 0;
1155 int sdvo_pixel_multiply; 1095 int sdvo_pixel_multiply, rate;
1156 struct intel_sdvo_in_out_map in_out; 1096 struct intel_sdvo_in_out_map in_out;
1157 struct intel_sdvo_dtd input_dtd; 1097 struct intel_sdvo_dtd input_dtd;
1158 u8 status;
1159 1098
1160 if (!mode) 1099 if (!mode)
1161 return; 1100 return;
@@ -1166,41 +1105,50 @@ static void intel_sdvo_mode_set(struct drm_encoder *encoder,
1166 * channel on the motherboard. In a two-input device, the first input 1105 * channel on the motherboard. In a two-input device, the first input
1167 * will be SDVOB and the second SDVOC. 1106 * will be SDVOB and the second SDVOC.
1168 */ 1107 */
1169 in_out.in0 = sdvo_priv->attached_output; 1108 in_out.in0 = intel_sdvo->attached_output;
1170 in_out.in1 = 0; 1109 in_out.in1 = 0;
1171 1110
1172 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_IN_OUT_MAP, 1111 if (!intel_sdvo_set_value(intel_sdvo,
1173 &in_out, sizeof(in_out)); 1112 SDVO_CMD_SET_IN_OUT_MAP,
1174 status = intel_sdvo_read_response(intel_encoder, NULL, 0); 1113 &in_out, sizeof(in_out)))
1114 return;
1115
1116 if (intel_sdvo->is_hdmi) {
1117 if (!intel_sdvo_set_avi_infoframe(intel_sdvo, mode))
1118 return;
1175 1119
1176 if (sdvo_priv->is_hdmi) {
1177 intel_sdvo_set_avi_infoframe(intel_encoder, mode);
1178 sdvox |= SDVO_AUDIO_ENABLE; 1120 sdvox |= SDVO_AUDIO_ENABLE;
1179 } 1121 }
1180 1122
1181 /* We have tried to get input timing in mode_fixup, and filled into 1123 /* We have tried to get input timing in mode_fixup, and filled into
1182 adjusted_mode */ 1124 adjusted_mode */
1183 if (sdvo_priv->is_tv || sdvo_priv->is_lvds) { 1125 if (intel_sdvo->is_tv || intel_sdvo->is_lvds) {
1184 intel_sdvo_get_dtd_from_mode(&input_dtd, adjusted_mode); 1126 intel_sdvo_get_dtd_from_mode(&input_dtd, adjusted_mode);
1185 input_dtd.part2.sdvo_flags = sdvo_priv->sdvo_flags; 1127 input_dtd.part2.sdvo_flags = intel_sdvo->sdvo_flags;
1186 } else 1128 } else
1187 intel_sdvo_get_dtd_from_mode(&input_dtd, mode); 1129 intel_sdvo_get_dtd_from_mode(&input_dtd, mode);
1188 1130
1189 /* If it's a TV, we already set the output timing in mode_fixup. 1131 /* If it's a TV, we already set the output timing in mode_fixup.
1190 * Otherwise, the output timing is equal to the input timing. 1132 * Otherwise, the output timing is equal to the input timing.
1191 */ 1133 */
1192 if (!sdvo_priv->is_tv && !sdvo_priv->is_lvds) { 1134 if (!intel_sdvo->is_tv && !intel_sdvo->is_lvds) {
1193 /* Set the output timing to the screen */ 1135 /* Set the output timing to the screen */
1194 intel_sdvo_set_target_output(intel_encoder, 1136 if (!intel_sdvo_set_target_output(intel_sdvo,
1195 sdvo_priv->attached_output); 1137 intel_sdvo->attached_output))
1196 intel_sdvo_set_output_timing(intel_encoder, &input_dtd); 1138 return;
1139
1140 if (!intel_sdvo_set_output_timing(intel_sdvo, &input_dtd))
1141 return;
1197 } 1142 }
1198 1143
1199 /* Set the input timing to the screen. Assume always input 0. */ 1144 /* Set the input timing to the screen. Assume always input 0. */
1200 intel_sdvo_set_target_input(intel_encoder, true, false); 1145 if (!intel_sdvo_set_target_input(intel_sdvo))
1146 return;
1201 1147
1202 if (sdvo_priv->is_tv) 1148 if (intel_sdvo->is_tv) {
1203 intel_sdvo_set_tv_format(intel_encoder); 1149 if (!intel_sdvo_set_tv_format(intel_sdvo))
1150 return;
1151 }
1204 1152
1205 /* We would like to use intel_sdvo_create_preferred_input_timing() to 1153 /* We would like to use intel_sdvo_create_preferred_input_timing() to
1206 * provide the device with a timing it can support, if it supports that 1154 * provide the device with a timing it can support, if it supports that
@@ -1217,23 +1165,18 @@ static void intel_sdvo_mode_set(struct drm_encoder *encoder,
1217 intel_sdvo_set_input_timing(encoder, &input_dtd); 1165 intel_sdvo_set_input_timing(encoder, &input_dtd);
1218 } 1166 }
1219#else 1167#else
1220 intel_sdvo_set_input_timing(intel_encoder, &input_dtd); 1168 if (!intel_sdvo_set_input_timing(intel_sdvo, &input_dtd))
1169 return;
1221#endif 1170#endif
1222 1171
1223 switch (intel_sdvo_get_pixel_multiplier(mode)) { 1172 sdvo_pixel_multiply = intel_sdvo_get_pixel_multiplier(mode);
1224 case 1: 1173 switch (sdvo_pixel_multiply) {
1225 intel_sdvo_set_clock_rate_mult(intel_encoder, 1174 case 1: rate = SDVO_CLOCK_RATE_MULT_1X; break;
1226 SDVO_CLOCK_RATE_MULT_1X); 1175 case 2: rate = SDVO_CLOCK_RATE_MULT_2X; break;
1227 break; 1176 case 4: rate = SDVO_CLOCK_RATE_MULT_4X; break;
1228 case 2:
1229 intel_sdvo_set_clock_rate_mult(intel_encoder,
1230 SDVO_CLOCK_RATE_MULT_2X);
1231 break;
1232 case 4:
1233 intel_sdvo_set_clock_rate_mult(intel_encoder,
1234 SDVO_CLOCK_RATE_MULT_4X);
1235 break;
1236 } 1177 }
1178 if (!intel_sdvo_set_clock_rate_mult(intel_sdvo, rate))
1179 return;
1237 1180
1238 /* Set the SDVO control regs. */ 1181 /* Set the SDVO control regs. */
1239 if (IS_I965G(dev)) { 1182 if (IS_I965G(dev)) {
@@ -1243,8 +1186,8 @@ static void intel_sdvo_mode_set(struct drm_encoder *encoder,
1243 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 1186 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1244 sdvox |= SDVO_HSYNC_ACTIVE_HIGH; 1187 sdvox |= SDVO_HSYNC_ACTIVE_HIGH;
1245 } else { 1188 } else {
1246 sdvox |= I915_READ(sdvo_priv->sdvo_reg); 1189 sdvox |= I915_READ(intel_sdvo->sdvo_reg);
1247 switch (sdvo_priv->sdvo_reg) { 1190 switch (intel_sdvo->sdvo_reg) {
1248 case SDVOB: 1191 case SDVOB:
1249 sdvox &= SDVOB_PRESERVE_MASK; 1192 sdvox &= SDVOB_PRESERVE_MASK;
1250 break; 1193 break;
@@ -1257,7 +1200,6 @@ static void intel_sdvo_mode_set(struct drm_encoder *encoder,
1257 if (intel_crtc->pipe == 1) 1200 if (intel_crtc->pipe == 1)
1258 sdvox |= SDVO_PIPE_B_SELECT; 1201 sdvox |= SDVO_PIPE_B_SELECT;
1259 1202
1260 sdvo_pixel_multiply = intel_sdvo_get_pixel_multiplier(mode);
1261 if (IS_I965G(dev)) { 1203 if (IS_I965G(dev)) {
1262 /* done in crtc_mode_set as the dpll_md reg must be written early */ 1204 /* done in crtc_mode_set as the dpll_md reg must be written early */
1263 } else if (IS_I945G(dev) || IS_I945GM(dev) || IS_G33(dev)) { 1205 } else if (IS_I945G(dev) || IS_I945GM(dev) || IS_G33(dev)) {
@@ -1266,28 +1208,28 @@ static void intel_sdvo_mode_set(struct drm_encoder *encoder,
1266 sdvox |= (sdvo_pixel_multiply - 1) << SDVO_PORT_MULTIPLY_SHIFT; 1208 sdvox |= (sdvo_pixel_multiply - 1) << SDVO_PORT_MULTIPLY_SHIFT;
1267 } 1209 }
1268 1210
1269 if (sdvo_priv->sdvo_flags & SDVO_NEED_TO_STALL) 1211 if (intel_sdvo->sdvo_flags & SDVO_NEED_TO_STALL)
1270 sdvox |= SDVO_STALL_SELECT; 1212 sdvox |= SDVO_STALL_SELECT;
1271 intel_sdvo_write_sdvox(intel_encoder, sdvox); 1213 intel_sdvo_write_sdvox(intel_sdvo, sdvox);
1272} 1214}
1273 1215
1274static void intel_sdvo_dpms(struct drm_encoder *encoder, int mode) 1216static void intel_sdvo_dpms(struct drm_encoder *encoder, int mode)
1275{ 1217{
1276 struct drm_device *dev = encoder->dev; 1218 struct drm_device *dev = encoder->dev;
1277 struct drm_i915_private *dev_priv = dev->dev_private; 1219 struct drm_i915_private *dev_priv = dev->dev_private;
1278 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1220 struct intel_sdvo *intel_sdvo = enc_to_intel_sdvo(encoder);
1279 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv; 1221 struct intel_crtc *intel_crtc = to_intel_crtc(encoder->crtc);
1280 u32 temp; 1222 u32 temp;
1281 1223
1282 if (mode != DRM_MODE_DPMS_ON) { 1224 if (mode != DRM_MODE_DPMS_ON) {
1283 intel_sdvo_set_active_outputs(intel_encoder, 0); 1225 intel_sdvo_set_active_outputs(intel_sdvo, 0);
1284 if (0) 1226 if (0)
1285 intel_sdvo_set_encoder_power_state(intel_encoder, mode); 1227 intel_sdvo_set_encoder_power_state(intel_sdvo, mode);
1286 1228
1287 if (mode == DRM_MODE_DPMS_OFF) { 1229 if (mode == DRM_MODE_DPMS_OFF) {
1288 temp = I915_READ(sdvo_priv->sdvo_reg); 1230 temp = I915_READ(intel_sdvo->sdvo_reg);
1289 if ((temp & SDVO_ENABLE) != 0) { 1231 if ((temp & SDVO_ENABLE) != 0) {
1290 intel_sdvo_write_sdvox(intel_encoder, temp & ~SDVO_ENABLE); 1232 intel_sdvo_write_sdvox(intel_sdvo, temp & ~SDVO_ENABLE);
1291 } 1233 }
1292 } 1234 }
1293 } else { 1235 } else {
@@ -1295,28 +1237,25 @@ static void intel_sdvo_dpms(struct drm_encoder *encoder, int mode)
1295 int i; 1237 int i;
1296 u8 status; 1238 u8 status;
1297 1239
1298 temp = I915_READ(sdvo_priv->sdvo_reg); 1240 temp = I915_READ(intel_sdvo->sdvo_reg);
1299 if ((temp & SDVO_ENABLE) == 0) 1241 if ((temp & SDVO_ENABLE) == 0)
1300 intel_sdvo_write_sdvox(intel_encoder, temp | SDVO_ENABLE); 1242 intel_sdvo_write_sdvox(intel_sdvo, temp | SDVO_ENABLE);
1301 for (i = 0; i < 2; i++) 1243 for (i = 0; i < 2; i++)
1302 intel_wait_for_vblank(dev); 1244 intel_wait_for_vblank(dev, intel_crtc->pipe);
1303
1304 status = intel_sdvo_get_trained_inputs(intel_encoder, &input1,
1305 &input2);
1306
1307 1245
1246 status = intel_sdvo_get_trained_inputs(intel_sdvo, &input1, &input2);
1308 /* Warn if the device reported failure to sync. 1247 /* Warn if the device reported failure to sync.
1309 * A lot of SDVO devices fail to notify of sync, but it's 1248 * A lot of SDVO devices fail to notify of sync, but it's
1310 * a given it the status is a success, we succeeded. 1249 * a given it the status is a success, we succeeded.
1311 */ 1250 */
1312 if (status == SDVO_CMD_STATUS_SUCCESS && !input1) { 1251 if (status == SDVO_CMD_STATUS_SUCCESS && !input1) {
1313 DRM_DEBUG_KMS("First %s output reported failure to " 1252 DRM_DEBUG_KMS("First %s output reported failure to "
1314 "sync\n", SDVO_NAME(sdvo_priv)); 1253 "sync\n", SDVO_NAME(intel_sdvo));
1315 } 1254 }
1316 1255
1317 if (0) 1256 if (0)
1318 intel_sdvo_set_encoder_power_state(intel_encoder, mode); 1257 intel_sdvo_set_encoder_power_state(intel_sdvo, mode);
1319 intel_sdvo_set_active_outputs(intel_encoder, sdvo_priv->attached_output); 1258 intel_sdvo_set_active_outputs(intel_sdvo, intel_sdvo->attached_output);
1320 } 1259 }
1321 return; 1260 return;
1322} 1261}
@@ -1325,42 +1264,31 @@ static int intel_sdvo_mode_valid(struct drm_connector *connector,
1325 struct drm_display_mode *mode) 1264 struct drm_display_mode *mode)
1326{ 1265{
1327 struct drm_encoder *encoder = intel_attached_encoder(connector); 1266 struct drm_encoder *encoder = intel_attached_encoder(connector);
1328 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1267 struct intel_sdvo *intel_sdvo = enc_to_intel_sdvo(encoder);
1329 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
1330 1268
1331 if (mode->flags & DRM_MODE_FLAG_DBLSCAN) 1269 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
1332 return MODE_NO_DBLESCAN; 1270 return MODE_NO_DBLESCAN;
1333 1271
1334 if (sdvo_priv->pixel_clock_min > mode->clock) 1272 if (intel_sdvo->pixel_clock_min > mode->clock)
1335 return MODE_CLOCK_LOW; 1273 return MODE_CLOCK_LOW;
1336 1274
1337 if (sdvo_priv->pixel_clock_max < mode->clock) 1275 if (intel_sdvo->pixel_clock_max < mode->clock)
1338 return MODE_CLOCK_HIGH; 1276 return MODE_CLOCK_HIGH;
1339 1277
1340 if (sdvo_priv->is_lvds == true) { 1278 if (intel_sdvo->is_lvds) {
1341 if (sdvo_priv->sdvo_lvds_fixed_mode == NULL) 1279 if (mode->hdisplay > intel_sdvo->sdvo_lvds_fixed_mode->hdisplay)
1342 return MODE_PANEL; 1280 return MODE_PANEL;
1343 1281
1344 if (mode->hdisplay > sdvo_priv->sdvo_lvds_fixed_mode->hdisplay) 1282 if (mode->vdisplay > intel_sdvo->sdvo_lvds_fixed_mode->vdisplay)
1345 return MODE_PANEL;
1346
1347 if (mode->vdisplay > sdvo_priv->sdvo_lvds_fixed_mode->vdisplay)
1348 return MODE_PANEL; 1283 return MODE_PANEL;
1349 } 1284 }
1350 1285
1351 return MODE_OK; 1286 return MODE_OK;
1352} 1287}
1353 1288
1354static bool intel_sdvo_get_capabilities(struct intel_encoder *intel_encoder, struct intel_sdvo_caps *caps) 1289static bool intel_sdvo_get_capabilities(struct intel_sdvo *intel_sdvo, struct intel_sdvo_caps *caps)
1355{ 1290{
1356 u8 status; 1291 return intel_sdvo_get_value(intel_sdvo, SDVO_CMD_GET_DEVICE_CAPS, caps, sizeof(*caps));
1357
1358 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_GET_DEVICE_CAPS, NULL, 0);
1359 status = intel_sdvo_read_response(intel_encoder, caps, sizeof(*caps));
1360 if (status != SDVO_CMD_STATUS_SUCCESS)
1361 return false;
1362
1363 return true;
1364} 1292}
1365 1293
1366/* No use! */ 1294/* No use! */
@@ -1368,12 +1296,12 @@ static bool intel_sdvo_get_capabilities(struct intel_encoder *intel_encoder, str
1368struct drm_connector* intel_sdvo_find(struct drm_device *dev, int sdvoB) 1296struct drm_connector* intel_sdvo_find(struct drm_device *dev, int sdvoB)
1369{ 1297{
1370 struct drm_connector *connector = NULL; 1298 struct drm_connector *connector = NULL;
1371 struct intel_encoder *iout = NULL; 1299 struct intel_sdvo *iout = NULL;
1372 struct intel_sdvo_priv *sdvo; 1300 struct intel_sdvo *sdvo;
1373 1301
1374 /* find the sdvo connector */ 1302 /* find the sdvo connector */
1375 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { 1303 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1376 iout = to_intel_encoder(connector); 1304 iout = to_intel_sdvo(connector);
1377 1305
1378 if (iout->type != INTEL_OUTPUT_SDVO) 1306 if (iout->type != INTEL_OUTPUT_SDVO)
1379 continue; 1307 continue;
@@ -1395,75 +1323,69 @@ int intel_sdvo_supports_hotplug(struct drm_connector *connector)
1395{ 1323{
1396 u8 response[2]; 1324 u8 response[2];
1397 u8 status; 1325 u8 status;
1398 struct intel_encoder *intel_encoder; 1326 struct intel_sdvo *intel_sdvo;
1399 DRM_DEBUG_KMS("\n"); 1327 DRM_DEBUG_KMS("\n");
1400 1328
1401 if (!connector) 1329 if (!connector)
1402 return 0; 1330 return 0;
1403 1331
1404 intel_encoder = to_intel_encoder(connector); 1332 intel_sdvo = to_intel_sdvo(connector);
1405
1406 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_GET_HOT_PLUG_SUPPORT, NULL, 0);
1407 status = intel_sdvo_read_response(intel_encoder, &response, 2);
1408
1409 if (response[0] !=0)
1410 return 1;
1411 1333
1412 return 0; 1334 return intel_sdvo_get_value(intel_sdvo, SDVO_CMD_GET_HOT_PLUG_SUPPORT,
1335 &response, 2) && response[0];
1413} 1336}
1414 1337
1415void intel_sdvo_set_hotplug(struct drm_connector *connector, int on) 1338void intel_sdvo_set_hotplug(struct drm_connector *connector, int on)
1416{ 1339{
1417 u8 response[2]; 1340 u8 response[2];
1418 u8 status; 1341 u8 status;
1419 struct intel_encoder *intel_encoder = to_intel_encoder(connector); 1342 struct intel_sdvo *intel_sdvo = to_intel_sdvo(connector);
1420 1343
1421 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_GET_ACTIVE_HOT_PLUG, NULL, 0); 1344 intel_sdvo_write_cmd(intel_sdvo, SDVO_CMD_GET_ACTIVE_HOT_PLUG, NULL, 0);
1422 intel_sdvo_read_response(intel_encoder, &response, 2); 1345 intel_sdvo_read_response(intel_sdvo, &response, 2);
1423 1346
1424 if (on) { 1347 if (on) {
1425 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_GET_HOT_PLUG_SUPPORT, NULL, 0); 1348 intel_sdvo_write_cmd(intel_sdvo, SDVO_CMD_GET_HOT_PLUG_SUPPORT, NULL, 0);
1426 status = intel_sdvo_read_response(intel_encoder, &response, 2); 1349 status = intel_sdvo_read_response(intel_sdvo, &response, 2);
1427 1350
1428 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_ACTIVE_HOT_PLUG, &response, 2); 1351 intel_sdvo_write_cmd(intel_sdvo, SDVO_CMD_SET_ACTIVE_HOT_PLUG, &response, 2);
1429 } else { 1352 } else {
1430 response[0] = 0; 1353 response[0] = 0;
1431 response[1] = 0; 1354 response[1] = 0;
1432 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_SET_ACTIVE_HOT_PLUG, &response, 2); 1355 intel_sdvo_write_cmd(intel_sdvo, SDVO_CMD_SET_ACTIVE_HOT_PLUG, &response, 2);
1433 } 1356 }
1434 1357
1435 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_GET_ACTIVE_HOT_PLUG, NULL, 0); 1358 intel_sdvo_write_cmd(intel_sdvo, SDVO_CMD_GET_ACTIVE_HOT_PLUG, NULL, 0);
1436 intel_sdvo_read_response(intel_encoder, &response, 2); 1359 intel_sdvo_read_response(intel_sdvo, &response, 2);
1437} 1360}
1438#endif 1361#endif
1439 1362
1440static bool 1363static bool
1441intel_sdvo_multifunc_encoder(struct intel_encoder *intel_encoder) 1364intel_sdvo_multifunc_encoder(struct intel_sdvo *intel_sdvo)
1442{ 1365{
1443 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
1444 int caps = 0; 1366 int caps = 0;
1445 1367
1446 if (sdvo_priv->caps.output_flags & 1368 if (intel_sdvo->caps.output_flags &
1447 (SDVO_OUTPUT_TMDS0 | SDVO_OUTPUT_TMDS1)) 1369 (SDVO_OUTPUT_TMDS0 | SDVO_OUTPUT_TMDS1))
1448 caps++; 1370 caps++;
1449 if (sdvo_priv->caps.output_flags & 1371 if (intel_sdvo->caps.output_flags &
1450 (SDVO_OUTPUT_RGB0 | SDVO_OUTPUT_RGB1)) 1372 (SDVO_OUTPUT_RGB0 | SDVO_OUTPUT_RGB1))
1451 caps++; 1373 caps++;
1452 if (sdvo_priv->caps.output_flags & 1374 if (intel_sdvo->caps.output_flags &
1453 (SDVO_OUTPUT_SVID0 | SDVO_OUTPUT_SVID1)) 1375 (SDVO_OUTPUT_SVID0 | SDVO_OUTPUT_SVID1))
1454 caps++; 1376 caps++;
1455 if (sdvo_priv->caps.output_flags & 1377 if (intel_sdvo->caps.output_flags &
1456 (SDVO_OUTPUT_CVBS0 | SDVO_OUTPUT_CVBS1)) 1378 (SDVO_OUTPUT_CVBS0 | SDVO_OUTPUT_CVBS1))
1457 caps++; 1379 caps++;
1458 if (sdvo_priv->caps.output_flags & 1380 if (intel_sdvo->caps.output_flags &
1459 (SDVO_OUTPUT_YPRPB0 | SDVO_OUTPUT_YPRPB1)) 1381 (SDVO_OUTPUT_YPRPB0 | SDVO_OUTPUT_YPRPB1))
1460 caps++; 1382 caps++;
1461 1383
1462 if (sdvo_priv->caps.output_flags & 1384 if (intel_sdvo->caps.output_flags &
1463 (SDVO_OUTPUT_SCART0 | SDVO_OUTPUT_SCART1)) 1385 (SDVO_OUTPUT_SCART0 | SDVO_OUTPUT_SCART1))
1464 caps++; 1386 caps++;
1465 1387
1466 if (sdvo_priv->caps.output_flags & 1388 if (intel_sdvo->caps.output_flags &
1467 (SDVO_OUTPUT_LVDS0 | SDVO_OUTPUT_LVDS1)) 1389 (SDVO_OUTPUT_LVDS0 | SDVO_OUTPUT_LVDS1))
1468 caps++; 1390 caps++;
1469 1391
@@ -1475,11 +1397,11 @@ intel_find_analog_connector(struct drm_device *dev)
1475{ 1397{
1476 struct drm_connector *connector; 1398 struct drm_connector *connector;
1477 struct drm_encoder *encoder; 1399 struct drm_encoder *encoder;
1478 struct intel_encoder *intel_encoder; 1400 struct intel_sdvo *intel_sdvo;
1479 1401
1480 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 1402 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1481 intel_encoder = enc_to_intel_encoder(encoder); 1403 intel_sdvo = enc_to_intel_sdvo(encoder);
1482 if (intel_encoder->type == INTEL_OUTPUT_ANALOG) { 1404 if (intel_sdvo->base.type == INTEL_OUTPUT_ANALOG) {
1483 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { 1405 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1484 if (encoder == intel_attached_encoder(connector)) 1406 if (encoder == intel_attached_encoder(connector))
1485 return connector; 1407 return connector;
@@ -1493,8 +1415,8 @@ static int
1493intel_analog_is_connected(struct drm_device *dev) 1415intel_analog_is_connected(struct drm_device *dev)
1494{ 1416{
1495 struct drm_connector *analog_connector; 1417 struct drm_connector *analog_connector;
1496 analog_connector = intel_find_analog_connector(dev);
1497 1418
1419 analog_connector = intel_find_analog_connector(dev);
1498 if (!analog_connector) 1420 if (!analog_connector)
1499 return false; 1421 return false;
1500 1422
@@ -1509,54 +1431,52 @@ enum drm_connector_status
1509intel_sdvo_hdmi_sink_detect(struct drm_connector *connector) 1431intel_sdvo_hdmi_sink_detect(struct drm_connector *connector)
1510{ 1432{
1511 struct drm_encoder *encoder = intel_attached_encoder(connector); 1433 struct drm_encoder *encoder = intel_attached_encoder(connector);
1512 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1434 struct intel_sdvo *intel_sdvo = enc_to_intel_sdvo(encoder);
1513 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv; 1435 struct intel_sdvo_connector *intel_sdvo_connector = to_intel_sdvo_connector(connector);
1514 struct intel_connector *intel_connector = to_intel_connector(connector);
1515 struct intel_sdvo_connector *sdvo_connector = intel_connector->dev_priv;
1516 enum drm_connector_status status = connector_status_connected; 1436 enum drm_connector_status status = connector_status_connected;
1517 struct edid *edid = NULL; 1437 struct edid *edid = NULL;
1518 1438
1519 edid = drm_get_edid(connector, intel_encoder->ddc_bus); 1439 edid = drm_get_edid(connector, intel_sdvo->base.ddc_bus);
1520 1440
1521 /* This is only applied to SDVO cards with multiple outputs */ 1441 /* This is only applied to SDVO cards with multiple outputs */
1522 if (edid == NULL && intel_sdvo_multifunc_encoder(intel_encoder)) { 1442 if (edid == NULL && intel_sdvo_multifunc_encoder(intel_sdvo)) {
1523 uint8_t saved_ddc, temp_ddc; 1443 uint8_t saved_ddc, temp_ddc;
1524 saved_ddc = sdvo_priv->ddc_bus; 1444 saved_ddc = intel_sdvo->ddc_bus;
1525 temp_ddc = sdvo_priv->ddc_bus >> 1; 1445 temp_ddc = intel_sdvo->ddc_bus >> 1;
1526 /* 1446 /*
1527 * Don't use the 1 as the argument of DDC bus switch to get 1447 * Don't use the 1 as the argument of DDC bus switch to get
1528 * the EDID. It is used for SDVO SPD ROM. 1448 * the EDID. It is used for SDVO SPD ROM.
1529 */ 1449 */
1530 while(temp_ddc > 1) { 1450 while(temp_ddc > 1) {
1531 sdvo_priv->ddc_bus = temp_ddc; 1451 intel_sdvo->ddc_bus = temp_ddc;
1532 edid = drm_get_edid(connector, intel_encoder->ddc_bus); 1452 edid = drm_get_edid(connector, intel_sdvo->base.ddc_bus);
1533 if (edid) { 1453 if (edid) {
1534 /* 1454 /*
1535 * When we can get the EDID, maybe it is the 1455 * When we can get the EDID, maybe it is the
1536 * correct DDC bus. Update it. 1456 * correct DDC bus. Update it.
1537 */ 1457 */
1538 sdvo_priv->ddc_bus = temp_ddc; 1458 intel_sdvo->ddc_bus = temp_ddc;
1539 break; 1459 break;
1540 } 1460 }
1541 temp_ddc >>= 1; 1461 temp_ddc >>= 1;
1542 } 1462 }
1543 if (edid == NULL) 1463 if (edid == NULL)
1544 sdvo_priv->ddc_bus = saved_ddc; 1464 intel_sdvo->ddc_bus = saved_ddc;
1545 } 1465 }
1546 /* when there is no edid and no monitor is connected with VGA 1466 /* when there is no edid and no monitor is connected with VGA
1547 * port, try to use the CRT ddc to read the EDID for DVI-connector 1467 * port, try to use the CRT ddc to read the EDID for DVI-connector
1548 */ 1468 */
1549 if (edid == NULL && sdvo_priv->analog_ddc_bus && 1469 if (edid == NULL && intel_sdvo->analog_ddc_bus &&
1550 !intel_analog_is_connected(connector->dev)) 1470 !intel_analog_is_connected(connector->dev))
1551 edid = drm_get_edid(connector, sdvo_priv->analog_ddc_bus); 1471 edid = drm_get_edid(connector, intel_sdvo->analog_ddc_bus);
1552 1472
1553 if (edid != NULL) { 1473 if (edid != NULL) {
1554 bool is_digital = !!(edid->input & DRM_EDID_INPUT_DIGITAL); 1474 bool is_digital = !!(edid->input & DRM_EDID_INPUT_DIGITAL);
1555 bool need_digital = !!(sdvo_connector->output_flag & SDVO_TMDS_MASK); 1475 bool need_digital = !!(intel_sdvo_connector->output_flag & SDVO_TMDS_MASK);
1556 1476
1557 /* DDC bus is shared, match EDID to connector type */ 1477 /* DDC bus is shared, match EDID to connector type */
1558 if (is_digital && need_digital) 1478 if (is_digital && need_digital)
1559 sdvo_priv->is_hdmi = drm_detect_hdmi_monitor(edid); 1479 intel_sdvo->is_hdmi = drm_detect_hdmi_monitor(edid);
1560 else if (is_digital != need_digital) 1480 else if (is_digital != need_digital)
1561 status = connector_status_disconnected; 1481 status = connector_status_disconnected;
1562 1482
@@ -1572,33 +1492,29 @@ intel_sdvo_hdmi_sink_detect(struct drm_connector *connector)
1572static enum drm_connector_status intel_sdvo_detect(struct drm_connector *connector) 1492static enum drm_connector_status intel_sdvo_detect(struct drm_connector *connector)
1573{ 1493{
1574 uint16_t response; 1494 uint16_t response;
1575 u8 status;
1576 struct drm_encoder *encoder = intel_attached_encoder(connector); 1495 struct drm_encoder *encoder = intel_attached_encoder(connector);
1577 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1496 struct intel_sdvo *intel_sdvo = enc_to_intel_sdvo(encoder);
1578 struct intel_connector *intel_connector = to_intel_connector(connector); 1497 struct intel_sdvo_connector *intel_sdvo_connector = to_intel_sdvo_connector(connector);
1579 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
1580 struct intel_sdvo_connector *sdvo_connector = intel_connector->dev_priv;
1581 enum drm_connector_status ret; 1498 enum drm_connector_status ret;
1582 1499
1583 intel_sdvo_write_cmd(intel_encoder, 1500 if (!intel_sdvo_write_cmd(intel_sdvo,
1584 SDVO_CMD_GET_ATTACHED_DISPLAYS, NULL, 0); 1501 SDVO_CMD_GET_ATTACHED_DISPLAYS, NULL, 0))
1585 if (sdvo_priv->is_tv) { 1502 return connector_status_unknown;
1503 if (intel_sdvo->is_tv) {
1586 /* add 30ms delay when the output type is SDVO-TV */ 1504 /* add 30ms delay when the output type is SDVO-TV */
1587 mdelay(30); 1505 mdelay(30);
1588 } 1506 }
1589 status = intel_sdvo_read_response(intel_encoder, &response, 2); 1507 if (!intel_sdvo_read_response(intel_sdvo, &response, 2))
1508 return connector_status_unknown;
1590 1509
1591 DRM_DEBUG_KMS("SDVO response %d %d\n", response & 0xff, response >> 8); 1510 DRM_DEBUG_KMS("SDVO response %d %d\n", response & 0xff, response >> 8);
1592 1511
1593 if (status != SDVO_CMD_STATUS_SUCCESS)
1594 return connector_status_unknown;
1595
1596 if (response == 0) 1512 if (response == 0)
1597 return connector_status_disconnected; 1513 return connector_status_disconnected;
1598 1514
1599 sdvo_priv->attached_output = response; 1515 intel_sdvo->attached_output = response;
1600 1516
1601 if ((sdvo_connector->output_flag & response) == 0) 1517 if ((intel_sdvo_connector->output_flag & response) == 0)
1602 ret = connector_status_disconnected; 1518 ret = connector_status_disconnected;
1603 else if (response & SDVO_TMDS_MASK) 1519 else if (response & SDVO_TMDS_MASK)
1604 ret = intel_sdvo_hdmi_sink_detect(connector); 1520 ret = intel_sdvo_hdmi_sink_detect(connector);
@@ -1607,16 +1523,16 @@ static enum drm_connector_status intel_sdvo_detect(struct drm_connector *connect
1607 1523
1608 /* May update encoder flag for like clock for SDVO TV, etc.*/ 1524 /* May update encoder flag for like clock for SDVO TV, etc.*/
1609 if (ret == connector_status_connected) { 1525 if (ret == connector_status_connected) {
1610 sdvo_priv->is_tv = false; 1526 intel_sdvo->is_tv = false;
1611 sdvo_priv->is_lvds = false; 1527 intel_sdvo->is_lvds = false;
1612 intel_encoder->needs_tv_clock = false; 1528 intel_sdvo->base.needs_tv_clock = false;
1613 1529
1614 if (response & SDVO_TV_MASK) { 1530 if (response & SDVO_TV_MASK) {
1615 sdvo_priv->is_tv = true; 1531 intel_sdvo->is_tv = true;
1616 intel_encoder->needs_tv_clock = true; 1532 intel_sdvo->base.needs_tv_clock = true;
1617 } 1533 }
1618 if (response & SDVO_LVDS_MASK) 1534 if (response & SDVO_LVDS_MASK)
1619 sdvo_priv->is_lvds = true; 1535 intel_sdvo->is_lvds = intel_sdvo->sdvo_lvds_fixed_mode != NULL;
1620 } 1536 }
1621 1537
1622 return ret; 1538 return ret;
@@ -1625,12 +1541,11 @@ static enum drm_connector_status intel_sdvo_detect(struct drm_connector *connect
1625static void intel_sdvo_get_ddc_modes(struct drm_connector *connector) 1541static void intel_sdvo_get_ddc_modes(struct drm_connector *connector)
1626{ 1542{
1627 struct drm_encoder *encoder = intel_attached_encoder(connector); 1543 struct drm_encoder *encoder = intel_attached_encoder(connector);
1628 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1544 struct intel_sdvo *intel_sdvo = enc_to_intel_sdvo(encoder);
1629 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
1630 int num_modes; 1545 int num_modes;
1631 1546
1632 /* set the bus switch and get the modes */ 1547 /* set the bus switch and get the modes */
1633 num_modes = intel_ddc_get_modes(connector, intel_encoder->ddc_bus); 1548 num_modes = intel_ddc_get_modes(connector, intel_sdvo->base.ddc_bus);
1634 1549
1635 /* 1550 /*
1636 * Mac mini hack. On this device, the DVI-I connector shares one DDC 1551 * Mac mini hack. On this device, the DVI-I connector shares one DDC
@@ -1639,11 +1554,11 @@ static void intel_sdvo_get_ddc_modes(struct drm_connector *connector)
1639 * which case we'll look there for the digital DDC data. 1554 * which case we'll look there for the digital DDC data.
1640 */ 1555 */
1641 if (num_modes == 0 && 1556 if (num_modes == 0 &&
1642 sdvo_priv->analog_ddc_bus && 1557 intel_sdvo->analog_ddc_bus &&
1643 !intel_analog_is_connected(connector->dev)) { 1558 !intel_analog_is_connected(connector->dev)) {
1644 /* Switch to the analog ddc bus and try that 1559 /* Switch to the analog ddc bus and try that
1645 */ 1560 */
1646 (void) intel_ddc_get_modes(connector, sdvo_priv->analog_ddc_bus); 1561 (void) intel_ddc_get_modes(connector, intel_sdvo->analog_ddc_bus);
1647 } 1562 }
1648} 1563}
1649 1564
@@ -1715,52 +1630,43 @@ struct drm_display_mode sdvo_tv_modes[] = {
1715static void intel_sdvo_get_tv_modes(struct drm_connector *connector) 1630static void intel_sdvo_get_tv_modes(struct drm_connector *connector)
1716{ 1631{
1717 struct drm_encoder *encoder = intel_attached_encoder(connector); 1632 struct drm_encoder *encoder = intel_attached_encoder(connector);
1718 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1633 struct intel_sdvo *intel_sdvo = enc_to_intel_sdvo(encoder);
1719 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
1720 struct intel_sdvo_sdtv_resolution_request tv_res; 1634 struct intel_sdvo_sdtv_resolution_request tv_res;
1721 uint32_t reply = 0, format_map = 0; 1635 uint32_t reply = 0, format_map = 0;
1722 int i; 1636 int i;
1723 uint8_t status;
1724
1725 1637
1726 /* Read the list of supported input resolutions for the selected TV 1638 /* Read the list of supported input resolutions for the selected TV
1727 * format. 1639 * format.
1728 */ 1640 */
1729 for (i = 0; i < TV_FORMAT_NUM; i++) 1641 format_map = 1 << intel_sdvo->tv_format_index;
1730 if (tv_format_names[i] == sdvo_priv->tv_format_name)
1731 break;
1732
1733 format_map = (1 << i);
1734 memcpy(&tv_res, &format_map, 1642 memcpy(&tv_res, &format_map,
1735 sizeof(struct intel_sdvo_sdtv_resolution_request) > 1643 min(sizeof(format_map), sizeof(struct intel_sdvo_sdtv_resolution_request)));
1736 sizeof(format_map) ? sizeof(format_map) :
1737 sizeof(struct intel_sdvo_sdtv_resolution_request));
1738 1644
1739 intel_sdvo_set_target_output(intel_encoder, sdvo_priv->attached_output); 1645 if (!intel_sdvo_set_target_output(intel_sdvo, intel_sdvo->attached_output))
1646 return;
1740 1647
1741 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_GET_SDTV_RESOLUTION_SUPPORT, 1648 BUILD_BUG_ON(sizeof(tv_res) != 3);
1742 &tv_res, sizeof(tv_res)); 1649 if (!intel_sdvo_write_cmd(intel_sdvo, SDVO_CMD_GET_SDTV_RESOLUTION_SUPPORT,
1743 status = intel_sdvo_read_response(intel_encoder, &reply, 3); 1650 &tv_res, sizeof(tv_res)))
1744 if (status != SDVO_CMD_STATUS_SUCCESS) 1651 return;
1652 if (!intel_sdvo_read_response(intel_sdvo, &reply, 3))
1745 return; 1653 return;
1746 1654
1747 for (i = 0; i < ARRAY_SIZE(sdvo_tv_modes); i++) 1655 for (i = 0; i < ARRAY_SIZE(sdvo_tv_modes); i++)
1748 if (reply & (1 << i)) { 1656 if (reply & (1 << i)) {
1749 struct drm_display_mode *nmode; 1657 struct drm_display_mode *nmode;
1750 nmode = drm_mode_duplicate(connector->dev, 1658 nmode = drm_mode_duplicate(connector->dev,
1751 &sdvo_tv_modes[i]); 1659 &sdvo_tv_modes[i]);
1752 if (nmode) 1660 if (nmode)
1753 drm_mode_probed_add(connector, nmode); 1661 drm_mode_probed_add(connector, nmode);
1754 } 1662 }
1755
1756} 1663}
1757 1664
1758static void intel_sdvo_get_lvds_modes(struct drm_connector *connector) 1665static void intel_sdvo_get_lvds_modes(struct drm_connector *connector)
1759{ 1666{
1760 struct drm_encoder *encoder = intel_attached_encoder(connector); 1667 struct drm_encoder *encoder = intel_attached_encoder(connector);
1761 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1668 struct intel_sdvo *intel_sdvo = enc_to_intel_sdvo(encoder);
1762 struct drm_i915_private *dev_priv = connector->dev->dev_private; 1669 struct drm_i915_private *dev_priv = connector->dev->dev_private;
1763 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
1764 struct drm_display_mode *newmode; 1670 struct drm_display_mode *newmode;
1765 1671
1766 /* 1672 /*
@@ -1768,7 +1674,7 @@ static void intel_sdvo_get_lvds_modes(struct drm_connector *connector)
1768 * Assume that the preferred modes are 1674 * Assume that the preferred modes are
1769 * arranged in priority order. 1675 * arranged in priority order.
1770 */ 1676 */
1771 intel_ddc_get_modes(connector, intel_encoder->ddc_bus); 1677 intel_ddc_get_modes(connector, intel_sdvo->base.ddc_bus);
1772 if (list_empty(&connector->probed_modes) == false) 1678 if (list_empty(&connector->probed_modes) == false)
1773 goto end; 1679 goto end;
1774 1680
@@ -1787,8 +1693,9 @@ static void intel_sdvo_get_lvds_modes(struct drm_connector *connector)
1787end: 1693end:
1788 list_for_each_entry(newmode, &connector->probed_modes, head) { 1694 list_for_each_entry(newmode, &connector->probed_modes, head) {
1789 if (newmode->type & DRM_MODE_TYPE_PREFERRED) { 1695 if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
1790 sdvo_priv->sdvo_lvds_fixed_mode = 1696 intel_sdvo->sdvo_lvds_fixed_mode =
1791 drm_mode_duplicate(connector->dev, newmode); 1697 drm_mode_duplicate(connector->dev, newmode);
1698 intel_sdvo->is_lvds = true;
1792 break; 1699 break;
1793 } 1700 }
1794 } 1701 }
@@ -1797,66 +1704,67 @@ end:
1797 1704
1798static int intel_sdvo_get_modes(struct drm_connector *connector) 1705static int intel_sdvo_get_modes(struct drm_connector *connector)
1799{ 1706{
1800 struct intel_connector *intel_connector = to_intel_connector(connector); 1707 struct intel_sdvo_connector *intel_sdvo_connector = to_intel_sdvo_connector(connector);
1801 struct intel_sdvo_connector *sdvo_connector = intel_connector->dev_priv;
1802 1708
1803 if (IS_TV(sdvo_connector)) 1709 if (IS_TV(intel_sdvo_connector))
1804 intel_sdvo_get_tv_modes(connector); 1710 intel_sdvo_get_tv_modes(connector);
1805 else if (IS_LVDS(sdvo_connector)) 1711 else if (IS_LVDS(intel_sdvo_connector))
1806 intel_sdvo_get_lvds_modes(connector); 1712 intel_sdvo_get_lvds_modes(connector);
1807 else 1713 else
1808 intel_sdvo_get_ddc_modes(connector); 1714 intel_sdvo_get_ddc_modes(connector);
1809 1715
1810 if (list_empty(&connector->probed_modes)) 1716 return !list_empty(&connector->probed_modes);
1811 return 0;
1812 return 1;
1813} 1717}
1814 1718
1815static 1719static void
1816void intel_sdvo_destroy_enhance_property(struct drm_connector *connector) 1720intel_sdvo_destroy_enhance_property(struct drm_connector *connector)
1817{ 1721{
1818 struct intel_connector *intel_connector = to_intel_connector(connector); 1722 struct intel_sdvo_connector *intel_sdvo_connector = to_intel_sdvo_connector(connector);
1819 struct intel_sdvo_connector *sdvo_priv = intel_connector->dev_priv;
1820 struct drm_device *dev = connector->dev; 1723 struct drm_device *dev = connector->dev;
1821 1724
1822 if (IS_TV(sdvo_priv)) { 1725 if (intel_sdvo_connector->left)
1823 if (sdvo_priv->left_property) 1726 drm_property_destroy(dev, intel_sdvo_connector->left);
1824 drm_property_destroy(dev, sdvo_priv->left_property); 1727 if (intel_sdvo_connector->right)
1825 if (sdvo_priv->right_property) 1728 drm_property_destroy(dev, intel_sdvo_connector->right);
1826 drm_property_destroy(dev, sdvo_priv->right_property); 1729 if (intel_sdvo_connector->top)
1827 if (sdvo_priv->top_property) 1730 drm_property_destroy(dev, intel_sdvo_connector->top);
1828 drm_property_destroy(dev, sdvo_priv->top_property); 1731 if (intel_sdvo_connector->bottom)
1829 if (sdvo_priv->bottom_property) 1732 drm_property_destroy(dev, intel_sdvo_connector->bottom);
1830 drm_property_destroy(dev, sdvo_priv->bottom_property); 1733 if (intel_sdvo_connector->hpos)
1831 if (sdvo_priv->hpos_property) 1734 drm_property_destroy(dev, intel_sdvo_connector->hpos);
1832 drm_property_destroy(dev, sdvo_priv->hpos_property); 1735 if (intel_sdvo_connector->vpos)
1833 if (sdvo_priv->vpos_property) 1736 drm_property_destroy(dev, intel_sdvo_connector->vpos);
1834 drm_property_destroy(dev, sdvo_priv->vpos_property); 1737 if (intel_sdvo_connector->saturation)
1835 if (sdvo_priv->saturation_property) 1738 drm_property_destroy(dev, intel_sdvo_connector->saturation);
1836 drm_property_destroy(dev, 1739 if (intel_sdvo_connector->contrast)
1837 sdvo_priv->saturation_property); 1740 drm_property_destroy(dev, intel_sdvo_connector->contrast);
1838 if (sdvo_priv->contrast_property) 1741 if (intel_sdvo_connector->hue)
1839 drm_property_destroy(dev, 1742 drm_property_destroy(dev, intel_sdvo_connector->hue);
1840 sdvo_priv->contrast_property); 1743 if (intel_sdvo_connector->sharpness)
1841 if (sdvo_priv->hue_property) 1744 drm_property_destroy(dev, intel_sdvo_connector->sharpness);
1842 drm_property_destroy(dev, sdvo_priv->hue_property); 1745 if (intel_sdvo_connector->flicker_filter)
1843 } 1746 drm_property_destroy(dev, intel_sdvo_connector->flicker_filter);
1844 if (IS_TV(sdvo_priv) || IS_LVDS(sdvo_priv)) { 1747 if (intel_sdvo_connector->flicker_filter_2d)
1845 if (sdvo_priv->brightness_property) 1748 drm_property_destroy(dev, intel_sdvo_connector->flicker_filter_2d);
1846 drm_property_destroy(dev, 1749 if (intel_sdvo_connector->flicker_filter_adaptive)
1847 sdvo_priv->brightness_property); 1750 drm_property_destroy(dev, intel_sdvo_connector->flicker_filter_adaptive);
1848 } 1751 if (intel_sdvo_connector->tv_luma_filter)
1849 return; 1752 drm_property_destroy(dev, intel_sdvo_connector->tv_luma_filter);
1753 if (intel_sdvo_connector->tv_chroma_filter)
1754 drm_property_destroy(dev, intel_sdvo_connector->tv_chroma_filter);
1755 if (intel_sdvo_connector->dot_crawl)
1756 drm_property_destroy(dev, intel_sdvo_connector->dot_crawl);
1757 if (intel_sdvo_connector->brightness)
1758 drm_property_destroy(dev, intel_sdvo_connector->brightness);
1850} 1759}
1851 1760
1852static void intel_sdvo_destroy(struct drm_connector *connector) 1761static void intel_sdvo_destroy(struct drm_connector *connector)
1853{ 1762{
1854 struct intel_connector *intel_connector = to_intel_connector(connector); 1763 struct intel_sdvo_connector *intel_sdvo_connector = to_intel_sdvo_connector(connector);
1855 struct intel_sdvo_connector *sdvo_connector = intel_connector->dev_priv;
1856 1764
1857 if (sdvo_connector->tv_format_property) 1765 if (intel_sdvo_connector->tv_format)
1858 drm_property_destroy(connector->dev, 1766 drm_property_destroy(connector->dev,
1859 sdvo_connector->tv_format_property); 1767 intel_sdvo_connector->tv_format);
1860 1768
1861 intel_sdvo_destroy_enhance_property(connector); 1769 intel_sdvo_destroy_enhance_property(connector);
1862 drm_sysfs_connector_remove(connector); 1770 drm_sysfs_connector_remove(connector);
@@ -1870,132 +1778,118 @@ intel_sdvo_set_property(struct drm_connector *connector,
1870 uint64_t val) 1778 uint64_t val)
1871{ 1779{
1872 struct drm_encoder *encoder = intel_attached_encoder(connector); 1780 struct drm_encoder *encoder = intel_attached_encoder(connector);
1873 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1781 struct intel_sdvo *intel_sdvo = enc_to_intel_sdvo(encoder);
1874 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv; 1782 struct intel_sdvo_connector *intel_sdvo_connector = to_intel_sdvo_connector(connector);
1875 struct intel_connector *intel_connector = to_intel_connector(connector);
1876 struct intel_sdvo_connector *sdvo_connector = intel_connector->dev_priv;
1877 struct drm_crtc *crtc = encoder->crtc;
1878 int ret = 0;
1879 bool changed = false;
1880 uint8_t cmd, status;
1881 uint16_t temp_value; 1783 uint16_t temp_value;
1784 uint8_t cmd;
1785 int ret;
1882 1786
1883 ret = drm_connector_property_set_value(connector, property, val); 1787 ret = drm_connector_property_set_value(connector, property, val);
1884 if (ret < 0) 1788 if (ret)
1885 goto out; 1789 return ret;
1790
1791#define CHECK_PROPERTY(name, NAME) \
1792 if (intel_sdvo_connector->name == property) { \
1793 if (intel_sdvo_connector->cur_##name == temp_value) return 0; \
1794 if (intel_sdvo_connector->max_##name < temp_value) return -EINVAL; \
1795 cmd = SDVO_CMD_SET_##NAME; \
1796 intel_sdvo_connector->cur_##name = temp_value; \
1797 goto set_value; \
1798 }
1886 1799
1887 if (property == sdvo_connector->tv_format_property) { 1800 if (property == intel_sdvo_connector->tv_format) {
1888 if (val >= TV_FORMAT_NUM) { 1801 if (val >= TV_FORMAT_NUM)
1889 ret = -EINVAL; 1802 return -EINVAL;
1890 goto out;
1891 }
1892 if (sdvo_priv->tv_format_name ==
1893 sdvo_connector->tv_format_supported[val])
1894 goto out;
1895 1803
1896 sdvo_priv->tv_format_name = sdvo_connector->tv_format_supported[val]; 1804 if (intel_sdvo->tv_format_index ==
1897 changed = true; 1805 intel_sdvo_connector->tv_format_supported[val])
1898 } 1806 return 0;
1899 1807
1900 if (IS_TV(sdvo_connector) || IS_LVDS(sdvo_connector)) { 1808 intel_sdvo->tv_format_index = intel_sdvo_connector->tv_format_supported[val];
1901 cmd = 0; 1809 goto done;
1810 } else if (IS_TV_OR_LVDS(intel_sdvo_connector)) {
1902 temp_value = val; 1811 temp_value = val;
1903 if (sdvo_connector->left_property == property) { 1812 if (intel_sdvo_connector->left == property) {
1904 drm_connector_property_set_value(connector, 1813 drm_connector_property_set_value(connector,
1905 sdvo_connector->right_property, val); 1814 intel_sdvo_connector->right, val);
1906 if (sdvo_connector->left_margin == temp_value) 1815 if (intel_sdvo_connector->left_margin == temp_value)
1907 goto out; 1816 return 0;
1908 1817
1909 sdvo_connector->left_margin = temp_value; 1818 intel_sdvo_connector->left_margin = temp_value;
1910 sdvo_connector->right_margin = temp_value; 1819 intel_sdvo_connector->right_margin = temp_value;
1911 temp_value = sdvo_connector->max_hscan - 1820 temp_value = intel_sdvo_connector->max_hscan -
1912 sdvo_connector->left_margin; 1821 intel_sdvo_connector->left_margin;
1913 cmd = SDVO_CMD_SET_OVERSCAN_H; 1822 cmd = SDVO_CMD_SET_OVERSCAN_H;
1914 } else if (sdvo_connector->right_property == property) { 1823 goto set_value;
1824 } else if (intel_sdvo_connector->right == property) {
1915 drm_connector_property_set_value(connector, 1825 drm_connector_property_set_value(connector,
1916 sdvo_connector->left_property, val); 1826 intel_sdvo_connector->left, val);
1917 if (sdvo_connector->right_margin == temp_value) 1827 if (intel_sdvo_connector->right_margin == temp_value)
1918 goto out; 1828 return 0;
1919 1829
1920 sdvo_connector->left_margin = temp_value; 1830 intel_sdvo_connector->left_margin = temp_value;
1921 sdvo_connector->right_margin = temp_value; 1831 intel_sdvo_connector->right_margin = temp_value;
1922 temp_value = sdvo_connector->max_hscan - 1832 temp_value = intel_sdvo_connector->max_hscan -
1923 sdvo_connector->left_margin; 1833 intel_sdvo_connector->left_margin;
1924 cmd = SDVO_CMD_SET_OVERSCAN_H; 1834 cmd = SDVO_CMD_SET_OVERSCAN_H;
1925 } else if (sdvo_connector->top_property == property) { 1835 goto set_value;
1836 } else if (intel_sdvo_connector->top == property) {
1926 drm_connector_property_set_value(connector, 1837 drm_connector_property_set_value(connector,
1927 sdvo_connector->bottom_property, val); 1838 intel_sdvo_connector->bottom, val);
1928 if (sdvo_connector->top_margin == temp_value) 1839 if (intel_sdvo_connector->top_margin == temp_value)
1929 goto out; 1840 return 0;
1930 1841
1931 sdvo_connector->top_margin = temp_value; 1842 intel_sdvo_connector->top_margin = temp_value;
1932 sdvo_connector->bottom_margin = temp_value; 1843 intel_sdvo_connector->bottom_margin = temp_value;
1933 temp_value = sdvo_connector->max_vscan - 1844 temp_value = intel_sdvo_connector->max_vscan -
1934 sdvo_connector->top_margin; 1845 intel_sdvo_connector->top_margin;
1935 cmd = SDVO_CMD_SET_OVERSCAN_V; 1846 cmd = SDVO_CMD_SET_OVERSCAN_V;
1936 } else if (sdvo_connector->bottom_property == property) { 1847 goto set_value;
1848 } else if (intel_sdvo_connector->bottom == property) {
1937 drm_connector_property_set_value(connector, 1849 drm_connector_property_set_value(connector,
1938 sdvo_connector->top_property, val); 1850 intel_sdvo_connector->top, val);
1939 if (sdvo_connector->bottom_margin == temp_value) 1851 if (intel_sdvo_connector->bottom_margin == temp_value)
1940 goto out; 1852 return 0;
1941 sdvo_connector->top_margin = temp_value; 1853
1942 sdvo_connector->bottom_margin = temp_value; 1854 intel_sdvo_connector->top_margin = temp_value;
1943 temp_value = sdvo_connector->max_vscan - 1855 intel_sdvo_connector->bottom_margin = temp_value;
1944 sdvo_connector->top_margin; 1856 temp_value = intel_sdvo_connector->max_vscan -
1857 intel_sdvo_connector->top_margin;
1945 cmd = SDVO_CMD_SET_OVERSCAN_V; 1858 cmd = SDVO_CMD_SET_OVERSCAN_V;
1946 } else if (sdvo_connector->hpos_property == property) { 1859 goto set_value;
1947 if (sdvo_connector->cur_hpos == temp_value)
1948 goto out;
1949
1950 cmd = SDVO_CMD_SET_POSITION_H;
1951 sdvo_connector->cur_hpos = temp_value;
1952 } else if (sdvo_connector->vpos_property == property) {
1953 if (sdvo_connector->cur_vpos == temp_value)
1954 goto out;
1955
1956 cmd = SDVO_CMD_SET_POSITION_V;
1957 sdvo_connector->cur_vpos = temp_value;
1958 } else if (sdvo_connector->saturation_property == property) {
1959 if (sdvo_connector->cur_saturation == temp_value)
1960 goto out;
1961
1962 cmd = SDVO_CMD_SET_SATURATION;
1963 sdvo_connector->cur_saturation = temp_value;
1964 } else if (sdvo_connector->contrast_property == property) {
1965 if (sdvo_connector->cur_contrast == temp_value)
1966 goto out;
1967
1968 cmd = SDVO_CMD_SET_CONTRAST;
1969 sdvo_connector->cur_contrast = temp_value;
1970 } else if (sdvo_connector->hue_property == property) {
1971 if (sdvo_connector->cur_hue == temp_value)
1972 goto out;
1973
1974 cmd = SDVO_CMD_SET_HUE;
1975 sdvo_connector->cur_hue = temp_value;
1976 } else if (sdvo_connector->brightness_property == property) {
1977 if (sdvo_connector->cur_brightness == temp_value)
1978 goto out;
1979
1980 cmd = SDVO_CMD_SET_BRIGHTNESS;
1981 sdvo_connector->cur_brightness = temp_value;
1982 }
1983 if (cmd) {
1984 intel_sdvo_write_cmd(intel_encoder, cmd, &temp_value, 2);
1985 status = intel_sdvo_read_response(intel_encoder,
1986 NULL, 0);
1987 if (status != SDVO_CMD_STATUS_SUCCESS) {
1988 DRM_DEBUG_KMS("Incorrect SDVO command \n");
1989 return -EINVAL;
1990 }
1991 changed = true;
1992 } 1860 }
1861 CHECK_PROPERTY(hpos, HPOS)
1862 CHECK_PROPERTY(vpos, VPOS)
1863 CHECK_PROPERTY(saturation, SATURATION)
1864 CHECK_PROPERTY(contrast, CONTRAST)
1865 CHECK_PROPERTY(hue, HUE)
1866 CHECK_PROPERTY(brightness, BRIGHTNESS)
1867 CHECK_PROPERTY(sharpness, SHARPNESS)
1868 CHECK_PROPERTY(flicker_filter, FLICKER_FILTER)
1869 CHECK_PROPERTY(flicker_filter_2d, FLICKER_FILTER_2D)
1870 CHECK_PROPERTY(flicker_filter_adaptive, FLICKER_FILTER_ADAPTIVE)
1871 CHECK_PROPERTY(tv_chroma_filter, TV_CHROMA_FILTER)
1872 CHECK_PROPERTY(tv_luma_filter, TV_LUMA_FILTER)
1873 CHECK_PROPERTY(dot_crawl, DOT_CRAWL)
1993 } 1874 }
1994 if (changed && crtc) 1875
1876 return -EINVAL; /* unknown property */
1877
1878set_value:
1879 if (!intel_sdvo_set_value(intel_sdvo, cmd, &temp_value, 2))
1880 return -EIO;
1881
1882
1883done:
1884 if (encoder->crtc) {
1885 struct drm_crtc *crtc = encoder->crtc;
1886
1995 drm_crtc_helper_set_mode(crtc, &crtc->mode, crtc->x, 1887 drm_crtc_helper_set_mode(crtc, &crtc->mode, crtc->x,
1996 crtc->y, crtc->fb); 1888 crtc->y, crtc->fb);
1997out: 1889 }
1998 return ret; 1890
1891 return 0;
1892#undef CHECK_PROPERTY
1999} 1893}
2000 1894
2001static const struct drm_encoder_helper_funcs intel_sdvo_helper_funcs = { 1895static const struct drm_encoder_helper_funcs intel_sdvo_helper_funcs = {
@@ -2022,22 +1916,16 @@ static const struct drm_connector_helper_funcs intel_sdvo_connector_helper_funcs
2022 1916
2023static void intel_sdvo_enc_destroy(struct drm_encoder *encoder) 1917static void intel_sdvo_enc_destroy(struct drm_encoder *encoder)
2024{ 1918{
2025 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1919 struct intel_sdvo *intel_sdvo = enc_to_intel_sdvo(encoder);
2026 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
2027 1920
2028 if (intel_encoder->i2c_bus) 1921 if (intel_sdvo->analog_ddc_bus)
2029 intel_i2c_destroy(intel_encoder->i2c_bus); 1922 intel_i2c_destroy(intel_sdvo->analog_ddc_bus);
2030 if (intel_encoder->ddc_bus)
2031 intel_i2c_destroy(intel_encoder->ddc_bus);
2032 if (sdvo_priv->analog_ddc_bus)
2033 intel_i2c_destroy(sdvo_priv->analog_ddc_bus);
2034 1923
2035 if (sdvo_priv->sdvo_lvds_fixed_mode != NULL) 1924 if (intel_sdvo->sdvo_lvds_fixed_mode != NULL)
2036 drm_mode_destroy(encoder->dev, 1925 drm_mode_destroy(encoder->dev,
2037 sdvo_priv->sdvo_lvds_fixed_mode); 1926 intel_sdvo->sdvo_lvds_fixed_mode);
2038 1927
2039 drm_encoder_cleanup(encoder); 1928 intel_encoder_destroy(encoder);
2040 kfree(intel_encoder);
2041} 1929}
2042 1930
2043static const struct drm_encoder_funcs intel_sdvo_enc_funcs = { 1931static const struct drm_encoder_funcs intel_sdvo_enc_funcs = {
@@ -2054,7 +1942,7 @@ static const struct drm_encoder_funcs intel_sdvo_enc_funcs = {
2054 */ 1942 */
2055static void 1943static void
2056intel_sdvo_select_ddc_bus(struct drm_i915_private *dev_priv, 1944intel_sdvo_select_ddc_bus(struct drm_i915_private *dev_priv,
2057 struct intel_sdvo_priv *sdvo, u32 reg) 1945 struct intel_sdvo *sdvo, u32 reg)
2058{ 1946{
2059 struct sdvo_device_mapping *mapping; 1947 struct sdvo_device_mapping *mapping;
2060 1948
@@ -2067,57 +1955,46 @@ intel_sdvo_select_ddc_bus(struct drm_i915_private *dev_priv,
2067} 1955}
2068 1956
2069static bool 1957static bool
2070intel_sdvo_get_digital_encoding_mode(struct intel_encoder *output, int device) 1958intel_sdvo_get_digital_encoding_mode(struct intel_sdvo *intel_sdvo, int device)
2071{ 1959{
2072 struct intel_sdvo_priv *sdvo_priv = output->dev_priv; 1960 return intel_sdvo_set_target_output(intel_sdvo,
2073 uint8_t status; 1961 device == 0 ? SDVO_OUTPUT_TMDS0 : SDVO_OUTPUT_TMDS1) &&
2074 1962 intel_sdvo_get_value(intel_sdvo, SDVO_CMD_GET_ENCODE,
2075 if (device == 0) 1963 &intel_sdvo->is_hdmi, 1);
2076 intel_sdvo_set_target_output(output, SDVO_OUTPUT_TMDS0);
2077 else
2078 intel_sdvo_set_target_output(output, SDVO_OUTPUT_TMDS1);
2079
2080 intel_sdvo_write_cmd(output, SDVO_CMD_GET_ENCODE, NULL, 0);
2081 status = intel_sdvo_read_response(output, &sdvo_priv->is_hdmi, 1);
2082 if (status != SDVO_CMD_STATUS_SUCCESS)
2083 return false;
2084 return true;
2085} 1964}
2086 1965
2087static struct intel_encoder * 1966static struct intel_sdvo *
2088intel_sdvo_chan_to_intel_encoder(struct intel_i2c_chan *chan) 1967intel_sdvo_chan_to_intel_sdvo(struct intel_i2c_chan *chan)
2089{ 1968{
2090 struct drm_device *dev = chan->drm_dev; 1969 struct drm_device *dev = chan->drm_dev;
2091 struct drm_encoder *encoder; 1970 struct drm_encoder *encoder;
2092 struct intel_encoder *intel_encoder = NULL;
2093 1971
2094 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 1972 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2095 intel_encoder = enc_to_intel_encoder(encoder); 1973 struct intel_sdvo *intel_sdvo = enc_to_intel_sdvo(encoder);
2096 if (intel_encoder->ddc_bus == &chan->adapter) 1974 if (intel_sdvo->base.ddc_bus == &chan->adapter)
2097 break; 1975 return intel_sdvo;
2098 } 1976 }
2099 return intel_encoder; 1977
1978 return NULL;
2100} 1979}
2101 1980
2102static int intel_sdvo_master_xfer(struct i2c_adapter *i2c_adap, 1981static int intel_sdvo_master_xfer(struct i2c_adapter *i2c_adap,
2103 struct i2c_msg msgs[], int num) 1982 struct i2c_msg msgs[], int num)
2104{ 1983{
2105 struct intel_encoder *intel_encoder; 1984 struct intel_sdvo *intel_sdvo;
2106 struct intel_sdvo_priv *sdvo_priv;
2107 struct i2c_algo_bit_data *algo_data; 1985 struct i2c_algo_bit_data *algo_data;
2108 const struct i2c_algorithm *algo; 1986 const struct i2c_algorithm *algo;
2109 1987
2110 algo_data = (struct i2c_algo_bit_data *)i2c_adap->algo_data; 1988 algo_data = (struct i2c_algo_bit_data *)i2c_adap->algo_data;
2111 intel_encoder = 1989 intel_sdvo =
2112 intel_sdvo_chan_to_intel_encoder( 1990 intel_sdvo_chan_to_intel_sdvo((struct intel_i2c_chan *)
2113 (struct intel_i2c_chan *)(algo_data->data)); 1991 (algo_data->data));
2114 if (intel_encoder == NULL) 1992 if (intel_sdvo == NULL)
2115 return -EINVAL; 1993 return -EINVAL;
2116 1994
2117 sdvo_priv = intel_encoder->dev_priv; 1995 algo = intel_sdvo->base.i2c_bus->algo;
2118 algo = intel_encoder->i2c_bus->algo;
2119 1996
2120 intel_sdvo_set_control_bus_switch(intel_encoder, sdvo_priv->ddc_bus); 1997 intel_sdvo_set_control_bus_switch(intel_sdvo, intel_sdvo->ddc_bus);
2121 return algo->master_xfer(i2c_adap, msgs, num); 1998 return algo->master_xfer(i2c_adap, msgs, num);
2122} 1999}
2123 2000
@@ -2162,27 +2039,9 @@ intel_sdvo_get_slave_addr(struct drm_device *dev, int sdvo_reg)
2162 return 0x72; 2039 return 0x72;
2163} 2040}
2164 2041
2165static bool
2166intel_sdvo_connector_alloc (struct intel_connector **ret)
2167{
2168 struct intel_connector *intel_connector;
2169 struct intel_sdvo_connector *sdvo_connector;
2170
2171 *ret = kzalloc(sizeof(*intel_connector) +
2172 sizeof(*sdvo_connector), GFP_KERNEL);
2173 if (!*ret)
2174 return false;
2175
2176 intel_connector = *ret;
2177 sdvo_connector = (struct intel_sdvo_connector *)(intel_connector + 1);
2178 intel_connector->dev_priv = sdvo_connector;
2179
2180 return true;
2181}
2182
2183static void 2042static void
2184intel_sdvo_connector_create (struct drm_encoder *encoder, 2043intel_sdvo_connector_init(struct drm_encoder *encoder,
2185 struct drm_connector *connector) 2044 struct drm_connector *connector)
2186{ 2045{
2187 drm_connector_init(encoder->dev, connector, &intel_sdvo_connector_funcs, 2046 drm_connector_init(encoder->dev, connector, &intel_sdvo_connector_funcs,
2188 connector->connector_type); 2047 connector->connector_type);
@@ -2198,582 +2057,470 @@ intel_sdvo_connector_create (struct drm_encoder *encoder,
2198} 2057}
2199 2058
2200static bool 2059static bool
2201intel_sdvo_dvi_init(struct intel_encoder *intel_encoder, int device) 2060intel_sdvo_dvi_init(struct intel_sdvo *intel_sdvo, int device)
2202{ 2061{
2203 struct drm_encoder *encoder = &intel_encoder->enc; 2062 struct drm_encoder *encoder = &intel_sdvo->base.enc;
2204 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
2205 struct drm_connector *connector; 2063 struct drm_connector *connector;
2206 struct intel_connector *intel_connector; 2064 struct intel_connector *intel_connector;
2207 struct intel_sdvo_connector *sdvo_connector; 2065 struct intel_sdvo_connector *intel_sdvo_connector;
2208 2066
2209 if (!intel_sdvo_connector_alloc(&intel_connector)) 2067 intel_sdvo_connector = kzalloc(sizeof(struct intel_sdvo_connector), GFP_KERNEL);
2068 if (!intel_sdvo_connector)
2210 return false; 2069 return false;
2211 2070
2212 sdvo_connector = intel_connector->dev_priv;
2213
2214 if (device == 0) { 2071 if (device == 0) {
2215 sdvo_priv->controlled_output |= SDVO_OUTPUT_TMDS0; 2072 intel_sdvo->controlled_output |= SDVO_OUTPUT_TMDS0;
2216 sdvo_connector->output_flag = SDVO_OUTPUT_TMDS0; 2073 intel_sdvo_connector->output_flag = SDVO_OUTPUT_TMDS0;
2217 } else if (device == 1) { 2074 } else if (device == 1) {
2218 sdvo_priv->controlled_output |= SDVO_OUTPUT_TMDS1; 2075 intel_sdvo->controlled_output |= SDVO_OUTPUT_TMDS1;
2219 sdvo_connector->output_flag = SDVO_OUTPUT_TMDS1; 2076 intel_sdvo_connector->output_flag = SDVO_OUTPUT_TMDS1;
2220 } 2077 }
2221 2078
2079 intel_connector = &intel_sdvo_connector->base;
2222 connector = &intel_connector->base; 2080 connector = &intel_connector->base;
2223 connector->polled = DRM_CONNECTOR_POLL_CONNECT | DRM_CONNECTOR_POLL_DISCONNECT; 2081 connector->polled = DRM_CONNECTOR_POLL_CONNECT | DRM_CONNECTOR_POLL_DISCONNECT;
2224 encoder->encoder_type = DRM_MODE_ENCODER_TMDS; 2082 encoder->encoder_type = DRM_MODE_ENCODER_TMDS;
2225 connector->connector_type = DRM_MODE_CONNECTOR_DVID; 2083 connector->connector_type = DRM_MODE_CONNECTOR_DVID;
2226 2084
2227 if (intel_sdvo_get_supp_encode(intel_encoder, &sdvo_priv->encode) 2085 if (intel_sdvo_get_supp_encode(intel_sdvo, &intel_sdvo->encode)
2228 && intel_sdvo_get_digital_encoding_mode(intel_encoder, device) 2086 && intel_sdvo_get_digital_encoding_mode(intel_sdvo, device)
2229 && sdvo_priv->is_hdmi) { 2087 && intel_sdvo->is_hdmi) {
2230 /* enable hdmi encoding mode if supported */ 2088 /* enable hdmi encoding mode if supported */
2231 intel_sdvo_set_encode(intel_encoder, SDVO_ENCODE_HDMI); 2089 intel_sdvo_set_encode(intel_sdvo, SDVO_ENCODE_HDMI);
2232 intel_sdvo_set_colorimetry(intel_encoder, 2090 intel_sdvo_set_colorimetry(intel_sdvo,
2233 SDVO_COLORIMETRY_RGB256); 2091 SDVO_COLORIMETRY_RGB256);
2234 connector->connector_type = DRM_MODE_CONNECTOR_HDMIA; 2092 connector->connector_type = DRM_MODE_CONNECTOR_HDMIA;
2235 } 2093 }
2236 intel_encoder->clone_mask = (1 << INTEL_SDVO_NON_TV_CLONE_BIT) | 2094 intel_sdvo->base.clone_mask = ((1 << INTEL_SDVO_NON_TV_CLONE_BIT) |
2237 (1 << INTEL_ANALOG_CLONE_BIT); 2095 (1 << INTEL_ANALOG_CLONE_BIT));
2238 2096
2239 intel_sdvo_connector_create(encoder, connector); 2097 intel_sdvo_connector_init(encoder, connector);
2240 2098
2241 return true; 2099 return true;
2242} 2100}
2243 2101
2244static bool 2102static bool
2245intel_sdvo_tv_init(struct intel_encoder *intel_encoder, int type) 2103intel_sdvo_tv_init(struct intel_sdvo *intel_sdvo, int type)
2246{ 2104{
2247 struct drm_encoder *encoder = &intel_encoder->enc; 2105 struct drm_encoder *encoder = &intel_sdvo->base.enc;
2248 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
2249 struct drm_connector *connector; 2106 struct drm_connector *connector;
2250 struct intel_connector *intel_connector; 2107 struct intel_connector *intel_connector;
2251 struct intel_sdvo_connector *sdvo_connector; 2108 struct intel_sdvo_connector *intel_sdvo_connector;
2252 2109
2253 if (!intel_sdvo_connector_alloc(&intel_connector)) 2110 intel_sdvo_connector = kzalloc(sizeof(struct intel_sdvo_connector), GFP_KERNEL);
2254 return false; 2111 if (!intel_sdvo_connector)
2112 return false;
2255 2113
2114 intel_connector = &intel_sdvo_connector->base;
2256 connector = &intel_connector->base; 2115 connector = &intel_connector->base;
2257 encoder->encoder_type = DRM_MODE_ENCODER_TVDAC; 2116 encoder->encoder_type = DRM_MODE_ENCODER_TVDAC;
2258 connector->connector_type = DRM_MODE_CONNECTOR_SVIDEO; 2117 connector->connector_type = DRM_MODE_CONNECTOR_SVIDEO;
2259 sdvo_connector = intel_connector->dev_priv;
2260 2118
2261 sdvo_priv->controlled_output |= type; 2119 intel_sdvo->controlled_output |= type;
2262 sdvo_connector->output_flag = type; 2120 intel_sdvo_connector->output_flag = type;
2263 2121
2264 sdvo_priv->is_tv = true; 2122 intel_sdvo->is_tv = true;
2265 intel_encoder->needs_tv_clock = true; 2123 intel_sdvo->base.needs_tv_clock = true;
2266 intel_encoder->clone_mask = 1 << INTEL_SDVO_TV_CLONE_BIT; 2124 intel_sdvo->base.clone_mask = 1 << INTEL_SDVO_TV_CLONE_BIT;
2267 2125
2268 intel_sdvo_connector_create(encoder, connector); 2126 intel_sdvo_connector_init(encoder, connector);
2269 2127
2270 intel_sdvo_tv_create_property(connector, type); 2128 if (!intel_sdvo_tv_create_property(intel_sdvo, intel_sdvo_connector, type))
2129 goto err;
2271 2130
2272 intel_sdvo_create_enhance_property(connector); 2131 if (!intel_sdvo_create_enhance_property(intel_sdvo, intel_sdvo_connector))
2132 goto err;
2273 2133
2274 return true; 2134 return true;
2135
2136err:
2137 intel_sdvo_destroy_enhance_property(connector);
2138 kfree(intel_sdvo_connector);
2139 return false;
2275} 2140}
2276 2141
2277static bool 2142static bool
2278intel_sdvo_analog_init(struct intel_encoder *intel_encoder, int device) 2143intel_sdvo_analog_init(struct intel_sdvo *intel_sdvo, int device)
2279{ 2144{
2280 struct drm_encoder *encoder = &intel_encoder->enc; 2145 struct drm_encoder *encoder = &intel_sdvo->base.enc;
2281 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
2282 struct drm_connector *connector; 2146 struct drm_connector *connector;
2283 struct intel_connector *intel_connector; 2147 struct intel_connector *intel_connector;
2284 struct intel_sdvo_connector *sdvo_connector; 2148 struct intel_sdvo_connector *intel_sdvo_connector;
2285 2149
2286 if (!intel_sdvo_connector_alloc(&intel_connector)) 2150 intel_sdvo_connector = kzalloc(sizeof(struct intel_sdvo_connector), GFP_KERNEL);
2287 return false; 2151 if (!intel_sdvo_connector)
2152 return false;
2288 2153
2154 intel_connector = &intel_sdvo_connector->base;
2289 connector = &intel_connector->base; 2155 connector = &intel_connector->base;
2290 connector->polled = DRM_CONNECTOR_POLL_CONNECT; 2156 connector->polled = DRM_CONNECTOR_POLL_CONNECT;
2291 encoder->encoder_type = DRM_MODE_ENCODER_DAC; 2157 encoder->encoder_type = DRM_MODE_ENCODER_DAC;
2292 connector->connector_type = DRM_MODE_CONNECTOR_VGA; 2158 connector->connector_type = DRM_MODE_CONNECTOR_VGA;
2293 sdvo_connector = intel_connector->dev_priv;
2294 2159
2295 if (device == 0) { 2160 if (device == 0) {
2296 sdvo_priv->controlled_output |= SDVO_OUTPUT_RGB0; 2161 intel_sdvo->controlled_output |= SDVO_OUTPUT_RGB0;
2297 sdvo_connector->output_flag = SDVO_OUTPUT_RGB0; 2162 intel_sdvo_connector->output_flag = SDVO_OUTPUT_RGB0;
2298 } else if (device == 1) { 2163 } else if (device == 1) {
2299 sdvo_priv->controlled_output |= SDVO_OUTPUT_RGB1; 2164 intel_sdvo->controlled_output |= SDVO_OUTPUT_RGB1;
2300 sdvo_connector->output_flag = SDVO_OUTPUT_RGB1; 2165 intel_sdvo_connector->output_flag = SDVO_OUTPUT_RGB1;
2301 } 2166 }
2302 2167
2303 intel_encoder->clone_mask = (1 << INTEL_SDVO_NON_TV_CLONE_BIT) | 2168 intel_sdvo->base.clone_mask = ((1 << INTEL_SDVO_NON_TV_CLONE_BIT) |
2304 (1 << INTEL_ANALOG_CLONE_BIT); 2169 (1 << INTEL_ANALOG_CLONE_BIT));
2305 2170
2306 intel_sdvo_connector_create(encoder, connector); 2171 intel_sdvo_connector_init(encoder, connector);
2307 return true; 2172 return true;
2308} 2173}
2309 2174
2310static bool 2175static bool
2311intel_sdvo_lvds_init(struct intel_encoder *intel_encoder, int device) 2176intel_sdvo_lvds_init(struct intel_sdvo *intel_sdvo, int device)
2312{ 2177{
2313 struct drm_encoder *encoder = &intel_encoder->enc; 2178 struct drm_encoder *encoder = &intel_sdvo->base.enc;
2314 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
2315 struct drm_connector *connector; 2179 struct drm_connector *connector;
2316 struct intel_connector *intel_connector; 2180 struct intel_connector *intel_connector;
2317 struct intel_sdvo_connector *sdvo_connector; 2181 struct intel_sdvo_connector *intel_sdvo_connector;
2318 2182
2319 if (!intel_sdvo_connector_alloc(&intel_connector)) 2183 intel_sdvo_connector = kzalloc(sizeof(struct intel_sdvo_connector), GFP_KERNEL);
2320 return false; 2184 if (!intel_sdvo_connector)
2185 return false;
2321 2186
2322 connector = &intel_connector->base; 2187 intel_connector = &intel_sdvo_connector->base;
2188 connector = &intel_connector->base;
2323 encoder->encoder_type = DRM_MODE_ENCODER_LVDS; 2189 encoder->encoder_type = DRM_MODE_ENCODER_LVDS;
2324 connector->connector_type = DRM_MODE_CONNECTOR_LVDS; 2190 connector->connector_type = DRM_MODE_CONNECTOR_LVDS;
2325 sdvo_connector = intel_connector->dev_priv;
2326
2327 sdvo_priv->is_lvds = true;
2328 2191
2329 if (device == 0) { 2192 if (device == 0) {
2330 sdvo_priv->controlled_output |= SDVO_OUTPUT_LVDS0; 2193 intel_sdvo->controlled_output |= SDVO_OUTPUT_LVDS0;
2331 sdvo_connector->output_flag = SDVO_OUTPUT_LVDS0; 2194 intel_sdvo_connector->output_flag = SDVO_OUTPUT_LVDS0;
2332 } else if (device == 1) { 2195 } else if (device == 1) {
2333 sdvo_priv->controlled_output |= SDVO_OUTPUT_LVDS1; 2196 intel_sdvo->controlled_output |= SDVO_OUTPUT_LVDS1;
2334 sdvo_connector->output_flag = SDVO_OUTPUT_LVDS1; 2197 intel_sdvo_connector->output_flag = SDVO_OUTPUT_LVDS1;
2335 } 2198 }
2336 2199
2337 intel_encoder->clone_mask = (1 << INTEL_ANALOG_CLONE_BIT) | 2200 intel_sdvo->base.clone_mask = ((1 << INTEL_ANALOG_CLONE_BIT) |
2338 (1 << INTEL_SDVO_LVDS_CLONE_BIT); 2201 (1 << INTEL_SDVO_LVDS_CLONE_BIT));
2339 2202
2340 intel_sdvo_connector_create(encoder, connector); 2203 intel_sdvo_connector_init(encoder, connector);
2341 intel_sdvo_create_enhance_property(connector); 2204 if (!intel_sdvo_create_enhance_property(intel_sdvo, intel_sdvo_connector))
2342 return true; 2205 goto err;
2206
2207 return true;
2208
2209err:
2210 intel_sdvo_destroy_enhance_property(connector);
2211 kfree(intel_sdvo_connector);
2212 return false;
2343} 2213}
2344 2214
2345static bool 2215static bool
2346intel_sdvo_output_setup(struct intel_encoder *intel_encoder, uint16_t flags) 2216intel_sdvo_output_setup(struct intel_sdvo *intel_sdvo, uint16_t flags)
2347{ 2217{
2348 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv; 2218 intel_sdvo->is_tv = false;
2349 2219 intel_sdvo->base.needs_tv_clock = false;
2350 sdvo_priv->is_tv = false; 2220 intel_sdvo->is_lvds = false;
2351 intel_encoder->needs_tv_clock = false;
2352 sdvo_priv->is_lvds = false;
2353 2221
2354 /* SDVO requires XXX1 function may not exist unless it has XXX0 function.*/ 2222 /* SDVO requires XXX1 function may not exist unless it has XXX0 function.*/
2355 2223
2356 if (flags & SDVO_OUTPUT_TMDS0) 2224 if (flags & SDVO_OUTPUT_TMDS0)
2357 if (!intel_sdvo_dvi_init(intel_encoder, 0)) 2225 if (!intel_sdvo_dvi_init(intel_sdvo, 0))
2358 return false; 2226 return false;
2359 2227
2360 if ((flags & SDVO_TMDS_MASK) == SDVO_TMDS_MASK) 2228 if ((flags & SDVO_TMDS_MASK) == SDVO_TMDS_MASK)
2361 if (!intel_sdvo_dvi_init(intel_encoder, 1)) 2229 if (!intel_sdvo_dvi_init(intel_sdvo, 1))
2362 return false; 2230 return false;
2363 2231
2364 /* TV has no XXX1 function block */ 2232 /* TV has no XXX1 function block */
2365 if (flags & SDVO_OUTPUT_SVID0) 2233 if (flags & SDVO_OUTPUT_SVID0)
2366 if (!intel_sdvo_tv_init(intel_encoder, SDVO_OUTPUT_SVID0)) 2234 if (!intel_sdvo_tv_init(intel_sdvo, SDVO_OUTPUT_SVID0))
2367 return false; 2235 return false;
2368 2236
2369 if (flags & SDVO_OUTPUT_CVBS0) 2237 if (flags & SDVO_OUTPUT_CVBS0)
2370 if (!intel_sdvo_tv_init(intel_encoder, SDVO_OUTPUT_CVBS0)) 2238 if (!intel_sdvo_tv_init(intel_sdvo, SDVO_OUTPUT_CVBS0))
2371 return false; 2239 return false;
2372 2240
2373 if (flags & SDVO_OUTPUT_RGB0) 2241 if (flags & SDVO_OUTPUT_RGB0)
2374 if (!intel_sdvo_analog_init(intel_encoder, 0)) 2242 if (!intel_sdvo_analog_init(intel_sdvo, 0))
2375 return false; 2243 return false;
2376 2244
2377 if ((flags & SDVO_RGB_MASK) == SDVO_RGB_MASK) 2245 if ((flags & SDVO_RGB_MASK) == SDVO_RGB_MASK)
2378 if (!intel_sdvo_analog_init(intel_encoder, 1)) 2246 if (!intel_sdvo_analog_init(intel_sdvo, 1))
2379 return false; 2247 return false;
2380 2248
2381 if (flags & SDVO_OUTPUT_LVDS0) 2249 if (flags & SDVO_OUTPUT_LVDS0)
2382 if (!intel_sdvo_lvds_init(intel_encoder, 0)) 2250 if (!intel_sdvo_lvds_init(intel_sdvo, 0))
2383 return false; 2251 return false;
2384 2252
2385 if ((flags & SDVO_LVDS_MASK) == SDVO_LVDS_MASK) 2253 if ((flags & SDVO_LVDS_MASK) == SDVO_LVDS_MASK)
2386 if (!intel_sdvo_lvds_init(intel_encoder, 1)) 2254 if (!intel_sdvo_lvds_init(intel_sdvo, 1))
2387 return false; 2255 return false;
2388 2256
2389 if ((flags & SDVO_OUTPUT_MASK) == 0) { 2257 if ((flags & SDVO_OUTPUT_MASK) == 0) {
2390 unsigned char bytes[2]; 2258 unsigned char bytes[2];
2391 2259
2392 sdvo_priv->controlled_output = 0; 2260 intel_sdvo->controlled_output = 0;
2393 memcpy(bytes, &sdvo_priv->caps.output_flags, 2); 2261 memcpy(bytes, &intel_sdvo->caps.output_flags, 2);
2394 DRM_DEBUG_KMS("%s: Unknown SDVO output type (0x%02x%02x)\n", 2262 DRM_DEBUG_KMS("%s: Unknown SDVO output type (0x%02x%02x)\n",
2395 SDVO_NAME(sdvo_priv), 2263 SDVO_NAME(intel_sdvo),
2396 bytes[0], bytes[1]); 2264 bytes[0], bytes[1]);
2397 return false; 2265 return false;
2398 } 2266 }
2399 intel_encoder->crtc_mask = (1 << 0) | (1 << 1); 2267 intel_sdvo->base.crtc_mask = (1 << 0) | (1 << 1);
2400 2268
2401 return true; 2269 return true;
2402} 2270}
2403 2271
2404static void intel_sdvo_tv_create_property(struct drm_connector *connector, int type) 2272static bool intel_sdvo_tv_create_property(struct intel_sdvo *intel_sdvo,
2273 struct intel_sdvo_connector *intel_sdvo_connector,
2274 int type)
2405{ 2275{
2406 struct drm_encoder *encoder = intel_attached_encoder(connector); 2276 struct drm_device *dev = intel_sdvo->base.enc.dev;
2407 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder);
2408 struct intel_sdvo_priv *sdvo_priv = intel_encoder->dev_priv;
2409 struct intel_connector *intel_connector = to_intel_connector(connector);
2410 struct intel_sdvo_connector *sdvo_connector = intel_connector->dev_priv;
2411 struct intel_sdvo_tv_format format; 2277 struct intel_sdvo_tv_format format;
2412 uint32_t format_map, i; 2278 uint32_t format_map, i;
2413 uint8_t status;
2414 2279
2415 intel_sdvo_set_target_output(intel_encoder, type); 2280 if (!intel_sdvo_set_target_output(intel_sdvo, type))
2281 return false;
2416 2282
2417 intel_sdvo_write_cmd(intel_encoder, 2283 if (!intel_sdvo_get_value(intel_sdvo,
2418 SDVO_CMD_GET_SUPPORTED_TV_FORMATS, NULL, 0); 2284 SDVO_CMD_GET_SUPPORTED_TV_FORMATS,
2419 status = intel_sdvo_read_response(intel_encoder, 2285 &format, sizeof(format)))
2420 &format, sizeof(format)); 2286 return false;
2421 if (status != SDVO_CMD_STATUS_SUCCESS)
2422 return;
2423 2287
2424 memcpy(&format_map, &format, sizeof(format) > sizeof(format_map) ? 2288 memcpy(&format_map, &format, min(sizeof(format_map), sizeof(format)));
2425 sizeof(format_map) : sizeof(format));
2426 2289
2427 if (format_map == 0) 2290 if (format_map == 0)
2428 return; 2291 return false;
2429 2292
2430 sdvo_connector->format_supported_num = 0; 2293 intel_sdvo_connector->format_supported_num = 0;
2431 for (i = 0 ; i < TV_FORMAT_NUM; i++) 2294 for (i = 0 ; i < TV_FORMAT_NUM; i++)
2432 if (format_map & (1 << i)) { 2295 if (format_map & (1 << i))
2433 sdvo_connector->tv_format_supported 2296 intel_sdvo_connector->tv_format_supported[intel_sdvo_connector->format_supported_num++] = i;
2434 [sdvo_connector->format_supported_num++] =
2435 tv_format_names[i];
2436 }
2437 2297
2438 2298
2439 sdvo_connector->tv_format_property = 2299 intel_sdvo_connector->tv_format =
2440 drm_property_create( 2300 drm_property_create(dev, DRM_MODE_PROP_ENUM,
2441 connector->dev, DRM_MODE_PROP_ENUM, 2301 "mode", intel_sdvo_connector->format_supported_num);
2442 "mode", sdvo_connector->format_supported_num); 2302 if (!intel_sdvo_connector->tv_format)
2303 return false;
2443 2304
2444 for (i = 0; i < sdvo_connector->format_supported_num; i++) 2305 for (i = 0; i < intel_sdvo_connector->format_supported_num; i++)
2445 drm_property_add_enum( 2306 drm_property_add_enum(
2446 sdvo_connector->tv_format_property, i, 2307 intel_sdvo_connector->tv_format, i,
2447 i, sdvo_connector->tv_format_supported[i]); 2308 i, tv_format_names[intel_sdvo_connector->tv_format_supported[i]]);
2448 2309
2449 sdvo_priv->tv_format_name = sdvo_connector->tv_format_supported[0]; 2310 intel_sdvo->tv_format_index = intel_sdvo_connector->tv_format_supported[0];
2450 drm_connector_attach_property( 2311 drm_connector_attach_property(&intel_sdvo_connector->base.base,
2451 connector, sdvo_connector->tv_format_property, 0); 2312 intel_sdvo_connector->tv_format, 0);
2313 return true;
2452 2314
2453} 2315}
2454 2316
2455static void intel_sdvo_create_enhance_property(struct drm_connector *connector) 2317#define ENHANCEMENT(name, NAME) do { \
2318 if (enhancements.name) { \
2319 if (!intel_sdvo_get_value(intel_sdvo, SDVO_CMD_GET_MAX_##NAME, &data_value, 4) || \
2320 !intel_sdvo_get_value(intel_sdvo, SDVO_CMD_GET_##NAME, &response, 2)) \
2321 return false; \
2322 intel_sdvo_connector->max_##name = data_value[0]; \
2323 intel_sdvo_connector->cur_##name = response; \
2324 intel_sdvo_connector->name = \
2325 drm_property_create(dev, DRM_MODE_PROP_RANGE, #name, 2); \
2326 if (!intel_sdvo_connector->name) return false; \
2327 intel_sdvo_connector->name->values[0] = 0; \
2328 intel_sdvo_connector->name->values[1] = data_value[0]; \
2329 drm_connector_attach_property(connector, \
2330 intel_sdvo_connector->name, \
2331 intel_sdvo_connector->cur_##name); \
2332 DRM_DEBUG_KMS(#name ": max %d, default %d, current %d\n", \
2333 data_value[0], data_value[1], response); \
2334 } \
2335} while(0)
2336
2337static bool
2338intel_sdvo_create_enhance_property_tv(struct intel_sdvo *intel_sdvo,
2339 struct intel_sdvo_connector *intel_sdvo_connector,
2340 struct intel_sdvo_enhancements_reply enhancements)
2456{ 2341{
2457 struct drm_encoder *encoder = intel_attached_encoder(connector); 2342 struct drm_device *dev = intel_sdvo->base.enc.dev;
2458 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 2343 struct drm_connector *connector = &intel_sdvo_connector->base.base;
2459 struct intel_connector *intel_connector = to_intel_connector(connector);
2460 struct intel_sdvo_connector *sdvo_priv = intel_connector->dev_priv;
2461 struct intel_sdvo_enhancements_reply sdvo_data;
2462 struct drm_device *dev = connector->dev;
2463 uint8_t status;
2464 uint16_t response, data_value[2]; 2344 uint16_t response, data_value[2];
2465 2345
2466 intel_sdvo_write_cmd(intel_encoder, SDVO_CMD_GET_SUPPORTED_ENHANCEMENTS, 2346 /* when horizontal overscan is supported, Add the left/right property */
2467 NULL, 0); 2347 if (enhancements.overscan_h) {
2468 status = intel_sdvo_read_response(intel_encoder, &sdvo_data, 2348 if (!intel_sdvo_get_value(intel_sdvo,
2469 sizeof(sdvo_data)); 2349 SDVO_CMD_GET_MAX_OVERSCAN_H,
2470 if (status != SDVO_CMD_STATUS_SUCCESS) { 2350 &data_value, 4))
2471 DRM_DEBUG_KMS(" incorrect response is returned\n"); 2351 return false;
2472 return; 2352
2353 if (!intel_sdvo_get_value(intel_sdvo,
2354 SDVO_CMD_GET_OVERSCAN_H,
2355 &response, 2))
2356 return false;
2357
2358 intel_sdvo_connector->max_hscan = data_value[0];
2359 intel_sdvo_connector->left_margin = data_value[0] - response;
2360 intel_sdvo_connector->right_margin = intel_sdvo_connector->left_margin;
2361 intel_sdvo_connector->left =
2362 drm_property_create(dev, DRM_MODE_PROP_RANGE,
2363 "left_margin", 2);
2364 if (!intel_sdvo_connector->left)
2365 return false;
2366
2367 intel_sdvo_connector->left->values[0] = 0;
2368 intel_sdvo_connector->left->values[1] = data_value[0];
2369 drm_connector_attach_property(connector,
2370 intel_sdvo_connector->left,
2371 intel_sdvo_connector->left_margin);
2372
2373 intel_sdvo_connector->right =
2374 drm_property_create(dev, DRM_MODE_PROP_RANGE,
2375 "right_margin", 2);
2376 if (!intel_sdvo_connector->right)
2377 return false;
2378
2379 intel_sdvo_connector->right->values[0] = 0;
2380 intel_sdvo_connector->right->values[1] = data_value[0];
2381 drm_connector_attach_property(connector,
2382 intel_sdvo_connector->right,
2383 intel_sdvo_connector->right_margin);
2384 DRM_DEBUG_KMS("h_overscan: max %d, "
2385 "default %d, current %d\n",
2386 data_value[0], data_value[1], response);
2473 } 2387 }
2474 response = *((uint16_t *)&sdvo_data); 2388
2475 if (!response) { 2389 if (enhancements.overscan_v) {
2476 DRM_DEBUG_KMS("No enhancement is supported\n"); 2390 if (!intel_sdvo_get_value(intel_sdvo,
2477 return; 2391 SDVO_CMD_GET_MAX_OVERSCAN_V,
2392 &data_value, 4))
2393 return false;
2394
2395 if (!intel_sdvo_get_value(intel_sdvo,
2396 SDVO_CMD_GET_OVERSCAN_V,
2397 &response, 2))
2398 return false;
2399
2400 intel_sdvo_connector->max_vscan = data_value[0];
2401 intel_sdvo_connector->top_margin = data_value[0] - response;
2402 intel_sdvo_connector->bottom_margin = intel_sdvo_connector->top_margin;
2403 intel_sdvo_connector->top =
2404 drm_property_create(dev, DRM_MODE_PROP_RANGE,
2405 "top_margin", 2);
2406 if (!intel_sdvo_connector->top)
2407 return false;
2408
2409 intel_sdvo_connector->top->values[0] = 0;
2410 intel_sdvo_connector->top->values[1] = data_value[0];
2411 drm_connector_attach_property(connector,
2412 intel_sdvo_connector->top,
2413 intel_sdvo_connector->top_margin);
2414
2415 intel_sdvo_connector->bottom =
2416 drm_property_create(dev, DRM_MODE_PROP_RANGE,
2417 "bottom_margin", 2);
2418 if (!intel_sdvo_connector->bottom)
2419 return false;
2420
2421 intel_sdvo_connector->bottom->values[0] = 0;
2422 intel_sdvo_connector->bottom->values[1] = data_value[0];
2423 drm_connector_attach_property(connector,
2424 intel_sdvo_connector->bottom,
2425 intel_sdvo_connector->bottom_margin);
2426 DRM_DEBUG_KMS("v_overscan: max %d, "
2427 "default %d, current %d\n",
2428 data_value[0], data_value[1], response);
2478 } 2429 }
2479 if (IS_TV(sdvo_priv)) { 2430
2480 /* when horizontal overscan is supported, Add the left/right 2431 ENHANCEMENT(hpos, HPOS);
2481 * property 2432 ENHANCEMENT(vpos, VPOS);
2482 */ 2433 ENHANCEMENT(saturation, SATURATION);
2483 if (sdvo_data.overscan_h) { 2434 ENHANCEMENT(contrast, CONTRAST);
2484 intel_sdvo_write_cmd(intel_encoder, 2435 ENHANCEMENT(hue, HUE);
2485 SDVO_CMD_GET_MAX_OVERSCAN_H, NULL, 0); 2436 ENHANCEMENT(sharpness, SHARPNESS);
2486 status = intel_sdvo_read_response(intel_encoder, 2437 ENHANCEMENT(brightness, BRIGHTNESS);
2487 &data_value, 4); 2438 ENHANCEMENT(flicker_filter, FLICKER_FILTER);
2488 if (status != SDVO_CMD_STATUS_SUCCESS) { 2439 ENHANCEMENT(flicker_filter_adaptive, FLICKER_FILTER_ADAPTIVE);
2489 DRM_DEBUG_KMS("Incorrect SDVO max " 2440 ENHANCEMENT(flicker_filter_2d, FLICKER_FILTER_2D);
2490 "h_overscan\n"); 2441 ENHANCEMENT(tv_chroma_filter, TV_CHROMA_FILTER);
2491 return; 2442 ENHANCEMENT(tv_luma_filter, TV_LUMA_FILTER);
2492 } 2443
2493 intel_sdvo_write_cmd(intel_encoder, 2444 if (enhancements.dot_crawl) {
2494 SDVO_CMD_GET_OVERSCAN_H, NULL, 0); 2445 if (!intel_sdvo_get_value(intel_sdvo, SDVO_CMD_GET_DOT_CRAWL, &response, 2))
2495 status = intel_sdvo_read_response(intel_encoder, 2446 return false;
2496 &response, 2); 2447
2497 if (status != SDVO_CMD_STATUS_SUCCESS) { 2448 intel_sdvo_connector->max_dot_crawl = 1;
2498 DRM_DEBUG_KMS("Incorrect SDVO h_overscan\n"); 2449 intel_sdvo_connector->cur_dot_crawl = response & 0x1;
2499 return; 2450 intel_sdvo_connector->dot_crawl =
2500 } 2451 drm_property_create(dev, DRM_MODE_PROP_RANGE, "dot_crawl", 2);
2501 sdvo_priv->max_hscan = data_value[0]; 2452 if (!intel_sdvo_connector->dot_crawl)
2502 sdvo_priv->left_margin = data_value[0] - response; 2453 return false;
2503 sdvo_priv->right_margin = sdvo_priv->left_margin; 2454
2504 sdvo_priv->left_property = 2455 intel_sdvo_connector->dot_crawl->values[0] = 0;
2505 drm_property_create(dev, DRM_MODE_PROP_RANGE, 2456 intel_sdvo_connector->dot_crawl->values[1] = 1;
2506 "left_margin", 2); 2457 drm_connector_attach_property(connector,
2507 sdvo_priv->left_property->values[0] = 0; 2458 intel_sdvo_connector->dot_crawl,
2508 sdvo_priv->left_property->values[1] = data_value[0]; 2459 intel_sdvo_connector->cur_dot_crawl);
2509 drm_connector_attach_property(connector, 2460 DRM_DEBUG_KMS("dot crawl: current %d\n", response);
2510 sdvo_priv->left_property,
2511 sdvo_priv->left_margin);
2512 sdvo_priv->right_property =
2513 drm_property_create(dev, DRM_MODE_PROP_RANGE,
2514 "right_margin", 2);
2515 sdvo_priv->right_property->values[0] = 0;
2516 sdvo_priv->right_property->values[1] = data_value[0];
2517 drm_connector_attach_property(connector,
2518 sdvo_priv->right_property,
2519 sdvo_priv->right_margin);
2520 DRM_DEBUG_KMS("h_overscan: max %d, "
2521 "default %d, current %d\n",
2522 data_value[0], data_value[1], response);
2523 }
2524 if (sdvo_data.overscan_v) {
2525 intel_sdvo_write_cmd(intel_encoder,
2526 SDVO_CMD_GET_MAX_OVERSCAN_V, NULL, 0);
2527 status = intel_sdvo_read_response(intel_encoder,
2528 &data_value, 4);
2529 if (status != SDVO_CMD_STATUS_SUCCESS) {
2530 DRM_DEBUG_KMS("Incorrect SDVO max "
2531 "v_overscan\n");
2532 return;
2533 }
2534 intel_sdvo_write_cmd(intel_encoder,
2535 SDVO_CMD_GET_OVERSCAN_V, NULL, 0);
2536 status = intel_sdvo_read_response(intel_encoder,
2537 &response, 2);
2538 if (status != SDVO_CMD_STATUS_SUCCESS) {
2539 DRM_DEBUG_KMS("Incorrect SDVO v_overscan\n");
2540 return;
2541 }
2542 sdvo_priv->max_vscan = data_value[0];
2543 sdvo_priv->top_margin = data_value[0] - response;
2544 sdvo_priv->bottom_margin = sdvo_priv->top_margin;
2545 sdvo_priv->top_property =
2546 drm_property_create(dev, DRM_MODE_PROP_RANGE,
2547 "top_margin", 2);
2548 sdvo_priv->top_property->values[0] = 0;
2549 sdvo_priv->top_property->values[1] = data_value[0];
2550 drm_connector_attach_property(connector,
2551 sdvo_priv->top_property,
2552 sdvo_priv->top_margin);
2553 sdvo_priv->bottom_property =
2554 drm_property_create(dev, DRM_MODE_PROP_RANGE,
2555 "bottom_margin", 2);
2556 sdvo_priv->bottom_property->values[0] = 0;
2557 sdvo_priv->bottom_property->values[1] = data_value[0];
2558 drm_connector_attach_property(connector,
2559 sdvo_priv->bottom_property,
2560 sdvo_priv->bottom_margin);
2561 DRM_DEBUG_KMS("v_overscan: max %d, "
2562 "default %d, current %d\n",
2563 data_value[0], data_value[1], response);
2564 }
2565 if (sdvo_data.position_h) {
2566 intel_sdvo_write_cmd(intel_encoder,
2567 SDVO_CMD_GET_MAX_POSITION_H, NULL, 0);
2568 status = intel_sdvo_read_response(intel_encoder,
2569 &data_value, 4);
2570 if (status != SDVO_CMD_STATUS_SUCCESS) {
2571 DRM_DEBUG_KMS("Incorrect SDVO Max h_pos\n");
2572 return;
2573 }
2574 intel_sdvo_write_cmd(intel_encoder,
2575 SDVO_CMD_GET_POSITION_H, NULL, 0);
2576 status = intel_sdvo_read_response(intel_encoder,
2577 &response, 2);
2578 if (status != SDVO_CMD_STATUS_SUCCESS) {
2579 DRM_DEBUG_KMS("Incorrect SDVO get h_postion\n");
2580 return;
2581 }
2582 sdvo_priv->max_hpos = data_value[0];
2583 sdvo_priv->cur_hpos = response;
2584 sdvo_priv->hpos_property =
2585 drm_property_create(dev, DRM_MODE_PROP_RANGE,
2586 "hpos", 2);
2587 sdvo_priv->hpos_property->values[0] = 0;
2588 sdvo_priv->hpos_property->values[1] = data_value[0];
2589 drm_connector_attach_property(connector,
2590 sdvo_priv->hpos_property,
2591 sdvo_priv->cur_hpos);
2592 DRM_DEBUG_KMS("h_position: max %d, "
2593 "default %d, current %d\n",
2594 data_value[0], data_value[1], response);
2595 }
2596 if (sdvo_data.position_v) {
2597 intel_sdvo_write_cmd(intel_encoder,
2598 SDVO_CMD_GET_MAX_POSITION_V, NULL, 0);
2599 status = intel_sdvo_read_response(intel_encoder,
2600 &data_value, 4);
2601 if (status != SDVO_CMD_STATUS_SUCCESS) {
2602 DRM_DEBUG_KMS("Incorrect SDVO Max v_pos\n");
2603 return;
2604 }
2605 intel_sdvo_write_cmd(intel_encoder,
2606 SDVO_CMD_GET_POSITION_V, NULL, 0);
2607 status = intel_sdvo_read_response(intel_encoder,
2608 &response, 2);
2609 if (status != SDVO_CMD_STATUS_SUCCESS) {
2610 DRM_DEBUG_KMS("Incorrect SDVO get v_postion\n");
2611 return;
2612 }
2613 sdvo_priv->max_vpos = data_value[0];
2614 sdvo_priv->cur_vpos = response;
2615 sdvo_priv->vpos_property =
2616 drm_property_create(dev, DRM_MODE_PROP_RANGE,
2617 "vpos", 2);
2618 sdvo_priv->vpos_property->values[0] = 0;
2619 sdvo_priv->vpos_property->values[1] = data_value[0];
2620 drm_connector_attach_property(connector,
2621 sdvo_priv->vpos_property,
2622 sdvo_priv->cur_vpos);
2623 DRM_DEBUG_KMS("v_position: max %d, "
2624 "default %d, current %d\n",
2625 data_value[0], data_value[1], response);
2626 }
2627 if (sdvo_data.saturation) {
2628 intel_sdvo_write_cmd(intel_encoder,
2629 SDVO_CMD_GET_MAX_SATURATION, NULL, 0);
2630 status = intel_sdvo_read_response(intel_encoder,
2631 &data_value, 4);
2632 if (status != SDVO_CMD_STATUS_SUCCESS) {
2633 DRM_DEBUG_KMS("Incorrect SDVO Max sat\n");
2634 return;
2635 }
2636 intel_sdvo_write_cmd(intel_encoder,
2637 SDVO_CMD_GET_SATURATION, NULL, 0);
2638 status = intel_sdvo_read_response(intel_encoder,
2639 &response, 2);
2640 if (status != SDVO_CMD_STATUS_SUCCESS) {
2641 DRM_DEBUG_KMS("Incorrect SDVO get sat\n");
2642 return;
2643 }
2644 sdvo_priv->max_saturation = data_value[0];
2645 sdvo_priv->cur_saturation = response;
2646 sdvo_priv->saturation_property =
2647 drm_property_create(dev, DRM_MODE_PROP_RANGE,
2648 "saturation", 2);
2649 sdvo_priv->saturation_property->values[0] = 0;
2650 sdvo_priv->saturation_property->values[1] =
2651 data_value[0];
2652 drm_connector_attach_property(connector,
2653 sdvo_priv->saturation_property,
2654 sdvo_priv->cur_saturation);
2655 DRM_DEBUG_KMS("saturation: max %d, "
2656 "default %d, current %d\n",
2657 data_value[0], data_value[1], response);
2658 }
2659 if (sdvo_data.contrast) {
2660 intel_sdvo_write_cmd(intel_encoder,
2661 SDVO_CMD_GET_MAX_CONTRAST, NULL, 0);
2662 status = intel_sdvo_read_response(intel_encoder,
2663 &data_value, 4);
2664 if (status != SDVO_CMD_STATUS_SUCCESS) {
2665 DRM_DEBUG_KMS("Incorrect SDVO Max contrast\n");
2666 return;
2667 }
2668 intel_sdvo_write_cmd(intel_encoder,
2669 SDVO_CMD_GET_CONTRAST, NULL, 0);
2670 status = intel_sdvo_read_response(intel_encoder,
2671 &response, 2);
2672 if (status != SDVO_CMD_STATUS_SUCCESS) {
2673 DRM_DEBUG_KMS("Incorrect SDVO get contrast\n");
2674 return;
2675 }
2676 sdvo_priv->max_contrast = data_value[0];
2677 sdvo_priv->cur_contrast = response;
2678 sdvo_priv->contrast_property =
2679 drm_property_create(dev, DRM_MODE_PROP_RANGE,
2680 "contrast", 2);
2681 sdvo_priv->contrast_property->values[0] = 0;
2682 sdvo_priv->contrast_property->values[1] = data_value[0];
2683 drm_connector_attach_property(connector,
2684 sdvo_priv->contrast_property,
2685 sdvo_priv->cur_contrast);
2686 DRM_DEBUG_KMS("contrast: max %d, "
2687 "default %d, current %d\n",
2688 data_value[0], data_value[1], response);
2689 }
2690 if (sdvo_data.hue) {
2691 intel_sdvo_write_cmd(intel_encoder,
2692 SDVO_CMD_GET_MAX_HUE, NULL, 0);
2693 status = intel_sdvo_read_response(intel_encoder,
2694 &data_value, 4);
2695 if (status != SDVO_CMD_STATUS_SUCCESS) {
2696 DRM_DEBUG_KMS("Incorrect SDVO Max hue\n");
2697 return;
2698 }
2699 intel_sdvo_write_cmd(intel_encoder,
2700 SDVO_CMD_GET_HUE, NULL, 0);
2701 status = intel_sdvo_read_response(intel_encoder,
2702 &response, 2);
2703 if (status != SDVO_CMD_STATUS_SUCCESS) {
2704 DRM_DEBUG_KMS("Incorrect SDVO get hue\n");
2705 return;
2706 }
2707 sdvo_priv->max_hue = data_value[0];
2708 sdvo_priv->cur_hue = response;
2709 sdvo_priv->hue_property =
2710 drm_property_create(dev, DRM_MODE_PROP_RANGE,
2711 "hue", 2);
2712 sdvo_priv->hue_property->values[0] = 0;
2713 sdvo_priv->hue_property->values[1] =
2714 data_value[0];
2715 drm_connector_attach_property(connector,
2716 sdvo_priv->hue_property,
2717 sdvo_priv->cur_hue);
2718 DRM_DEBUG_KMS("hue: max %d, default %d, current %d\n",
2719 data_value[0], data_value[1], response);
2720 }
2721 } 2461 }
2722 if (IS_TV(sdvo_priv) || IS_LVDS(sdvo_priv)) { 2462
2723 if (sdvo_data.brightness) { 2463 return true;
2724 intel_sdvo_write_cmd(intel_encoder, 2464}
2725 SDVO_CMD_GET_MAX_BRIGHTNESS, NULL, 0); 2465
2726 status = intel_sdvo_read_response(intel_encoder, 2466static bool
2727 &data_value, 4); 2467intel_sdvo_create_enhance_property_lvds(struct intel_sdvo *intel_sdvo,
2728 if (status != SDVO_CMD_STATUS_SUCCESS) { 2468 struct intel_sdvo_connector *intel_sdvo_connector,
2729 DRM_DEBUG_KMS("Incorrect SDVO Max bright\n"); 2469 struct intel_sdvo_enhancements_reply enhancements)
2730 return; 2470{
2731 } 2471 struct drm_device *dev = intel_sdvo->base.enc.dev;
2732 intel_sdvo_write_cmd(intel_encoder, 2472 struct drm_connector *connector = &intel_sdvo_connector->base.base;
2733 SDVO_CMD_GET_BRIGHTNESS, NULL, 0); 2473 uint16_t response, data_value[2];
2734 status = intel_sdvo_read_response(intel_encoder, 2474
2735 &response, 2); 2475 ENHANCEMENT(brightness, BRIGHTNESS);
2736 if (status != SDVO_CMD_STATUS_SUCCESS) { 2476
2737 DRM_DEBUG_KMS("Incorrect SDVO get brigh\n"); 2477 return true;
2738 return; 2478}
2739 } 2479#undef ENHANCEMENT
2740 sdvo_priv->max_brightness = data_value[0]; 2480
2741 sdvo_priv->cur_brightness = response; 2481static bool intel_sdvo_create_enhance_property(struct intel_sdvo *intel_sdvo,
2742 sdvo_priv->brightness_property = 2482 struct intel_sdvo_connector *intel_sdvo_connector)
2743 drm_property_create(dev, DRM_MODE_PROP_RANGE, 2483{
2744 "brightness", 2); 2484 union {
2745 sdvo_priv->brightness_property->values[0] = 0; 2485 struct intel_sdvo_enhancements_reply reply;
2746 sdvo_priv->brightness_property->values[1] = 2486 uint16_t response;
2747 data_value[0]; 2487 } enhancements;
2748 drm_connector_attach_property(connector, 2488
2749 sdvo_priv->brightness_property, 2489 if (!intel_sdvo_get_value(intel_sdvo,
2750 sdvo_priv->cur_brightness); 2490 SDVO_CMD_GET_SUPPORTED_ENHANCEMENTS,
2751 DRM_DEBUG_KMS("brightness: max %d, " 2491 &enhancements, sizeof(enhancements)))
2752 "default %d, current %d\n", 2492 return false;
2753 data_value[0], data_value[1], response); 2493
2754 } 2494 if (enhancements.response == 0) {
2495 DRM_DEBUG_KMS("No enhancement is supported\n");
2496 return true;
2755 } 2497 }
2756 return; 2498
2499 if (IS_TV(intel_sdvo_connector))
2500 return intel_sdvo_create_enhance_property_tv(intel_sdvo, intel_sdvo_connector, enhancements.reply);
2501 else if(IS_LVDS(intel_sdvo_connector))
2502 return intel_sdvo_create_enhance_property_lvds(intel_sdvo, intel_sdvo_connector, enhancements.reply);
2503 else
2504 return true;
2505
2757} 2506}
2758 2507
2759bool intel_sdvo_init(struct drm_device *dev, int sdvo_reg) 2508bool intel_sdvo_init(struct drm_device *dev, int sdvo_reg)
2760{ 2509{
2761 struct drm_i915_private *dev_priv = dev->dev_private; 2510 struct drm_i915_private *dev_priv = dev->dev_private;
2762 struct intel_encoder *intel_encoder; 2511 struct intel_encoder *intel_encoder;
2763 struct intel_sdvo_priv *sdvo_priv; 2512 struct intel_sdvo *intel_sdvo;
2764 u8 ch[0x40]; 2513 u8 ch[0x40];
2765 int i; 2514 int i;
2766 u32 i2c_reg, ddc_reg, analog_ddc_reg; 2515 u32 i2c_reg, ddc_reg, analog_ddc_reg;
2767 2516
2768 intel_encoder = kcalloc(sizeof(struct intel_encoder)+sizeof(struct intel_sdvo_priv), 1, GFP_KERNEL); 2517 intel_sdvo = kzalloc(sizeof(struct intel_sdvo), GFP_KERNEL);
2769 if (!intel_encoder) { 2518 if (!intel_sdvo)
2770 return false; 2519 return false;
2771 }
2772 2520
2773 sdvo_priv = (struct intel_sdvo_priv *)(intel_encoder + 1); 2521 intel_sdvo->sdvo_reg = sdvo_reg;
2774 sdvo_priv->sdvo_reg = sdvo_reg;
2775 2522
2776 intel_encoder->dev_priv = sdvo_priv; 2523 intel_encoder = &intel_sdvo->base;
2777 intel_encoder->type = INTEL_OUTPUT_SDVO; 2524 intel_encoder->type = INTEL_OUTPUT_SDVO;
2778 2525
2779 if (HAS_PCH_SPLIT(dev)) { 2526 if (HAS_PCH_SPLIT(dev)) {
@@ -2795,14 +2542,14 @@ bool intel_sdvo_init(struct drm_device *dev, int sdvo_reg)
2795 if (!intel_encoder->i2c_bus) 2542 if (!intel_encoder->i2c_bus)
2796 goto err_inteloutput; 2543 goto err_inteloutput;
2797 2544
2798 sdvo_priv->slave_addr = intel_sdvo_get_slave_addr(dev, sdvo_reg); 2545 intel_sdvo->slave_addr = intel_sdvo_get_slave_addr(dev, sdvo_reg);
2799 2546
2800 /* Save the bit-banging i2c functionality for use by the DDC wrapper */ 2547 /* Save the bit-banging i2c functionality for use by the DDC wrapper */
2801 intel_sdvo_i2c_bit_algo.functionality = intel_encoder->i2c_bus->algo->functionality; 2548 intel_sdvo_i2c_bit_algo.functionality = intel_encoder->i2c_bus->algo->functionality;
2802 2549
2803 /* Read the regs to test if we can talk to the device */ 2550 /* Read the regs to test if we can talk to the device */
2804 for (i = 0; i < 0x40; i++) { 2551 for (i = 0; i < 0x40; i++) {
2805 if (!intel_sdvo_read_byte(intel_encoder, i, &ch[i])) { 2552 if (!intel_sdvo_read_byte(intel_sdvo, i, &ch[i])) {
2806 DRM_DEBUG_KMS("No SDVO device found on SDVO%c\n", 2553 DRM_DEBUG_KMS("No SDVO device found on SDVO%c\n",
2807 IS_SDVOB(sdvo_reg) ? 'B' : 'C'); 2554 IS_SDVOB(sdvo_reg) ? 'B' : 'C');
2808 goto err_i2c; 2555 goto err_i2c;
@@ -2812,17 +2559,16 @@ bool intel_sdvo_init(struct drm_device *dev, int sdvo_reg)
2812 /* setup the DDC bus. */ 2559 /* setup the DDC bus. */
2813 if (IS_SDVOB(sdvo_reg)) { 2560 if (IS_SDVOB(sdvo_reg)) {
2814 intel_encoder->ddc_bus = intel_i2c_create(dev, ddc_reg, "SDVOB DDC BUS"); 2561 intel_encoder->ddc_bus = intel_i2c_create(dev, ddc_reg, "SDVOB DDC BUS");
2815 sdvo_priv->analog_ddc_bus = intel_i2c_create(dev, analog_ddc_reg, 2562 intel_sdvo->analog_ddc_bus = intel_i2c_create(dev, analog_ddc_reg,
2816 "SDVOB/VGA DDC BUS"); 2563 "SDVOB/VGA DDC BUS");
2817 dev_priv->hotplug_supported_mask |= SDVOB_HOTPLUG_INT_STATUS; 2564 dev_priv->hotplug_supported_mask |= SDVOB_HOTPLUG_INT_STATUS;
2818 } else { 2565 } else {
2819 intel_encoder->ddc_bus = intel_i2c_create(dev, ddc_reg, "SDVOC DDC BUS"); 2566 intel_encoder->ddc_bus = intel_i2c_create(dev, ddc_reg, "SDVOC DDC BUS");
2820 sdvo_priv->analog_ddc_bus = intel_i2c_create(dev, analog_ddc_reg, 2567 intel_sdvo->analog_ddc_bus = intel_i2c_create(dev, analog_ddc_reg,
2821 "SDVOC/VGA DDC BUS"); 2568 "SDVOC/VGA DDC BUS");
2822 dev_priv->hotplug_supported_mask |= SDVOC_HOTPLUG_INT_STATUS; 2569 dev_priv->hotplug_supported_mask |= SDVOC_HOTPLUG_INT_STATUS;
2823 } 2570 }
2824 2571 if (intel_encoder->ddc_bus == NULL || intel_sdvo->analog_ddc_bus == NULL)
2825 if (intel_encoder->ddc_bus == NULL)
2826 goto err_i2c; 2572 goto err_i2c;
2827 2573
2828 /* Wrap with our custom algo which switches to DDC mode */ 2574 /* Wrap with our custom algo which switches to DDC mode */
@@ -2833,53 +2579,56 @@ bool intel_sdvo_init(struct drm_device *dev, int sdvo_reg)
2833 drm_encoder_helper_add(&intel_encoder->enc, &intel_sdvo_helper_funcs); 2579 drm_encoder_helper_add(&intel_encoder->enc, &intel_sdvo_helper_funcs);
2834 2580
2835 /* In default case sdvo lvds is false */ 2581 /* In default case sdvo lvds is false */
2836 intel_sdvo_get_capabilities(intel_encoder, &sdvo_priv->caps); 2582 if (!intel_sdvo_get_capabilities(intel_sdvo, &intel_sdvo->caps))
2583 goto err_enc;
2837 2584
2838 if (intel_sdvo_output_setup(intel_encoder, 2585 if (intel_sdvo_output_setup(intel_sdvo,
2839 sdvo_priv->caps.output_flags) != true) { 2586 intel_sdvo->caps.output_flags) != true) {
2840 DRM_DEBUG_KMS("SDVO output failed to setup on SDVO%c\n", 2587 DRM_DEBUG_KMS("SDVO output failed to setup on SDVO%c\n",
2841 IS_SDVOB(sdvo_reg) ? 'B' : 'C'); 2588 IS_SDVOB(sdvo_reg) ? 'B' : 'C');
2842 goto err_i2c; 2589 goto err_enc;
2843 } 2590 }
2844 2591
2845 intel_sdvo_select_ddc_bus(dev_priv, sdvo_priv, sdvo_reg); 2592 intel_sdvo_select_ddc_bus(dev_priv, intel_sdvo, sdvo_reg);
2846 2593
2847 /* Set the input timing to the screen. Assume always input 0. */ 2594 /* Set the input timing to the screen. Assume always input 0. */
2848 intel_sdvo_set_target_input(intel_encoder, true, false); 2595 if (!intel_sdvo_set_target_input(intel_sdvo))
2849 2596 goto err_enc;
2850 intel_sdvo_get_input_pixel_clock_range(intel_encoder,
2851 &sdvo_priv->pixel_clock_min,
2852 &sdvo_priv->pixel_clock_max);
2853 2597
2598 if (!intel_sdvo_get_input_pixel_clock_range(intel_sdvo,
2599 &intel_sdvo->pixel_clock_min,
2600 &intel_sdvo->pixel_clock_max))
2601 goto err_enc;
2854 2602
2855 DRM_DEBUG_KMS("%s device VID/DID: %02X:%02X.%02X, " 2603 DRM_DEBUG_KMS("%s device VID/DID: %02X:%02X.%02X, "
2856 "clock range %dMHz - %dMHz, " 2604 "clock range %dMHz - %dMHz, "
2857 "input 1: %c, input 2: %c, " 2605 "input 1: %c, input 2: %c, "
2858 "output 1: %c, output 2: %c\n", 2606 "output 1: %c, output 2: %c\n",
2859 SDVO_NAME(sdvo_priv), 2607 SDVO_NAME(intel_sdvo),
2860 sdvo_priv->caps.vendor_id, sdvo_priv->caps.device_id, 2608 intel_sdvo->caps.vendor_id, intel_sdvo->caps.device_id,
2861 sdvo_priv->caps.device_rev_id, 2609 intel_sdvo->caps.device_rev_id,
2862 sdvo_priv->pixel_clock_min / 1000, 2610 intel_sdvo->pixel_clock_min / 1000,
2863 sdvo_priv->pixel_clock_max / 1000, 2611 intel_sdvo->pixel_clock_max / 1000,
2864 (sdvo_priv->caps.sdvo_inputs_mask & 0x1) ? 'Y' : 'N', 2612 (intel_sdvo->caps.sdvo_inputs_mask & 0x1) ? 'Y' : 'N',
2865 (sdvo_priv->caps.sdvo_inputs_mask & 0x2) ? 'Y' : 'N', 2613 (intel_sdvo->caps.sdvo_inputs_mask & 0x2) ? 'Y' : 'N',
2866 /* check currently supported outputs */ 2614 /* check currently supported outputs */
2867 sdvo_priv->caps.output_flags & 2615 intel_sdvo->caps.output_flags &
2868 (SDVO_OUTPUT_TMDS0 | SDVO_OUTPUT_RGB0) ? 'Y' : 'N', 2616 (SDVO_OUTPUT_TMDS0 | SDVO_OUTPUT_RGB0) ? 'Y' : 'N',
2869 sdvo_priv->caps.output_flags & 2617 intel_sdvo->caps.output_flags &
2870 (SDVO_OUTPUT_TMDS1 | SDVO_OUTPUT_RGB1) ? 'Y' : 'N'); 2618 (SDVO_OUTPUT_TMDS1 | SDVO_OUTPUT_RGB1) ? 'Y' : 'N');
2871
2872 return true; 2619 return true;
2873 2620
2621err_enc:
2622 drm_encoder_cleanup(&intel_encoder->enc);
2874err_i2c: 2623err_i2c:
2875 if (sdvo_priv->analog_ddc_bus != NULL) 2624 if (intel_sdvo->analog_ddc_bus != NULL)
2876 intel_i2c_destroy(sdvo_priv->analog_ddc_bus); 2625 intel_i2c_destroy(intel_sdvo->analog_ddc_bus);
2877 if (intel_encoder->ddc_bus != NULL) 2626 if (intel_encoder->ddc_bus != NULL)
2878 intel_i2c_destroy(intel_encoder->ddc_bus); 2627 intel_i2c_destroy(intel_encoder->ddc_bus);
2879 if (intel_encoder->i2c_bus != NULL) 2628 if (intel_encoder->i2c_bus != NULL)
2880 intel_i2c_destroy(intel_encoder->i2c_bus); 2629 intel_i2c_destroy(intel_encoder->i2c_bus);
2881err_inteloutput: 2630err_inteloutput:
2882 kfree(intel_encoder); 2631 kfree(intel_sdvo);
2883 2632
2884 return false; 2633 return false;
2885} 2634}
diff --git a/drivers/gpu/drm/i915/intel_sdvo_regs.h b/drivers/gpu/drm/i915/intel_sdvo_regs.h
index ba5cdf8ae40b..a386b022e538 100644
--- a/drivers/gpu/drm/i915/intel_sdvo_regs.h
+++ b/drivers/gpu/drm/i915/intel_sdvo_regs.h
@@ -312,7 +312,7 @@ struct intel_sdvo_set_target_input_args {
312# define SDVO_CLOCK_RATE_MULT_4X (1 << 3) 312# define SDVO_CLOCK_RATE_MULT_4X (1 << 3)
313 313
314#define SDVO_CMD_GET_SUPPORTED_TV_FORMATS 0x27 314#define SDVO_CMD_GET_SUPPORTED_TV_FORMATS 0x27
315/** 5 bytes of bit flags for TV formats shared by all TV format functions */ 315/** 6 bytes of bit flags for TV formats shared by all TV format functions */
316struct intel_sdvo_tv_format { 316struct intel_sdvo_tv_format {
317 unsigned int ntsc_m:1; 317 unsigned int ntsc_m:1;
318 unsigned int ntsc_j:1; 318 unsigned int ntsc_j:1;
@@ -596,32 +596,32 @@ struct intel_sdvo_enhancements_reply {
596 unsigned int overscan_h:1; 596 unsigned int overscan_h:1;
597 597
598 unsigned int overscan_v:1; 598 unsigned int overscan_v:1;
599 unsigned int position_h:1; 599 unsigned int hpos:1;
600 unsigned int position_v:1; 600 unsigned int vpos:1;
601 unsigned int sharpness:1; 601 unsigned int sharpness:1;
602 unsigned int dot_crawl:1; 602 unsigned int dot_crawl:1;
603 unsigned int dither:1; 603 unsigned int dither:1;
604 unsigned int max_tv_chroma_filter:1; 604 unsigned int tv_chroma_filter:1;
605 unsigned int max_tv_luma_filter:1; 605 unsigned int tv_luma_filter:1;
606} __attribute__((packed)); 606} __attribute__((packed));
607 607
608/* Picture enhancement limits below are dependent on the current TV format, 608/* Picture enhancement limits below are dependent on the current TV format,
609 * and thus need to be queried and set after it. 609 * and thus need to be queried and set after it.
610 */ 610 */
611#define SDVO_CMD_GET_MAX_FLICKER_FITER 0x4d 611#define SDVO_CMD_GET_MAX_FLICKER_FILTER 0x4d
612#define SDVO_CMD_GET_MAX_ADAPTIVE_FLICKER_FITER 0x7b 612#define SDVO_CMD_GET_MAX_FLICKER_FILTER_ADAPTIVE 0x7b
613#define SDVO_CMD_GET_MAX_2D_FLICKER_FITER 0x52 613#define SDVO_CMD_GET_MAX_FLICKER_FILTER_2D 0x52
614#define SDVO_CMD_GET_MAX_SATURATION 0x55 614#define SDVO_CMD_GET_MAX_SATURATION 0x55
615#define SDVO_CMD_GET_MAX_HUE 0x58 615#define SDVO_CMD_GET_MAX_HUE 0x58
616#define SDVO_CMD_GET_MAX_BRIGHTNESS 0x5b 616#define SDVO_CMD_GET_MAX_BRIGHTNESS 0x5b
617#define SDVO_CMD_GET_MAX_CONTRAST 0x5e 617#define SDVO_CMD_GET_MAX_CONTRAST 0x5e
618#define SDVO_CMD_GET_MAX_OVERSCAN_H 0x61 618#define SDVO_CMD_GET_MAX_OVERSCAN_H 0x61
619#define SDVO_CMD_GET_MAX_OVERSCAN_V 0x64 619#define SDVO_CMD_GET_MAX_OVERSCAN_V 0x64
620#define SDVO_CMD_GET_MAX_POSITION_H 0x67 620#define SDVO_CMD_GET_MAX_HPOS 0x67
621#define SDVO_CMD_GET_MAX_POSITION_V 0x6a 621#define SDVO_CMD_GET_MAX_VPOS 0x6a
622#define SDVO_CMD_GET_MAX_SHARPNESS_V 0x6d 622#define SDVO_CMD_GET_MAX_SHARPNESS 0x6d
623#define SDVO_CMD_GET_MAX_TV_CHROMA 0x74 623#define SDVO_CMD_GET_MAX_TV_CHROMA_FILTER 0x74
624#define SDVO_CMD_GET_MAX_TV_LUMA 0x77 624#define SDVO_CMD_GET_MAX_TV_LUMA_FILTER 0x77
625struct intel_sdvo_enhancement_limits_reply { 625struct intel_sdvo_enhancement_limits_reply {
626 u16 max_value; 626 u16 max_value;
627 u16 default_value; 627 u16 default_value;
@@ -638,10 +638,10 @@ struct intel_sdvo_enhancement_limits_reply {
638 638
639#define SDVO_CMD_GET_FLICKER_FILTER 0x4e 639#define SDVO_CMD_GET_FLICKER_FILTER 0x4e
640#define SDVO_CMD_SET_FLICKER_FILTER 0x4f 640#define SDVO_CMD_SET_FLICKER_FILTER 0x4f
641#define SDVO_CMD_GET_ADAPTIVE_FLICKER_FITER 0x50 641#define SDVO_CMD_GET_FLICKER_FILTER_ADAPTIVE 0x50
642#define SDVO_CMD_SET_ADAPTIVE_FLICKER_FITER 0x51 642#define SDVO_CMD_SET_FLICKER_FILTER_ADAPTIVE 0x51
643#define SDVO_CMD_GET_2D_FLICKER_FITER 0x53 643#define SDVO_CMD_GET_FLICKER_FILTER_2D 0x53
644#define SDVO_CMD_SET_2D_FLICKER_FITER 0x54 644#define SDVO_CMD_SET_FLICKER_FILTER_2D 0x54
645#define SDVO_CMD_GET_SATURATION 0x56 645#define SDVO_CMD_GET_SATURATION 0x56
646#define SDVO_CMD_SET_SATURATION 0x57 646#define SDVO_CMD_SET_SATURATION 0x57
647#define SDVO_CMD_GET_HUE 0x59 647#define SDVO_CMD_GET_HUE 0x59
@@ -654,16 +654,16 @@ struct intel_sdvo_enhancement_limits_reply {
654#define SDVO_CMD_SET_OVERSCAN_H 0x63 654#define SDVO_CMD_SET_OVERSCAN_H 0x63
655#define SDVO_CMD_GET_OVERSCAN_V 0x65 655#define SDVO_CMD_GET_OVERSCAN_V 0x65
656#define SDVO_CMD_SET_OVERSCAN_V 0x66 656#define SDVO_CMD_SET_OVERSCAN_V 0x66
657#define SDVO_CMD_GET_POSITION_H 0x68 657#define SDVO_CMD_GET_HPOS 0x68
658#define SDVO_CMD_SET_POSITION_H 0x69 658#define SDVO_CMD_SET_HPOS 0x69
659#define SDVO_CMD_GET_POSITION_V 0x6b 659#define SDVO_CMD_GET_VPOS 0x6b
660#define SDVO_CMD_SET_POSITION_V 0x6c 660#define SDVO_CMD_SET_VPOS 0x6c
661#define SDVO_CMD_GET_SHARPNESS 0x6e 661#define SDVO_CMD_GET_SHARPNESS 0x6e
662#define SDVO_CMD_SET_SHARPNESS 0x6f 662#define SDVO_CMD_SET_SHARPNESS 0x6f
663#define SDVO_CMD_GET_TV_CHROMA 0x75 663#define SDVO_CMD_GET_TV_CHROMA_FILTER 0x75
664#define SDVO_CMD_SET_TV_CHROMA 0x76 664#define SDVO_CMD_SET_TV_CHROMA_FILTER 0x76
665#define SDVO_CMD_GET_TV_LUMA 0x78 665#define SDVO_CMD_GET_TV_LUMA_FILTER 0x78
666#define SDVO_CMD_SET_TV_LUMA 0x79 666#define SDVO_CMD_SET_TV_LUMA_FILTER 0x79
667struct intel_sdvo_enhancements_arg { 667struct intel_sdvo_enhancements_arg {
668 u16 value; 668 u16 value;
669}__attribute__((packed)); 669}__attribute__((packed));
diff --git a/drivers/gpu/drm/i915/intel_tv.c b/drivers/gpu/drm/i915/intel_tv.c
index cc3726a4a1cb..d2029efee982 100644
--- a/drivers/gpu/drm/i915/intel_tv.c
+++ b/drivers/gpu/drm/i915/intel_tv.c
@@ -44,7 +44,9 @@ enum tv_margin {
44}; 44};
45 45
46/** Private structure for the integrated TV support */ 46/** Private structure for the integrated TV support */
47struct intel_tv_priv { 47struct intel_tv {
48 struct intel_encoder base;
49
48 int type; 50 int type;
49 char *tv_format; 51 char *tv_format;
50 int margin[4]; 52 int margin[4];
@@ -896,6 +898,11 @@ static const struct tv_mode tv_modes[] = {
896 }, 898 },
897}; 899};
898 900
901static struct intel_tv *enc_to_intel_tv(struct drm_encoder *encoder)
902{
903 return container_of(enc_to_intel_encoder(encoder), struct intel_tv, base);
904}
905
899static void 906static void
900intel_tv_dpms(struct drm_encoder *encoder, int mode) 907intel_tv_dpms(struct drm_encoder *encoder, int mode)
901{ 908{
@@ -929,19 +936,17 @@ intel_tv_mode_lookup (char *tv_format)
929} 936}
930 937
931static const struct tv_mode * 938static const struct tv_mode *
932intel_tv_mode_find (struct intel_encoder *intel_encoder) 939intel_tv_mode_find (struct intel_tv *intel_tv)
933{ 940{
934 struct intel_tv_priv *tv_priv = intel_encoder->dev_priv; 941 return intel_tv_mode_lookup(intel_tv->tv_format);
935
936 return intel_tv_mode_lookup(tv_priv->tv_format);
937} 942}
938 943
939static enum drm_mode_status 944static enum drm_mode_status
940intel_tv_mode_valid(struct drm_connector *connector, struct drm_display_mode *mode) 945intel_tv_mode_valid(struct drm_connector *connector, struct drm_display_mode *mode)
941{ 946{
942 struct drm_encoder *encoder = intel_attached_encoder(connector); 947 struct drm_encoder *encoder = intel_attached_encoder(connector);
943 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 948 struct intel_tv *intel_tv = enc_to_intel_tv(encoder);
944 const struct tv_mode *tv_mode = intel_tv_mode_find(intel_encoder); 949 const struct tv_mode *tv_mode = intel_tv_mode_find(intel_tv);
945 950
946 /* Ensure TV refresh is close to desired refresh */ 951 /* Ensure TV refresh is close to desired refresh */
947 if (tv_mode && abs(tv_mode->refresh - drm_mode_vrefresh(mode) * 1000) 952 if (tv_mode && abs(tv_mode->refresh - drm_mode_vrefresh(mode) * 1000)
@@ -957,8 +962,8 @@ intel_tv_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
957{ 962{
958 struct drm_device *dev = encoder->dev; 963 struct drm_device *dev = encoder->dev;
959 struct drm_mode_config *drm_config = &dev->mode_config; 964 struct drm_mode_config *drm_config = &dev->mode_config;
960 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 965 struct intel_tv *intel_tv = enc_to_intel_tv(encoder);
961 const struct tv_mode *tv_mode = intel_tv_mode_find (intel_encoder); 966 const struct tv_mode *tv_mode = intel_tv_mode_find(intel_tv);
962 struct drm_encoder *other_encoder; 967 struct drm_encoder *other_encoder;
963 968
964 if (!tv_mode) 969 if (!tv_mode)
@@ -983,9 +988,8 @@ intel_tv_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
983 struct drm_i915_private *dev_priv = dev->dev_private; 988 struct drm_i915_private *dev_priv = dev->dev_private;
984 struct drm_crtc *crtc = encoder->crtc; 989 struct drm_crtc *crtc = encoder->crtc;
985 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 990 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
986 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 991 struct intel_tv *intel_tv = enc_to_intel_tv(encoder);
987 struct intel_tv_priv *tv_priv = intel_encoder->dev_priv; 992 const struct tv_mode *tv_mode = intel_tv_mode_find(intel_tv);
988 const struct tv_mode *tv_mode = intel_tv_mode_find(intel_encoder);
989 u32 tv_ctl; 993 u32 tv_ctl;
990 u32 hctl1, hctl2, hctl3; 994 u32 hctl1, hctl2, hctl3;
991 u32 vctl1, vctl2, vctl3, vctl4, vctl5, vctl6, vctl7; 995 u32 vctl1, vctl2, vctl3, vctl4, vctl5, vctl6, vctl7;
@@ -1001,7 +1005,7 @@ intel_tv_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1001 tv_ctl = I915_READ(TV_CTL); 1005 tv_ctl = I915_READ(TV_CTL);
1002 tv_ctl &= TV_CTL_SAVE; 1006 tv_ctl &= TV_CTL_SAVE;
1003 1007
1004 switch (tv_priv->type) { 1008 switch (intel_tv->type) {
1005 default: 1009 default:
1006 case DRM_MODE_CONNECTOR_Unknown: 1010 case DRM_MODE_CONNECTOR_Unknown:
1007 case DRM_MODE_CONNECTOR_Composite: 1011 case DRM_MODE_CONNECTOR_Composite:
@@ -1154,11 +1158,11 @@ intel_tv_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1154 1158
1155 /* Wait for vblank for the disable to take effect */ 1159 /* Wait for vblank for the disable to take effect */
1156 if (!IS_I9XX(dev)) 1160 if (!IS_I9XX(dev))
1157 intel_wait_for_vblank(dev); 1161 intel_wait_for_vblank(dev, intel_crtc->pipe);
1158 1162
1159 I915_WRITE(pipeconf_reg, pipeconf & ~PIPEACONF_ENABLE); 1163 I915_WRITE(pipeconf_reg, pipeconf & ~PIPEACONF_ENABLE);
1160 /* Wait for vblank for the disable to take effect. */ 1164 /* Wait for vblank for the disable to take effect. */
1161 intel_wait_for_vblank(dev); 1165 intel_wait_for_vblank(dev, intel_crtc->pipe);
1162 1166
1163 /* Filter ctl must be set before TV_WIN_SIZE */ 1167 /* Filter ctl must be set before TV_WIN_SIZE */
1164 I915_WRITE(TV_FILTER_CTL_1, TV_AUTO_SCALE); 1168 I915_WRITE(TV_FILTER_CTL_1, TV_AUTO_SCALE);
@@ -1168,12 +1172,12 @@ intel_tv_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1168 else 1172 else
1169 ysize = 2*tv_mode->nbr_end + 1; 1173 ysize = 2*tv_mode->nbr_end + 1;
1170 1174
1171 xpos += tv_priv->margin[TV_MARGIN_LEFT]; 1175 xpos += intel_tv->margin[TV_MARGIN_LEFT];
1172 ypos += tv_priv->margin[TV_MARGIN_TOP]; 1176 ypos += intel_tv->margin[TV_MARGIN_TOP];
1173 xsize -= (tv_priv->margin[TV_MARGIN_LEFT] + 1177 xsize -= (intel_tv->margin[TV_MARGIN_LEFT] +
1174 tv_priv->margin[TV_MARGIN_RIGHT]); 1178 intel_tv->margin[TV_MARGIN_RIGHT]);
1175 ysize -= (tv_priv->margin[TV_MARGIN_TOP] + 1179 ysize -= (intel_tv->margin[TV_MARGIN_TOP] +
1176 tv_priv->margin[TV_MARGIN_BOTTOM]); 1180 intel_tv->margin[TV_MARGIN_BOTTOM]);
1177 I915_WRITE(TV_WIN_POS, (xpos<<16)|ypos); 1181 I915_WRITE(TV_WIN_POS, (xpos<<16)|ypos);
1178 I915_WRITE(TV_WIN_SIZE, (xsize<<16)|ysize); 1182 I915_WRITE(TV_WIN_SIZE, (xsize<<16)|ysize);
1179 1183
@@ -1222,11 +1226,12 @@ static const struct drm_display_mode reported_modes[] = {
1222 * \return false if TV is disconnected. 1226 * \return false if TV is disconnected.
1223 */ 1227 */
1224static int 1228static int
1225intel_tv_detect_type (struct drm_crtc *crtc, struct intel_encoder *intel_encoder) 1229intel_tv_detect_type (struct intel_tv *intel_tv)
1226{ 1230{
1227 struct drm_encoder *encoder = &intel_encoder->enc; 1231 struct drm_encoder *encoder = &intel_tv->base.enc;
1228 struct drm_device *dev = encoder->dev; 1232 struct drm_device *dev = encoder->dev;
1229 struct drm_i915_private *dev_priv = dev->dev_private; 1233 struct drm_i915_private *dev_priv = dev->dev_private;
1234 struct intel_crtc *intel_crtc = to_intel_crtc(encoder->crtc);
1230 unsigned long irqflags; 1235 unsigned long irqflags;
1231 u32 tv_ctl, save_tv_ctl; 1236 u32 tv_ctl, save_tv_ctl;
1232 u32 tv_dac, save_tv_dac; 1237 u32 tv_dac, save_tv_dac;
@@ -1263,11 +1268,11 @@ intel_tv_detect_type (struct drm_crtc *crtc, struct intel_encoder *intel_encoder
1263 DAC_C_0_7_V); 1268 DAC_C_0_7_V);
1264 I915_WRITE(TV_CTL, tv_ctl); 1269 I915_WRITE(TV_CTL, tv_ctl);
1265 I915_WRITE(TV_DAC, tv_dac); 1270 I915_WRITE(TV_DAC, tv_dac);
1266 intel_wait_for_vblank(dev); 1271 intel_wait_for_vblank(dev, intel_crtc->pipe);
1267 tv_dac = I915_READ(TV_DAC); 1272 tv_dac = I915_READ(TV_DAC);
1268 I915_WRITE(TV_DAC, save_tv_dac); 1273 I915_WRITE(TV_DAC, save_tv_dac);
1269 I915_WRITE(TV_CTL, save_tv_ctl); 1274 I915_WRITE(TV_CTL, save_tv_ctl);
1270 intel_wait_for_vblank(dev); 1275 intel_wait_for_vblank(dev, intel_crtc->pipe);
1271 /* 1276 /*
1272 * A B C 1277 * A B C
1273 * 0 1 1 Composite 1278 * 0 1 1 Composite
@@ -1304,12 +1309,11 @@ intel_tv_detect_type (struct drm_crtc *crtc, struct intel_encoder *intel_encoder
1304static void intel_tv_find_better_format(struct drm_connector *connector) 1309static void intel_tv_find_better_format(struct drm_connector *connector)
1305{ 1310{
1306 struct drm_encoder *encoder = intel_attached_encoder(connector); 1311 struct drm_encoder *encoder = intel_attached_encoder(connector);
1307 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1312 struct intel_tv *intel_tv = enc_to_intel_tv(encoder);
1308 struct intel_tv_priv *tv_priv = intel_encoder->dev_priv; 1313 const struct tv_mode *tv_mode = intel_tv_mode_find(intel_tv);
1309 const struct tv_mode *tv_mode = intel_tv_mode_find(intel_encoder);
1310 int i; 1314 int i;
1311 1315
1312 if ((tv_priv->type == DRM_MODE_CONNECTOR_Component) == 1316 if ((intel_tv->type == DRM_MODE_CONNECTOR_Component) ==
1313 tv_mode->component_only) 1317 tv_mode->component_only)
1314 return; 1318 return;
1315 1319
@@ -1317,12 +1321,12 @@ static void intel_tv_find_better_format(struct drm_connector *connector)
1317 for (i = 0; i < sizeof(tv_modes) / sizeof(*tv_modes); i++) { 1321 for (i = 0; i < sizeof(tv_modes) / sizeof(*tv_modes); i++) {
1318 tv_mode = tv_modes + i; 1322 tv_mode = tv_modes + i;
1319 1323
1320 if ((tv_priv->type == DRM_MODE_CONNECTOR_Component) == 1324 if ((intel_tv->type == DRM_MODE_CONNECTOR_Component) ==
1321 tv_mode->component_only) 1325 tv_mode->component_only)
1322 break; 1326 break;
1323 } 1327 }
1324 1328
1325 tv_priv->tv_format = tv_mode->name; 1329 intel_tv->tv_format = tv_mode->name;
1326 drm_connector_property_set_value(connector, 1330 drm_connector_property_set_value(connector,
1327 connector->dev->mode_config.tv_mode_property, i); 1331 connector->dev->mode_config.tv_mode_property, i);
1328} 1332}
@@ -1336,31 +1340,31 @@ static void intel_tv_find_better_format(struct drm_connector *connector)
1336static enum drm_connector_status 1340static enum drm_connector_status
1337intel_tv_detect(struct drm_connector *connector) 1341intel_tv_detect(struct drm_connector *connector)
1338{ 1342{
1339 struct drm_crtc *crtc;
1340 struct drm_display_mode mode; 1343 struct drm_display_mode mode;
1341 struct drm_encoder *encoder = intel_attached_encoder(connector); 1344 struct drm_encoder *encoder = intel_attached_encoder(connector);
1342 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1345 struct intel_tv *intel_tv = enc_to_intel_tv(encoder);
1343 struct intel_tv_priv *tv_priv = intel_encoder->dev_priv; 1346 int type;
1344 int dpms_mode;
1345 int type = tv_priv->type;
1346 1347
1347 mode = reported_modes[0]; 1348 mode = reported_modes[0];
1348 drm_mode_set_crtcinfo(&mode, CRTC_INTERLACE_HALVE_V); 1349 drm_mode_set_crtcinfo(&mode, CRTC_INTERLACE_HALVE_V);
1349 1350
1350 if (encoder->crtc && encoder->crtc->enabled) { 1351 if (encoder->crtc && encoder->crtc->enabled) {
1351 type = intel_tv_detect_type(encoder->crtc, intel_encoder); 1352 type = intel_tv_detect_type(intel_tv);
1352 } else { 1353 } else {
1353 crtc = intel_get_load_detect_pipe(intel_encoder, connector, 1354 struct drm_crtc *crtc;
1355 int dpms_mode;
1356
1357 crtc = intel_get_load_detect_pipe(&intel_tv->base, connector,
1354 &mode, &dpms_mode); 1358 &mode, &dpms_mode);
1355 if (crtc) { 1359 if (crtc) {
1356 type = intel_tv_detect_type(crtc, intel_encoder); 1360 type = intel_tv_detect_type(intel_tv);
1357 intel_release_load_detect_pipe(intel_encoder, connector, 1361 intel_release_load_detect_pipe(&intel_tv->base, connector,
1358 dpms_mode); 1362 dpms_mode);
1359 } else 1363 } else
1360 type = -1; 1364 type = -1;
1361 } 1365 }
1362 1366
1363 tv_priv->type = type; 1367 intel_tv->type = type;
1364 1368
1365 if (type < 0) 1369 if (type < 0)
1366 return connector_status_disconnected; 1370 return connector_status_disconnected;
@@ -1391,8 +1395,8 @@ intel_tv_chose_preferred_modes(struct drm_connector *connector,
1391 struct drm_display_mode *mode_ptr) 1395 struct drm_display_mode *mode_ptr)
1392{ 1396{
1393 struct drm_encoder *encoder = intel_attached_encoder(connector); 1397 struct drm_encoder *encoder = intel_attached_encoder(connector);
1394 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1398 struct intel_tv *intel_tv = enc_to_intel_tv(encoder);
1395 const struct tv_mode *tv_mode = intel_tv_mode_find(intel_encoder); 1399 const struct tv_mode *tv_mode = intel_tv_mode_find(intel_tv);
1396 1400
1397 if (tv_mode->nbr_end < 480 && mode_ptr->vdisplay == 480) 1401 if (tv_mode->nbr_end < 480 && mode_ptr->vdisplay == 480)
1398 mode_ptr->type |= DRM_MODE_TYPE_PREFERRED; 1402 mode_ptr->type |= DRM_MODE_TYPE_PREFERRED;
@@ -1417,8 +1421,8 @@ intel_tv_get_modes(struct drm_connector *connector)
1417{ 1421{
1418 struct drm_display_mode *mode_ptr; 1422 struct drm_display_mode *mode_ptr;
1419 struct drm_encoder *encoder = intel_attached_encoder(connector); 1423 struct drm_encoder *encoder = intel_attached_encoder(connector);
1420 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1424 struct intel_tv *intel_tv = enc_to_intel_tv(encoder);
1421 const struct tv_mode *tv_mode = intel_tv_mode_find(intel_encoder); 1425 const struct tv_mode *tv_mode = intel_tv_mode_find(intel_tv);
1422 int j, count = 0; 1426 int j, count = 0;
1423 u64 tmp; 1427 u64 tmp;
1424 1428
@@ -1483,8 +1487,7 @@ intel_tv_set_property(struct drm_connector *connector, struct drm_property *prop
1483{ 1487{
1484 struct drm_device *dev = connector->dev; 1488 struct drm_device *dev = connector->dev;
1485 struct drm_encoder *encoder = intel_attached_encoder(connector); 1489 struct drm_encoder *encoder = intel_attached_encoder(connector);
1486 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder); 1490 struct intel_tv *intel_tv = enc_to_intel_tv(encoder);
1487 struct intel_tv_priv *tv_priv = intel_encoder->dev_priv;
1488 struct drm_crtc *crtc = encoder->crtc; 1491 struct drm_crtc *crtc = encoder->crtc;
1489 int ret = 0; 1492 int ret = 0;
1490 bool changed = false; 1493 bool changed = false;
@@ -1494,30 +1497,30 @@ intel_tv_set_property(struct drm_connector *connector, struct drm_property *prop
1494 goto out; 1497 goto out;
1495 1498
1496 if (property == dev->mode_config.tv_left_margin_property && 1499 if (property == dev->mode_config.tv_left_margin_property &&
1497 tv_priv->margin[TV_MARGIN_LEFT] != val) { 1500 intel_tv->margin[TV_MARGIN_LEFT] != val) {
1498 tv_priv->margin[TV_MARGIN_LEFT] = val; 1501 intel_tv->margin[TV_MARGIN_LEFT] = val;
1499 changed = true; 1502 changed = true;
1500 } else if (property == dev->mode_config.tv_right_margin_property && 1503 } else if (property == dev->mode_config.tv_right_margin_property &&
1501 tv_priv->margin[TV_MARGIN_RIGHT] != val) { 1504 intel_tv->margin[TV_MARGIN_RIGHT] != val) {
1502 tv_priv->margin[TV_MARGIN_RIGHT] = val; 1505 intel_tv->margin[TV_MARGIN_RIGHT] = val;
1503 changed = true; 1506 changed = true;
1504 } else if (property == dev->mode_config.tv_top_margin_property && 1507 } else if (property == dev->mode_config.tv_top_margin_property &&
1505 tv_priv->margin[TV_MARGIN_TOP] != val) { 1508 intel_tv->margin[TV_MARGIN_TOP] != val) {
1506 tv_priv->margin[TV_MARGIN_TOP] = val; 1509 intel_tv->margin[TV_MARGIN_TOP] = val;
1507 changed = true; 1510 changed = true;
1508 } else if (property == dev->mode_config.tv_bottom_margin_property && 1511 } else if (property == dev->mode_config.tv_bottom_margin_property &&
1509 tv_priv->margin[TV_MARGIN_BOTTOM] != val) { 1512 intel_tv->margin[TV_MARGIN_BOTTOM] != val) {
1510 tv_priv->margin[TV_MARGIN_BOTTOM] = val; 1513 intel_tv->margin[TV_MARGIN_BOTTOM] = val;
1511 changed = true; 1514 changed = true;
1512 } else if (property == dev->mode_config.tv_mode_property) { 1515 } else if (property == dev->mode_config.tv_mode_property) {
1513 if (val >= ARRAY_SIZE(tv_modes)) { 1516 if (val >= ARRAY_SIZE(tv_modes)) {
1514 ret = -EINVAL; 1517 ret = -EINVAL;
1515 goto out; 1518 goto out;
1516 } 1519 }
1517 if (!strcmp(tv_priv->tv_format, tv_modes[val].name)) 1520 if (!strcmp(intel_tv->tv_format, tv_modes[val].name))
1518 goto out; 1521 goto out;
1519 1522
1520 tv_priv->tv_format = tv_modes[val].name; 1523 intel_tv->tv_format = tv_modes[val].name;
1521 changed = true; 1524 changed = true;
1522 } else { 1525 } else {
1523 ret = -EINVAL; 1526 ret = -EINVAL;
@@ -1553,16 +1556,8 @@ static const struct drm_connector_helper_funcs intel_tv_connector_helper_funcs =
1553 .best_encoder = intel_attached_encoder, 1556 .best_encoder = intel_attached_encoder,
1554}; 1557};
1555 1558
1556static void intel_tv_enc_destroy(struct drm_encoder *encoder)
1557{
1558 struct intel_encoder *intel_encoder = enc_to_intel_encoder(encoder);
1559
1560 drm_encoder_cleanup(encoder);
1561 kfree(intel_encoder);
1562}
1563
1564static const struct drm_encoder_funcs intel_tv_enc_funcs = { 1559static const struct drm_encoder_funcs intel_tv_enc_funcs = {
1565 .destroy = intel_tv_enc_destroy, 1560 .destroy = intel_encoder_destroy,
1566}; 1561};
1567 1562
1568/* 1563/*
@@ -1606,9 +1601,9 @@ intel_tv_init(struct drm_device *dev)
1606{ 1601{
1607 struct drm_i915_private *dev_priv = dev->dev_private; 1602 struct drm_i915_private *dev_priv = dev->dev_private;
1608 struct drm_connector *connector; 1603 struct drm_connector *connector;
1604 struct intel_tv *intel_tv;
1609 struct intel_encoder *intel_encoder; 1605 struct intel_encoder *intel_encoder;
1610 struct intel_connector *intel_connector; 1606 struct intel_connector *intel_connector;
1611 struct intel_tv_priv *tv_priv;
1612 u32 tv_dac_on, tv_dac_off, save_tv_dac; 1607 u32 tv_dac_on, tv_dac_off, save_tv_dac;
1613 char **tv_format_names; 1608 char **tv_format_names;
1614 int i, initial_mode = 0; 1609 int i, initial_mode = 0;
@@ -1647,18 +1642,18 @@ intel_tv_init(struct drm_device *dev)
1647 (tv_dac_off & TVDAC_STATE_CHG_EN) != 0) 1642 (tv_dac_off & TVDAC_STATE_CHG_EN) != 0)
1648 return; 1643 return;
1649 1644
1650 intel_encoder = kzalloc(sizeof(struct intel_encoder) + 1645 intel_tv = kzalloc(sizeof(struct intel_tv), GFP_KERNEL);
1651 sizeof(struct intel_tv_priv), GFP_KERNEL); 1646 if (!intel_tv) {
1652 if (!intel_encoder) {
1653 return; 1647 return;
1654 } 1648 }
1655 1649
1656 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL); 1650 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL);
1657 if (!intel_connector) { 1651 if (!intel_connector) {
1658 kfree(intel_encoder); 1652 kfree(intel_tv);
1659 return; 1653 return;
1660 } 1654 }
1661 1655
1656 intel_encoder = &intel_tv->base;
1662 connector = &intel_connector->base; 1657 connector = &intel_connector->base;
1663 1658
1664 drm_connector_init(dev, connector, &intel_tv_connector_funcs, 1659 drm_connector_init(dev, connector, &intel_tv_connector_funcs,
@@ -1668,22 +1663,20 @@ intel_tv_init(struct drm_device *dev)
1668 DRM_MODE_ENCODER_TVDAC); 1663 DRM_MODE_ENCODER_TVDAC);
1669 1664
1670 drm_mode_connector_attach_encoder(&intel_connector->base, &intel_encoder->enc); 1665 drm_mode_connector_attach_encoder(&intel_connector->base, &intel_encoder->enc);
1671 tv_priv = (struct intel_tv_priv *)(intel_encoder + 1);
1672 intel_encoder->type = INTEL_OUTPUT_TVOUT; 1666 intel_encoder->type = INTEL_OUTPUT_TVOUT;
1673 intel_encoder->crtc_mask = (1 << 0) | (1 << 1); 1667 intel_encoder->crtc_mask = (1 << 0) | (1 << 1);
1674 intel_encoder->clone_mask = (1 << INTEL_TV_CLONE_BIT); 1668 intel_encoder->clone_mask = (1 << INTEL_TV_CLONE_BIT);
1675 intel_encoder->enc.possible_crtcs = ((1 << 0) | (1 << 1)); 1669 intel_encoder->enc.possible_crtcs = ((1 << 0) | (1 << 1));
1676 intel_encoder->enc.possible_clones = (1 << INTEL_OUTPUT_TVOUT); 1670 intel_encoder->enc.possible_clones = (1 << INTEL_OUTPUT_TVOUT);
1677 intel_encoder->dev_priv = tv_priv; 1671 intel_tv->type = DRM_MODE_CONNECTOR_Unknown;
1678 tv_priv->type = DRM_MODE_CONNECTOR_Unknown;
1679 1672
1680 /* BIOS margin values */ 1673 /* BIOS margin values */
1681 tv_priv->margin[TV_MARGIN_LEFT] = 54; 1674 intel_tv->margin[TV_MARGIN_LEFT] = 54;
1682 tv_priv->margin[TV_MARGIN_TOP] = 36; 1675 intel_tv->margin[TV_MARGIN_TOP] = 36;
1683 tv_priv->margin[TV_MARGIN_RIGHT] = 46; 1676 intel_tv->margin[TV_MARGIN_RIGHT] = 46;
1684 tv_priv->margin[TV_MARGIN_BOTTOM] = 37; 1677 intel_tv->margin[TV_MARGIN_BOTTOM] = 37;
1685 1678
1686 tv_priv->tv_format = kstrdup(tv_modes[initial_mode].name, GFP_KERNEL); 1679 intel_tv->tv_format = kstrdup(tv_modes[initial_mode].name, GFP_KERNEL);
1687 1680
1688 drm_encoder_helper_add(&intel_encoder->enc, &intel_tv_helper_funcs); 1681 drm_encoder_helper_add(&intel_encoder->enc, &intel_tv_helper_funcs);
1689 drm_connector_helper_add(connector, &intel_tv_connector_helper_funcs); 1682 drm_connector_helper_add(connector, &intel_tv_connector_helper_funcs);
@@ -1703,16 +1696,16 @@ intel_tv_init(struct drm_device *dev)
1703 initial_mode); 1696 initial_mode);
1704 drm_connector_attach_property(connector, 1697 drm_connector_attach_property(connector,
1705 dev->mode_config.tv_left_margin_property, 1698 dev->mode_config.tv_left_margin_property,
1706 tv_priv->margin[TV_MARGIN_LEFT]); 1699 intel_tv->margin[TV_MARGIN_LEFT]);
1707 drm_connector_attach_property(connector, 1700 drm_connector_attach_property(connector,
1708 dev->mode_config.tv_top_margin_property, 1701 dev->mode_config.tv_top_margin_property,
1709 tv_priv->margin[TV_MARGIN_TOP]); 1702 intel_tv->margin[TV_MARGIN_TOP]);
1710 drm_connector_attach_property(connector, 1703 drm_connector_attach_property(connector,
1711 dev->mode_config.tv_right_margin_property, 1704 dev->mode_config.tv_right_margin_property,
1712 tv_priv->margin[TV_MARGIN_RIGHT]); 1705 intel_tv->margin[TV_MARGIN_RIGHT]);
1713 drm_connector_attach_property(connector, 1706 drm_connector_attach_property(connector,
1714 dev->mode_config.tv_bottom_margin_property, 1707 dev->mode_config.tv_bottom_margin_property,
1715 tv_priv->margin[TV_MARGIN_BOTTOM]); 1708 intel_tv->margin[TV_MARGIN_BOTTOM]);
1716out: 1709out:
1717 drm_sysfs_connector_add(connector); 1710 drm_sysfs_connector_add(connector);
1718} 1711}