aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/i915_drv.h
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/i915/i915_drv.h')
-rw-r--r--drivers/gpu/drm/i915/i915_drv.h396
1 files changed, 263 insertions, 133 deletions
diff --git a/drivers/gpu/drm/i915/i915_drv.h b/drivers/gpu/drm/i915/i915_drv.h
index ab0f2c0a440c..b12d942ab09c 100644
--- a/drivers/gpu/drm/i915/i915_drv.h
+++ b/drivers/gpu/drm/i915/i915_drv.h
@@ -98,13 +98,25 @@ enum intel_display_power_domain {
98 POWER_DOMAIN_TRANSCODER_A, 98 POWER_DOMAIN_TRANSCODER_A,
99 POWER_DOMAIN_TRANSCODER_B, 99 POWER_DOMAIN_TRANSCODER_B,
100 POWER_DOMAIN_TRANSCODER_C, 100 POWER_DOMAIN_TRANSCODER_C,
101 POWER_DOMAIN_TRANSCODER_EDP = POWER_DOMAIN_TRANSCODER_A + 0xF, 101 POWER_DOMAIN_TRANSCODER_EDP,
102 POWER_DOMAIN_VGA,
103 POWER_DOMAIN_INIT,
104
105 POWER_DOMAIN_NUM,
102}; 106};
103 107
108#define POWER_DOMAIN_MASK (BIT(POWER_DOMAIN_NUM) - 1)
109
104#define POWER_DOMAIN_PIPE(pipe) ((pipe) + POWER_DOMAIN_PIPE_A) 110#define POWER_DOMAIN_PIPE(pipe) ((pipe) + POWER_DOMAIN_PIPE_A)
105#define POWER_DOMAIN_PIPE_PANEL_FITTER(pipe) \ 111#define POWER_DOMAIN_PIPE_PANEL_FITTER(pipe) \
106 ((pipe) + POWER_DOMAIN_PIPE_A_PANEL_FITTER) 112 ((pipe) + POWER_DOMAIN_PIPE_A_PANEL_FITTER)
107#define POWER_DOMAIN_TRANSCODER(tran) ((tran) + POWER_DOMAIN_TRANSCODER_A) 113#define POWER_DOMAIN_TRANSCODER(tran) \
114 ((tran) == TRANSCODER_EDP ? POWER_DOMAIN_TRANSCODER_EDP : \
115 (tran) + POWER_DOMAIN_TRANSCODER_A)
116
117#define HSW_ALWAYS_ON_POWER_DOMAINS ( \
118 BIT(POWER_DOMAIN_PIPE_A) | \
119 BIT(POWER_DOMAIN_TRANSCODER_EDP))
108 120
109enum hpd_pin { 121enum hpd_pin {
110 HPD_NONE = 0, 122 HPD_NONE = 0,
@@ -225,6 +237,8 @@ struct intel_opregion {
225 struct opregion_header __iomem *header; 237 struct opregion_header __iomem *header;
226 struct opregion_acpi __iomem *acpi; 238 struct opregion_acpi __iomem *acpi;
227 struct opregion_swsci __iomem *swsci; 239 struct opregion_swsci __iomem *swsci;
240 u32 swsci_gbda_sub_functions;
241 u32 swsci_sbcb_sub_functions;
228 struct opregion_asle __iomem *asle; 242 struct opregion_asle __iomem *asle;
229 void __iomem *vbt; 243 void __iomem *vbt;
230 u32 __iomem *lid_state; 244 u32 __iomem *lid_state;
@@ -285,6 +299,7 @@ struct drm_i915_error_state {
285 u32 cpu_ring_tail[I915_NUM_RINGS]; 299 u32 cpu_ring_tail[I915_NUM_RINGS];
286 u32 error; /* gen6+ */ 300 u32 error; /* gen6+ */
287 u32 err_int; /* gen7 */ 301 u32 err_int; /* gen7 */
302 u32 bbstate[I915_NUM_RINGS];
288 u32 instpm[I915_NUM_RINGS]; 303 u32 instpm[I915_NUM_RINGS];
289 u32 instps[I915_NUM_RINGS]; 304 u32 instps[I915_NUM_RINGS];
290 u32 extra_instdone[I915_NUM_INSTDONE_REG]; 305 u32 extra_instdone[I915_NUM_INSTDONE_REG];
@@ -321,11 +336,13 @@ struct drm_i915_error_state {
321 u32 dirty:1; 336 u32 dirty:1;
322 u32 purgeable:1; 337 u32 purgeable:1;
323 s32 ring:4; 338 s32 ring:4;
324 u32 cache_level:2; 339 u32 cache_level:3;
325 } **active_bo, **pinned_bo; 340 } **active_bo, **pinned_bo;
326 u32 *active_bo_count, *pinned_bo_count; 341 u32 *active_bo_count, *pinned_bo_count;
327 struct intel_overlay_error_state *overlay; 342 struct intel_overlay_error_state *overlay;
328 struct intel_display_error_state *display; 343 struct intel_display_error_state *display;
344 int hangcheck_score[I915_NUM_RINGS];
345 enum intel_ring_hangcheck_action hangcheck_action[I915_NUM_RINGS];
329}; 346};
330 347
331struct intel_crtc_config; 348struct intel_crtc_config;
@@ -357,7 +374,7 @@ struct drm_i915_display_funcs {
357 int target, int refclk, 374 int target, int refclk,
358 struct dpll *match_clock, 375 struct dpll *match_clock,
359 struct dpll *best_clock); 376 struct dpll *best_clock);
360 void (*update_wm)(struct drm_device *dev); 377 void (*update_wm)(struct drm_crtc *crtc);
361 void (*update_sprite_wm)(struct drm_plane *plane, 378 void (*update_sprite_wm)(struct drm_plane *plane,
362 struct drm_crtc *crtc, 379 struct drm_crtc *crtc,
363 uint32_t sprite_width, int pixel_size, 380 uint32_t sprite_width, int pixel_size,
@@ -367,7 +384,6 @@ struct drm_i915_display_funcs {
367 * fills out the pipe-config with the hw state. */ 384 * fills out the pipe-config with the hw state. */
368 bool (*get_pipe_config)(struct intel_crtc *, 385 bool (*get_pipe_config)(struct intel_crtc *,
369 struct intel_crtc_config *); 386 struct intel_crtc_config *);
370 void (*get_clock)(struct intel_crtc *, struct intel_crtc_config *);
371 int (*crtc_mode_set)(struct drm_crtc *crtc, 387 int (*crtc_mode_set)(struct drm_crtc *crtc,
372 int x, int y, 388 int x, int y,
373 struct drm_framebuffer *old_fb); 389 struct drm_framebuffer *old_fb);
@@ -375,7 +391,8 @@ struct drm_i915_display_funcs {
375 void (*crtc_disable)(struct drm_crtc *crtc); 391 void (*crtc_disable)(struct drm_crtc *crtc);
376 void (*off)(struct drm_crtc *crtc); 392 void (*off)(struct drm_crtc *crtc);
377 void (*write_eld)(struct drm_connector *connector, 393 void (*write_eld)(struct drm_connector *connector,
378 struct drm_crtc *crtc); 394 struct drm_crtc *crtc,
395 struct drm_display_mode *mode);
379 void (*fdi_link_train)(struct drm_crtc *crtc); 396 void (*fdi_link_train)(struct drm_crtc *crtc);
380 void (*init_clock_gating)(struct drm_device *dev); 397 void (*init_clock_gating)(struct drm_device *dev);
381 int (*queue_flip)(struct drm_device *dev, struct drm_crtc *crtc, 398 int (*queue_flip)(struct drm_device *dev, struct drm_crtc *crtc,
@@ -395,6 +412,20 @@ struct drm_i915_display_funcs {
395struct intel_uncore_funcs { 412struct intel_uncore_funcs {
396 void (*force_wake_get)(struct drm_i915_private *dev_priv); 413 void (*force_wake_get)(struct drm_i915_private *dev_priv);
397 void (*force_wake_put)(struct drm_i915_private *dev_priv); 414 void (*force_wake_put)(struct drm_i915_private *dev_priv);
415
416 uint8_t (*mmio_readb)(struct drm_i915_private *dev_priv, off_t offset, bool trace);
417 uint16_t (*mmio_readw)(struct drm_i915_private *dev_priv, off_t offset, bool trace);
418 uint32_t (*mmio_readl)(struct drm_i915_private *dev_priv, off_t offset, bool trace);
419 uint64_t (*mmio_readq)(struct drm_i915_private *dev_priv, off_t offset, bool trace);
420
421 void (*mmio_writeb)(struct drm_i915_private *dev_priv, off_t offset,
422 uint8_t val, bool trace);
423 void (*mmio_writew)(struct drm_i915_private *dev_priv, off_t offset,
424 uint16_t val, bool trace);
425 void (*mmio_writel)(struct drm_i915_private *dev_priv, off_t offset,
426 uint32_t val, bool trace);
427 void (*mmio_writeq)(struct drm_i915_private *dev_priv, off_t offset,
428 uint64_t val, bool trace);
398}; 429};
399 430
400struct intel_uncore { 431struct intel_uncore {
@@ -404,6 +435,8 @@ struct intel_uncore {
404 435
405 unsigned fifo_count; 436 unsigned fifo_count;
406 unsigned forcewake_count; 437 unsigned forcewake_count;
438
439 struct delayed_work force_wake_work;
407}; 440};
408 441
409#define DEV_INFO_FOR_EACH_FLAG(func, sep) \ 442#define DEV_INFO_FOR_EACH_FLAG(func, sep) \
@@ -420,7 +453,7 @@ struct intel_uncore {
420 func(is_ivybridge) sep \ 453 func(is_ivybridge) sep \
421 func(is_valleyview) sep \ 454 func(is_valleyview) sep \
422 func(is_haswell) sep \ 455 func(is_haswell) sep \
423 func(has_force_wake) sep \ 456 func(is_preliminary) sep \
424 func(has_fbc) sep \ 457 func(has_fbc) sep \
425 func(has_pipe_cxsr) sep \ 458 func(has_pipe_cxsr) sep \
426 func(has_hotplug) sep \ 459 func(has_hotplug) sep \
@@ -428,9 +461,6 @@ struct intel_uncore {
428 func(has_overlay) sep \ 461 func(has_overlay) sep \
429 func(overlay_needs_physical) sep \ 462 func(overlay_needs_physical) sep \
430 func(supports_tv) sep \ 463 func(supports_tv) sep \
431 func(has_bsd_ring) sep \
432 func(has_blt_ring) sep \
433 func(has_vebox_ring) sep \
434 func(has_llc) sep \ 464 func(has_llc) sep \
435 func(has_ddi) sep \ 465 func(has_ddi) sep \
436 func(has_fpga_dbg) 466 func(has_fpga_dbg)
@@ -442,6 +472,7 @@ struct intel_device_info {
442 u32 display_mmio_offset; 472 u32 display_mmio_offset;
443 u8 num_pipes:3; 473 u8 num_pipes:3;
444 u8 gen; 474 u8 gen;
475 u8 ring_mask; /* Rings supported by the HW */
445 DEV_INFO_FOR_EACH_FLAG(DEFINE_FLAG, SEP_SEMICOLON); 476 DEV_INFO_FOR_EACH_FLAG(DEFINE_FLAG, SEP_SEMICOLON);
446}; 477};
447 478
@@ -570,6 +601,13 @@ struct i915_vma {
570 /** This vma's place in the batchbuffer or on the eviction list */ 601 /** This vma's place in the batchbuffer or on the eviction list */
571 struct list_head exec_list; 602 struct list_head exec_list;
572 603
604 /**
605 * Used for performing relocations during execbuffer insertion.
606 */
607 struct hlist_node exec_node;
608 unsigned long exec_handle;
609 struct drm_i915_gem_exec_object2 *exec_entry;
610
573}; 611};
574 612
575struct i915_ctx_hang_stats { 613struct i915_ctx_hang_stats {
@@ -578,6 +616,12 @@ struct i915_ctx_hang_stats {
578 616
579 /* This context had batch active when hang was declared */ 617 /* This context had batch active when hang was declared */
580 unsigned batch_active; 618 unsigned batch_active;
619
620 /* Time when this context was last blamed for a GPU reset */
621 unsigned long guilty_ts;
622
623 /* This context is banned to submit more work */
624 bool banned;
581}; 625};
582 626
583/* This must match up with the value previously used for execbuf2.rsvd1. */ 627/* This must match up with the value previously used for execbuf2.rsvd1. */
@@ -586,10 +630,13 @@ struct i915_hw_context {
586 struct kref ref; 630 struct kref ref;
587 int id; 631 int id;
588 bool is_initialized; 632 bool is_initialized;
633 uint8_t remap_slice;
589 struct drm_i915_file_private *file_priv; 634 struct drm_i915_file_private *file_priv;
590 struct intel_ring_buffer *ring; 635 struct intel_ring_buffer *ring;
591 struct drm_i915_gem_object *obj; 636 struct drm_i915_gem_object *obj;
592 struct i915_ctx_hang_stats hang_stats; 637 struct i915_ctx_hang_stats hang_stats;
638
639 struct list_head link;
593}; 640};
594 641
595struct i915_fbc { 642struct i915_fbc {
@@ -623,17 +670,9 @@ struct i915_fbc {
623 } no_fbc_reason; 670 } no_fbc_reason;
624}; 671};
625 672
626enum no_psr_reason { 673struct i915_psr {
627 PSR_NO_SOURCE, /* Not supported on platform */ 674 bool sink_support;
628 PSR_NO_SINK, /* Not supported by panel */ 675 bool source_ok;
629 PSR_MODULE_PARAM,
630 PSR_CRTC_NOT_ACTIVE,
631 PSR_PWR_WELL_ENABLED,
632 PSR_NOT_TILED,
633 PSR_SPRITE_ENABLED,
634 PSR_S3D_ENABLED,
635 PSR_INTERLACED_ENABLED,
636 PSR_HSW_NOT_DDIA,
637}; 676};
638 677
639enum intel_pch { 678enum intel_pch {
@@ -823,17 +862,20 @@ struct intel_gen6_power_mgmt {
823 struct work_struct work; 862 struct work_struct work;
824 u32 pm_iir; 863 u32 pm_iir;
825 864
826 /* On vlv we need to manually drop to Vmin with a delayed work. */
827 struct delayed_work vlv_work;
828
829 /* The below variables an all the rps hw state are protected by 865 /* The below variables an all the rps hw state are protected by
830 * dev->struct mutext. */ 866 * dev->struct mutext. */
831 u8 cur_delay; 867 u8 cur_delay;
832 u8 min_delay; 868 u8 min_delay;
833 u8 max_delay; 869 u8 max_delay;
834 u8 rpe_delay; 870 u8 rpe_delay;
871 u8 rp1_delay;
872 u8 rp0_delay;
835 u8 hw_max; 873 u8 hw_max;
836 874
875 int last_adj;
876 enum { LOW_POWER, BETWEEN, HIGH_POWER } power;
877
878 bool enabled;
837 struct delayed_work delayed_resume_work; 879 struct delayed_work delayed_resume_work;
838 880
839 /* 881 /*
@@ -870,11 +912,21 @@ struct intel_ilk_power_mgmt {
870 912
871/* Power well structure for haswell */ 913/* Power well structure for haswell */
872struct i915_power_well { 914struct i915_power_well {
873 struct drm_device *device;
874 spinlock_t lock;
875 /* power well enable/disable usage count */ 915 /* power well enable/disable usage count */
876 int count; 916 int count;
877 int i915_request; 917};
918
919#define I915_MAX_POWER_WELLS 1
920
921struct i915_power_domains {
922 /*
923 * Power wells needed for initialization at driver init and suspend
924 * time are on. They are kept on until after the first modeset.
925 */
926 bool init_power_on;
927
928 struct mutex lock;
929 struct i915_power_well power_wells[I915_MAX_POWER_WELLS];
878}; 930};
879 931
880struct i915_dri1_state { 932struct i915_dri1_state {
@@ -902,9 +954,11 @@ struct i915_ums_state {
902 int mm_suspended; 954 int mm_suspended;
903}; 955};
904 956
957#define MAX_L3_SLICES 2
905struct intel_l3_parity { 958struct intel_l3_parity {
906 u32 *remap_info; 959 u32 *remap_info[MAX_L3_SLICES];
907 struct work_struct error_work; 960 struct work_struct error_work;
961 int which_slice;
908}; 962};
909 963
910struct i915_gem_mm { 964struct i915_gem_mm {
@@ -942,6 +996,15 @@ struct i915_gem_mm {
942 struct delayed_work retire_work; 996 struct delayed_work retire_work;
943 997
944 /** 998 /**
999 * When we detect an idle GPU, we want to turn on
1000 * powersaving features. So once we see that there
1001 * are no more requests outstanding and no more
1002 * arrive within a small period of time, we fire
1003 * off the idle_work.
1004 */
1005 struct delayed_work idle_work;
1006
1007 /**
945 * Are we in a non-interruptible section of code like 1008 * Are we in a non-interruptible section of code like
946 * modesetting? 1009 * modesetting?
947 */ 1010 */
@@ -979,6 +1042,9 @@ struct i915_gpu_error {
979 /* For hangcheck timer */ 1042 /* For hangcheck timer */
980#define DRM_I915_HANGCHECK_PERIOD 1500 /* in ms */ 1043#define DRM_I915_HANGCHECK_PERIOD 1500 /* in ms */
981#define DRM_I915_HANGCHECK_JIFFIES msecs_to_jiffies(DRM_I915_HANGCHECK_PERIOD) 1044#define DRM_I915_HANGCHECK_JIFFIES msecs_to_jiffies(DRM_I915_HANGCHECK_PERIOD)
1045 /* Hang gpu twice in this window and your context gets banned */
1046#define DRM_I915_CTX_BAN_PERIOD DIV_ROUND_UP(8*DRM_I915_HANGCHECK_PERIOD, 1000)
1047
982 struct timer_list hangcheck_timer; 1048 struct timer_list hangcheck_timer;
983 1049
984 /* For reset and error_state handling. */ 1050 /* For reset and error_state handling. */
@@ -987,7 +1053,8 @@ struct i915_gpu_error {
987 struct drm_i915_error_state *first_error; 1053 struct drm_i915_error_state *first_error;
988 struct work_struct work; 1054 struct work_struct work;
989 1055
990 unsigned long last_reset; 1056
1057 unsigned long missed_irq_rings;
991 1058
992 /** 1059 /**
993 * State variable and reset counter controlling the reset flow 1060 * State variable and reset counter controlling the reset flow
@@ -1027,6 +1094,9 @@ struct i915_gpu_error {
1027 1094
1028 /* For gpu hang simulation. */ 1095 /* For gpu hang simulation. */
1029 unsigned int stop_rings; 1096 unsigned int stop_rings;
1097
1098 /* For missed irq/seqno simulation. */
1099 unsigned int test_irq_rings;
1030}; 1100};
1031 1101
1032enum modeset_restore { 1102enum modeset_restore {
@@ -1035,6 +1105,14 @@ enum modeset_restore {
1035 MODESET_SUSPENDED, 1105 MODESET_SUSPENDED,
1036}; 1106};
1037 1107
1108struct ddi_vbt_port_info {
1109 uint8_t hdmi_level_shift;
1110
1111 uint8_t supports_dvi:1;
1112 uint8_t supports_hdmi:1;
1113 uint8_t supports_dp:1;
1114};
1115
1038struct intel_vbt_data { 1116struct intel_vbt_data {
1039 struct drm_display_mode *lfp_lvds_vbt_mode; /* if any */ 1117 struct drm_display_mode *lfp_lvds_vbt_mode; /* if any */
1040 struct drm_display_mode *sdvo_lvds_vbt_mode; /* if any */ 1118 struct drm_display_mode *sdvo_lvds_vbt_mode; /* if any */
@@ -1060,10 +1138,17 @@ struct intel_vbt_data {
1060 int edp_bpp; 1138 int edp_bpp;
1061 struct edp_power_seq edp_pps; 1139 struct edp_power_seq edp_pps;
1062 1140
1141 /* MIPI DSI */
1142 struct {
1143 u16 panel_id;
1144 } dsi;
1145
1063 int crt_ddc_pin; 1146 int crt_ddc_pin;
1064 1147
1065 int child_dev_num; 1148 int child_dev_num;
1066 struct child_device_config *child_dev; 1149 union child_device_config *child_dev;
1150
1151 struct ddi_vbt_port_info ddi_port_info[I915_MAX_PORTS];
1067}; 1152};
1068 1153
1069enum intel_ddb_partitioning { 1154enum intel_ddb_partitioning {
@@ -1079,6 +1164,15 @@ struct intel_wm_level {
1079 uint32_t fbc_val; 1164 uint32_t fbc_val;
1080}; 1165};
1081 1166
1167struct hsw_wm_values {
1168 uint32_t wm_pipe[3];
1169 uint32_t wm_lp[3];
1170 uint32_t wm_lp_spr[3];
1171 uint32_t wm_linetime[3];
1172 bool enable_fbc_wm;
1173 enum intel_ddb_partitioning partitioning;
1174};
1175
1082/* 1176/*
1083 * This struct tracks the state needed for the Package C8+ feature. 1177 * This struct tracks the state needed for the Package C8+ feature.
1084 * 1178 *
@@ -1148,6 +1242,36 @@ struct i915_package_c8 {
1148 } regsave; 1242 } regsave;
1149}; 1243};
1150 1244
1245enum intel_pipe_crc_source {
1246 INTEL_PIPE_CRC_SOURCE_NONE,
1247 INTEL_PIPE_CRC_SOURCE_PLANE1,
1248 INTEL_PIPE_CRC_SOURCE_PLANE2,
1249 INTEL_PIPE_CRC_SOURCE_PF,
1250 INTEL_PIPE_CRC_SOURCE_PIPE,
1251 /* TV/DP on pre-gen5/vlv can't use the pipe source. */
1252 INTEL_PIPE_CRC_SOURCE_TV,
1253 INTEL_PIPE_CRC_SOURCE_DP_B,
1254 INTEL_PIPE_CRC_SOURCE_DP_C,
1255 INTEL_PIPE_CRC_SOURCE_DP_D,
1256 INTEL_PIPE_CRC_SOURCE_AUTO,
1257 INTEL_PIPE_CRC_SOURCE_MAX,
1258};
1259
1260struct intel_pipe_crc_entry {
1261 uint32_t frame;
1262 uint32_t crc[5];
1263};
1264
1265#define INTEL_PIPE_CRC_ENTRIES_NR 128
1266struct intel_pipe_crc {
1267 spinlock_t lock;
1268 bool opened; /* exclusive access to the result file */
1269 struct intel_pipe_crc_entry *entries;
1270 enum intel_pipe_crc_source source;
1271 int head, tail;
1272 wait_queue_head_t wq;
1273};
1274
1151typedef struct drm_i915_private { 1275typedef struct drm_i915_private {
1152 struct drm_device *dev; 1276 struct drm_device *dev;
1153 struct kmem_cache *slab; 1277 struct kmem_cache *slab;
@@ -1272,6 +1396,10 @@ typedef struct drm_i915_private {
1272 struct drm_crtc *pipe_to_crtc_mapping[3]; 1396 struct drm_crtc *pipe_to_crtc_mapping[3];
1273 wait_queue_head_t pending_flip_queue; 1397 wait_queue_head_t pending_flip_queue;
1274 1398
1399#ifdef CONFIG_DEBUG_FS
1400 struct intel_pipe_crc pipe_crc[I915_MAX_PIPES];
1401#endif
1402
1275 int num_shared_dpll; 1403 int num_shared_dpll;
1276 struct intel_shared_dpll shared_dplls[I915_NUM_PLLS]; 1404 struct intel_shared_dpll shared_dplls[I915_NUM_PLLS];
1277 struct intel_ddi_plls ddi_plls; 1405 struct intel_ddi_plls ddi_plls;
@@ -1297,17 +1425,18 @@ typedef struct drm_i915_private {
1297 * mchdev_lock in intel_pm.c */ 1425 * mchdev_lock in intel_pm.c */
1298 struct intel_ilk_power_mgmt ips; 1426 struct intel_ilk_power_mgmt ips;
1299 1427
1300 /* Haswell power well */ 1428 struct i915_power_domains power_domains;
1301 struct i915_power_well power_well;
1302 1429
1303 enum no_psr_reason no_psr_reason; 1430 struct i915_psr psr;
1304 1431
1305 struct i915_gpu_error gpu_error; 1432 struct i915_gpu_error gpu_error;
1306 1433
1307 struct drm_i915_gem_object *vlv_pctx; 1434 struct drm_i915_gem_object *vlv_pctx;
1308 1435
1436#ifdef CONFIG_DRM_I915_FBDEV
1309 /* list of fbdev register on this device */ 1437 /* list of fbdev register on this device */
1310 struct intel_fbdev *fbdev; 1438 struct intel_fbdev *fbdev;
1439#endif
1311 1440
1312 /* 1441 /*
1313 * The console may be contended at resume, but we don't 1442 * The console may be contended at resume, but we don't
@@ -1320,6 +1449,7 @@ typedef struct drm_i915_private {
1320 1449
1321 bool hw_contexts_disabled; 1450 bool hw_contexts_disabled;
1322 uint32_t hw_context_size; 1451 uint32_t hw_context_size;
1452 struct list_head context_list;
1323 1453
1324 u32 fdi_rx_config; 1454 u32 fdi_rx_config;
1325 1455
@@ -1337,6 +1467,9 @@ typedef struct drm_i915_private {
1337 uint16_t spr_latency[5]; 1467 uint16_t spr_latency[5];
1338 /* cursor */ 1468 /* cursor */
1339 uint16_t cur_latency[5]; 1469 uint16_t cur_latency[5];
1470
1471 /* current hardware state */
1472 struct hsw_wm_values hw;
1340 } wm; 1473 } wm;
1341 1474
1342 struct i915_package_c8 pc8; 1475 struct i915_package_c8 pc8;
@@ -1400,8 +1533,6 @@ struct drm_i915_gem_object {
1400 struct list_head ring_list; 1533 struct list_head ring_list;
1401 /** Used in execbuf to temporarily hold a ref */ 1534 /** Used in execbuf to temporarily hold a ref */
1402 struct list_head obj_exec_link; 1535 struct list_head obj_exec_link;
1403 /** This object's place in the batchbuffer or on the eviction list */
1404 struct list_head exec_list;
1405 1536
1406 /** 1537 /**
1407 * This is set if the object is on the active lists (has pending 1538 * This is set if the object is on the active lists (has pending
@@ -1487,13 +1618,6 @@ struct drm_i915_gem_object {
1487 void *dma_buf_vmapping; 1618 void *dma_buf_vmapping;
1488 int vmapping_count; 1619 int vmapping_count;
1489 1620
1490 /**
1491 * Used for performing relocations during execbuffer insertion.
1492 */
1493 struct hlist_node exec_node;
1494 unsigned long exec_handle;
1495 struct drm_i915_gem_exec_object2 *exec_entry;
1496
1497 struct intel_ring_buffer *ring; 1621 struct intel_ring_buffer *ring;
1498 1622
1499 /** Breadcrumb of last rendering to the buffer. */ 1623 /** Breadcrumb of last rendering to the buffer. */
@@ -1505,11 +1629,14 @@ struct drm_i915_gem_object {
1505 /** Current tiling stride for the object, if it's tiled. */ 1629 /** Current tiling stride for the object, if it's tiled. */
1506 uint32_t stride; 1630 uint32_t stride;
1507 1631
1632 /** References from framebuffers, locks out tiling changes. */
1633 unsigned long framebuffer_references;
1634
1508 /** Record of address bit 17 of each page at last unbind. */ 1635 /** Record of address bit 17 of each page at last unbind. */
1509 unsigned long *bit_17; 1636 unsigned long *bit_17;
1510 1637
1511 /** User space pin count and filp owning the pin */ 1638 /** User space pin count and filp owning the pin */
1512 uint32_t user_pin_count; 1639 unsigned long user_pin_count;
1513 struct drm_file *pin_filp; 1640 struct drm_file *pin_filp;
1514 1641
1515 /** for phy allocated objects */ 1642 /** for phy allocated objects */
@@ -1560,48 +1687,55 @@ struct drm_i915_gem_request {
1560}; 1687};
1561 1688
1562struct drm_i915_file_private { 1689struct drm_i915_file_private {
1690 struct drm_i915_private *dev_priv;
1691
1563 struct { 1692 struct {
1564 spinlock_t lock; 1693 spinlock_t lock;
1565 struct list_head request_list; 1694 struct list_head request_list;
1695 struct delayed_work idle_work;
1566 } mm; 1696 } mm;
1567 struct idr context_idr; 1697 struct idr context_idr;
1568 1698
1569 struct i915_ctx_hang_stats hang_stats; 1699 struct i915_ctx_hang_stats hang_stats;
1700 atomic_t rps_wait_boost;
1570}; 1701};
1571 1702
1572#define INTEL_INFO(dev) (to_i915(dev)->info) 1703#define INTEL_INFO(dev) (to_i915(dev)->info)
1573 1704
1574#define IS_I830(dev) ((dev)->pci_device == 0x3577) 1705#define IS_I830(dev) ((dev)->pdev->device == 0x3577)
1575#define IS_845G(dev) ((dev)->pci_device == 0x2562) 1706#define IS_845G(dev) ((dev)->pdev->device == 0x2562)
1576#define IS_I85X(dev) (INTEL_INFO(dev)->is_i85x) 1707#define IS_I85X(dev) (INTEL_INFO(dev)->is_i85x)
1577#define IS_I865G(dev) ((dev)->pci_device == 0x2572) 1708#define IS_I865G(dev) ((dev)->pdev->device == 0x2572)
1578#define IS_I915G(dev) (INTEL_INFO(dev)->is_i915g) 1709#define IS_I915G(dev) (INTEL_INFO(dev)->is_i915g)
1579#define IS_I915GM(dev) ((dev)->pci_device == 0x2592) 1710#define IS_I915GM(dev) ((dev)->pdev->device == 0x2592)
1580#define IS_I945G(dev) ((dev)->pci_device == 0x2772) 1711#define IS_I945G(dev) ((dev)->pdev->device == 0x2772)
1581#define IS_I945GM(dev) (INTEL_INFO(dev)->is_i945gm) 1712#define IS_I945GM(dev) (INTEL_INFO(dev)->is_i945gm)
1582#define IS_BROADWATER(dev) (INTEL_INFO(dev)->is_broadwater) 1713#define IS_BROADWATER(dev) (INTEL_INFO(dev)->is_broadwater)
1583#define IS_CRESTLINE(dev) (INTEL_INFO(dev)->is_crestline) 1714#define IS_CRESTLINE(dev) (INTEL_INFO(dev)->is_crestline)
1584#define IS_GM45(dev) ((dev)->pci_device == 0x2A42) 1715#define IS_GM45(dev) ((dev)->pdev->device == 0x2A42)
1585#define IS_G4X(dev) (INTEL_INFO(dev)->is_g4x) 1716#define IS_G4X(dev) (INTEL_INFO(dev)->is_g4x)
1586#define IS_PINEVIEW_G(dev) ((dev)->pci_device == 0xa001) 1717#define IS_PINEVIEW_G(dev) ((dev)->pdev->device == 0xa001)
1587#define IS_PINEVIEW_M(dev) ((dev)->pci_device == 0xa011) 1718#define IS_PINEVIEW_M(dev) ((dev)->pdev->device == 0xa011)
1588#define IS_PINEVIEW(dev) (INTEL_INFO(dev)->is_pineview) 1719#define IS_PINEVIEW(dev) (INTEL_INFO(dev)->is_pineview)
1589#define IS_G33(dev) (INTEL_INFO(dev)->is_g33) 1720#define IS_G33(dev) (INTEL_INFO(dev)->is_g33)
1590#define IS_IRONLAKE_M(dev) ((dev)->pci_device == 0x0046) 1721#define IS_IRONLAKE_M(dev) ((dev)->pdev->device == 0x0046)
1591#define IS_IVYBRIDGE(dev) (INTEL_INFO(dev)->is_ivybridge) 1722#define IS_IVYBRIDGE(dev) (INTEL_INFO(dev)->is_ivybridge)
1592#define IS_IVB_GT1(dev) ((dev)->pci_device == 0x0156 || \ 1723#define IS_IVB_GT1(dev) ((dev)->pdev->device == 0x0156 || \
1593 (dev)->pci_device == 0x0152 || \ 1724 (dev)->pdev->device == 0x0152 || \
1594 (dev)->pci_device == 0x015a) 1725 (dev)->pdev->device == 0x015a)
1595#define IS_SNB_GT1(dev) ((dev)->pci_device == 0x0102 || \ 1726#define IS_SNB_GT1(dev) ((dev)->pdev->device == 0x0102 || \
1596 (dev)->pci_device == 0x0106 || \ 1727 (dev)->pdev->device == 0x0106 || \
1597 (dev)->pci_device == 0x010A) 1728 (dev)->pdev->device == 0x010A)
1598#define IS_VALLEYVIEW(dev) (INTEL_INFO(dev)->is_valleyview) 1729#define IS_VALLEYVIEW(dev) (INTEL_INFO(dev)->is_valleyview)
1599#define IS_HASWELL(dev) (INTEL_INFO(dev)->is_haswell) 1730#define IS_HASWELL(dev) (INTEL_INFO(dev)->is_haswell)
1600#define IS_MOBILE(dev) (INTEL_INFO(dev)->is_mobile) 1731#define IS_MOBILE(dev) (INTEL_INFO(dev)->is_mobile)
1601#define IS_HSW_EARLY_SDV(dev) (IS_HASWELL(dev) && \ 1732#define IS_HSW_EARLY_SDV(dev) (IS_HASWELL(dev) && \
1602 ((dev)->pci_device & 0xFF00) == 0x0C00) 1733 ((dev)->pdev->device & 0xFF00) == 0x0C00)
1603#define IS_ULT(dev) (IS_HASWELL(dev) && \ 1734#define IS_ULT(dev) (IS_HASWELL(dev) && \
1604 ((dev)->pci_device & 0xFF00) == 0x0A00) 1735 ((dev)->pdev->device & 0xFF00) == 0x0A00)
1736#define IS_HSW_GT3(dev) (IS_HASWELL(dev) && \
1737 ((dev)->pdev->device & 0x00F0) == 0x0020)
1738#define IS_PRELIMINARY_HW(intel_info) ((intel_info)->is_preliminary)
1605 1739
1606/* 1740/*
1607 * The genX designation typically refers to the render engine, so render 1741 * The genX designation typically refers to the render engine, so render
@@ -1616,9 +1750,13 @@ struct drm_i915_file_private {
1616#define IS_GEN6(dev) (INTEL_INFO(dev)->gen == 6) 1750#define IS_GEN6(dev) (INTEL_INFO(dev)->gen == 6)
1617#define IS_GEN7(dev) (INTEL_INFO(dev)->gen == 7) 1751#define IS_GEN7(dev) (INTEL_INFO(dev)->gen == 7)
1618 1752
1619#define HAS_BSD(dev) (INTEL_INFO(dev)->has_bsd_ring) 1753#define RENDER_RING (1<<RCS)
1620#define HAS_BLT(dev) (INTEL_INFO(dev)->has_blt_ring) 1754#define BSD_RING (1<<VCS)
1621#define HAS_VEBOX(dev) (INTEL_INFO(dev)->has_vebox_ring) 1755#define BLT_RING (1<<BCS)
1756#define VEBOX_RING (1<<VECS)
1757#define HAS_BSD(dev) (INTEL_INFO(dev)->ring_mask & BSD_RING)
1758#define HAS_BLT(dev) (INTEL_INFO(dev)->ring_mask & BLT_RING)
1759#define HAS_VEBOX(dev) (INTEL_INFO(dev)->ring_mask & VEBOX_RING)
1622#define HAS_LLC(dev) (INTEL_INFO(dev)->has_llc) 1760#define HAS_LLC(dev) (INTEL_INFO(dev)->has_llc)
1623#define HAS_WT(dev) (IS_HASWELL(dev) && to_i915(dev)->ellc_size) 1761#define HAS_WT(dev) (IS_HASWELL(dev) && to_i915(dev)->ellc_size)
1624#define I915_NEED_GFX_HWS(dev) (INTEL_INFO(dev)->need_gfx_hws) 1762#define I915_NEED_GFX_HWS(dev) (INTEL_INFO(dev)->need_gfx_hws)
@@ -1640,7 +1778,6 @@ struct drm_i915_file_private {
1640#define SUPPORTS_DIGITAL_OUTPUTS(dev) (!IS_GEN2(dev) && !IS_PINEVIEW(dev)) 1778#define SUPPORTS_DIGITAL_OUTPUTS(dev) (!IS_GEN2(dev) && !IS_PINEVIEW(dev))
1641#define SUPPORTS_INTEGRATED_HDMI(dev) (IS_G4X(dev) || IS_GEN5(dev)) 1779#define SUPPORTS_INTEGRATED_HDMI(dev) (IS_G4X(dev) || IS_GEN5(dev))
1642#define SUPPORTS_INTEGRATED_DP(dev) (IS_G4X(dev) || IS_GEN5(dev)) 1780#define SUPPORTS_INTEGRATED_DP(dev) (IS_G4X(dev) || IS_GEN5(dev))
1643#define SUPPORTS_EDP(dev) (IS_IRONLAKE_M(dev))
1644#define SUPPORTS_TV(dev) (INTEL_INFO(dev)->supports_tv) 1781#define SUPPORTS_TV(dev) (INTEL_INFO(dev)->supports_tv)
1645#define I915_HAS_HOTPLUG(dev) (INTEL_INFO(dev)->has_hotplug) 1782#define I915_HAS_HOTPLUG(dev) (INTEL_INFO(dev)->has_hotplug)
1646 1783
@@ -1653,6 +1790,7 @@ struct drm_i915_file_private {
1653#define HAS_DDI(dev) (INTEL_INFO(dev)->has_ddi) 1790#define HAS_DDI(dev) (INTEL_INFO(dev)->has_ddi)
1654#define HAS_POWER_WELL(dev) (IS_HASWELL(dev)) 1791#define HAS_POWER_WELL(dev) (IS_HASWELL(dev))
1655#define HAS_FPGA_DBG_UNCLAIMED(dev) (INTEL_INFO(dev)->has_fpga_dbg) 1792#define HAS_FPGA_DBG_UNCLAIMED(dev) (INTEL_INFO(dev)->has_fpga_dbg)
1793#define HAS_PSR(dev) (IS_HASWELL(dev))
1656 1794
1657#define INTEL_PCH_DEVICE_ID_MASK 0xff00 1795#define INTEL_PCH_DEVICE_ID_MASK 0xff00
1658#define INTEL_PCH_IBX_DEVICE_ID_TYPE 0x3b00 1796#define INTEL_PCH_IBX_DEVICE_ID_TYPE 0x3b00
@@ -1668,35 +1806,14 @@ struct drm_i915_file_private {
1668#define HAS_PCH_NOP(dev) (INTEL_PCH_TYPE(dev) == PCH_NOP) 1806#define HAS_PCH_NOP(dev) (INTEL_PCH_TYPE(dev) == PCH_NOP)
1669#define HAS_PCH_SPLIT(dev) (INTEL_PCH_TYPE(dev) != PCH_NONE) 1807#define HAS_PCH_SPLIT(dev) (INTEL_PCH_TYPE(dev) != PCH_NONE)
1670 1808
1671#define HAS_FORCE_WAKE(dev) (INTEL_INFO(dev)->has_force_wake) 1809/* DPF == dynamic parity feature */
1672 1810#define HAS_L3_DPF(dev) (IS_IVYBRIDGE(dev) || IS_HASWELL(dev))
1673#define HAS_L3_GPU_CACHE(dev) (IS_IVYBRIDGE(dev) || IS_HASWELL(dev)) 1811#define NUM_L3_SLICES(dev) (IS_HSW_GT3(dev) ? 2 : HAS_L3_DPF(dev))
1674 1812
1675#define GT_FREQUENCY_MULTIPLIER 50 1813#define GT_FREQUENCY_MULTIPLIER 50
1676 1814
1677#include "i915_trace.h" 1815#include "i915_trace.h"
1678 1816
1679/**
1680 * RC6 is a special power stage which allows the GPU to enter an very
1681 * low-voltage mode when idle, using down to 0V while at this stage. This
1682 * stage is entered automatically when the GPU is idle when RC6 support is
1683 * enabled, and as soon as new workload arises GPU wakes up automatically as well.
1684 *
1685 * There are different RC6 modes available in Intel GPU, which differentiate
1686 * among each other with the latency required to enter and leave RC6 and
1687 * voltage consumed by the GPU in different states.
1688 *
1689 * The combination of the following flags define which states GPU is allowed
1690 * to enter, while RC6 is the normal RC6 state, RC6p is the deep RC6, and
1691 * RC6pp is deepest RC6. Their support by hardware varies according to the
1692 * GPU, BIOS, chipset and platform. RC6 is usually the safest one and the one
1693 * which brings the most power savings; deeper states save more power, but
1694 * require higher latency to switch to and wake up.
1695 */
1696#define INTEL_RC6_ENABLE (1<<0)
1697#define INTEL_RC6p_ENABLE (1<<1)
1698#define INTEL_RC6pp_ENABLE (1<<2)
1699
1700extern const struct drm_ioctl_desc i915_ioctls[]; 1817extern const struct drm_ioctl_desc i915_ioctls[];
1701extern int i915_max_ioctl; 1818extern int i915_max_ioctl;
1702extern unsigned int i915_fbpercrtc __always_unused; 1819extern unsigned int i915_fbpercrtc __always_unused;
@@ -1767,12 +1884,13 @@ extern void intel_uncore_early_sanitize(struct drm_device *dev);
1767extern void intel_uncore_init(struct drm_device *dev); 1884extern void intel_uncore_init(struct drm_device *dev);
1768extern void intel_uncore_clear_errors(struct drm_device *dev); 1885extern void intel_uncore_clear_errors(struct drm_device *dev);
1769extern void intel_uncore_check_errors(struct drm_device *dev); 1886extern void intel_uncore_check_errors(struct drm_device *dev);
1887extern void intel_uncore_fini(struct drm_device *dev);
1770 1888
1771void 1889void
1772i915_enable_pipestat(drm_i915_private_t *dev_priv, int pipe, u32 mask); 1890i915_enable_pipestat(drm_i915_private_t *dev_priv, enum pipe pipe, u32 mask);
1773 1891
1774void 1892void
1775i915_disable_pipestat(drm_i915_private_t *dev_priv, int pipe, u32 mask); 1893i915_disable_pipestat(drm_i915_private_t *dev_priv, enum pipe pipe, u32 mask);
1776 1894
1777/* i915_gem.c */ 1895/* i915_gem.c */
1778int i915_gem_init_ioctl(struct drm_device *dev, void *data, 1896int i915_gem_init_ioctl(struct drm_device *dev, void *data,
@@ -1824,14 +1942,11 @@ int i915_gem_wait_ioctl(struct drm_device *dev, void *data,
1824void i915_gem_load(struct drm_device *dev); 1942void i915_gem_load(struct drm_device *dev);
1825void *i915_gem_object_alloc(struct drm_device *dev); 1943void *i915_gem_object_alloc(struct drm_device *dev);
1826void i915_gem_object_free(struct drm_i915_gem_object *obj); 1944void i915_gem_object_free(struct drm_i915_gem_object *obj);
1827int i915_gem_init_object(struct drm_gem_object *obj);
1828void i915_gem_object_init(struct drm_i915_gem_object *obj, 1945void i915_gem_object_init(struct drm_i915_gem_object *obj,
1829 const struct drm_i915_gem_object_ops *ops); 1946 const struct drm_i915_gem_object_ops *ops);
1830struct drm_i915_gem_object *i915_gem_alloc_object(struct drm_device *dev, 1947struct drm_i915_gem_object *i915_gem_alloc_object(struct drm_device *dev,
1831 size_t size); 1948 size_t size);
1832void i915_gem_free_object(struct drm_gem_object *obj); 1949void i915_gem_free_object(struct drm_gem_object *obj);
1833struct i915_vma *i915_gem_vma_create(struct drm_i915_gem_object *obj,
1834 struct i915_address_space *vm);
1835void i915_gem_vma_destroy(struct i915_vma *vma); 1950void i915_gem_vma_destroy(struct i915_vma *vma);
1836 1951
1837int __must_check i915_gem_object_pin(struct drm_i915_gem_object *obj, 1952int __must_check i915_gem_object_pin(struct drm_i915_gem_object *obj,
@@ -1870,9 +1985,8 @@ static inline void i915_gem_object_unpin_pages(struct drm_i915_gem_object *obj)
1870int __must_check i915_mutex_lock_interruptible(struct drm_device *dev); 1985int __must_check i915_mutex_lock_interruptible(struct drm_device *dev);
1871int i915_gem_object_sync(struct drm_i915_gem_object *obj, 1986int i915_gem_object_sync(struct drm_i915_gem_object *obj,
1872 struct intel_ring_buffer *to); 1987 struct intel_ring_buffer *to);
1873void i915_gem_object_move_to_active(struct drm_i915_gem_object *obj, 1988void i915_vma_move_to_active(struct i915_vma *vma,
1874 struct intel_ring_buffer *ring); 1989 struct intel_ring_buffer *ring);
1875
1876int i915_gem_dumb_create(struct drm_file *file_priv, 1990int i915_gem_dumb_create(struct drm_file *file_priv,
1877 struct drm_device *dev, 1991 struct drm_device *dev,
1878 struct drm_mode_create_dumb *args); 1992 struct drm_mode_create_dumb *args);
@@ -1913,7 +2027,7 @@ i915_gem_object_unpin_fence(struct drm_i915_gem_object *obj)
1913 } 2027 }
1914} 2028}
1915 2029
1916void i915_gem_retire_requests(struct drm_device *dev); 2030bool i915_gem_retire_requests(struct drm_device *dev);
1917void i915_gem_retire_requests_ring(struct intel_ring_buffer *ring); 2031void i915_gem_retire_requests_ring(struct intel_ring_buffer *ring);
1918int __must_check i915_gem_check_wedge(struct i915_gpu_error *error, 2032int __must_check i915_gem_check_wedge(struct i915_gpu_error *error,
1919 bool interruptible); 2033 bool interruptible);
@@ -1933,11 +2047,11 @@ bool i915_gem_clflush_object(struct drm_i915_gem_object *obj, bool force);
1933int __must_check i915_gem_object_finish_gpu(struct drm_i915_gem_object *obj); 2047int __must_check i915_gem_object_finish_gpu(struct drm_i915_gem_object *obj);
1934int __must_check i915_gem_init(struct drm_device *dev); 2048int __must_check i915_gem_init(struct drm_device *dev);
1935int __must_check i915_gem_init_hw(struct drm_device *dev); 2049int __must_check i915_gem_init_hw(struct drm_device *dev);
1936void i915_gem_l3_remap(struct drm_device *dev); 2050int i915_gem_l3_remap(struct intel_ring_buffer *ring, int slice);
1937void i915_gem_init_swizzling(struct drm_device *dev); 2051void i915_gem_init_swizzling(struct drm_device *dev);
1938void i915_gem_cleanup_ringbuffer(struct drm_device *dev); 2052void i915_gem_cleanup_ringbuffer(struct drm_device *dev);
1939int __must_check i915_gpu_idle(struct drm_device *dev); 2053int __must_check i915_gpu_idle(struct drm_device *dev);
1940int __must_check i915_gem_idle(struct drm_device *dev); 2054int __must_check i915_gem_suspend(struct drm_device *dev);
1941int __i915_add_request(struct intel_ring_buffer *ring, 2055int __i915_add_request(struct intel_ring_buffer *ring,
1942 struct drm_file *file, 2056 struct drm_file *file,
1943 struct drm_i915_gem_object *batch_obj, 2057 struct drm_i915_gem_object *batch_obj,
@@ -1964,6 +2078,7 @@ int i915_gem_attach_phys_object(struct drm_device *dev,
1964void i915_gem_detach_phys_object(struct drm_device *dev, 2078void i915_gem_detach_phys_object(struct drm_device *dev,
1965 struct drm_i915_gem_object *obj); 2079 struct drm_i915_gem_object *obj);
1966void i915_gem_free_all_phys_object(struct drm_device *dev); 2080void i915_gem_free_all_phys_object(struct drm_device *dev);
2081int i915_gem_open(struct drm_device *dev, struct drm_file *file);
1967void i915_gem_release(struct drm_device *dev, struct drm_file *file); 2082void i915_gem_release(struct drm_device *dev, struct drm_file *file);
1968 2083
1969uint32_t 2084uint32_t
@@ -1995,6 +2110,9 @@ struct i915_vma *i915_gem_obj_to_vma(struct drm_i915_gem_object *obj,
1995struct i915_vma * 2110struct i915_vma *
1996i915_gem_obj_lookup_or_create_vma(struct drm_i915_gem_object *obj, 2111i915_gem_obj_lookup_or_create_vma(struct drm_i915_gem_object *obj,
1997 struct i915_address_space *vm); 2112 struct i915_address_space *vm);
2113
2114struct i915_vma *i915_gem_obj_to_ggtt(struct drm_i915_gem_object *obj);
2115
1998/* Some GGTT VM helpers */ 2116/* Some GGTT VM helpers */
1999#define obj_to_ggtt(obj) \ 2117#define obj_to_ggtt(obj) \
2000 (&((struct drm_i915_private *)(obj)->base.dev->dev_private)->gtt.base) 2118 (&((struct drm_i915_private *)(obj)->base.dev->dev_private)->gtt.base)
@@ -2031,7 +2149,6 @@ i915_gem_obj_ggtt_pin(struct drm_i915_gem_object *obj,
2031 return i915_gem_object_pin(obj, obj_to_ggtt(obj), alignment, 2149 return i915_gem_object_pin(obj, obj_to_ggtt(obj), alignment,
2032 map_and_fenceable, nonblocking); 2150 map_and_fenceable, nonblocking);
2033} 2151}
2034#undef obj_to_ggtt
2035 2152
2036/* i915_gem_context.c */ 2153/* i915_gem_context.c */
2037void i915_gem_context_init(struct drm_device *dev); 2154void i915_gem_context_init(struct drm_device *dev);
@@ -2094,6 +2211,7 @@ int __must_check i915_gem_evict_something(struct drm_device *dev,
2094 unsigned cache_level, 2211 unsigned cache_level,
2095 bool mappable, 2212 bool mappable,
2096 bool nonblock); 2213 bool nonblock);
2214int i915_gem_evict_vm(struct i915_address_space *vm, bool do_idle);
2097int i915_gem_evict_everything(struct drm_device *dev); 2215int i915_gem_evict_everything(struct drm_device *dev);
2098 2216
2099/* i915_gem_stolen.c */ 2217/* i915_gem_stolen.c */
@@ -2133,6 +2251,11 @@ int i915_verify_lists(struct drm_device *dev);
2133/* i915_debugfs.c */ 2251/* i915_debugfs.c */
2134int i915_debugfs_init(struct drm_minor *minor); 2252int i915_debugfs_init(struct drm_minor *minor);
2135void i915_debugfs_cleanup(struct drm_minor *minor); 2253void i915_debugfs_cleanup(struct drm_minor *minor);
2254#ifdef CONFIG_DEBUG_FS
2255void intel_display_crc_init(struct drm_device *dev);
2256#else
2257static inline void intel_display_crc_init(struct drm_device *dev) {}
2258#endif
2136 2259
2137/* i915_gpu_error.c */ 2260/* i915_gpu_error.c */
2138__printf(2, 3) 2261__printf(2, 3)
@@ -2186,15 +2309,30 @@ static inline bool intel_gmbus_is_forced_bit(struct i2c_adapter *adapter)
2186extern void intel_i2c_reset(struct drm_device *dev); 2309extern void intel_i2c_reset(struct drm_device *dev);
2187 2310
2188/* intel_opregion.c */ 2311/* intel_opregion.c */
2312struct intel_encoder;
2189extern int intel_opregion_setup(struct drm_device *dev); 2313extern int intel_opregion_setup(struct drm_device *dev);
2190#ifdef CONFIG_ACPI 2314#ifdef CONFIG_ACPI
2191extern void intel_opregion_init(struct drm_device *dev); 2315extern void intel_opregion_init(struct drm_device *dev);
2192extern void intel_opregion_fini(struct drm_device *dev); 2316extern void intel_opregion_fini(struct drm_device *dev);
2193extern void intel_opregion_asle_intr(struct drm_device *dev); 2317extern void intel_opregion_asle_intr(struct drm_device *dev);
2318extern int intel_opregion_notify_encoder(struct intel_encoder *intel_encoder,
2319 bool enable);
2320extern int intel_opregion_notify_adapter(struct drm_device *dev,
2321 pci_power_t state);
2194#else 2322#else
2195static inline void intel_opregion_init(struct drm_device *dev) { return; } 2323static inline void intel_opregion_init(struct drm_device *dev) { return; }
2196static inline void intel_opregion_fini(struct drm_device *dev) { return; } 2324static inline void intel_opregion_fini(struct drm_device *dev) { return; }
2197static inline void intel_opregion_asle_intr(struct drm_device *dev) { return; } 2325static inline void intel_opregion_asle_intr(struct drm_device *dev) { return; }
2326static inline int
2327intel_opregion_notify_encoder(struct intel_encoder *intel_encoder, bool enable)
2328{
2329 return 0;
2330}
2331static inline int
2332intel_opregion_notify_adapter(struct drm_device *dev, pci_power_t state)
2333{
2334 return 0;
2335}
2198#endif 2336#endif
2199 2337
2200/* intel_acpi.c */ 2338/* intel_acpi.c */
@@ -2256,8 +2394,16 @@ int sandybridge_pcode_write(struct drm_i915_private *dev_priv, u8 mbox, u32 val)
2256u32 vlv_punit_read(struct drm_i915_private *dev_priv, u8 addr); 2394u32 vlv_punit_read(struct drm_i915_private *dev_priv, u8 addr);
2257void vlv_punit_write(struct drm_i915_private *dev_priv, u8 addr, u32 val); 2395void vlv_punit_write(struct drm_i915_private *dev_priv, u8 addr, u32 val);
2258u32 vlv_nc_read(struct drm_i915_private *dev_priv, u8 addr); 2396u32 vlv_nc_read(struct drm_i915_private *dev_priv, u8 addr);
2259u32 vlv_dpio_read(struct drm_i915_private *dev_priv, int reg); 2397u32 vlv_gpio_nc_read(struct drm_i915_private *dev_priv, u32 reg);
2260void vlv_dpio_write(struct drm_i915_private *dev_priv, int reg, u32 val); 2398void vlv_gpio_nc_write(struct drm_i915_private *dev_priv, u32 reg, u32 val);
2399u32 vlv_cck_read(struct drm_i915_private *dev_priv, u32 reg);
2400void vlv_cck_write(struct drm_i915_private *dev_priv, u32 reg, u32 val);
2401u32 vlv_ccu_read(struct drm_i915_private *dev_priv, u32 reg);
2402void vlv_ccu_write(struct drm_i915_private *dev_priv, u32 reg, u32 val);
2403u32 vlv_gps_core_read(struct drm_i915_private *dev_priv, u32 reg);
2404void vlv_gps_core_write(struct drm_i915_private *dev_priv, u32 reg, u32 val);
2405u32 vlv_dpio_read(struct drm_i915_private *dev_priv, enum pipe pipe, int reg);
2406void vlv_dpio_write(struct drm_i915_private *dev_priv, enum pipe pipe, int reg, u32 val);
2261u32 intel_sbi_read(struct drm_i915_private *dev_priv, u16 reg, 2407u32 intel_sbi_read(struct drm_i915_private *dev_priv, u16 reg,
2262 enum intel_sbi_destination destination); 2408 enum intel_sbi_destination destination);
2263void intel_sbi_write(struct drm_i915_private *dev_priv, u16 reg, u32 value, 2409void intel_sbi_write(struct drm_i915_private *dev_priv, u16 reg, u32 value,
@@ -2266,37 +2412,21 @@ void intel_sbi_write(struct drm_i915_private *dev_priv, u16 reg, u32 value,
2266int vlv_gpu_freq(int ddr_freq, int val); 2412int vlv_gpu_freq(int ddr_freq, int val);
2267int vlv_freq_opcode(int ddr_freq, int val); 2413int vlv_freq_opcode(int ddr_freq, int val);
2268 2414
2269#define __i915_read(x) \ 2415#define I915_READ8(reg) dev_priv->uncore.funcs.mmio_readb(dev_priv, (reg), true)
2270 u##x i915_read##x(struct drm_i915_private *dev_priv, u32 reg, bool trace); 2416#define I915_WRITE8(reg, val) dev_priv->uncore.funcs.mmio_writeb(dev_priv, (reg), (val), true)
2271__i915_read(8) 2417
2272__i915_read(16) 2418#define I915_READ16(reg) dev_priv->uncore.funcs.mmio_readw(dev_priv, (reg), true)
2273__i915_read(32) 2419#define I915_WRITE16(reg, val) dev_priv->uncore.funcs.mmio_writew(dev_priv, (reg), (val), true)
2274__i915_read(64) 2420#define I915_READ16_NOTRACE(reg) dev_priv->uncore.funcs.mmio_readw(dev_priv, (reg), false)
2275#undef __i915_read 2421#define I915_WRITE16_NOTRACE(reg, val) dev_priv->uncore.funcs.mmio_writew(dev_priv, (reg), (val), false)
2276 2422
2277#define __i915_write(x) \ 2423#define I915_READ(reg) dev_priv->uncore.funcs.mmio_readl(dev_priv, (reg), true)
2278 void i915_write##x(struct drm_i915_private *dev_priv, u32 reg, u##x val, bool trace); 2424#define I915_WRITE(reg, val) dev_priv->uncore.funcs.mmio_writel(dev_priv, (reg), (val), true)
2279__i915_write(8) 2425#define I915_READ_NOTRACE(reg) dev_priv->uncore.funcs.mmio_readl(dev_priv, (reg), false)
2280__i915_write(16) 2426#define I915_WRITE_NOTRACE(reg, val) dev_priv->uncore.funcs.mmio_writel(dev_priv, (reg), (val), false)
2281__i915_write(32) 2427
2282__i915_write(64) 2428#define I915_WRITE64(reg, val) dev_priv->uncore.funcs.mmio_writeq(dev_priv, (reg), (val), true)
2283#undef __i915_write 2429#define I915_READ64(reg) dev_priv->uncore.funcs.mmio_readq(dev_priv, (reg), true)
2284
2285#define I915_READ8(reg) i915_read8(dev_priv, (reg), true)
2286#define I915_WRITE8(reg, val) i915_write8(dev_priv, (reg), (val), true)
2287
2288#define I915_READ16(reg) i915_read16(dev_priv, (reg), true)
2289#define I915_WRITE16(reg, val) i915_write16(dev_priv, (reg), (val), true)
2290#define I915_READ16_NOTRACE(reg) i915_read16(dev_priv, (reg), false)
2291#define I915_WRITE16_NOTRACE(reg, val) i915_write16(dev_priv, (reg), (val), false)
2292
2293#define I915_READ(reg) i915_read32(dev_priv, (reg), true)
2294#define I915_WRITE(reg, val) i915_write32(dev_priv, (reg), (val), true)
2295#define I915_READ_NOTRACE(reg) i915_read32(dev_priv, (reg), false)
2296#define I915_WRITE_NOTRACE(reg, val) i915_write32(dev_priv, (reg), (val), false)
2297
2298#define I915_WRITE64(reg, val) i915_write64(dev_priv, (reg), (val), true)
2299#define I915_READ64(reg) i915_read64(dev_priv, (reg), true)
2300 2430
2301#define POSTING_READ(reg) (void)I915_READ_NOTRACE(reg) 2431#define POSTING_READ(reg) (void)I915_READ_NOTRACE(reg)
2302#define POSTING_READ16(reg) (void)I915_READ16_NOTRACE(reg) 2432#define POSTING_READ16(reg) (void)I915_READ16_NOTRACE(reg)