aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/radeon
diff options
context:
space:
mode:
authorAndrea Bastoni <bastoni@cs.unc.edu>2010-05-30 19:16:45 -0400
committerAndrea Bastoni <bastoni@cs.unc.edu>2010-05-30 19:16:45 -0400
commitada47b5fe13d89735805b566185f4885f5a3f750 (patch)
tree644b88f8a71896307d71438e9b3af49126ffb22b /drivers/gpu/drm/radeon
parent43e98717ad40a4ae64545b5ba047c7b86aa44f4f (diff)
parent3280f21d43ee541f97f8cda5792150d2dbec20d5 (diff)
Merge branch 'wip-2.6.34' into old-private-masterarchived-private-master
Diffstat (limited to 'drivers/gpu/drm/radeon')
-rw-r--r--drivers/gpu/drm/radeon/Kconfig12
-rw-r--r--drivers/gpu/drm/radeon/Makefile16
-rw-r--r--drivers/gpu/drm/radeon/ObjectID.h801
-rw-r--r--drivers/gpu/drm/radeon/atom.c233
-rw-r--r--drivers/gpu/drm/radeon/atom.h12
-rw-r--r--drivers/gpu/drm/radeon/atombios.h7483
-rw-r--r--drivers/gpu/drm/radeon/atombios_crtc.c766
-rw-r--r--drivers/gpu/drm/radeon/atombios_dp.c809
-rw-r--r--drivers/gpu/drm/radeon/avivod.h2
-rw-r--r--drivers/gpu/drm/radeon/evergreen.c765
-rw-r--r--drivers/gpu/drm/radeon/evergreen_reg.h176
-rw-r--r--drivers/gpu/drm/radeon/mkregtable.c4
-rw-r--r--drivers/gpu/drm/radeon/r100.c544
-rw-r--r--drivers/gpu/drm/radeon/r100_track.h20
-rw-r--r--drivers/gpu/drm/radeon/r200.c64
-rw-r--r--drivers/gpu/drm/radeon/r300.c302
-rw-r--r--drivers/gpu/drm/radeon/r300_cmdbuf.c286
-rw-r--r--drivers/gpu/drm/radeon/r300_reg.h3
-rw-r--r--drivers/gpu/drm/radeon/r420.c123
-rw-r--r--drivers/gpu/drm/radeon/r500_reg.h100
-rw-r--r--drivers/gpu/drm/radeon/r520.c42
-rw-r--r--drivers/gpu/drm/radeon/r600.c1462
-rw-r--r--drivers/gpu/drm/radeon/r600_audio.c243
-rw-r--r--drivers/gpu/drm/radeon/r600_blit.c2
-rw-r--r--drivers/gpu/drm/radeon/r600_blit_kms.c59
-rw-r--r--drivers/gpu/drm/radeon/r600_blit_shaders.c45
-rw-r--r--drivers/gpu/drm/radeon/r600_cp.c256
-rw-r--r--drivers/gpu/drm/radeon/r600_cs.c880
-rw-r--r--drivers/gpu/drm/radeon/r600_hdmi.c566
-rw-r--r--drivers/gpu/drm/radeon/r600_reg.h78
-rw-r--r--drivers/gpu/drm/radeon/r600d.h749
-rw-r--r--drivers/gpu/drm/radeon/radeon.h445
-rw-r--r--drivers/gpu/drm/radeon/radeon_agp.c31
-rw-r--r--drivers/gpu/drm/radeon/radeon_asic.c772
-rw-r--r--drivers/gpu/drm/radeon/radeon_asic.h415
-rw-r--r--drivers/gpu/drm/radeon/radeon_atombios.c1147
-rw-r--r--drivers/gpu/drm/radeon/radeon_atpx_handler.c258
-rw-r--r--drivers/gpu/drm/radeon/radeon_benchmark.c91
-rw-r--r--drivers/gpu/drm/radeon/radeon_bios.c51
-rw-r--r--drivers/gpu/drm/radeon/radeon_clocks.c47
-rw-r--r--drivers/gpu/drm/radeon/radeon_combios.c1038
-rw-r--r--drivers/gpu/drm/radeon/radeon_connectors.c249
-rw-r--r--drivers/gpu/drm/radeon/radeon_cp.c64
-rw-r--r--drivers/gpu/drm/radeon/radeon_cs.c28
-rw-r--r--drivers/gpu/drm/radeon/radeon_cursor.c50
-rw-r--r--drivers/gpu/drm/radeon/radeon_device.c621
-rw-r--r--drivers/gpu/drm/radeon/radeon_display.c427
-rw-r--r--drivers/gpu/drm/radeon/radeon_drv.c34
-rw-r--r--drivers/gpu/drm/radeon/radeon_drv.h51
-rw-r--r--drivers/gpu/drm/radeon/radeon_encoders.c800
-rw-r--r--drivers/gpu/drm/radeon/radeon_family.h8
-rw-r--r--drivers/gpu/drm/radeon/radeon_fb.c87
-rw-r--r--drivers/gpu/drm/radeon/radeon_fence.c66
-rw-r--r--drivers/gpu/drm/radeon/radeon_fixed.h17
-rw-r--r--drivers/gpu/drm/radeon/radeon_gart.c74
-rw-r--r--drivers/gpu/drm/radeon/radeon_gem.c147
-rw-r--r--drivers/gpu/drm/radeon/radeon_i2c.c899
-rw-r--r--drivers/gpu/drm/radeon/radeon_ioc32.c38
-rw-r--r--drivers/gpu/drm/radeon/radeon_irq.c10
-rw-r--r--drivers/gpu/drm/radeon/radeon_irq_kms.c83
-rw-r--r--drivers/gpu/drm/radeon/radeon_kms.c76
-rw-r--r--drivers/gpu/drm/radeon/radeon_legacy_crtc.c223
-rw-r--r--drivers/gpu/drm/radeon/radeon_legacy_encoders.c218
-rw-r--r--drivers/gpu/drm/radeon/radeon_legacy_tv.c43
-rw-r--r--drivers/gpu/drm/radeon/radeon_mode.h229
-rw-r--r--drivers/gpu/drm/radeon/radeon_object.c582
-rw-r--r--drivers/gpu/drm/radeon/radeon_object.h151
-rw-r--r--drivers/gpu/drm/radeon/radeon_pm.c436
-rw-r--r--drivers/gpu/drm/radeon/radeon_reg.h75
-rw-r--r--drivers/gpu/drm/radeon/radeon_ring.c242
-rw-r--r--drivers/gpu/drm/radeon/radeon_state.c208
-rw-r--r--drivers/gpu/drm/radeon/radeon_test.c61
-rw-r--r--drivers/gpu/drm/radeon/radeon_ttm.c122
-rw-r--r--drivers/gpu/drm/radeon/reg_srcs/r2002
-rw-r--r--drivers/gpu/drm/radeon/reg_srcs/r3002
-rw-r--r--drivers/gpu/drm/radeon/reg_srcs/r420797
-rw-r--r--drivers/gpu/drm/radeon/reg_srcs/r600762
-rw-r--r--drivers/gpu/drm/radeon/reg_srcs/rs60070
-rw-r--r--drivers/gpu/drm/radeon/reg_srcs/rv5159
-rw-r--r--drivers/gpu/drm/radeon/rs400.c92
-rw-r--r--drivers/gpu/drm/radeon/rs600.c285
-rw-r--r--drivers/gpu/drm/radeon/rs600d.h165
-rw-r--r--drivers/gpu/drm/radeon/rs690.c174
-rw-r--r--drivers/gpu/drm/radeon/rs690d.h3
-rw-r--r--drivers/gpu/drm/radeon/rv515.c96
-rw-r--r--drivers/gpu/drm/radeon/rv770.c406
-rw-r--r--drivers/gpu/drm/radeon/rv770d.h2
87 files changed, 22724 insertions, 7758 deletions
diff --git a/drivers/gpu/drm/radeon/Kconfig b/drivers/gpu/drm/radeon/Kconfig
index 5982321be4d5..1c02d23f6fcc 100644
--- a/drivers/gpu/drm/radeon/Kconfig
+++ b/drivers/gpu/drm/radeon/Kconfig
@@ -1,10 +1,14 @@
1config DRM_RADEON_KMS 1config DRM_RADEON_KMS
2 bool "Enable modesetting on radeon by default" 2 bool "Enable modesetting on radeon by default - NEW DRIVER"
3 depends on DRM_RADEON 3 depends on DRM_RADEON
4 help 4 help
5 Choose this option if you want kernel modesetting enabled by default, 5 Choose this option if you want kernel modesetting enabled by default.
6 and you have a new enough userspace to support this. Running old 6
7 userspaces with this enabled will cause pain. 7 This is a completely new driver. It's only part of the existing drm
8 for compatibility reasons. It requires an entirely different graphics
9 stack above it and works very differently from the old drm stack.
10 i.e. don't enable this unless you know what you are doing it may
11 cause issues or bugs compared to the previous userspace driver stack.
8 12
9 When kernel modesetting is enabled the IOCTL of radeon/drm 13 When kernel modesetting is enabled the IOCTL of radeon/drm
10 driver are considered as invalid and an error message is printed 14 driver are considered as invalid and an error message is printed
diff --git a/drivers/gpu/drm/radeon/Makefile b/drivers/gpu/drm/radeon/Makefile
index b5713eedd6e1..3c91312dea9a 100644
--- a/drivers/gpu/drm/radeon/Makefile
+++ b/drivers/gpu/drm/radeon/Makefile
@@ -24,9 +24,15 @@ $(obj)/rv515_reg_safe.h: $(src)/reg_srcs/rv515 $(obj)/mkregtable
24$(obj)/r300_reg_safe.h: $(src)/reg_srcs/r300 $(obj)/mkregtable 24$(obj)/r300_reg_safe.h: $(src)/reg_srcs/r300 $(obj)/mkregtable
25 $(call if_changed,mkregtable) 25 $(call if_changed,mkregtable)
26 26
27$(obj)/r420_reg_safe.h: $(src)/reg_srcs/r420 $(obj)/mkregtable
28 $(call if_changed,mkregtable)
29
27$(obj)/rs600_reg_safe.h: $(src)/reg_srcs/rs600 $(obj)/mkregtable 30$(obj)/rs600_reg_safe.h: $(src)/reg_srcs/rs600 $(obj)/mkregtable
28 $(call if_changed,mkregtable) 31 $(call if_changed,mkregtable)
29 32
33$(obj)/r600_reg_safe.h: $(src)/reg_srcs/r600 $(obj)/mkregtable
34 $(call if_changed,mkregtable)
35
30$(obj)/r100.o: $(obj)/r100_reg_safe.h $(obj)/rn50_reg_safe.h 36$(obj)/r100.o: $(obj)/r100_reg_safe.h $(obj)/rn50_reg_safe.h
31 37
32$(obj)/r200.o: $(obj)/r200_reg_safe.h 38$(obj)/r200.o: $(obj)/r200_reg_safe.h
@@ -35,12 +41,16 @@ $(obj)/rv515.o: $(obj)/rv515_reg_safe.h
35 41
36$(obj)/r300.o: $(obj)/r300_reg_safe.h 42$(obj)/r300.o: $(obj)/r300_reg_safe.h
37 43
44$(obj)/r420.o: $(obj)/r420_reg_safe.h
45
38$(obj)/rs600.o: $(obj)/rs600_reg_safe.h 46$(obj)/rs600.o: $(obj)/rs600_reg_safe.h
39 47
48$(obj)/r600_cs.o: $(obj)/r600_reg_safe.h
49
40radeon-y := radeon_drv.o radeon_cp.o radeon_state.o radeon_mem.o \ 50radeon-y := radeon_drv.o radeon_cp.o radeon_state.o radeon_mem.o \
41 radeon_irq.o r300_cmdbuf.o r600_cp.o 51 radeon_irq.o r300_cmdbuf.o r600_cp.o
42# add KMS driver 52# add KMS driver
43radeon-y += radeon_device.o radeon_kms.o \ 53radeon-y += radeon_device.o radeon_asic.o radeon_kms.o \
44 radeon_atombios.o radeon_agp.o atombios_crtc.o radeon_combios.o \ 54 radeon_atombios.o radeon_agp.o atombios_crtc.o radeon_combios.o \
45 atom.o radeon_fence.o radeon_ttm.o radeon_object.o radeon_gart.o \ 55 atom.o radeon_fence.o radeon_ttm.o radeon_object.o radeon_gart.o \
46 radeon_legacy_crtc.o radeon_legacy_encoders.o radeon_connectors.o \ 56 radeon_legacy_crtc.o radeon_legacy_encoders.o radeon_connectors.o \
@@ -49,8 +59,10 @@ radeon-y += radeon_device.o radeon_kms.o \
49 radeon_cs.o radeon_bios.o radeon_benchmark.o r100.o r300.o r420.o \ 59 radeon_cs.o radeon_bios.o radeon_benchmark.o r100.o r300.o r420.o \
50 rs400.o rs600.o rs690.o rv515.o r520.o r600.o rv770.o radeon_test.o \ 60 rs400.o rs600.o rs690.o rv515.o r520.o r600.o rv770.o radeon_test.o \
51 r200.o radeon_legacy_tv.o r600_cs.o r600_blit.o r600_blit_shaders.o \ 61 r200.o radeon_legacy_tv.o r600_cs.o r600_blit.o r600_blit_shaders.o \
52 r600_blit_kms.o radeon_pm.o 62 r600_blit_kms.o radeon_pm.o atombios_dp.o r600_audio.o r600_hdmi.o \
63 evergreen.o
53 64
54radeon-$(CONFIG_COMPAT) += radeon_ioc32.o 65radeon-$(CONFIG_COMPAT) += radeon_ioc32.o
66radeon-$(CONFIG_VGA_SWITCHEROO) += radeon_atpx_handler.o
55 67
56obj-$(CONFIG_DRM_RADEON)+= radeon.o 68obj-$(CONFIG_DRM_RADEON)+= radeon.o
diff --git a/drivers/gpu/drm/radeon/ObjectID.h b/drivers/gpu/drm/radeon/ObjectID.h
index 6d0183c61d3b..c714179d1bfa 100644
--- a/drivers/gpu/drm/radeon/ObjectID.h
+++ b/drivers/gpu/drm/radeon/ObjectID.h
@@ -1,5 +1,5 @@
1/* 1/*
2* Copyright 2006-2007 Advanced Micro Devices, Inc. 2* Copyright 2006-2007 Advanced Micro Devices, Inc.
3* 3*
4* Permission is hereby granted, free of charge, to any person obtaining a 4* Permission is hereby granted, free of charge, to any person obtaining a
5* copy of this software and associated documentation files (the "Software"), 5* copy of this software and associated documentation files (the "Software"),
@@ -41,14 +41,14 @@
41/****************************************************/ 41/****************************************************/
42/* Encoder Object ID Definition */ 42/* Encoder Object ID Definition */
43/****************************************************/ 43/****************************************************/
44#define ENCODER_OBJECT_ID_NONE 0x00 44#define ENCODER_OBJECT_ID_NONE 0x00
45 45
46/* Radeon Class Display Hardware */ 46/* Radeon Class Display Hardware */
47#define ENCODER_OBJECT_ID_INTERNAL_LVDS 0x01 47#define ENCODER_OBJECT_ID_INTERNAL_LVDS 0x01
48#define ENCODER_OBJECT_ID_INTERNAL_TMDS1 0x02 48#define ENCODER_OBJECT_ID_INTERNAL_TMDS1 0x02
49#define ENCODER_OBJECT_ID_INTERNAL_TMDS2 0x03 49#define ENCODER_OBJECT_ID_INTERNAL_TMDS2 0x03
50#define ENCODER_OBJECT_ID_INTERNAL_DAC1 0x04 50#define ENCODER_OBJECT_ID_INTERNAL_DAC1 0x04
51#define ENCODER_OBJECT_ID_INTERNAL_DAC2 0x05 /* TV/CV DAC */ 51#define ENCODER_OBJECT_ID_INTERNAL_DAC2 0x05 /* TV/CV DAC */
52#define ENCODER_OBJECT_ID_INTERNAL_SDVOA 0x06 52#define ENCODER_OBJECT_ID_INTERNAL_SDVOA 0x06
53#define ENCODER_OBJECT_ID_INTERNAL_SDVOB 0x07 53#define ENCODER_OBJECT_ID_INTERNAL_SDVOB 0x07
54 54
@@ -56,11 +56,11 @@
56#define ENCODER_OBJECT_ID_SI170B 0x08 56#define ENCODER_OBJECT_ID_SI170B 0x08
57#define ENCODER_OBJECT_ID_CH7303 0x09 57#define ENCODER_OBJECT_ID_CH7303 0x09
58#define ENCODER_OBJECT_ID_CH7301 0x0A 58#define ENCODER_OBJECT_ID_CH7301 0x0A
59#define ENCODER_OBJECT_ID_INTERNAL_DVO1 0x0B /* This belongs to Radeon Class Display Hardware */ 59#define ENCODER_OBJECT_ID_INTERNAL_DVO1 0x0B /* This belongs to Radeon Class Display Hardware */
60#define ENCODER_OBJECT_ID_EXTERNAL_SDVOA 0x0C 60#define ENCODER_OBJECT_ID_EXTERNAL_SDVOA 0x0C
61#define ENCODER_OBJECT_ID_EXTERNAL_SDVOB 0x0D 61#define ENCODER_OBJECT_ID_EXTERNAL_SDVOB 0x0D
62#define ENCODER_OBJECT_ID_TITFP513 0x0E 62#define ENCODER_OBJECT_ID_TITFP513 0x0E
63#define ENCODER_OBJECT_ID_INTERNAL_LVTM1 0x0F /* not used for Radeon */ 63#define ENCODER_OBJECT_ID_INTERNAL_LVTM1 0x0F /* not used for Radeon */
64#define ENCODER_OBJECT_ID_VT1623 0x10 64#define ENCODER_OBJECT_ID_VT1623 0x10
65#define ENCODER_OBJECT_ID_HDMI_SI1930 0x11 65#define ENCODER_OBJECT_ID_HDMI_SI1930 0x11
66#define ENCODER_OBJECT_ID_HDMI_INTERNAL 0x12 66#define ENCODER_OBJECT_ID_HDMI_INTERNAL 0x12
@@ -68,9 +68,9 @@
68#define ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1 0x13 68#define ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1 0x13
69#define ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1 0x14 69#define ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1 0x14
70#define ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1 0x15 70#define ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1 0x15
71#define ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2 0x16 /* Shared with CV/TV and CRT */ 71#define ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2 0x16 /* Shared with CV/TV and CRT */
72#define ENCODER_OBJECT_ID_SI178 0X17 /* External TMDS (dual link, no HDCP.) */ 72#define ENCODER_OBJECT_ID_SI178 0X17 /* External TMDS (dual link, no HDCP.) */
73#define ENCODER_OBJECT_ID_MVPU_FPGA 0x18 /* MVPU FPGA chip */ 73#define ENCODER_OBJECT_ID_MVPU_FPGA 0x18 /* MVPU FPGA chip */
74#define ENCODER_OBJECT_ID_INTERNAL_DDI 0x19 74#define ENCODER_OBJECT_ID_INTERNAL_DDI 0x19
75#define ENCODER_OBJECT_ID_VT1625 0x1A 75#define ENCODER_OBJECT_ID_VT1625 0x1A
76#define ENCODER_OBJECT_ID_HDMI_SI1932 0x1B 76#define ENCODER_OBJECT_ID_HDMI_SI1932 0x1B
@@ -86,7 +86,7 @@
86/****************************************************/ 86/****************************************************/
87/* Connector Object ID Definition */ 87/* Connector Object ID Definition */
88/****************************************************/ 88/****************************************************/
89#define CONNECTOR_OBJECT_ID_NONE 0x00 89#define CONNECTOR_OBJECT_ID_NONE 0x00
90#define CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I 0x01 90#define CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I 0x01
91#define CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_I 0x02 91#define CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_I 0x02
92#define CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D 0x03 92#define CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D 0x03
@@ -96,7 +96,7 @@
96#define CONNECTOR_OBJECT_ID_SVIDEO 0x07 96#define CONNECTOR_OBJECT_ID_SVIDEO 0x07
97#define CONNECTOR_OBJECT_ID_YPbPr 0x08 97#define CONNECTOR_OBJECT_ID_YPbPr 0x08
98#define CONNECTOR_OBJECT_ID_D_CONNECTOR 0x09 98#define CONNECTOR_OBJECT_ID_D_CONNECTOR 0x09
99#define CONNECTOR_OBJECT_ID_9PIN_DIN 0x0A /* Supports both CV & TV */ 99#define CONNECTOR_OBJECT_ID_9PIN_DIN 0x0A /* Supports both CV & TV */
100#define CONNECTOR_OBJECT_ID_SCART 0x0B 100#define CONNECTOR_OBJECT_ID_SCART 0x0B
101#define CONNECTOR_OBJECT_ID_HDMI_TYPE_A 0x0C 101#define CONNECTOR_OBJECT_ID_HDMI_TYPE_A 0x0C
102#define CONNECTOR_OBJECT_ID_HDMI_TYPE_B 0x0D 102#define CONNECTOR_OBJECT_ID_HDMI_TYPE_B 0x0D
@@ -106,6 +106,8 @@
106#define CONNECTOR_OBJECT_ID_CROSSFIRE 0x11 106#define CONNECTOR_OBJECT_ID_CROSSFIRE 0x11
107#define CONNECTOR_OBJECT_ID_HARDCODE_DVI 0x12 107#define CONNECTOR_OBJECT_ID_HARDCODE_DVI 0x12
108#define CONNECTOR_OBJECT_ID_DISPLAYPORT 0x13 108#define CONNECTOR_OBJECT_ID_DISPLAYPORT 0x13
109#define CONNECTOR_OBJECT_ID_eDP 0x14
110#define CONNECTOR_OBJECT_ID_MXM 0x15
109 111
110/* deleted */ 112/* deleted */
111 113
@@ -116,6 +118,14 @@
116#define ROUTER_OBJECT_ID_I2C_EXTENDER_CNTL 0x01 118#define ROUTER_OBJECT_ID_I2C_EXTENDER_CNTL 0x01
117 119
118/****************************************************/ 120/****************************************************/
121/* Generic Object ID Definition */
122/****************************************************/
123#define GENERIC_OBJECT_ID_NONE 0x00
124#define GENERIC_OBJECT_ID_GLSYNC 0x01
125#define GENERIC_OBJECT_ID_PX2_NON_DRIVABLE 0x02
126#define GENERIC_OBJECT_ID_MXM_OPM 0x03
127
128/****************************************************/
119/* Graphics Object ENUM ID Definition */ 129/* Graphics Object ENUM ID Definition */
120/****************************************************/ 130/****************************************************/
121#define GRAPH_OBJECT_ENUM_ID1 0x01 131#define GRAPH_OBJECT_ENUM_ID1 0x01
@@ -124,6 +134,7 @@
124#define GRAPH_OBJECT_ENUM_ID4 0x04 134#define GRAPH_OBJECT_ENUM_ID4 0x04
125#define GRAPH_OBJECT_ENUM_ID5 0x05 135#define GRAPH_OBJECT_ENUM_ID5 0x05
126#define GRAPH_OBJECT_ENUM_ID6 0x06 136#define GRAPH_OBJECT_ENUM_ID6 0x06
137#define GRAPH_OBJECT_ENUM_ID7 0x07
127 138
128/****************************************************/ 139/****************************************************/
129/* Graphics Object ID Bit definition */ 140/* Graphics Object ID Bit definition */
@@ -133,35 +144,35 @@
133#define RESERVED1_ID_MASK 0x0800 144#define RESERVED1_ID_MASK 0x0800
134#define OBJECT_TYPE_MASK 0x7000 145#define OBJECT_TYPE_MASK 0x7000
135#define RESERVED2_ID_MASK 0x8000 146#define RESERVED2_ID_MASK 0x8000
136 147
137#define OBJECT_ID_SHIFT 0x00 148#define OBJECT_ID_SHIFT 0x00
138#define ENUM_ID_SHIFT 0x08 149#define ENUM_ID_SHIFT 0x08
139#define OBJECT_TYPE_SHIFT 0x0C 150#define OBJECT_TYPE_SHIFT 0x0C
140 151
152
141/****************************************************/ 153/****************************************************/
142/* Graphics Object family definition */ 154/* Graphics Object family definition */
143/****************************************************/ 155/****************************************************/
144#define CONSTRUCTOBJECTFAMILYID(GRAPHICS_OBJECT_TYPE, GRAPHICS_OBJECT_ID) \ 156#define CONSTRUCTOBJECTFAMILYID(GRAPHICS_OBJECT_TYPE, GRAPHICS_OBJECT_ID) (GRAPHICS_OBJECT_TYPE << OBJECT_TYPE_SHIFT | \
145 (GRAPHICS_OBJECT_TYPE << OBJECT_TYPE_SHIFT | \ 157 GRAPHICS_OBJECT_ID << OBJECT_ID_SHIFT)
146 GRAPHICS_OBJECT_ID << OBJECT_ID_SHIFT)
147/****************************************************/ 158/****************************************************/
148/* GPU Object ID definition - Shared with BIOS */ 159/* GPU Object ID definition - Shared with BIOS */
149/****************************************************/ 160/****************************************************/
150#define GPU_ENUM_ID1 (GRAPH_OBJECT_TYPE_GPU << OBJECT_TYPE_SHIFT |\ 161#define GPU_ENUM_ID1 ( GRAPH_OBJECT_TYPE_GPU << OBJECT_TYPE_SHIFT |\
151 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT) 162 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT)
152 163
153/****************************************************/ 164/****************************************************/
154/* Encoder Object ID definition - Shared with BIOS */ 165/* Encoder Object ID definition - Shared with BIOS */
155/****************************************************/ 166/****************************************************/
156/* 167/*
157#define ENCODER_INTERNAL_LVDS_ENUM_ID1 0x2101 168#define ENCODER_INTERNAL_LVDS_ENUM_ID1 0x2101
158#define ENCODER_INTERNAL_TMDS1_ENUM_ID1 0x2102 169#define ENCODER_INTERNAL_TMDS1_ENUM_ID1 0x2102
159#define ENCODER_INTERNAL_TMDS2_ENUM_ID1 0x2103 170#define ENCODER_INTERNAL_TMDS2_ENUM_ID1 0x2103
160#define ENCODER_INTERNAL_DAC1_ENUM_ID1 0x2104 171#define ENCODER_INTERNAL_DAC1_ENUM_ID1 0x2104
161#define ENCODER_INTERNAL_DAC2_ENUM_ID1 0x2105 172#define ENCODER_INTERNAL_DAC2_ENUM_ID1 0x2105
162#define ENCODER_INTERNAL_SDVOA_ENUM_ID1 0x2106 173#define ENCODER_INTERNAL_SDVOA_ENUM_ID1 0x2106
163#define ENCODER_INTERNAL_SDVOB_ENUM_ID1 0x2107 174#define ENCODER_INTERNAL_SDVOB_ENUM_ID1 0x2107
164#define ENCODER_SIL170B_ENUM_ID1 0x2108 175#define ENCODER_SIL170B_ENUM_ID1 0x2108
165#define ENCODER_CH7303_ENUM_ID1 0x2109 176#define ENCODER_CH7303_ENUM_ID1 0x2109
166#define ENCODER_CH7301_ENUM_ID1 0x210A 177#define ENCODER_CH7301_ENUM_ID1 0x210A
167#define ENCODER_INTERNAL_DVO1_ENUM_ID1 0x210B 178#define ENCODER_INTERNAL_DVO1_ENUM_ID1 0x210B
@@ -175,8 +186,8 @@
175#define ENCODER_INTERNAL_KLDSCP_TMDS1_ENUM_ID1 0x2113 186#define ENCODER_INTERNAL_KLDSCP_TMDS1_ENUM_ID1 0x2113
176#define ENCODER_INTERNAL_KLDSCP_DVO1_ENUM_ID1 0x2114 187#define ENCODER_INTERNAL_KLDSCP_DVO1_ENUM_ID1 0x2114
177#define ENCODER_INTERNAL_KLDSCP_DAC1_ENUM_ID1 0x2115 188#define ENCODER_INTERNAL_KLDSCP_DAC1_ENUM_ID1 0x2115
178#define ENCODER_INTERNAL_KLDSCP_DAC2_ENUM_ID1 0x2116 189#define ENCODER_INTERNAL_KLDSCP_DAC2_ENUM_ID1 0x2116
179#define ENCODER_SI178_ENUM_ID1 0x2117 190#define ENCODER_SI178_ENUM_ID1 0x2117
180#define ENCODER_MVPU_FPGA_ENUM_ID1 0x2118 191#define ENCODER_MVPU_FPGA_ENUM_ID1 0x2118
181#define ENCODER_INTERNAL_DDI_ENUM_ID1 0x2119 192#define ENCODER_INTERNAL_DDI_ENUM_ID1 0x2119
182#define ENCODER_VT1625_ENUM_ID1 0x211A 193#define ENCODER_VT1625_ENUM_ID1 0x211A
@@ -185,205 +196,169 @@
185#define ENCODER_DP_DP501_ENUM_ID1 0x211D 196#define ENCODER_DP_DP501_ENUM_ID1 0x211D
186#define ENCODER_INTERNAL_UNIPHY_ENUM_ID1 0x211E 197#define ENCODER_INTERNAL_UNIPHY_ENUM_ID1 0x211E
187*/ 198*/
188#define ENCODER_INTERNAL_LVDS_ENUM_ID1 \ 199#define ENCODER_INTERNAL_LVDS_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
189 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 200 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
190 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 201 ENCODER_OBJECT_ID_INTERNAL_LVDS << OBJECT_ID_SHIFT)
191 ENCODER_OBJECT_ID_INTERNAL_LVDS << OBJECT_ID_SHIFT) 202
192 203#define ENCODER_INTERNAL_TMDS1_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
193#define ENCODER_INTERNAL_TMDS1_ENUM_ID1 \ 204 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
194 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 205 ENCODER_OBJECT_ID_INTERNAL_TMDS1 << OBJECT_ID_SHIFT)
195 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 206
196 ENCODER_OBJECT_ID_INTERNAL_TMDS1 << OBJECT_ID_SHIFT) 207#define ENCODER_INTERNAL_TMDS2_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
197 208 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
198#define ENCODER_INTERNAL_TMDS2_ENUM_ID1 \ 209 ENCODER_OBJECT_ID_INTERNAL_TMDS2 << OBJECT_ID_SHIFT)
199 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 210
200 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 211#define ENCODER_INTERNAL_DAC1_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
201 ENCODER_OBJECT_ID_INTERNAL_TMDS2 << OBJECT_ID_SHIFT) 212 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
202 213 ENCODER_OBJECT_ID_INTERNAL_DAC1 << OBJECT_ID_SHIFT)
203#define ENCODER_INTERNAL_DAC1_ENUM_ID1 \ 214
204 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 215#define ENCODER_INTERNAL_DAC2_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
205 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 216 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
206 ENCODER_OBJECT_ID_INTERNAL_DAC1 << OBJECT_ID_SHIFT) 217 ENCODER_OBJECT_ID_INTERNAL_DAC2 << OBJECT_ID_SHIFT)
207 218
208#define ENCODER_INTERNAL_DAC2_ENUM_ID1 \ 219#define ENCODER_INTERNAL_SDVOA_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
209 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 220 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
210 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 221 ENCODER_OBJECT_ID_INTERNAL_SDVOA << OBJECT_ID_SHIFT)
211 ENCODER_OBJECT_ID_INTERNAL_DAC2 << OBJECT_ID_SHIFT) 222
212 223#define ENCODER_INTERNAL_SDVOA_ENUM_ID2 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
213#define ENCODER_INTERNAL_SDVOA_ENUM_ID1 \ 224 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
214 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 225 ENCODER_OBJECT_ID_INTERNAL_SDVOA << OBJECT_ID_SHIFT)
215 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 226
216 ENCODER_OBJECT_ID_INTERNAL_SDVOA << OBJECT_ID_SHIFT) 227#define ENCODER_INTERNAL_SDVOB_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
217 228 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
218#define ENCODER_INTERNAL_SDVOA_ENUM_ID2 \ 229 ENCODER_OBJECT_ID_INTERNAL_SDVOB << OBJECT_ID_SHIFT)
219 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 230
220 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\ 231#define ENCODER_SIL170B_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
221 ENCODER_OBJECT_ID_INTERNAL_SDVOA << OBJECT_ID_SHIFT) 232 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
222 233 ENCODER_OBJECT_ID_SI170B << OBJECT_ID_SHIFT)
223#define ENCODER_INTERNAL_SDVOB_ENUM_ID1 \ 234
224 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 235#define ENCODER_CH7303_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
225 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 236 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
226 ENCODER_OBJECT_ID_INTERNAL_SDVOB << OBJECT_ID_SHIFT) 237 ENCODER_OBJECT_ID_CH7303 << OBJECT_ID_SHIFT)
227 238
228#define ENCODER_SIL170B_ENUM_ID1 \ 239#define ENCODER_CH7301_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
229 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 240 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
230 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 241 ENCODER_OBJECT_ID_CH7301 << OBJECT_ID_SHIFT)
231 ENCODER_OBJECT_ID_SI170B << OBJECT_ID_SHIFT) 242
232 243#define ENCODER_INTERNAL_DVO1_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
233#define ENCODER_CH7303_ENUM_ID1 \ 244 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
234 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 245 ENCODER_OBJECT_ID_INTERNAL_DVO1 << OBJECT_ID_SHIFT)
235 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 246
236 ENCODER_OBJECT_ID_CH7303 << OBJECT_ID_SHIFT) 247#define ENCODER_EXTERNAL_SDVOA_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
237 248 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
238#define ENCODER_CH7301_ENUM_ID1 \ 249 ENCODER_OBJECT_ID_EXTERNAL_SDVOA << OBJECT_ID_SHIFT)
239 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 250
240 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 251#define ENCODER_EXTERNAL_SDVOA_ENUM_ID2 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
241 ENCODER_OBJECT_ID_CH7301 << OBJECT_ID_SHIFT) 252 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
242 253 ENCODER_OBJECT_ID_EXTERNAL_SDVOA << OBJECT_ID_SHIFT)
243#define ENCODER_INTERNAL_DVO1_ENUM_ID1 \ 254
244 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 255
245 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 256#define ENCODER_EXTERNAL_SDVOB_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
246 ENCODER_OBJECT_ID_INTERNAL_DVO1 << OBJECT_ID_SHIFT) 257 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
247 258 ENCODER_OBJECT_ID_EXTERNAL_SDVOB << OBJECT_ID_SHIFT)
248#define ENCODER_EXTERNAL_SDVOA_ENUM_ID1 \ 259
249 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 260
250 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 261#define ENCODER_TITFP513_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
251 ENCODER_OBJECT_ID_EXTERNAL_SDVOA << OBJECT_ID_SHIFT) 262 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
252 263 ENCODER_OBJECT_ID_TITFP513 << OBJECT_ID_SHIFT)
253#define ENCODER_EXTERNAL_SDVOA_ENUM_ID2 \ 264
254 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 265#define ENCODER_INTERNAL_LVTM1_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
255 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\ 266 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
256 ENCODER_OBJECT_ID_EXTERNAL_SDVOA << OBJECT_ID_SHIFT) 267 ENCODER_OBJECT_ID_INTERNAL_LVTM1 << OBJECT_ID_SHIFT)
257 268
258#define ENCODER_EXTERNAL_SDVOB_ENUM_ID1 \ 269#define ENCODER_VT1623_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
259 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 270 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
260 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 271 ENCODER_OBJECT_ID_VT1623 << OBJECT_ID_SHIFT)
261 ENCODER_OBJECT_ID_EXTERNAL_SDVOB << OBJECT_ID_SHIFT) 272
262 273#define ENCODER_HDMI_SI1930_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
263#define ENCODER_TITFP513_ENUM_ID1 \ 274 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
264 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 275 ENCODER_OBJECT_ID_HDMI_SI1930 << OBJECT_ID_SHIFT)
265 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 276
266 ENCODER_OBJECT_ID_TITFP513 << OBJECT_ID_SHIFT) 277#define ENCODER_HDMI_INTERNAL_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
267 278 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
268#define ENCODER_INTERNAL_LVTM1_ENUM_ID1 \ 279 ENCODER_OBJECT_ID_HDMI_INTERNAL << OBJECT_ID_SHIFT)
269 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 280
270 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 281#define ENCODER_INTERNAL_KLDSCP_TMDS1_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
271 ENCODER_OBJECT_ID_INTERNAL_LVTM1 << OBJECT_ID_SHIFT) 282 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
272 283 ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1 << OBJECT_ID_SHIFT)
273#define ENCODER_VT1623_ENUM_ID1 \ 284
274 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 285
275 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 286#define ENCODER_INTERNAL_KLDSCP_TMDS1_ENUM_ID2 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
276 ENCODER_OBJECT_ID_VT1623 << OBJECT_ID_SHIFT) 287 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
277 288 ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1 << OBJECT_ID_SHIFT)
278#define ENCODER_HDMI_SI1930_ENUM_ID1 \ 289
279 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 290
280 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 291#define ENCODER_INTERNAL_KLDSCP_DVO1_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
281 ENCODER_OBJECT_ID_HDMI_SI1930 << OBJECT_ID_SHIFT) 292 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
282 293 ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1 << OBJECT_ID_SHIFT)
283#define ENCODER_HDMI_INTERNAL_ENUM_ID1 \ 294
284 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 295#define ENCODER_INTERNAL_KLDSCP_DAC1_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
285 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 296 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
286 ENCODER_OBJECT_ID_HDMI_INTERNAL << OBJECT_ID_SHIFT) 297 ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1 << OBJECT_ID_SHIFT)
287 298
288#define ENCODER_INTERNAL_KLDSCP_TMDS1_ENUM_ID1 \ 299#define ENCODER_INTERNAL_KLDSCP_DAC2_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
289 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 300 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
290 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 301 ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2 << OBJECT_ID_SHIFT) // Shared with CV/TV and CRT
291 ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1 << OBJECT_ID_SHIFT) 302
292 303#define ENCODER_SI178_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
293#define ENCODER_INTERNAL_KLDSCP_TMDS1_ENUM_ID2 \ 304 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
294 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 305 ENCODER_OBJECT_ID_SI178 << OBJECT_ID_SHIFT)
295 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\ 306
296 ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1 << OBJECT_ID_SHIFT) 307#define ENCODER_MVPU_FPGA_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
297 308 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
298#define ENCODER_INTERNAL_KLDSCP_DVO1_ENUM_ID1 \ 309 ENCODER_OBJECT_ID_MVPU_FPGA << OBJECT_ID_SHIFT)
299 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 310
300 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 311#define ENCODER_INTERNAL_DDI_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
301 ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1 << OBJECT_ID_SHIFT) 312 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
302 313 ENCODER_OBJECT_ID_INTERNAL_DDI << OBJECT_ID_SHIFT)
303#define ENCODER_INTERNAL_KLDSCP_DAC1_ENUM_ID1 \ 314
304 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 315#define ENCODER_VT1625_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
305 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 316 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
306 ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1 << OBJECT_ID_SHIFT) 317 ENCODER_OBJECT_ID_VT1625 << OBJECT_ID_SHIFT)
307 318
308#define ENCODER_INTERNAL_KLDSCP_DAC2_ENUM_ID1 \ 319#define ENCODER_HDMI_SI1932_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
309 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 320 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
310 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 321 ENCODER_OBJECT_ID_HDMI_SI1932 << OBJECT_ID_SHIFT)
311 ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2 << OBJECT_ID_SHIFT) /* Shared with CV/TV and CRT */ 322
312 323#define ENCODER_DP_DP501_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
313#define ENCODER_SI178_ENUM_ID1 \ 324 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
314 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 325 ENCODER_OBJECT_ID_DP_DP501 << OBJECT_ID_SHIFT)
315 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 326
316 ENCODER_OBJECT_ID_SI178 << OBJECT_ID_SHIFT) 327#define ENCODER_DP_AN9801_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
317 328 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
318#define ENCODER_MVPU_FPGA_ENUM_ID1 \ 329 ENCODER_OBJECT_ID_DP_AN9801 << OBJECT_ID_SHIFT)
319 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 330
320 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 331#define ENCODER_INTERNAL_UNIPHY_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
321 ENCODER_OBJECT_ID_MVPU_FPGA << OBJECT_ID_SHIFT) 332 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
322 333 ENCODER_OBJECT_ID_INTERNAL_UNIPHY << OBJECT_ID_SHIFT)
323#define ENCODER_INTERNAL_DDI_ENUM_ID1 \ 334
324 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 335#define ENCODER_INTERNAL_UNIPHY_ENUM_ID2 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
325 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 336 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
326 ENCODER_OBJECT_ID_INTERNAL_DDI << OBJECT_ID_SHIFT) 337 ENCODER_OBJECT_ID_INTERNAL_UNIPHY << OBJECT_ID_SHIFT)
327 338
328#define ENCODER_VT1625_ENUM_ID1 \ 339#define ENCODER_INTERNAL_KLDSCP_LVTMA_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
329 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 340 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
330 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 341 ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA << OBJECT_ID_SHIFT)
331 ENCODER_OBJECT_ID_VT1625 << OBJECT_ID_SHIFT) 342
332 343#define ENCODER_INTERNAL_UNIPHY1_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
333#define ENCODER_HDMI_SI1932_ENUM_ID1 \ 344 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
334 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 345 ENCODER_OBJECT_ID_INTERNAL_UNIPHY1 << OBJECT_ID_SHIFT)
335 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 346
336 ENCODER_OBJECT_ID_HDMI_SI1932 << OBJECT_ID_SHIFT) 347#define ENCODER_INTERNAL_UNIPHY1_ENUM_ID2 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
337 348 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
338#define ENCODER_DP_DP501_ENUM_ID1 \ 349 ENCODER_OBJECT_ID_INTERNAL_UNIPHY1 << OBJECT_ID_SHIFT)
339 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 350
340 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 351#define ENCODER_INTERNAL_UNIPHY2_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
341 ENCODER_OBJECT_ID_DP_DP501 << OBJECT_ID_SHIFT) 352 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
342 353 ENCODER_OBJECT_ID_INTERNAL_UNIPHY2 << OBJECT_ID_SHIFT)
343#define ENCODER_DP_AN9801_ENUM_ID1 \ 354
344 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 355#define ENCODER_INTERNAL_UNIPHY2_ENUM_ID2 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
345 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 356 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
346 ENCODER_OBJECT_ID_DP_AN9801 << OBJECT_ID_SHIFT) 357 ENCODER_OBJECT_ID_INTERNAL_UNIPHY2 << OBJECT_ID_SHIFT)
347 358
348#define ENCODER_INTERNAL_UNIPHY_ENUM_ID1 \ 359#define ENCODER_GENERAL_EXTERNAL_DVO_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
349 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\ 360 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
350 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 361 ENCODER_OBJECT_ID_GENERAL_EXTERNAL_DVO << OBJECT_ID_SHIFT)
351 ENCODER_OBJECT_ID_INTERNAL_UNIPHY << OBJECT_ID_SHIFT)
352
353#define ENCODER_INTERNAL_UNIPHY_ENUM_ID2 \
354 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
355 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
356 ENCODER_OBJECT_ID_INTERNAL_UNIPHY << OBJECT_ID_SHIFT)
357
358#define ENCODER_INTERNAL_KLDSCP_LVTMA_ENUM_ID1 \
359 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
360 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
361 ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA << OBJECT_ID_SHIFT)
362
363#define ENCODER_INTERNAL_UNIPHY1_ENUM_ID1 \
364 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
365 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
366 ENCODER_OBJECT_ID_INTERNAL_UNIPHY1 << OBJECT_ID_SHIFT)
367
368#define ENCODER_INTERNAL_UNIPHY1_ENUM_ID2 \
369 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
370 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
371 ENCODER_OBJECT_ID_INTERNAL_UNIPHY1 << OBJECT_ID_SHIFT)
372
373#define ENCODER_INTERNAL_UNIPHY2_ENUM_ID1 \
374 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
375 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
376 ENCODER_OBJECT_ID_INTERNAL_UNIPHY2 << OBJECT_ID_SHIFT)
377
378#define ENCODER_INTERNAL_UNIPHY2_ENUM_ID2 \
379 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
380 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
381 ENCODER_OBJECT_ID_INTERNAL_UNIPHY2 << OBJECT_ID_SHIFT)
382
383#define ENCODER_GENERAL_EXTERNAL_DVO_ENUM_ID1 \
384 (GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
385 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
386 ENCODER_OBJECT_ID_GENERAL_EXTERNAL_DVO << OBJECT_ID_SHIFT)
387 362
388/****************************************************/ 363/****************************************************/
389/* Connector Object ID definition - Shared with BIOS */ 364/* Connector Object ID definition - Shared with BIOS */
@@ -406,167 +381,253 @@
406#define CONNECTOR_7PIN_DIN_ENUM_ID1 0x310F 381#define CONNECTOR_7PIN_DIN_ENUM_ID1 0x310F
407#define CONNECTOR_PCIE_CONNECTOR_ENUM_ID1 0x3110 382#define CONNECTOR_PCIE_CONNECTOR_ENUM_ID1 0x3110
408*/ 383*/
409#define CONNECTOR_LVDS_ENUM_ID1 \ 384#define CONNECTOR_LVDS_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
410 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 385 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
411 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 386 CONNECTOR_OBJECT_ID_LVDS << OBJECT_ID_SHIFT)
412 CONNECTOR_OBJECT_ID_LVDS << OBJECT_ID_SHIFT) 387
413 388#define CONNECTOR_LVDS_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
414#define CONNECTOR_SINGLE_LINK_DVI_I_ENUM_ID1 \ 389 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
415 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 390 CONNECTOR_OBJECT_ID_LVDS << OBJECT_ID_SHIFT)
416 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 391
417 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I << OBJECT_ID_SHIFT) 392#define CONNECTOR_eDP_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
418 393 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
419#define CONNECTOR_SINGLE_LINK_DVI_I_ENUM_ID2 \ 394 CONNECTOR_OBJECT_ID_eDP << OBJECT_ID_SHIFT)
420 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 395
421 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\ 396#define CONNECTOR_eDP_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
422 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I << OBJECT_ID_SHIFT) 397 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
423 398 CONNECTOR_OBJECT_ID_eDP << OBJECT_ID_SHIFT)
424#define CONNECTOR_DUAL_LINK_DVI_I_ENUM_ID1 \ 399
425 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 400#define CONNECTOR_SINGLE_LINK_DVI_I_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
426 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 401 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
427 CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_I << OBJECT_ID_SHIFT) 402 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I << OBJECT_ID_SHIFT)
428 403
429#define CONNECTOR_DUAL_LINK_DVI_I_ENUM_ID2 \ 404#define CONNECTOR_SINGLE_LINK_DVI_I_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
430 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 405 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
431 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\ 406 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I << OBJECT_ID_SHIFT)
432 CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_I << OBJECT_ID_SHIFT) 407
433 408#define CONNECTOR_DUAL_LINK_DVI_I_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
434#define CONNECTOR_SINGLE_LINK_DVI_D_ENUM_ID1 \ 409 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
435 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 410 CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_I << OBJECT_ID_SHIFT)
436 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 411
437 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D << OBJECT_ID_SHIFT) 412#define CONNECTOR_DUAL_LINK_DVI_I_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
438 413 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
439#define CONNECTOR_SINGLE_LINK_DVI_D_ENUM_ID2 \ 414 CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_I << OBJECT_ID_SHIFT)
440 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 415
441 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\ 416#define CONNECTOR_SINGLE_LINK_DVI_D_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
442 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D << OBJECT_ID_SHIFT) 417 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
443 418 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D << OBJECT_ID_SHIFT)
444#define CONNECTOR_DUAL_LINK_DVI_D_ENUM_ID1 \ 419
445 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 420#define CONNECTOR_SINGLE_LINK_DVI_D_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
446 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 421 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
447 CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_D << OBJECT_ID_SHIFT) 422 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D << OBJECT_ID_SHIFT)
448 423
449#define CONNECTOR_VGA_ENUM_ID1 \ 424#define CONNECTOR_DUAL_LINK_DVI_D_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
450 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 425 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
451 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 426 CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_D << OBJECT_ID_SHIFT)
452 CONNECTOR_OBJECT_ID_VGA << OBJECT_ID_SHIFT) 427
453 428#define CONNECTOR_DUAL_LINK_DVI_D_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
454#define CONNECTOR_VGA_ENUM_ID2 \ 429 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
455 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 430 CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_D << OBJECT_ID_SHIFT)
456 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\ 431
457 CONNECTOR_OBJECT_ID_VGA << OBJECT_ID_SHIFT) 432#define CONNECTOR_DUAL_LINK_DVI_D_ENUM_ID3 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
458 433 GRAPH_OBJECT_ENUM_ID3 << ENUM_ID_SHIFT |\
459#define CONNECTOR_COMPOSITE_ENUM_ID1 \ 434 CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_D << OBJECT_ID_SHIFT)
460 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 435
461 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 436#define CONNECTOR_VGA_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
462 CONNECTOR_OBJECT_ID_COMPOSITE << OBJECT_ID_SHIFT) 437 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
463 438 CONNECTOR_OBJECT_ID_VGA << OBJECT_ID_SHIFT)
464#define CONNECTOR_SVIDEO_ENUM_ID1 \ 439
465 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 440#define CONNECTOR_VGA_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
466 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 441 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
467 CONNECTOR_OBJECT_ID_SVIDEO << OBJECT_ID_SHIFT) 442 CONNECTOR_OBJECT_ID_VGA << OBJECT_ID_SHIFT)
468 443
469#define CONNECTOR_YPbPr_ENUM_ID1 \ 444#define CONNECTOR_COMPOSITE_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
470 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 445 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
471 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 446 CONNECTOR_OBJECT_ID_COMPOSITE << OBJECT_ID_SHIFT)
472 CONNECTOR_OBJECT_ID_YPbPr << OBJECT_ID_SHIFT) 447
473 448#define CONNECTOR_COMPOSITE_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
474#define CONNECTOR_D_CONNECTOR_ENUM_ID1 \ 449 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
475 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 450 CONNECTOR_OBJECT_ID_COMPOSITE << OBJECT_ID_SHIFT)
476 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 451
477 CONNECTOR_OBJECT_ID_D_CONNECTOR << OBJECT_ID_SHIFT) 452#define CONNECTOR_SVIDEO_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
478 453 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
479#define CONNECTOR_9PIN_DIN_ENUM_ID1 \ 454 CONNECTOR_OBJECT_ID_SVIDEO << OBJECT_ID_SHIFT)
480 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 455
481 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 456#define CONNECTOR_SVIDEO_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
482 CONNECTOR_OBJECT_ID_9PIN_DIN << OBJECT_ID_SHIFT) 457 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
483 458 CONNECTOR_OBJECT_ID_SVIDEO << OBJECT_ID_SHIFT)
484#define CONNECTOR_SCART_ENUM_ID1 \ 459
485 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 460#define CONNECTOR_YPbPr_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
486 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 461 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
487 CONNECTOR_OBJECT_ID_SCART << OBJECT_ID_SHIFT) 462 CONNECTOR_OBJECT_ID_YPbPr << OBJECT_ID_SHIFT)
488 463
489#define CONNECTOR_HDMI_TYPE_A_ENUM_ID1 \ 464#define CONNECTOR_YPbPr_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
490 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 465 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
491 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 466 CONNECTOR_OBJECT_ID_YPbPr << OBJECT_ID_SHIFT)
492 CONNECTOR_OBJECT_ID_HDMI_TYPE_A << OBJECT_ID_SHIFT) 467
493 468#define CONNECTOR_D_CONNECTOR_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
494#define CONNECTOR_HDMI_TYPE_B_ENUM_ID1 \ 469 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
495 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 470 CONNECTOR_OBJECT_ID_D_CONNECTOR << OBJECT_ID_SHIFT)
496 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 471
497 CONNECTOR_OBJECT_ID_HDMI_TYPE_B << OBJECT_ID_SHIFT) 472#define CONNECTOR_D_CONNECTOR_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
498 473 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
499#define CONNECTOR_7PIN_DIN_ENUM_ID1 \ 474 CONNECTOR_OBJECT_ID_D_CONNECTOR << OBJECT_ID_SHIFT)
500 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 475
501 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 476#define CONNECTOR_9PIN_DIN_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
502 CONNECTOR_OBJECT_ID_7PIN_DIN << OBJECT_ID_SHIFT) 477 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
503 478 CONNECTOR_OBJECT_ID_9PIN_DIN << OBJECT_ID_SHIFT)
504#define CONNECTOR_PCIE_CONNECTOR_ENUM_ID1 \ 479
505 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 480#define CONNECTOR_9PIN_DIN_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
506 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 481 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
507 CONNECTOR_OBJECT_ID_PCIE_CONNECTOR << OBJECT_ID_SHIFT) 482 CONNECTOR_OBJECT_ID_9PIN_DIN << OBJECT_ID_SHIFT)
508 483
509#define CONNECTOR_PCIE_CONNECTOR_ENUM_ID2 \ 484#define CONNECTOR_SCART_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
510 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 485 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
511 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\ 486 CONNECTOR_OBJECT_ID_SCART << OBJECT_ID_SHIFT)
512 CONNECTOR_OBJECT_ID_PCIE_CONNECTOR << OBJECT_ID_SHIFT) 487
513 488#define CONNECTOR_SCART_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
514#define CONNECTOR_CROSSFIRE_ENUM_ID1 \ 489 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
515 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 490 CONNECTOR_OBJECT_ID_SCART << OBJECT_ID_SHIFT)
516 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 491
517 CONNECTOR_OBJECT_ID_CROSSFIRE << OBJECT_ID_SHIFT) 492#define CONNECTOR_HDMI_TYPE_A_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
518 493 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
519#define CONNECTOR_CROSSFIRE_ENUM_ID2 \ 494 CONNECTOR_OBJECT_ID_HDMI_TYPE_A << OBJECT_ID_SHIFT)
520 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 495
521 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\ 496#define CONNECTOR_HDMI_TYPE_A_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
522 CONNECTOR_OBJECT_ID_CROSSFIRE << OBJECT_ID_SHIFT) 497 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
523 498 CONNECTOR_OBJECT_ID_HDMI_TYPE_A << OBJECT_ID_SHIFT)
524#define CONNECTOR_HARDCODE_DVI_ENUM_ID1 \ 499
525 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 500#define CONNECTOR_HDMI_TYPE_A_ENUM_ID3 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
526 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 501 GRAPH_OBJECT_ENUM_ID3 << ENUM_ID_SHIFT |\
527 CONNECTOR_OBJECT_ID_HARDCODE_DVI << OBJECT_ID_SHIFT) 502 CONNECTOR_OBJECT_ID_HDMI_TYPE_A << OBJECT_ID_SHIFT)
528 503
529#define CONNECTOR_HARDCODE_DVI_ENUM_ID2 \ 504#define CONNECTOR_HDMI_TYPE_B_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
530 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 505 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
531 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\ 506 CONNECTOR_OBJECT_ID_HDMI_TYPE_B << OBJECT_ID_SHIFT)
532 CONNECTOR_OBJECT_ID_HARDCODE_DVI << OBJECT_ID_SHIFT) 507
533 508#define CONNECTOR_HDMI_TYPE_B_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
534#define CONNECTOR_DISPLAYPORT_ENUM_ID1 \ 509 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
535 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 510 CONNECTOR_OBJECT_ID_HDMI_TYPE_B << OBJECT_ID_SHIFT)
536 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 511
537 CONNECTOR_OBJECT_ID_DISPLAYPORT << OBJECT_ID_SHIFT) 512#define CONNECTOR_7PIN_DIN_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
538 513 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
539#define CONNECTOR_DISPLAYPORT_ENUM_ID2 \ 514 CONNECTOR_OBJECT_ID_7PIN_DIN << OBJECT_ID_SHIFT)
540 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 515#define CONNECTOR_7PIN_DIN_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
541 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\ 516 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
542 CONNECTOR_OBJECT_ID_DISPLAYPORT << OBJECT_ID_SHIFT) 517 CONNECTOR_OBJECT_ID_7PIN_DIN << OBJECT_ID_SHIFT)
543 518
544#define CONNECTOR_DISPLAYPORT_ENUM_ID3 \ 519#define CONNECTOR_PCIE_CONNECTOR_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
545 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 520 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
546 GRAPH_OBJECT_ENUM_ID3 << ENUM_ID_SHIFT |\ 521 CONNECTOR_OBJECT_ID_PCIE_CONNECTOR << OBJECT_ID_SHIFT)
547 CONNECTOR_OBJECT_ID_DISPLAYPORT << OBJECT_ID_SHIFT) 522
548 523#define CONNECTOR_PCIE_CONNECTOR_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
549#define CONNECTOR_DISPLAYPORT_ENUM_ID4 \ 524 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
550 (GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\ 525 CONNECTOR_OBJECT_ID_PCIE_CONNECTOR << OBJECT_ID_SHIFT)
551 GRAPH_OBJECT_ENUM_ID4 << ENUM_ID_SHIFT |\ 526
552 CONNECTOR_OBJECT_ID_DISPLAYPORT << OBJECT_ID_SHIFT) 527#define CONNECTOR_CROSSFIRE_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
528 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
529 CONNECTOR_OBJECT_ID_CROSSFIRE << OBJECT_ID_SHIFT)
530
531#define CONNECTOR_CROSSFIRE_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
532 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
533 CONNECTOR_OBJECT_ID_CROSSFIRE << OBJECT_ID_SHIFT)
534
535
536#define CONNECTOR_HARDCODE_DVI_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
537 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
538 CONNECTOR_OBJECT_ID_HARDCODE_DVI << OBJECT_ID_SHIFT)
539
540#define CONNECTOR_HARDCODE_DVI_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
541 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
542 CONNECTOR_OBJECT_ID_HARDCODE_DVI << OBJECT_ID_SHIFT)
543
544#define CONNECTOR_DISPLAYPORT_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
545 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
546 CONNECTOR_OBJECT_ID_DISPLAYPORT << OBJECT_ID_SHIFT)
547
548#define CONNECTOR_DISPLAYPORT_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
549 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
550 CONNECTOR_OBJECT_ID_DISPLAYPORT << OBJECT_ID_SHIFT)
551
552#define CONNECTOR_DISPLAYPORT_ENUM_ID3 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
553 GRAPH_OBJECT_ENUM_ID3 << ENUM_ID_SHIFT |\
554 CONNECTOR_OBJECT_ID_DISPLAYPORT << OBJECT_ID_SHIFT)
555
556#define CONNECTOR_DISPLAYPORT_ENUM_ID4 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
557 GRAPH_OBJECT_ENUM_ID4 << ENUM_ID_SHIFT |\
558 CONNECTOR_OBJECT_ID_DISPLAYPORT << OBJECT_ID_SHIFT)
559
560#define CONNECTOR_DISPLAYPORT_ENUM_ID5 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
561 GRAPH_OBJECT_ENUM_ID5 << ENUM_ID_SHIFT |\
562 CONNECTOR_OBJECT_ID_DISPLAYPORT << OBJECT_ID_SHIFT)
563
564#define CONNECTOR_DISPLAYPORT_ENUM_ID6 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
565 GRAPH_OBJECT_ENUM_ID6 << ENUM_ID_SHIFT |\
566 CONNECTOR_OBJECT_ID_DISPLAYPORT << OBJECT_ID_SHIFT)
567
568#define CONNECTOR_MXM_ENUM_ID1 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
569 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
570 CONNECTOR_OBJECT_ID_MXM << OBJECT_ID_SHIFT) //Mapping to MXM_DP_A
571
572#define CONNECTOR_MXM_ENUM_ID2 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
573 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
574 CONNECTOR_OBJECT_ID_MXM << OBJECT_ID_SHIFT) //Mapping to MXM_DP_B
575
576#define CONNECTOR_MXM_ENUM_ID3 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
577 GRAPH_OBJECT_ENUM_ID3 << ENUM_ID_SHIFT |\
578 CONNECTOR_OBJECT_ID_MXM << OBJECT_ID_SHIFT) //Mapping to MXM_DP_C
579
580#define CONNECTOR_MXM_ENUM_ID4 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
581 GRAPH_OBJECT_ENUM_ID4 << ENUM_ID_SHIFT |\
582 CONNECTOR_OBJECT_ID_MXM << OBJECT_ID_SHIFT) //Mapping to MXM_DP_D
583
584#define CONNECTOR_MXM_ENUM_ID5 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
585 GRAPH_OBJECT_ENUM_ID5 << ENUM_ID_SHIFT |\
586 CONNECTOR_OBJECT_ID_MXM << OBJECT_ID_SHIFT) //Mapping to MXM_LVDS_TXxx
587
588#define CONNECTOR_MXM_ENUM_ID6 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
589 GRAPH_OBJECT_ENUM_ID6 << ENUM_ID_SHIFT |\
590 CONNECTOR_OBJECT_ID_MXM << OBJECT_ID_SHIFT) //Mapping to MXM_LVDS_UXxx
591
592#define CONNECTOR_MXM_ENUM_ID7 ( GRAPH_OBJECT_TYPE_CONNECTOR << OBJECT_TYPE_SHIFT |\
593 GRAPH_OBJECT_ENUM_ID7 << ENUM_ID_SHIFT |\
594 CONNECTOR_OBJECT_ID_MXM << OBJECT_ID_SHIFT) //Mapping to MXM_DAC
553 595
554/****************************************************/ 596/****************************************************/
555/* Router Object ID definition - Shared with BIOS */ 597/* Router Object ID definition - Shared with BIOS */
556/****************************************************/ 598/****************************************************/
557#define ROUTER_I2C_EXTENDER_CNTL_ENUM_ID1 \ 599#define ROUTER_I2C_EXTENDER_CNTL_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ROUTER << OBJECT_TYPE_SHIFT |\
558 (GRAPH_OBJECT_TYPE_ROUTER << OBJECT_TYPE_SHIFT |\ 600 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
559 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\ 601 ROUTER_OBJECT_ID_I2C_EXTENDER_CNTL << OBJECT_ID_SHIFT)
560 ROUTER_OBJECT_ID_I2C_EXTENDER_CNTL << OBJECT_ID_SHIFT)
561 602
562/* deleted */ 603/* deleted */
563 604
564/****************************************************/ 605/****************************************************/
606/* Generic Object ID definition - Shared with BIOS */
607/****************************************************/
608#define GENERICOBJECT_GLSYNC_ENUM_ID1 (GRAPH_OBJECT_TYPE_GENERIC << OBJECT_TYPE_SHIFT |\
609 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
610 GENERIC_OBJECT_ID_GLSYNC << OBJECT_ID_SHIFT)
611
612#define GENERICOBJECT_PX2_NON_DRIVABLE_ID1 (GRAPH_OBJECT_TYPE_GENERIC << OBJECT_TYPE_SHIFT |\
613 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
614 GENERIC_OBJECT_ID_PX2_NON_DRIVABLE<< OBJECT_ID_SHIFT)
615
616#define GENERICOBJECT_PX2_NON_DRIVABLE_ID2 (GRAPH_OBJECT_TYPE_GENERIC << OBJECT_TYPE_SHIFT |\
617 GRAPH_OBJECT_ENUM_ID2 << ENUM_ID_SHIFT |\
618 GENERIC_OBJECT_ID_PX2_NON_DRIVABLE<< OBJECT_ID_SHIFT)
619
620#define GENERICOBJECT_MXM_OPM_ENUM_ID1 (GRAPH_OBJECT_TYPE_GENERIC << OBJECT_TYPE_SHIFT |\
621 GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
622 GENERIC_OBJECT_ID_MXM_OPM << OBJECT_ID_SHIFT)
623
624/****************************************************/
565/* Object Cap definition - Shared with BIOS */ 625/* Object Cap definition - Shared with BIOS */
566/****************************************************/ 626/****************************************************/
567#define GRAPHICS_OBJECT_CAP_I2C 0x00000001L 627#define GRAPHICS_OBJECT_CAP_I2C 0x00000001L
568#define GRAPHICS_OBJECT_CAP_TABLE_ID 0x00000002L 628#define GRAPHICS_OBJECT_CAP_TABLE_ID 0x00000002L
569 629
630
570#define GRAPHICS_OBJECT_I2CCOMMAND_TABLE_ID 0x01 631#define GRAPHICS_OBJECT_I2CCOMMAND_TABLE_ID 0x01
571#define GRAPHICS_OBJECT_HOTPLUGDETECTIONINTERUPT_TABLE_ID 0x02 632#define GRAPHICS_OBJECT_HOTPLUGDETECTIONINTERUPT_TABLE_ID 0x02
572#define GRAPHICS_OBJECT_ENCODER_OUTPUT_PROTECTION_TABLE_ID 0x03 633#define GRAPHICS_OBJECT_ENCODER_OUTPUT_PROTECTION_TABLE_ID 0x03
@@ -575,4 +636,8 @@
575#pragma pack() 636#pragma pack()
576#endif 637#endif
577 638
578#endif /*GRAPHICTYPE */ 639#endif /*GRAPHICTYPE */
640
641
642
643
diff --git a/drivers/gpu/drm/radeon/atom.c b/drivers/gpu/drm/radeon/atom.c
index d67c42555ab9..1d569830ed99 100644
--- a/drivers/gpu/drm/radeon/atom.c
+++ b/drivers/gpu/drm/radeon/atom.c
@@ -24,6 +24,8 @@
24 24
25#include <linux/module.h> 25#include <linux/module.h>
26#include <linux/sched.h> 26#include <linux/sched.h>
27#include <linux/slab.h>
28#include <asm/unaligned.h>
27 29
28#define ATOM_DEBUG 30#define ATOM_DEBUG
29 31
@@ -51,14 +53,17 @@
51 53
52typedef struct { 54typedef struct {
53 struct atom_context *ctx; 55 struct atom_context *ctx;
54
55 uint32_t *ps, *ws; 56 uint32_t *ps, *ws;
56 int ps_shift; 57 int ps_shift;
57 uint16_t start; 58 uint16_t start;
59 unsigned last_jump;
60 unsigned long last_jump_jiffies;
61 bool abort;
58} atom_exec_context; 62} atom_exec_context;
59 63
60int atom_debug = 0; 64int atom_debug = 0;
61void atom_execute_table(struct atom_context *ctx, int index, uint32_t * params); 65static int atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t * params);
66int atom_execute_table(struct atom_context *ctx, int index, uint32_t * params);
62 67
63static uint32_t atom_arg_mask[8] = 68static uint32_t atom_arg_mask[8] =
64 { 0xFFFFFFFF, 0xFFFF, 0xFFFF00, 0xFFFF0000, 0xFF, 0xFF00, 0xFF0000, 69 { 0xFFFFFFFF, 0xFFFF, 0xFFFF00, 0xFFFF0000, 0xFF, 0xFF00, 0xFF0000,
@@ -211,7 +216,9 @@ static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr,
211 case ATOM_ARG_PS: 216 case ATOM_ARG_PS:
212 idx = U8(*ptr); 217 idx = U8(*ptr);
213 (*ptr)++; 218 (*ptr)++;
214 val = le32_to_cpu(ctx->ps[idx]); 219 /* get_unaligned_le32 avoids unaligned accesses from atombios
220 * tables, noticed on a DEC Alpha. */
221 val = get_unaligned_le32((u32 *)&ctx->ps[idx]);
215 if (print) 222 if (print)
216 DEBUG("PS[0x%02X,0x%04X]", idx, val); 223 DEBUG("PS[0x%02X,0x%04X]", idx, val);
217 break; 224 break;
@@ -245,6 +252,9 @@ static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr,
245 case ATOM_WS_ATTRIBUTES: 252 case ATOM_WS_ATTRIBUTES:
246 val = gctx->io_attr; 253 val = gctx->io_attr;
247 break; 254 break;
255 case ATOM_WS_REGPTR:
256 val = gctx->reg_block;
257 break;
248 default: 258 default:
249 val = ctx->ws[idx]; 259 val = ctx->ws[idx];
250 } 260 }
@@ -263,10 +273,10 @@ static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr,
263 case ATOM_ARG_FB: 273 case ATOM_ARG_FB:
264 idx = U8(*ptr); 274 idx = U8(*ptr);
265 (*ptr)++; 275 (*ptr)++;
276 val = gctx->scratch[((gctx->fb_base + idx) / 4)];
266 if (print) 277 if (print)
267 DEBUG("FB[0x%02X]", idx); 278 DEBUG("FB[0x%02X]", idx);
268 printk(KERN_INFO "FB access is not implemented.\n"); 279 break;
269 return 0;
270 case ATOM_ARG_IMM: 280 case ATOM_ARG_IMM:
271 switch (align) { 281 switch (align) {
272 case ATOM_SRC_DWORD: 282 case ATOM_SRC_DWORD:
@@ -384,6 +394,32 @@ static uint32_t atom_get_src(atom_exec_context *ctx, uint8_t attr, int *ptr)
384 return atom_get_src_int(ctx, attr, ptr, NULL, 1); 394 return atom_get_src_int(ctx, attr, ptr, NULL, 1);
385} 395}
386 396
397static uint32_t atom_get_src_direct(atom_exec_context *ctx, uint8_t align, int *ptr)
398{
399 uint32_t val = 0xCDCDCDCD;
400
401 switch (align) {
402 case ATOM_SRC_DWORD:
403 val = U32(*ptr);
404 (*ptr) += 4;
405 break;
406 case ATOM_SRC_WORD0:
407 case ATOM_SRC_WORD8:
408 case ATOM_SRC_WORD16:
409 val = U16(*ptr);
410 (*ptr) += 2;
411 break;
412 case ATOM_SRC_BYTE0:
413 case ATOM_SRC_BYTE8:
414 case ATOM_SRC_BYTE16:
415 case ATOM_SRC_BYTE24:
416 val = U8(*ptr);
417 (*ptr)++;
418 break;
419 }
420 return val;
421}
422
387static uint32_t atom_get_dst(atom_exec_context *ctx, int arg, uint8_t attr, 423static uint32_t atom_get_dst(atom_exec_context *ctx, int arg, uint8_t attr,
388 int *ptr, uint32_t *saved, int print) 424 int *ptr, uint32_t *saved, int print)
389{ 425{
@@ -481,6 +517,9 @@ static void atom_put_dst(atom_exec_context *ctx, int arg, uint8_t attr,
481 case ATOM_WS_ATTRIBUTES: 517 case ATOM_WS_ATTRIBUTES:
482 gctx->io_attr = val; 518 gctx->io_attr = val;
483 break; 519 break;
520 case ATOM_WS_REGPTR:
521 gctx->reg_block = val;
522 break;
484 default: 523 default:
485 ctx->ws[idx] = val; 524 ctx->ws[idx] = val;
486 } 525 }
@@ -488,9 +527,9 @@ static void atom_put_dst(atom_exec_context *ctx, int arg, uint8_t attr,
488 case ATOM_ARG_FB: 527 case ATOM_ARG_FB:
489 idx = U8(*ptr); 528 idx = U8(*ptr);
490 (*ptr)++; 529 (*ptr)++;
530 gctx->scratch[((gctx->fb_base + idx) / 4)] = val;
491 DEBUG("FB[0x%02X]", idx); 531 DEBUG("FB[0x%02X]", idx);
492 printk(KERN_INFO "FB access is not implemented.\n"); 532 break;
493 return;
494 case ATOM_ARG_PLL: 533 case ATOM_ARG_PLL:
495 idx = U8(*ptr); 534 idx = U8(*ptr);
496 (*ptr)++; 535 (*ptr)++;
@@ -568,12 +607,17 @@ static void atom_op_beep(atom_exec_context *ctx, int *ptr, int arg)
568static void atom_op_calltable(atom_exec_context *ctx, int *ptr, int arg) 607static void atom_op_calltable(atom_exec_context *ctx, int *ptr, int arg)
569{ 608{
570 int idx = U8((*ptr)++); 609 int idx = U8((*ptr)++);
610 int r = 0;
611
571 if (idx < ATOM_TABLE_NAMES_CNT) 612 if (idx < ATOM_TABLE_NAMES_CNT)
572 SDEBUG(" table: %d (%s)\n", idx, atom_table_names[idx]); 613 SDEBUG(" table: %d (%s)\n", idx, atom_table_names[idx]);
573 else 614 else
574 SDEBUG(" table: %d\n", idx); 615 SDEBUG(" table: %d\n", idx);
575 if (U16(ctx->ctx->cmd_table + 4 + 2 * idx)) 616 if (U16(ctx->ctx->cmd_table + 4 + 2 * idx))
576 atom_execute_table(ctx->ctx, idx, ctx->ps + ctx->ps_shift); 617 r = atom_execute_table_locked(ctx->ctx, idx, ctx->ps + ctx->ps_shift);
618 if (r) {
619 ctx->abort = true;
620 }
577} 621}
578 622
579static void atom_op_clear(atom_exec_context *ctx, int *ptr, int arg) 623static void atom_op_clear(atom_exec_context *ctx, int *ptr, int arg)
@@ -607,7 +651,7 @@ static void atom_op_delay(atom_exec_context *ctx, int *ptr, int arg)
607 uint8_t count = U8((*ptr)++); 651 uint8_t count = U8((*ptr)++);
608 SDEBUG(" count: %d\n", count); 652 SDEBUG(" count: %d\n", count);
609 if (arg == ATOM_UNIT_MICROSEC) 653 if (arg == ATOM_UNIT_MICROSEC)
610 schedule_timeout_uninterruptible(usecs_to_jiffies(count)); 654 udelay(count);
611 else 655 else
612 schedule_timeout_uninterruptible(msecs_to_jiffies(count)); 656 schedule_timeout_uninterruptible(msecs_to_jiffies(count));
613} 657}
@@ -637,6 +681,8 @@ static void atom_op_eot(atom_exec_context *ctx, int *ptr, int arg)
637static void atom_op_jump(atom_exec_context *ctx, int *ptr, int arg) 681static void atom_op_jump(atom_exec_context *ctx, int *ptr, int arg)
638{ 682{
639 int execute = 0, target = U16(*ptr); 683 int execute = 0, target = U16(*ptr);
684 unsigned long cjiffies;
685
640 (*ptr) += 2; 686 (*ptr) += 2;
641 switch (arg) { 687 switch (arg) {
642 case ATOM_COND_ABOVE: 688 case ATOM_COND_ABOVE:
@@ -664,8 +710,25 @@ static void atom_op_jump(atom_exec_context *ctx, int *ptr, int arg)
664 if (arg != ATOM_COND_ALWAYS) 710 if (arg != ATOM_COND_ALWAYS)
665 SDEBUG(" taken: %s\n", execute ? "yes" : "no"); 711 SDEBUG(" taken: %s\n", execute ? "yes" : "no");
666 SDEBUG(" target: 0x%04X\n", target); 712 SDEBUG(" target: 0x%04X\n", target);
667 if (execute) 713 if (execute) {
714 if (ctx->last_jump == (ctx->start + target)) {
715 cjiffies = jiffies;
716 if (time_after(cjiffies, ctx->last_jump_jiffies)) {
717 cjiffies -= ctx->last_jump_jiffies;
718 if ((jiffies_to_msecs(cjiffies) > 1000)) {
719 DRM_ERROR("atombios stuck in loop for more than 1sec aborting\n");
720 ctx->abort = true;
721 }
722 } else {
723 /* jiffies wrap around we will just wait a little longer */
724 ctx->last_jump_jiffies = jiffies;
725 }
726 } else {
727 ctx->last_jump = ctx->start + target;
728 ctx->last_jump_jiffies = jiffies;
729 }
668 *ptr = ctx->start + target; 730 *ptr = ctx->start + target;
731 }
669} 732}
670 733
671static void atom_op_mask(atom_exec_context *ctx, int *ptr, int arg) 734static void atom_op_mask(atom_exec_context *ctx, int *ptr, int arg)
@@ -676,7 +739,7 @@ static void atom_op_mask(atom_exec_context *ctx, int *ptr, int arg)
676 SDEBUG(" dst: "); 739 SDEBUG(" dst: ");
677 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); 740 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
678 SDEBUG(" src1: "); 741 SDEBUG(" src1: ");
679 src1 = atom_get_src(ctx, attr, ptr); 742 src1 = atom_get_src_direct(ctx, ((attr >> 3) & 7), ptr);
680 SDEBUG(" src2: "); 743 SDEBUG(" src2: ");
681 src2 = atom_get_src(ctx, attr, ptr); 744 src2 = atom_get_src(ctx, attr, ptr);
682 dst &= src1; 745 dst &= src1;
@@ -808,7 +871,7 @@ static void atom_op_setregblock(atom_exec_context *ctx, int *ptr, int arg)
808 SDEBUG(" base: 0x%04X\n", ctx->ctx->reg_block); 871 SDEBUG(" base: 0x%04X\n", ctx->ctx->reg_block);
809} 872}
810 873
811static void atom_op_shl(atom_exec_context *ctx, int *ptr, int arg) 874static void atom_op_shift_left(atom_exec_context *ctx, int *ptr, int arg)
812{ 875{
813 uint8_t attr = U8((*ptr)++), shift; 876 uint8_t attr = U8((*ptr)++), shift;
814 uint32_t saved, dst; 877 uint32_t saved, dst;
@@ -817,14 +880,14 @@ static void atom_op_shl(atom_exec_context *ctx, int *ptr, int arg)
817 attr |= atom_def_dst[attr >> 3] << 6; 880 attr |= atom_def_dst[attr >> 3] << 6;
818 SDEBUG(" dst: "); 881 SDEBUG(" dst: ");
819 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); 882 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
820 shift = U8((*ptr)++); 883 shift = atom_get_src_direct(ctx, ATOM_SRC_BYTE0, ptr);
821 SDEBUG(" shift: %d\n", shift); 884 SDEBUG(" shift: %d\n", shift);
822 dst <<= shift; 885 dst <<= shift;
823 SDEBUG(" dst: "); 886 SDEBUG(" dst: ");
824 atom_put_dst(ctx, arg, attr, &dptr, dst, saved); 887 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
825} 888}
826 889
827static void atom_op_shr(atom_exec_context *ctx, int *ptr, int arg) 890static void atom_op_shift_right(atom_exec_context *ctx, int *ptr, int arg)
828{ 891{
829 uint8_t attr = U8((*ptr)++), shift; 892 uint8_t attr = U8((*ptr)++), shift;
830 uint32_t saved, dst; 893 uint32_t saved, dst;
@@ -833,9 +896,47 @@ static void atom_op_shr(atom_exec_context *ctx, int *ptr, int arg)
833 attr |= atom_def_dst[attr >> 3] << 6; 896 attr |= atom_def_dst[attr >> 3] << 6;
834 SDEBUG(" dst: "); 897 SDEBUG(" dst: ");
835 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); 898 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
836 shift = U8((*ptr)++); 899 shift = atom_get_src_direct(ctx, ATOM_SRC_BYTE0, ptr);
900 SDEBUG(" shift: %d\n", shift);
901 dst >>= shift;
902 SDEBUG(" dst: ");
903 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
904}
905
906static void atom_op_shl(atom_exec_context *ctx, int *ptr, int arg)
907{
908 uint8_t attr = U8((*ptr)++), shift;
909 uint32_t saved, dst;
910 int dptr = *ptr;
911 uint32_t dst_align = atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) & 3];
912 SDEBUG(" dst: ");
913 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
914 /* op needs to full dst value */
915 dst = saved;
916 shift = atom_get_src(ctx, attr, ptr);
917 SDEBUG(" shift: %d\n", shift);
918 dst <<= shift;
919 dst &= atom_arg_mask[dst_align];
920 dst >>= atom_arg_shift[dst_align];
921 SDEBUG(" dst: ");
922 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
923}
924
925static void atom_op_shr(atom_exec_context *ctx, int *ptr, int arg)
926{
927 uint8_t attr = U8((*ptr)++), shift;
928 uint32_t saved, dst;
929 int dptr = *ptr;
930 uint32_t dst_align = atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) & 3];
931 SDEBUG(" dst: ");
932 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
933 /* op needs to full dst value */
934 dst = saved;
935 shift = atom_get_src(ctx, attr, ptr);
837 SDEBUG(" shift: %d\n", shift); 936 SDEBUG(" shift: %d\n", shift);
838 dst >>= shift; 937 dst >>= shift;
938 dst &= atom_arg_mask[dst_align];
939 dst >>= atom_arg_shift[dst_align];
839 SDEBUG(" dst: "); 940 SDEBUG(" dst: ");
840 atom_put_dst(ctx, arg, attr, &dptr, dst, saved); 941 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
841} 942}
@@ -936,18 +1037,18 @@ static struct {
936 atom_op_or, ATOM_ARG_FB}, { 1037 atom_op_or, ATOM_ARG_FB}, {
937 atom_op_or, ATOM_ARG_PLL}, { 1038 atom_op_or, ATOM_ARG_PLL}, {
938 atom_op_or, ATOM_ARG_MC}, { 1039 atom_op_or, ATOM_ARG_MC}, {
939 atom_op_shl, ATOM_ARG_REG}, { 1040 atom_op_shift_left, ATOM_ARG_REG}, {
940 atom_op_shl, ATOM_ARG_PS}, { 1041 atom_op_shift_left, ATOM_ARG_PS}, {
941 atom_op_shl, ATOM_ARG_WS}, { 1042 atom_op_shift_left, ATOM_ARG_WS}, {
942 atom_op_shl, ATOM_ARG_FB}, { 1043 atom_op_shift_left, ATOM_ARG_FB}, {
943 atom_op_shl, ATOM_ARG_PLL}, { 1044 atom_op_shift_left, ATOM_ARG_PLL}, {
944 atom_op_shl, ATOM_ARG_MC}, { 1045 atom_op_shift_left, ATOM_ARG_MC}, {
945 atom_op_shr, ATOM_ARG_REG}, { 1046 atom_op_shift_right, ATOM_ARG_REG}, {
946 atom_op_shr, ATOM_ARG_PS}, { 1047 atom_op_shift_right, ATOM_ARG_PS}, {
947 atom_op_shr, ATOM_ARG_WS}, { 1048 atom_op_shift_right, ATOM_ARG_WS}, {
948 atom_op_shr, ATOM_ARG_FB}, { 1049 atom_op_shift_right, ATOM_ARG_FB}, {
949 atom_op_shr, ATOM_ARG_PLL}, { 1050 atom_op_shift_right, ATOM_ARG_PLL}, {
950 atom_op_shr, ATOM_ARG_MC}, { 1051 atom_op_shift_right, ATOM_ARG_MC}, {
951 atom_op_mul, ATOM_ARG_REG}, { 1052 atom_op_mul, ATOM_ARG_REG}, {
952 atom_op_mul, ATOM_ARG_PS}, { 1053 atom_op_mul, ATOM_ARG_PS}, {
953 atom_op_mul, ATOM_ARG_WS}, { 1054 atom_op_mul, ATOM_ARG_WS}, {
@@ -1040,15 +1141,16 @@ static struct {
1040 atom_op_shr, ATOM_ARG_MC}, { 1141 atom_op_shr, ATOM_ARG_MC}, {
1041atom_op_debug, 0},}; 1142atom_op_debug, 0},};
1042 1143
1043void atom_execute_table(struct atom_context *ctx, int index, uint32_t * params) 1144static int atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t * params)
1044{ 1145{
1045 int base = CU16(ctx->cmd_table + 4 + 2 * index); 1146 int base = CU16(ctx->cmd_table + 4 + 2 * index);
1046 int len, ws, ps, ptr; 1147 int len, ws, ps, ptr;
1047 unsigned char op; 1148 unsigned char op;
1048 atom_exec_context ectx; 1149 atom_exec_context ectx;
1150 int ret = 0;
1049 1151
1050 if (!base) 1152 if (!base)
1051 return; 1153 return -EINVAL;
1052 1154
1053 len = CU16(base + ATOM_CT_SIZE_PTR); 1155 len = CU16(base + ATOM_CT_SIZE_PTR);
1054 ws = CU8(base + ATOM_CT_WS_PTR); 1156 ws = CU8(base + ATOM_CT_WS_PTR);
@@ -1057,12 +1159,12 @@ void atom_execute_table(struct atom_context *ctx, int index, uint32_t * params)
1057 1159
1058 SDEBUG(">> execute %04X (len %d, WS %d, PS %d)\n", base, len, ws, ps); 1160 SDEBUG(">> execute %04X (len %d, WS %d, PS %d)\n", base, len, ws, ps);
1059 1161
1060 /* reset reg block */
1061 ctx->reg_block = 0;
1062 ectx.ctx = ctx; 1162 ectx.ctx = ctx;
1063 ectx.ps_shift = ps / 4; 1163 ectx.ps_shift = ps / 4;
1064 ectx.start = base; 1164 ectx.start = base;
1065 ectx.ps = params; 1165 ectx.ps = params;
1166 ectx.abort = false;
1167 ectx.last_jump = 0;
1066 if (ws) 1168 if (ws)
1067 ectx.ws = kzalloc(4 * ws, GFP_KERNEL); 1169 ectx.ws = kzalloc(4 * ws, GFP_KERNEL);
1068 else 1170 else
@@ -1075,6 +1177,12 @@ void atom_execute_table(struct atom_context *ctx, int index, uint32_t * params)
1075 SDEBUG("%s @ 0x%04X\n", atom_op_names[op], ptr - 1); 1177 SDEBUG("%s @ 0x%04X\n", atom_op_names[op], ptr - 1);
1076 else 1178 else
1077 SDEBUG("[%d] @ 0x%04X\n", op, ptr - 1); 1179 SDEBUG("[%d] @ 0x%04X\n", op, ptr - 1);
1180 if (ectx.abort) {
1181 DRM_ERROR("atombios stuck executing %04X (len %d, WS %d, PS %d) @ 0x%04X\n",
1182 base, len, ws, ps, ptr - 1);
1183 ret = -EINVAL;
1184 goto free;
1185 }
1078 1186
1079 if (op < ATOM_OP_CNT && op > 0) 1187 if (op < ATOM_OP_CNT && op > 0)
1080 opcode_table[op].func(&ectx, &ptr, 1188 opcode_table[op].func(&ectx, &ptr,
@@ -1088,8 +1196,26 @@ void atom_execute_table(struct atom_context *ctx, int index, uint32_t * params)
1088 debug_depth--; 1196 debug_depth--;
1089 SDEBUG("<<\n"); 1197 SDEBUG("<<\n");
1090 1198
1199free:
1091 if (ws) 1200 if (ws)
1092 kfree(ectx.ws); 1201 kfree(ectx.ws);
1202 return ret;
1203}
1204
1205int atom_execute_table(struct atom_context *ctx, int index, uint32_t * params)
1206{
1207 int r;
1208
1209 mutex_lock(&ctx->mutex);
1210 /* reset reg block */
1211 ctx->reg_block = 0;
1212 /* reset fb window */
1213 ctx->fb_base = 0;
1214 /* reset io mode */
1215 ctx->io_mode = ATOM_IO_MM;
1216 r = atom_execute_table_locked(ctx, index, params);
1217 mutex_unlock(&ctx->mutex);
1218 return r;
1093} 1219}
1094 1220
1095static int atom_iio_len[] = { 1, 2, 3, 3, 3, 3, 4, 4, 4, 3 }; 1221static int atom_iio_len[] = { 1, 2, 3, 3, 3, 3, 4, 4, 4, 3 };
@@ -1173,9 +1299,7 @@ int atom_asic_init(struct atom_context *ctx)
1173 1299
1174 if (!CU16(ctx->cmd_table + 4 + 2 * ATOM_CMD_INIT)) 1300 if (!CU16(ctx->cmd_table + 4 + 2 * ATOM_CMD_INIT))
1175 return 1; 1301 return 1;
1176 atom_execute_table(ctx, ATOM_CMD_INIT, ps); 1302 return atom_execute_table(ctx, ATOM_CMD_INIT, ps);
1177
1178 return 0;
1179} 1303}
1180 1304
1181void atom_destroy(struct atom_context *ctx) 1305void atom_destroy(struct atom_context *ctx)
@@ -1185,12 +1309,16 @@ void atom_destroy(struct atom_context *ctx)
1185 kfree(ctx); 1309 kfree(ctx);
1186} 1310}
1187 1311
1188void atom_parse_data_header(struct atom_context *ctx, int index, 1312bool atom_parse_data_header(struct atom_context *ctx, int index,
1189 uint16_t * size, uint8_t * frev, uint8_t * crev, 1313 uint16_t * size, uint8_t * frev, uint8_t * crev,
1190 uint16_t * data_start) 1314 uint16_t * data_start)
1191{ 1315{
1192 int offset = index * 2 + 4; 1316 int offset = index * 2 + 4;
1193 int idx = CU16(ctx->data_table + offset); 1317 int idx = CU16(ctx->data_table + offset);
1318 u16 *mdt = (u16 *)(ctx->bios + ctx->data_table + 4);
1319
1320 if (!mdt[index])
1321 return false;
1194 1322
1195 if (size) 1323 if (size)
1196 *size = CU16(idx); 1324 *size = CU16(idx);
@@ -1199,18 +1327,47 @@ void atom_parse_data_header(struct atom_context *ctx, int index,
1199 if (crev) 1327 if (crev)
1200 *crev = CU8(idx + 3); 1328 *crev = CU8(idx + 3);
1201 *data_start = idx; 1329 *data_start = idx;
1202 return; 1330 return true;
1203} 1331}
1204 1332
1205void atom_parse_cmd_header(struct atom_context *ctx, int index, uint8_t * frev, 1333bool atom_parse_cmd_header(struct atom_context *ctx, int index, uint8_t * frev,
1206 uint8_t * crev) 1334 uint8_t * crev)
1207{ 1335{
1208 int offset = index * 2 + 4; 1336 int offset = index * 2 + 4;
1209 int idx = CU16(ctx->cmd_table + offset); 1337 int idx = CU16(ctx->cmd_table + offset);
1338 u16 *mct = (u16 *)(ctx->bios + ctx->cmd_table + 4);
1339
1340 if (!mct[index])
1341 return false;
1210 1342
1211 if (frev) 1343 if (frev)
1212 *frev = CU8(idx + 2); 1344 *frev = CU8(idx + 2);
1213 if (crev) 1345 if (crev)
1214 *crev = CU8(idx + 3); 1346 *crev = CU8(idx + 3);
1215 return; 1347 return true;
1348}
1349
1350int atom_allocate_fb_scratch(struct atom_context *ctx)
1351{
1352 int index = GetIndexIntoMasterTable(DATA, VRAM_UsageByFirmware);
1353 uint16_t data_offset;
1354 int usage_bytes = 0;
1355 struct _ATOM_VRAM_USAGE_BY_FIRMWARE *firmware_usage;
1356
1357 if (atom_parse_data_header(ctx, index, NULL, NULL, NULL, &data_offset)) {
1358 firmware_usage = (struct _ATOM_VRAM_USAGE_BY_FIRMWARE *)(ctx->bios + data_offset);
1359
1360 DRM_DEBUG("atom firmware requested %08x %dkb\n",
1361 firmware_usage->asFirmwareVramReserveInfo[0].ulStartAddrUsedByFirmware,
1362 firmware_usage->asFirmwareVramReserveInfo[0].usFirmwareUseInKb);
1363
1364 usage_bytes = firmware_usage->asFirmwareVramReserveInfo[0].usFirmwareUseInKb * 1024;
1365 }
1366 if (usage_bytes == 0)
1367 usage_bytes = 20 * 1024;
1368 /* allocate some scratch memory */
1369 ctx->scratch = kzalloc(usage_bytes, GFP_KERNEL);
1370 if (!ctx->scratch)
1371 return -ENOMEM;
1372 return 0;
1216} 1373}
diff --git a/drivers/gpu/drm/radeon/atom.h b/drivers/gpu/drm/radeon/atom.h
index e6eb38f2bcae..cd1b64ab5ca7 100644
--- a/drivers/gpu/drm/radeon/atom.h
+++ b/drivers/gpu/drm/radeon/atom.h
@@ -91,6 +91,7 @@
91#define ATOM_WS_AND_MASK 0x45 91#define ATOM_WS_AND_MASK 0x45
92#define ATOM_WS_FB_WINDOW 0x46 92#define ATOM_WS_FB_WINDOW 0x46
93#define ATOM_WS_ATTRIBUTES 0x47 93#define ATOM_WS_ATTRIBUTES 0x47
94#define ATOM_WS_REGPTR 0x48
94 95
95#define ATOM_IIO_NOP 0 96#define ATOM_IIO_NOP 0
96#define ATOM_IIO_START 1 97#define ATOM_IIO_START 1
@@ -120,6 +121,7 @@ struct card_info {
120 121
121struct atom_context { 122struct atom_context {
122 struct card_info *card; 123 struct card_info *card;
124 struct mutex mutex;
123 void *bios; 125 void *bios;
124 uint32_t cmd_table, data_table; 126 uint32_t cmd_table, data_table;
125 uint16_t *iio; 127 uint16_t *iio;
@@ -132,16 +134,20 @@ struct atom_context {
132 uint8_t shift; 134 uint8_t shift;
133 int cs_equal, cs_above; 135 int cs_equal, cs_above;
134 int io_mode; 136 int io_mode;
137 uint32_t *scratch;
135}; 138};
136 139
137extern int atom_debug; 140extern int atom_debug;
138 141
139struct atom_context *atom_parse(struct card_info *, void *); 142struct atom_context *atom_parse(struct card_info *, void *);
140void atom_execute_table(struct atom_context *, int, uint32_t *); 143int atom_execute_table(struct atom_context *, int, uint32_t *);
141int atom_asic_init(struct atom_context *); 144int atom_asic_init(struct atom_context *);
142void atom_destroy(struct atom_context *); 145void atom_destroy(struct atom_context *);
143void atom_parse_data_header(struct atom_context *ctx, int index, uint16_t *size, uint8_t *frev, uint8_t *crev, uint16_t *data_start); 146bool atom_parse_data_header(struct atom_context *ctx, int index, uint16_t *size,
144void atom_parse_cmd_header(struct atom_context *ctx, int index, uint8_t *frev, uint8_t *crev); 147 uint8_t *frev, uint8_t *crev, uint16_t *data_start);
148bool atom_parse_cmd_header(struct atom_context *ctx, int index,
149 uint8_t *frev, uint8_t *crev);
150int atom_allocate_fb_scratch(struct atom_context *ctx);
145#include "atom-types.h" 151#include "atom-types.h"
146#include "atombios.h" 152#include "atombios.h"
147#include "ObjectID.h" 153#include "ObjectID.h"
diff --git a/drivers/gpu/drm/radeon/atombios.h b/drivers/gpu/drm/radeon/atombios.h
index c11ddddfb3b6..27e2c715be11 100644
--- a/drivers/gpu/drm/radeon/atombios.h
+++ b/drivers/gpu/drm/radeon/atombios.h
@@ -1,5 +1,5 @@
1/* 1/*
2 * Copyright 2006-2007 Advanced Micro Devices, Inc. 2 * Copyright 2006-2007 Advanced Micro Devices, Inc.
3 * 3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a 4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"), 5 * copy of this software and associated documentation files (the "Software"),
@@ -20,10 +20,12 @@
20 * OTHER DEALINGS IN THE SOFTWARE. 20 * OTHER DEALINGS IN THE SOFTWARE.
21 */ 21 */
22 22
23/****************************************************************************/ 23
24/****************************************************************************/
24/*Portion I: Definitions shared between VBIOS and Driver */ 25/*Portion I: Definitions shared between VBIOS and Driver */
25/****************************************************************************/ 26/****************************************************************************/
26 27
28
27#ifndef _ATOMBIOS_H 29#ifndef _ATOMBIOS_H
28#define _ATOMBIOS_H 30#define _ATOMBIOS_H
29 31
@@ -40,39 +42,46 @@
40#endif 42#endif
41 43
42#ifdef _H2INC 44#ifdef _H2INC
43#ifndef ULONG 45 #ifndef ULONG
44typedef unsigned long ULONG; 46 typedef unsigned long ULONG;
45#endif 47 #endif
46 48
47#ifndef UCHAR 49 #ifndef UCHAR
48typedef unsigned char UCHAR; 50 typedef unsigned char UCHAR;
49#endif 51 #endif
50 52
51#ifndef USHORT 53 #ifndef USHORT
52typedef unsigned short USHORT; 54 typedef unsigned short USHORT;
53#endif 55 #endif
54#endif 56#endif
55 57
56#define ATOM_DAC_A 0 58#define ATOM_DAC_A 0
57#define ATOM_DAC_B 1 59#define ATOM_DAC_B 1
58#define ATOM_EXT_DAC 2 60#define ATOM_EXT_DAC 2
59 61
60#define ATOM_CRTC1 0 62#define ATOM_CRTC1 0
61#define ATOM_CRTC2 1 63#define ATOM_CRTC2 1
64#define ATOM_CRTC3 2
65#define ATOM_CRTC4 3
66#define ATOM_CRTC5 4
67#define ATOM_CRTC6 5
68#define ATOM_CRTC_INVALID 0xFF
62 69
63#define ATOM_DIGA 0 70#define ATOM_DIGA 0
64#define ATOM_DIGB 1 71#define ATOM_DIGB 1
65 72
66#define ATOM_PPLL1 0 73#define ATOM_PPLL1 0
67#define ATOM_PPLL2 1 74#define ATOM_PPLL2 1
75#define ATOM_DCPLL 2
76#define ATOM_PPLL_INVALID 0xFF
68 77
69#define ATOM_SCALER1 0 78#define ATOM_SCALER1 0
70#define ATOM_SCALER2 1 79#define ATOM_SCALER2 1
71 80
72#define ATOM_SCALER_DISABLE 0 81#define ATOM_SCALER_DISABLE 0
73#define ATOM_SCALER_CENTER 1 82#define ATOM_SCALER_CENTER 1
74#define ATOM_SCALER_EXPANSION 2 83#define ATOM_SCALER_EXPANSION 2
75#define ATOM_SCALER_MULTI_EX 3 84#define ATOM_SCALER_MULTI_EX 3
76 85
77#define ATOM_DISABLE 0 86#define ATOM_DISABLE 0
78#define ATOM_ENABLE 1 87#define ATOM_ENABLE 1
@@ -82,6 +91,7 @@ typedef unsigned short USHORT;
82#define ATOM_LCD_SELFTEST_START (ATOM_DISABLE+5) 91#define ATOM_LCD_SELFTEST_START (ATOM_DISABLE+5)
83#define ATOM_LCD_SELFTEST_STOP (ATOM_ENABLE+5) 92#define ATOM_LCD_SELFTEST_STOP (ATOM_ENABLE+5)
84#define ATOM_ENCODER_INIT (ATOM_DISABLE+7) 93#define ATOM_ENCODER_INIT (ATOM_DISABLE+7)
94#define ATOM_GET_STATUS (ATOM_DISABLE+8)
85 95
86#define ATOM_BLANKING 1 96#define ATOM_BLANKING 1
87#define ATOM_BLANKING_OFF 0 97#define ATOM_BLANKING_OFF 0
@@ -114,7 +124,7 @@ typedef unsigned short USHORT;
114#define ATOM_DAC2_CV ATOM_DAC1_CV 124#define ATOM_DAC2_CV ATOM_DAC1_CV
115#define ATOM_DAC2_NTSC ATOM_DAC1_NTSC 125#define ATOM_DAC2_NTSC ATOM_DAC1_NTSC
116#define ATOM_DAC2_PAL ATOM_DAC1_PAL 126#define ATOM_DAC2_PAL ATOM_DAC1_PAL
117 127
118#define ATOM_PM_ON 0 128#define ATOM_PM_ON 0
119#define ATOM_PM_STANDBY 1 129#define ATOM_PM_STANDBY 1
120#define ATOM_PM_SUSPEND 2 130#define ATOM_PM_SUSPEND 2
@@ -134,6 +144,7 @@ typedef unsigned short USHORT;
134#define ATOM_PANEL_MISC_TEMPORAL 0x00000040 144#define ATOM_PANEL_MISC_TEMPORAL 0x00000040
135#define ATOM_PANEL_MISC_API_ENABLED 0x00000080 145#define ATOM_PANEL_MISC_API_ENABLED 0x00000080
136 146
147
137#define MEMTYPE_DDR1 "DDR1" 148#define MEMTYPE_DDR1 "DDR1"
138#define MEMTYPE_DDR2 "DDR2" 149#define MEMTYPE_DDR2 "DDR2"
139#define MEMTYPE_DDR3 "DDR3" 150#define MEMTYPE_DDR3 "DDR3"
@@ -145,19 +156,19 @@ typedef unsigned short USHORT;
145 156
146/* Maximum size of that FireGL flag string */ 157/* Maximum size of that FireGL flag string */
147 158
148#define ATOM_FIREGL_FLAG_STRING "FGL" /* Flag used to enable FireGL Support */ 159#define ATOM_FIREGL_FLAG_STRING "FGL" //Flag used to enable FireGL Support
149#define ATOM_MAX_SIZE_OF_FIREGL_FLAG_STRING 3 /* sizeof( ATOM_FIREGL_FLAG_STRING ) */ 160#define ATOM_MAX_SIZE_OF_FIREGL_FLAG_STRING 3 //sizeof( ATOM_FIREGL_FLAG_STRING )
150 161
151#define ATOM_FAKE_DESKTOP_STRING "DSK" /* Flag used to enable mobile ASIC on Desktop */ 162#define ATOM_FAKE_DESKTOP_STRING "DSK" //Flag used to enable mobile ASIC on Desktop
152#define ATOM_MAX_SIZE_OF_FAKE_DESKTOP_STRING ATOM_MAX_SIZE_OF_FIREGL_FLAG_STRING 163#define ATOM_MAX_SIZE_OF_FAKE_DESKTOP_STRING ATOM_MAX_SIZE_OF_FIREGL_FLAG_STRING
153 164
154#define ATOM_M54T_FLAG_STRING "M54T" /* Flag used to enable M54T Support */ 165#define ATOM_M54T_FLAG_STRING "M54T" //Flag used to enable M54T Support
155#define ATOM_MAX_SIZE_OF_M54T_FLAG_STRING 4 /* sizeof( ATOM_M54T_FLAG_STRING ) */ 166#define ATOM_MAX_SIZE_OF_M54T_FLAG_STRING 4 //sizeof( ATOM_M54T_FLAG_STRING )
156 167
157#define HW_ASSISTED_I2C_STATUS_FAILURE 2 168#define HW_ASSISTED_I2C_STATUS_FAILURE 2
158#define HW_ASSISTED_I2C_STATUS_SUCCESS 1 169#define HW_ASSISTED_I2C_STATUS_SUCCESS 1
159 170
160#pragma pack(1) /* BIOS data must use byte aligment */ 171#pragma pack(1) /* BIOS data must use byte aligment */
161 172
162/* Define offset to location of ROM header. */ 173/* Define offset to location of ROM header. */
163 174
@@ -165,367 +176,410 @@ typedef unsigned short USHORT;
165#define OFFSET_TO_ATOM_ROM_IMAGE_SIZE 0x00000002L 176#define OFFSET_TO_ATOM_ROM_IMAGE_SIZE 0x00000002L
166 177
167#define OFFSET_TO_ATOMBIOS_ASIC_BUS_MEM_TYPE 0x94 178#define OFFSET_TO_ATOMBIOS_ASIC_BUS_MEM_TYPE 0x94
168#define MAXSIZE_OF_ATOMBIOS_ASIC_BUS_MEM_TYPE 20 /* including the terminator 0x0! */ 179#define MAXSIZE_OF_ATOMBIOS_ASIC_BUS_MEM_TYPE 20 /* including the terminator 0x0! */
169#define OFFSET_TO_GET_ATOMBIOS_STRINGS_NUMBER 0x002f 180#define OFFSET_TO_GET_ATOMBIOS_STRINGS_NUMBER 0x002f
170#define OFFSET_TO_GET_ATOMBIOS_STRINGS_START 0x006e 181#define OFFSET_TO_GET_ATOMBIOS_STRINGS_START 0x006e
171 182
172/* Common header for all ROM Data tables. 183/* Common header for all ROM Data tables.
173 Every table pointed _ATOM_MASTER_DATA_TABLE has this common header. 184 Every table pointed _ATOM_MASTER_DATA_TABLE has this common header.
174 And the pointer actually points to this header. */ 185 And the pointer actually points to this header. */
175 186
176typedef struct _ATOM_COMMON_TABLE_HEADER { 187typedef struct _ATOM_COMMON_TABLE_HEADER
177 USHORT usStructureSize; 188{
178 UCHAR ucTableFormatRevision; /*Change it when the Parser is not backward compatible */ 189 USHORT usStructureSize;
179 UCHAR ucTableContentRevision; /*Change it only when the table needs to change but the firmware */ 190 UCHAR ucTableFormatRevision; /*Change it when the Parser is not backward compatible */
180 /*Image can't be updated, while Driver needs to carry the new table! */ 191 UCHAR ucTableContentRevision; /*Change it only when the table needs to change but the firmware */
181} ATOM_COMMON_TABLE_HEADER; 192 /*Image can't be updated, while Driver needs to carry the new table! */
182 193}ATOM_COMMON_TABLE_HEADER;
183typedef struct _ATOM_ROM_HEADER { 194
184 ATOM_COMMON_TABLE_HEADER sHeader; 195typedef struct _ATOM_ROM_HEADER
185 UCHAR uaFirmWareSignature[4]; /*Signature to distinguish between Atombios and non-atombios, 196{
186 atombios should init it as "ATOM", don't change the position */ 197 ATOM_COMMON_TABLE_HEADER sHeader;
187 USHORT usBiosRuntimeSegmentAddress; 198 UCHAR uaFirmWareSignature[4]; /*Signature to distinguish between Atombios and non-atombios,
188 USHORT usProtectedModeInfoOffset; 199 atombios should init it as "ATOM", don't change the position */
189 USHORT usConfigFilenameOffset; 200 USHORT usBiosRuntimeSegmentAddress;
190 USHORT usCRC_BlockOffset; 201 USHORT usProtectedModeInfoOffset;
191 USHORT usBIOS_BootupMessageOffset; 202 USHORT usConfigFilenameOffset;
192 USHORT usInt10Offset; 203 USHORT usCRC_BlockOffset;
193 USHORT usPciBusDevInitCode; 204 USHORT usBIOS_BootupMessageOffset;
194 USHORT usIoBaseAddress; 205 USHORT usInt10Offset;
195 USHORT usSubsystemVendorID; 206 USHORT usPciBusDevInitCode;
196 USHORT usSubsystemID; 207 USHORT usIoBaseAddress;
197 USHORT usPCI_InfoOffset; 208 USHORT usSubsystemVendorID;
198 USHORT usMasterCommandTableOffset; /*Offset for SW to get all command table offsets, Don't change the position */ 209 USHORT usSubsystemID;
199 USHORT usMasterDataTableOffset; /*Offset for SW to get all data table offsets, Don't change the position */ 210 USHORT usPCI_InfoOffset;
200 UCHAR ucExtendedFunctionCode; 211 USHORT usMasterCommandTableOffset; /*Offset for SW to get all command table offsets, Don't change the position */
201 UCHAR ucReserved; 212 USHORT usMasterDataTableOffset; /*Offset for SW to get all data table offsets, Don't change the position */
202} ATOM_ROM_HEADER; 213 UCHAR ucExtendedFunctionCode;
214 UCHAR ucReserved;
215}ATOM_ROM_HEADER;
203 216
204/*==============================Command Table Portion==================================== */ 217/*==============================Command Table Portion==================================== */
205 218
206#ifdef UEFI_BUILD 219#ifdef UEFI_BUILD
207#define UTEMP USHORT 220 #define UTEMP USHORT
208#define USHORT void* 221 #define USHORT void*
209#endif 222#endif
210 223
211typedef struct _ATOM_MASTER_LIST_OF_COMMAND_TABLES { 224typedef struct _ATOM_MASTER_LIST_OF_COMMAND_TABLES{
212 USHORT ASIC_Init; /* Function Table, used by various SW components,latest version 1.1 */ 225 USHORT ASIC_Init; //Function Table, used by various SW components,latest version 1.1
213 USHORT GetDisplaySurfaceSize; /* Atomic Table, Used by Bios when enabling HW ICON */ 226 USHORT GetDisplaySurfaceSize; //Atomic Table, Used by Bios when enabling HW ICON
214 USHORT ASIC_RegistersInit; /* Atomic Table, indirectly used by various SW components,called from ASIC_Init */ 227 USHORT ASIC_RegistersInit; //Atomic Table, indirectly used by various SW components,called from ASIC_Init
215 USHORT VRAM_BlockVenderDetection; /* Atomic Table, used only by Bios */ 228 USHORT VRAM_BlockVenderDetection; //Atomic Table, used only by Bios
216 USHORT DIGxEncoderControl; /* Only used by Bios */ 229 USHORT DIGxEncoderControl; //Only used by Bios
217 USHORT MemoryControllerInit; /* Atomic Table, indirectly used by various SW components,called from ASIC_Init */ 230 USHORT MemoryControllerInit; //Atomic Table, indirectly used by various SW components,called from ASIC_Init
218 USHORT EnableCRTCMemReq; /* Function Table,directly used by various SW components,latest version 2.1 */ 231 USHORT EnableCRTCMemReq; //Function Table,directly used by various SW components,latest version 2.1
219 USHORT MemoryParamAdjust; /* Atomic Table, indirectly used by various SW components,called from SetMemoryClock if needed */ 232 USHORT MemoryParamAdjust; //Atomic Table, indirectly used by various SW components,called from SetMemoryClock if needed
220 USHORT DVOEncoderControl; /* Function Table,directly used by various SW components,latest version 1.2 */ 233 USHORT DVOEncoderControl; //Function Table,directly used by various SW components,latest version 1.2
221 USHORT GPIOPinControl; /* Atomic Table, only used by Bios */ 234 USHORT GPIOPinControl; //Atomic Table, only used by Bios
222 USHORT SetEngineClock; /*Function Table,directly used by various SW components,latest version 1.1 */ 235 USHORT SetEngineClock; //Function Table,directly used by various SW components,latest version 1.1
223 USHORT SetMemoryClock; /* Function Table,directly used by various SW components,latest version 1.1 */ 236 USHORT SetMemoryClock; //Function Table,directly used by various SW components,latest version 1.1
224 USHORT SetPixelClock; /*Function Table,directly used by various SW components,latest version 1.2 */ 237 USHORT SetPixelClock; //Function Table,directly used by various SW components,latest version 1.2
225 USHORT DynamicClockGating; /* Atomic Table, indirectly used by various SW components,called from ASIC_Init */ 238 USHORT DynamicClockGating; //Atomic Table, indirectly used by various SW components,called from ASIC_Init
226 USHORT ResetMemoryDLL; /* Atomic Table, indirectly used by various SW components,called from SetMemoryClock */ 239 USHORT ResetMemoryDLL; //Atomic Table, indirectly used by various SW components,called from SetMemoryClock
227 USHORT ResetMemoryDevice; /* Atomic Table, indirectly used by various SW components,called from SetMemoryClock */ 240 USHORT ResetMemoryDevice; //Atomic Table, indirectly used by various SW components,called from SetMemoryClock
228 USHORT MemoryPLLInit; 241 USHORT MemoryPLLInit;
229 USHORT AdjustDisplayPll; /* only used by Bios */ 242 USHORT AdjustDisplayPll; //only used by Bios
230 USHORT AdjustMemoryController; /* Atomic Table, indirectly used by various SW components,called from SetMemoryClock */ 243 USHORT AdjustMemoryController; //Atomic Table, indirectly used by various SW components,called from SetMemoryClock
231 USHORT EnableASIC_StaticPwrMgt; /* Atomic Table, only used by Bios */ 244 USHORT EnableASIC_StaticPwrMgt; //Atomic Table, only used by Bios
232 USHORT ASIC_StaticPwrMgtStatusChange; /* Obsolete, only used by Bios */ 245 USHORT ASIC_StaticPwrMgtStatusChange; //Obsolete , only used by Bios
233 USHORT DAC_LoadDetection; /* Atomic Table, directly used by various SW components,latest version 1.2 */ 246 USHORT DAC_LoadDetection; //Atomic Table, directly used by various SW components,latest version 1.2
234 USHORT LVTMAEncoderControl; /* Atomic Table,directly used by various SW components,latest version 1.3 */ 247 USHORT LVTMAEncoderControl; //Atomic Table,directly used by various SW components,latest version 1.3
235 USHORT LCD1OutputControl; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 248 USHORT LCD1OutputControl; //Atomic Table, directly used by various SW components,latest version 1.1
236 USHORT DAC1EncoderControl; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 249 USHORT DAC1EncoderControl; //Atomic Table, directly used by various SW components,latest version 1.1
237 USHORT DAC2EncoderControl; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 250 USHORT DAC2EncoderControl; //Atomic Table, directly used by various SW components,latest version 1.1
238 USHORT DVOOutputControl; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 251 USHORT DVOOutputControl; //Atomic Table, directly used by various SW components,latest version 1.1
239 USHORT CV1OutputControl; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 252 USHORT CV1OutputControl; //Atomic Table, Atomic Table, Obsolete from Ry6xx, use DAC2 Output instead
240 USHORT GetConditionalGoldenSetting; /* only used by Bios */ 253 USHORT GetConditionalGoldenSetting; //only used by Bios
241 USHORT TVEncoderControl; /* Function Table,directly used by various SW components,latest version 1.1 */ 254 USHORT TVEncoderControl; //Function Table,directly used by various SW components,latest version 1.1
242 USHORT TMDSAEncoderControl; /* Atomic Table, directly used by various SW components,latest version 1.3 */ 255 USHORT TMDSAEncoderControl; //Atomic Table, directly used by various SW components,latest version 1.3
243 USHORT LVDSEncoderControl; /* Atomic Table, directly used by various SW components,latest version 1.3 */ 256 USHORT LVDSEncoderControl; //Atomic Table, directly used by various SW components,latest version 1.3
244 USHORT TV1OutputControl; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 257 USHORT TV1OutputControl; //Atomic Table, Obsolete from Ry6xx, use DAC2 Output instead
245 USHORT EnableScaler; /* Atomic Table, used only by Bios */ 258 USHORT EnableScaler; //Atomic Table, used only by Bios
246 USHORT BlankCRTC; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 259 USHORT BlankCRTC; //Atomic Table, directly used by various SW components,latest version 1.1
247 USHORT EnableCRTC; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 260 USHORT EnableCRTC; //Atomic Table, directly used by various SW components,latest version 1.1
248 USHORT GetPixelClock; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 261 USHORT GetPixelClock; //Atomic Table, directly used by various SW components,latest version 1.1
249 USHORT EnableVGA_Render; /* Function Table,directly used by various SW components,latest version 1.1 */ 262 USHORT EnableVGA_Render; //Function Table,directly used by various SW components,latest version 1.1
250 USHORT EnableVGA_Access; /* Obsolete , only used by Bios */ 263 USHORT GetSCLKOverMCLKRatio; //Atomic Table, only used by Bios
251 USHORT SetCRTC_Timing; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 264 USHORT SetCRTC_Timing; //Atomic Table, directly used by various SW components,latest version 1.1
252 USHORT SetCRTC_OverScan; /* Atomic Table, used by various SW components,latest version 1.1 */ 265 USHORT SetCRTC_OverScan; //Atomic Table, used by various SW components,latest version 1.1
253 USHORT SetCRTC_Replication; /* Atomic Table, used only by Bios */ 266 USHORT SetCRTC_Replication; //Atomic Table, used only by Bios
254 USHORT SelectCRTC_Source; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 267 USHORT SelectCRTC_Source; //Atomic Table, directly used by various SW components,latest version 1.1
255 USHORT EnableGraphSurfaces; /* Atomic Table, used only by Bios */ 268 USHORT EnableGraphSurfaces; //Atomic Table, used only by Bios
256 USHORT UpdateCRTC_DoubleBufferRegisters; 269 USHORT UpdateCRTC_DoubleBufferRegisters;
257 USHORT LUT_AutoFill; /* Atomic Table, only used by Bios */ 270 USHORT LUT_AutoFill; //Atomic Table, only used by Bios
258 USHORT EnableHW_IconCursor; /* Atomic Table, only used by Bios */ 271 USHORT EnableHW_IconCursor; //Atomic Table, only used by Bios
259 USHORT GetMemoryClock; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 272 USHORT GetMemoryClock; //Atomic Table, directly used by various SW components,latest version 1.1
260 USHORT GetEngineClock; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 273 USHORT GetEngineClock; //Atomic Table, directly used by various SW components,latest version 1.1
261 USHORT SetCRTC_UsingDTDTiming; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 274 USHORT SetCRTC_UsingDTDTiming; //Atomic Table, directly used by various SW components,latest version 1.1
262 USHORT ExternalEncoderControl; /* Atomic Table, directly used by various SW components,latest version 2.1 */ 275 USHORT ExternalEncoderControl; //Atomic Table, directly used by various SW components,latest version 2.1
263 USHORT LVTMAOutputControl; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 276 USHORT LVTMAOutputControl; //Atomic Table, directly used by various SW components,latest version 1.1
264 USHORT VRAM_BlockDetectionByStrap; /* Atomic Table, used only by Bios */ 277 USHORT VRAM_BlockDetectionByStrap; //Atomic Table, used only by Bios
265 USHORT MemoryCleanUp; /* Atomic Table, only used by Bios */ 278 USHORT MemoryCleanUp; //Atomic Table, only used by Bios
266 USHORT ProcessI2cChannelTransaction; /* Function Table,only used by Bios */ 279 USHORT ProcessI2cChannelTransaction; //Function Table,only used by Bios
267 USHORT WriteOneByteToHWAssistedI2C; /* Function Table,indirectly used by various SW components */ 280 USHORT WriteOneByteToHWAssistedI2C; //Function Table,indirectly used by various SW components
268 USHORT ReadHWAssistedI2CStatus; /* Atomic Table, indirectly used by various SW components */ 281 USHORT ReadHWAssistedI2CStatus; //Atomic Table, indirectly used by various SW components
269 USHORT SpeedFanControl; /* Function Table,indirectly used by various SW components,called from ASIC_Init */ 282 USHORT SpeedFanControl; //Function Table,indirectly used by various SW components,called from ASIC_Init
270 USHORT PowerConnectorDetection; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 283 USHORT PowerConnectorDetection; //Atomic Table, directly used by various SW components,latest version 1.1
271 USHORT MC_Synchronization; /* Atomic Table, indirectly used by various SW components,called from SetMemoryClock */ 284 USHORT MC_Synchronization; //Atomic Table, indirectly used by various SW components,called from SetMemoryClock
272 USHORT ComputeMemoryEnginePLL; /* Atomic Table, indirectly used by various SW components,called from SetMemory/EngineClock */ 285 USHORT ComputeMemoryEnginePLL; //Atomic Table, indirectly used by various SW components,called from SetMemory/EngineClock
273 USHORT MemoryRefreshConversion; /* Atomic Table, indirectly used by various SW components,called from SetMemory or SetEngineClock */ 286 USHORT MemoryRefreshConversion; //Atomic Table, indirectly used by various SW components,called from SetMemory or SetEngineClock
274 USHORT VRAM_GetCurrentInfoBlock; /* Atomic Table, used only by Bios */ 287 USHORT VRAM_GetCurrentInfoBlock; //Atomic Table, used only by Bios
275 USHORT DynamicMemorySettings; /* Atomic Table, indirectly used by various SW components,called from SetMemoryClock */ 288 USHORT DynamicMemorySettings; //Atomic Table, indirectly used by various SW components,called from SetMemoryClock
276 USHORT MemoryTraining; /* Atomic Table, used only by Bios */ 289 USHORT MemoryTraining; //Atomic Table, used only by Bios
277 USHORT EnableSpreadSpectrumOnPPLL; /* Atomic Table, directly used by various SW components,latest version 1.2 */ 290 USHORT EnableSpreadSpectrumOnPPLL; //Atomic Table, directly used by various SW components,latest version 1.2
278 USHORT TMDSAOutputControl; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 291 USHORT TMDSAOutputControl; //Atomic Table, directly used by various SW components,latest version 1.1
279 USHORT SetVoltage; /* Function Table,directly and/or indirectly used by various SW components,latest version 1.1 */ 292 USHORT SetVoltage; //Function Table,directly and/or indirectly used by various SW components,latest version 1.1
280 USHORT DAC1OutputControl; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 293 USHORT DAC1OutputControl; //Atomic Table, directly used by various SW components,latest version 1.1
281 USHORT DAC2OutputControl; /* Atomic Table, directly used by various SW components,latest version 1.1 */ 294 USHORT DAC2OutputControl; //Atomic Table, directly used by various SW components,latest version 1.1
282 USHORT SetupHWAssistedI2CStatus; /* Function Table,only used by Bios, obsolete soon.Switch to use "ReadEDIDFromHWAssistedI2C" */ 295 USHORT SetupHWAssistedI2CStatus; //Function Table,only used by Bios, obsolete soon.Switch to use "ReadEDIDFromHWAssistedI2C"
283 USHORT ClockSource; /* Atomic Table, indirectly used by various SW components,called from ASIC_Init */ 296 USHORT ClockSource; //Atomic Table, indirectly used by various SW components,called from ASIC_Init
284 USHORT MemoryDeviceInit; /* Atomic Table, indirectly used by various SW components,called from SetMemoryClock */ 297 USHORT MemoryDeviceInit; //Atomic Table, indirectly used by various SW components,called from SetMemoryClock
285 USHORT EnableYUV; /* Atomic Table, indirectly used by various SW components,called from EnableVGARender */ 298 USHORT EnableYUV; //Atomic Table, indirectly used by various SW components,called from EnableVGARender
286 USHORT DIG1EncoderControl; /* Atomic Table,directly used by various SW components,latest version 1.1 */ 299 USHORT DIG1EncoderControl; //Atomic Table,directly used by various SW components,latest version 1.1
287 USHORT DIG2EncoderControl; /* Atomic Table,directly used by various SW components,latest version 1.1 */ 300 USHORT DIG2EncoderControl; //Atomic Table,directly used by various SW components,latest version 1.1
288 USHORT DIG1TransmitterControl; /* Atomic Table,directly used by various SW components,latest version 1.1 */ 301 USHORT DIG1TransmitterControl; //Atomic Table,directly used by various SW components,latest version 1.1
289 USHORT DIG2TransmitterControl; /* Atomic Table,directly used by various SW components,latest version 1.1 */ 302 USHORT DIG2TransmitterControl; //Atomic Table,directly used by various SW components,latest version 1.1
290 USHORT ProcessAuxChannelTransaction; /* Function Table,only used by Bios */ 303 USHORT ProcessAuxChannelTransaction; //Function Table,only used by Bios
291 USHORT DPEncoderService; /* Function Table,only used by Bios */ 304 USHORT DPEncoderService; //Function Table,only used by Bios
292} ATOM_MASTER_LIST_OF_COMMAND_TABLES; 305}ATOM_MASTER_LIST_OF_COMMAND_TABLES;
293 306
294/* For backward compatible */ 307// For backward compatible
295#define ReadEDIDFromHWAssistedI2C ProcessI2cChannelTransaction 308#define ReadEDIDFromHWAssistedI2C ProcessI2cChannelTransaction
296#define UNIPHYTransmitterControl DIG1TransmitterControl 309#define UNIPHYTransmitterControl DIG1TransmitterControl
297#define LVTMATransmitterControl DIG2TransmitterControl 310#define LVTMATransmitterControl DIG2TransmitterControl
298#define SetCRTC_DPM_State GetConditionalGoldenSetting 311#define SetCRTC_DPM_State GetConditionalGoldenSetting
299#define SetUniphyInstance ASIC_StaticPwrMgtStatusChange 312#define SetUniphyInstance ASIC_StaticPwrMgtStatusChange
313#define HPDInterruptService ReadHWAssistedI2CStatus
314#define EnableVGA_Access GetSCLKOverMCLKRatio
300 315
301typedef struct _ATOM_MASTER_COMMAND_TABLE { 316typedef struct _ATOM_MASTER_COMMAND_TABLE
302 ATOM_COMMON_TABLE_HEADER sHeader; 317{
303 ATOM_MASTER_LIST_OF_COMMAND_TABLES ListOfCommandTables; 318 ATOM_COMMON_TABLE_HEADER sHeader;
304} ATOM_MASTER_COMMAND_TABLE; 319 ATOM_MASTER_LIST_OF_COMMAND_TABLES ListOfCommandTables;
305 320}ATOM_MASTER_COMMAND_TABLE;
306/****************************************************************************/ 321
307/* Structures used in every command table */ 322/****************************************************************************/
308/****************************************************************************/ 323// Structures used in every command table
309typedef struct _ATOM_TABLE_ATTRIBUTE { 324/****************************************************************************/
325typedef struct _ATOM_TABLE_ATTRIBUTE
326{
310#if ATOM_BIG_ENDIAN 327#if ATOM_BIG_ENDIAN
311 USHORT UpdatedByUtility:1; /* [15]=Table updated by utility flag */ 328 USHORT UpdatedByUtility:1; //[15]=Table updated by utility flag
312 USHORT PS_SizeInBytes:7; /* [14:8]=Size of parameter space in Bytes (multiple of a dword), */ 329 USHORT PS_SizeInBytes:7; //[14:8]=Size of parameter space in Bytes (multiple of a dword),
313 USHORT WS_SizeInBytes:8; /* [7:0]=Size of workspace in Bytes (in multiple of a dword), */ 330 USHORT WS_SizeInBytes:8; //[7:0]=Size of workspace in Bytes (in multiple of a dword),
314#else 331#else
315 USHORT WS_SizeInBytes:8; /* [7:0]=Size of workspace in Bytes (in multiple of a dword), */ 332 USHORT WS_SizeInBytes:8; //[7:0]=Size of workspace in Bytes (in multiple of a dword),
316 USHORT PS_SizeInBytes:7; /* [14:8]=Size of parameter space in Bytes (multiple of a dword), */ 333 USHORT PS_SizeInBytes:7; //[14:8]=Size of parameter space in Bytes (multiple of a dword),
317 USHORT UpdatedByUtility:1; /* [15]=Table updated by utility flag */ 334 USHORT UpdatedByUtility:1; //[15]=Table updated by utility flag
318#endif 335#endif
319} ATOM_TABLE_ATTRIBUTE; 336}ATOM_TABLE_ATTRIBUTE;
320 337
321typedef union _ATOM_TABLE_ATTRIBUTE_ACCESS { 338typedef union _ATOM_TABLE_ATTRIBUTE_ACCESS
322 ATOM_TABLE_ATTRIBUTE sbfAccess; 339{
323 USHORT susAccess; 340 ATOM_TABLE_ATTRIBUTE sbfAccess;
324} ATOM_TABLE_ATTRIBUTE_ACCESS; 341 USHORT susAccess;
325 342}ATOM_TABLE_ATTRIBUTE_ACCESS;
326/****************************************************************************/ 343
327/* Common header for all command tables. */ 344/****************************************************************************/
328/* Every table pointed by _ATOM_MASTER_COMMAND_TABLE has this common header. */ 345// Common header for all command tables.
329/* And the pointer actually points to this header. */ 346// Every table pointed by _ATOM_MASTER_COMMAND_TABLE has this common header.
330/****************************************************************************/ 347// And the pointer actually points to this header.
331typedef struct _ATOM_COMMON_ROM_COMMAND_TABLE_HEADER { 348/****************************************************************************/
332 ATOM_COMMON_TABLE_HEADER CommonHeader; 349typedef struct _ATOM_COMMON_ROM_COMMAND_TABLE_HEADER
333 ATOM_TABLE_ATTRIBUTE TableAttribute; 350{
334} ATOM_COMMON_ROM_COMMAND_TABLE_HEADER; 351 ATOM_COMMON_TABLE_HEADER CommonHeader;
352 ATOM_TABLE_ATTRIBUTE TableAttribute;
353}ATOM_COMMON_ROM_COMMAND_TABLE_HEADER;
335 354
336/****************************************************************************/ 355/****************************************************************************/
337/* Structures used by ComputeMemoryEnginePLLTable */ 356// Structures used by ComputeMemoryEnginePLLTable
338/****************************************************************************/ 357/****************************************************************************/
339#define COMPUTE_MEMORY_PLL_PARAM 1 358#define COMPUTE_MEMORY_PLL_PARAM 1
340#define COMPUTE_ENGINE_PLL_PARAM 2 359#define COMPUTE_ENGINE_PLL_PARAM 2
341 360
342typedef struct _COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS { 361typedef struct _COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS
343 ULONG ulClock; /* When returen, it's the re-calculated clock based on given Fb_div Post_Div and ref_div */ 362{
344 UCHAR ucAction; /* 0:reserved //1:Memory //2:Engine */ 363 ULONG ulClock; //When returen, it's the re-calculated clock based on given Fb_div Post_Div and ref_div
345 UCHAR ucReserved; /* may expand to return larger Fbdiv later */ 364 UCHAR ucAction; //0:reserved //1:Memory //2:Engine
346 UCHAR ucFbDiv; /* return value */ 365 UCHAR ucReserved; //may expand to return larger Fbdiv later
347 UCHAR ucPostDiv; /* return value */ 366 UCHAR ucFbDiv; //return value
348} COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS; 367 UCHAR ucPostDiv; //return value
349 368}COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS;
350typedef struct _COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_V2 { 369
351 ULONG ulClock; /* When return, [23:0] return real clock */ 370typedef struct _COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_V2
352 UCHAR ucAction; /* 0:reserved;COMPUTE_MEMORY_PLL_PARAM:Memory;COMPUTE_ENGINE_PLL_PARAM:Engine. it return ref_div to be written to register */ 371{
353 USHORT usFbDiv; /* return Feedback value to be written to register */ 372 ULONG ulClock; //When return, [23:0] return real clock
354 UCHAR ucPostDiv; /* return post div to be written to register */ 373 UCHAR ucAction; //0:reserved;COMPUTE_MEMORY_PLL_PARAM:Memory;COMPUTE_ENGINE_PLL_PARAM:Engine. it return ref_div to be written to register
355} COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_V2; 374 USHORT usFbDiv; //return Feedback value to be written to register
375 UCHAR ucPostDiv; //return post div to be written to register
376}COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_V2;
356#define COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_PS_ALLOCATION COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS 377#define COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_PS_ALLOCATION COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS
357 378
358#define SET_CLOCK_FREQ_MASK 0x00FFFFFF /* Clock change tables only take bit [23:0] as the requested clock value */ 379
359#define USE_NON_BUS_CLOCK_MASK 0x01000000 /* Applicable to both memory and engine clock change, when set, it uses another clock as the temporary clock (engine uses memory and vice versa) */ 380#define SET_CLOCK_FREQ_MASK 0x00FFFFFF //Clock change tables only take bit [23:0] as the requested clock value
360#define USE_MEMORY_SELF_REFRESH_MASK 0x02000000 /* Only applicable to memory clock change, when set, using memory self refresh during clock transition */ 381#define USE_NON_BUS_CLOCK_MASK 0x01000000 //Applicable to both memory and engine clock change, when set, it uses another clock as the temporary clock (engine uses memory and vice versa)
361#define SKIP_INTERNAL_MEMORY_PARAMETER_CHANGE 0x04000000 /* Only applicable to memory clock change, when set, the table will skip predefined internal memory parameter change */ 382#define USE_MEMORY_SELF_REFRESH_MASK 0x02000000 //Only applicable to memory clock change, when set, using memory self refresh during clock transition
362#define FIRST_TIME_CHANGE_CLOCK 0x08000000 /* Applicable to both memory and engine clock change,when set, it means this is 1st time to change clock after ASIC bootup */ 383#define SKIP_INTERNAL_MEMORY_PARAMETER_CHANGE 0x04000000 //Only applicable to memory clock change, when set, the table will skip predefined internal memory parameter change
363#define SKIP_SW_PROGRAM_PLL 0x10000000 /* Applicable to both memory and engine clock change, when set, it means the table will not program SPLL/MPLL */ 384#define FIRST_TIME_CHANGE_CLOCK 0x08000000 //Applicable to both memory and engine clock change,when set, it means this is 1st time to change clock after ASIC bootup
385#define SKIP_SW_PROGRAM_PLL 0x10000000 //Applicable to both memory and engine clock change, when set, it means the table will not program SPLL/MPLL
364#define USE_SS_ENABLED_PIXEL_CLOCK USE_NON_BUS_CLOCK_MASK 386#define USE_SS_ENABLED_PIXEL_CLOCK USE_NON_BUS_CLOCK_MASK
365 387
366#define b3USE_NON_BUS_CLOCK_MASK 0x01 /* Applicable to both memory and engine clock change, when set, it uses another clock as the temporary clock (engine uses memory and vice versa) */ 388#define b3USE_NON_BUS_CLOCK_MASK 0x01 //Applicable to both memory and engine clock change, when set, it uses another clock as the temporary clock (engine uses memory and vice versa)
367#define b3USE_MEMORY_SELF_REFRESH 0x02 /* Only applicable to memory clock change, when set, using memory self refresh during clock transition */ 389#define b3USE_MEMORY_SELF_REFRESH 0x02 //Only applicable to memory clock change, when set, using memory self refresh during clock transition
368#define b3SKIP_INTERNAL_MEMORY_PARAMETER_CHANGE 0x04 /* Only applicable to memory clock change, when set, the table will skip predefined internal memory parameter change */ 390#define b3SKIP_INTERNAL_MEMORY_PARAMETER_CHANGE 0x04 //Only applicable to memory clock change, when set, the table will skip predefined internal memory parameter change
369#define b3FIRST_TIME_CHANGE_CLOCK 0x08 /* Applicable to both memory and engine clock change,when set, it means this is 1st time to change clock after ASIC bootup */ 391#define b3FIRST_TIME_CHANGE_CLOCK 0x08 //Applicable to both memory and engine clock change,when set, it means this is 1st time to change clock after ASIC bootup
370#define b3SKIP_SW_PROGRAM_PLL 0x10 /* Applicable to both memory and engine clock change, when set, it means the table will not program SPLL/MPLL */ 392#define b3SKIP_SW_PROGRAM_PLL 0x10 //Applicable to both memory and engine clock change, when set, it means the table will not program SPLL/MPLL
371 393
372typedef struct _ATOM_COMPUTE_CLOCK_FREQ { 394typedef struct _ATOM_COMPUTE_CLOCK_FREQ
395{
373#if ATOM_BIG_ENDIAN 396#if ATOM_BIG_ENDIAN
374 ULONG ulComputeClockFlag:8; /* =1: COMPUTE_MEMORY_PLL_PARAM, =2: COMPUTE_ENGINE_PLL_PARAM */ 397 ULONG ulComputeClockFlag:8; // =1: COMPUTE_MEMORY_PLL_PARAM, =2: COMPUTE_ENGINE_PLL_PARAM
375 ULONG ulClockFreq:24; /* in unit of 10kHz */ 398 ULONG ulClockFreq:24; // in unit of 10kHz
376#else 399#else
377 ULONG ulClockFreq:24; /* in unit of 10kHz */ 400 ULONG ulClockFreq:24; // in unit of 10kHz
378 ULONG ulComputeClockFlag:8; /* =1: COMPUTE_MEMORY_PLL_PARAM, =2: COMPUTE_ENGINE_PLL_PARAM */ 401 ULONG ulComputeClockFlag:8; // =1: COMPUTE_MEMORY_PLL_PARAM, =2: COMPUTE_ENGINE_PLL_PARAM
379#endif 402#endif
380} ATOM_COMPUTE_CLOCK_FREQ; 403}ATOM_COMPUTE_CLOCK_FREQ;
381 404
382typedef struct _ATOM_S_MPLL_FB_DIVIDER { 405typedef struct _ATOM_S_MPLL_FB_DIVIDER
383 USHORT usFbDivFrac; 406{
384 USHORT usFbDiv; 407 USHORT usFbDivFrac;
385} ATOM_S_MPLL_FB_DIVIDER; 408 USHORT usFbDiv;
386 409}ATOM_S_MPLL_FB_DIVIDER;
387typedef struct _COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_V3 {
388 union {
389 ATOM_COMPUTE_CLOCK_FREQ ulClock; /* Input Parameter */
390 ATOM_S_MPLL_FB_DIVIDER ulFbDiv; /* Output Parameter */
391 };
392 UCHAR ucRefDiv; /* Output Parameter */
393 UCHAR ucPostDiv; /* Output Parameter */
394 UCHAR ucCntlFlag; /* Output Parameter */
395 UCHAR ucReserved;
396} COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_V3;
397 410
398/* ucCntlFlag */ 411typedef struct _COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_V3
412{
413 union
414 {
415 ATOM_COMPUTE_CLOCK_FREQ ulClock; //Input Parameter
416 ATOM_S_MPLL_FB_DIVIDER ulFbDiv; //Output Parameter
417 };
418 UCHAR ucRefDiv; //Output Parameter
419 UCHAR ucPostDiv; //Output Parameter
420 UCHAR ucCntlFlag; //Output Parameter
421 UCHAR ucReserved;
422}COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_V3;
423
424// ucCntlFlag
399#define ATOM_PLL_CNTL_FLAG_PLL_POST_DIV_EN 1 425#define ATOM_PLL_CNTL_FLAG_PLL_POST_DIV_EN 1
400#define ATOM_PLL_CNTL_FLAG_MPLL_VCO_MODE 2 426#define ATOM_PLL_CNTL_FLAG_MPLL_VCO_MODE 2
401#define ATOM_PLL_CNTL_FLAG_FRACTION_DISABLE 4 427#define ATOM_PLL_CNTL_FLAG_FRACTION_DISABLE 4
428#define ATOM_PLL_CNTL_FLAG_SPLL_ISPARE_9 8
402 429
403typedef struct _DYNAMICE_MEMORY_SETTINGS_PARAMETER {
404 ATOM_COMPUTE_CLOCK_FREQ ulClock;
405 ULONG ulReserved[2];
406} DYNAMICE_MEMORY_SETTINGS_PARAMETER;
407
408typedef struct _DYNAMICE_ENGINE_SETTINGS_PARAMETER {
409 ATOM_COMPUTE_CLOCK_FREQ ulClock;
410 ULONG ulMemoryClock;
411 ULONG ulReserved;
412} DYNAMICE_ENGINE_SETTINGS_PARAMETER;
413 430
414/****************************************************************************/ 431// V4 are only used for APU which PLL outside GPU
415/* Structures used by SetEngineClockTable */ 432typedef struct _COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_V4
416/****************************************************************************/ 433{
417typedef struct _SET_ENGINE_CLOCK_PARAMETERS { 434#if ATOM_BIG_ENDIAN
418 ULONG ulTargetEngineClock; /* In 10Khz unit */ 435 ULONG ucPostDiv; //return parameter: post divider which is used to program to register directly
419} SET_ENGINE_CLOCK_PARAMETERS; 436 ULONG ulClock:24; //Input= target clock, output = actual clock
420 437#else
421typedef struct _SET_ENGINE_CLOCK_PS_ALLOCATION { 438 ULONG ulClock:24; //Input= target clock, output = actual clock
422 ULONG ulTargetEngineClock; /* In 10Khz unit */ 439 ULONG ucPostDiv; //return parameter: post divider which is used to program to register directly
423 COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_PS_ALLOCATION sReserved; 440#endif
424} SET_ENGINE_CLOCK_PS_ALLOCATION; 441}COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_V4;
425 442
426/****************************************************************************/ 443typedef struct _DYNAMICE_MEMORY_SETTINGS_PARAMETER
427/* Structures used by SetMemoryClockTable */ 444{
428/****************************************************************************/ 445 ATOM_COMPUTE_CLOCK_FREQ ulClock;
429typedef struct _SET_MEMORY_CLOCK_PARAMETERS { 446 ULONG ulReserved[2];
430 ULONG ulTargetMemoryClock; /* In 10Khz unit */ 447}DYNAMICE_MEMORY_SETTINGS_PARAMETER;
431} SET_MEMORY_CLOCK_PARAMETERS;
432 448
433typedef struct _SET_MEMORY_CLOCK_PS_ALLOCATION { 449typedef struct _DYNAMICE_ENGINE_SETTINGS_PARAMETER
434 ULONG ulTargetMemoryClock; /* In 10Khz unit */ 450{
435 COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_PS_ALLOCATION sReserved; 451 ATOM_COMPUTE_CLOCK_FREQ ulClock;
436} SET_MEMORY_CLOCK_PS_ALLOCATION; 452 ULONG ulMemoryClock;
453 ULONG ulReserved;
454}DYNAMICE_ENGINE_SETTINGS_PARAMETER;
455
456/****************************************************************************/
457// Structures used by SetEngineClockTable
458/****************************************************************************/
459typedef struct _SET_ENGINE_CLOCK_PARAMETERS
460{
461 ULONG ulTargetEngineClock; //In 10Khz unit
462}SET_ENGINE_CLOCK_PARAMETERS;
437 463
438/****************************************************************************/ 464typedef struct _SET_ENGINE_CLOCK_PS_ALLOCATION
439/* Structures used by ASIC_Init.ctb */ 465{
440/****************************************************************************/ 466 ULONG ulTargetEngineClock; //In 10Khz unit
441typedef struct _ASIC_INIT_PARAMETERS { 467 COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_PS_ALLOCATION sReserved;
442 ULONG ulDefaultEngineClock; /* In 10Khz unit */ 468}SET_ENGINE_CLOCK_PS_ALLOCATION;
443 ULONG ulDefaultMemoryClock; /* In 10Khz unit */ 469
444} ASIC_INIT_PARAMETERS; 470/****************************************************************************/
471// Structures used by SetMemoryClockTable
472/****************************************************************************/
473typedef struct _SET_MEMORY_CLOCK_PARAMETERS
474{
475 ULONG ulTargetMemoryClock; //In 10Khz unit
476}SET_MEMORY_CLOCK_PARAMETERS;
445 477
446typedef struct _ASIC_INIT_PS_ALLOCATION { 478typedef struct _SET_MEMORY_CLOCK_PS_ALLOCATION
447 ASIC_INIT_PARAMETERS sASICInitClocks; 479{
448 SET_ENGINE_CLOCK_PS_ALLOCATION sReserved; /* Caller doesn't need to init this structure */ 480 ULONG ulTargetMemoryClock; //In 10Khz unit
449} ASIC_INIT_PS_ALLOCATION; 481 COMPUTE_MEMORY_ENGINE_PLL_PARAMETERS_PS_ALLOCATION sReserved;
482}SET_MEMORY_CLOCK_PS_ALLOCATION;
483
484/****************************************************************************/
485// Structures used by ASIC_Init.ctb
486/****************************************************************************/
487typedef struct _ASIC_INIT_PARAMETERS
488{
489 ULONG ulDefaultEngineClock; //In 10Khz unit
490 ULONG ulDefaultMemoryClock; //In 10Khz unit
491}ASIC_INIT_PARAMETERS;
450 492
451/****************************************************************************/ 493typedef struct _ASIC_INIT_PS_ALLOCATION
452/* Structure used by DynamicClockGatingTable.ctb */ 494{
453/****************************************************************************/ 495 ASIC_INIT_PARAMETERS sASICInitClocks;
454typedef struct _DYNAMIC_CLOCK_GATING_PARAMETERS { 496 SET_ENGINE_CLOCK_PS_ALLOCATION sReserved; //Caller doesn't need to init this structure
455 UCHAR ucEnable; /* ATOM_ENABLE or ATOM_DISABLE */ 497}ASIC_INIT_PS_ALLOCATION;
456 UCHAR ucPadding[3]; 498
457} DYNAMIC_CLOCK_GATING_PARAMETERS; 499/****************************************************************************/
500// Structure used by DynamicClockGatingTable.ctb
501/****************************************************************************/
502typedef struct _DYNAMIC_CLOCK_GATING_PARAMETERS
503{
504 UCHAR ucEnable; // ATOM_ENABLE or ATOM_DISABLE
505 UCHAR ucPadding[3];
506}DYNAMIC_CLOCK_GATING_PARAMETERS;
458#define DYNAMIC_CLOCK_GATING_PS_ALLOCATION DYNAMIC_CLOCK_GATING_PARAMETERS 507#define DYNAMIC_CLOCK_GATING_PS_ALLOCATION DYNAMIC_CLOCK_GATING_PARAMETERS
459 508
460/****************************************************************************/ 509/****************************************************************************/
461/* Structure used by EnableASIC_StaticPwrMgtTable.ctb */ 510// Structure used by EnableASIC_StaticPwrMgtTable.ctb
462/****************************************************************************/ 511/****************************************************************************/
463typedef struct _ENABLE_ASIC_STATIC_PWR_MGT_PARAMETERS { 512typedef struct _ENABLE_ASIC_STATIC_PWR_MGT_PARAMETERS
464 UCHAR ucEnable; /* ATOM_ENABLE or ATOM_DISABLE */ 513{
465 UCHAR ucPadding[3]; 514 UCHAR ucEnable; // ATOM_ENABLE or ATOM_DISABLE
466} ENABLE_ASIC_STATIC_PWR_MGT_PARAMETERS; 515 UCHAR ucPadding[3];
516}ENABLE_ASIC_STATIC_PWR_MGT_PARAMETERS;
467#define ENABLE_ASIC_STATIC_PWR_MGT_PS_ALLOCATION ENABLE_ASIC_STATIC_PWR_MGT_PARAMETERS 517#define ENABLE_ASIC_STATIC_PWR_MGT_PS_ALLOCATION ENABLE_ASIC_STATIC_PWR_MGT_PARAMETERS
468 518
469/****************************************************************************/ 519/****************************************************************************/
470/* Structures used by DAC_LoadDetectionTable.ctb */ 520// Structures used by DAC_LoadDetectionTable.ctb
471/****************************************************************************/ 521/****************************************************************************/
472typedef struct _DAC_LOAD_DETECTION_PARAMETERS { 522typedef struct _DAC_LOAD_DETECTION_PARAMETERS
473 USHORT usDeviceID; /* {ATOM_DEVICE_CRTx_SUPPORT,ATOM_DEVICE_TVx_SUPPORT,ATOM_DEVICE_CVx_SUPPORT} */ 523{
474 UCHAR ucDacType; /* {ATOM_DAC_A,ATOM_DAC_B, ATOM_EXT_DAC} */ 524 USHORT usDeviceID; //{ATOM_DEVICE_CRTx_SUPPORT,ATOM_DEVICE_TVx_SUPPORT,ATOM_DEVICE_CVx_SUPPORT}
475 UCHAR ucMisc; /* Valid only when table revision =1.3 and above */ 525 UCHAR ucDacType; //{ATOM_DAC_A,ATOM_DAC_B, ATOM_EXT_DAC}
476} DAC_LOAD_DETECTION_PARAMETERS; 526 UCHAR ucMisc; //Valid only when table revision =1.3 and above
527}DAC_LOAD_DETECTION_PARAMETERS;
477 528
478/* DAC_LOAD_DETECTION_PARAMETERS.ucMisc */ 529// DAC_LOAD_DETECTION_PARAMETERS.ucMisc
479#define DAC_LOAD_MISC_YPrPb 0x01 530#define DAC_LOAD_MISC_YPrPb 0x01
480 531
481typedef struct _DAC_LOAD_DETECTION_PS_ALLOCATION { 532typedef struct _DAC_LOAD_DETECTION_PS_ALLOCATION
482 DAC_LOAD_DETECTION_PARAMETERS sDacload; 533{
483 ULONG Reserved[2]; /* Don't set this one, allocation for EXT DAC */ 534 DAC_LOAD_DETECTION_PARAMETERS sDacload;
484} DAC_LOAD_DETECTION_PS_ALLOCATION; 535 ULONG Reserved[2];// Don't set this one, allocation for EXT DAC
485 536}DAC_LOAD_DETECTION_PS_ALLOCATION;
486/****************************************************************************/ 537
487/* Structures used by DAC1EncoderControlTable.ctb and DAC2EncoderControlTable.ctb */ 538/****************************************************************************/
488/****************************************************************************/ 539// Structures used by DAC1EncoderControlTable.ctb and DAC2EncoderControlTable.ctb
489typedef struct _DAC_ENCODER_CONTROL_PARAMETERS { 540/****************************************************************************/
490 USHORT usPixelClock; /* in 10KHz; for bios convenient */ 541typedef struct _DAC_ENCODER_CONTROL_PARAMETERS
491 UCHAR ucDacStandard; /* See definition of ATOM_DACx_xxx, For DEC3.0, bit 7 used as internal flag to indicate DAC2 (==1) or DAC1 (==0) */ 542{
492 UCHAR ucAction; /* 0: turn off encoder */ 543 USHORT usPixelClock; // in 10KHz; for bios convenient
493 /* 1: setup and turn on encoder */ 544 UCHAR ucDacStandard; // See definition of ATOM_DACx_xxx, For DEC3.0, bit 7 used as internal flag to indicate DAC2 (==1) or DAC1 (==0)
494 /* 7: ATOM_ENCODER_INIT Initialize DAC */ 545 UCHAR ucAction; // 0: turn off encoder
495} DAC_ENCODER_CONTROL_PARAMETERS; 546 // 1: setup and turn on encoder
547 // 7: ATOM_ENCODER_INIT Initialize DAC
548}DAC_ENCODER_CONTROL_PARAMETERS;
496 549
497#define DAC_ENCODER_CONTROL_PS_ALLOCATION DAC_ENCODER_CONTROL_PARAMETERS 550#define DAC_ENCODER_CONTROL_PS_ALLOCATION DAC_ENCODER_CONTROL_PARAMETERS
498 551
499/****************************************************************************/ 552/****************************************************************************/
500/* Structures used by DIG1EncoderControlTable */ 553// Structures used by DIG1EncoderControlTable
501/* DIG2EncoderControlTable */ 554// DIG2EncoderControlTable
502/* ExternalEncoderControlTable */ 555// ExternalEncoderControlTable
503/****************************************************************************/ 556/****************************************************************************/
504typedef struct _DIG_ENCODER_CONTROL_PARAMETERS { 557typedef struct _DIG_ENCODER_CONTROL_PARAMETERS
505 USHORT usPixelClock; /* in 10KHz; for bios convenient */ 558{
506 UCHAR ucConfig; 559 USHORT usPixelClock; // in 10KHz; for bios convenient
507 /* [2] Link Select: */ 560 UCHAR ucConfig;
508 /* =0: PHY linkA if bfLane<3 */ 561 // [2] Link Select:
509 /* =1: PHY linkB if bfLanes<3 */ 562 // =0: PHY linkA if bfLane<3
510 /* =0: PHY linkA+B if bfLanes=3 */ 563 // =1: PHY linkB if bfLanes<3
511 /* [3] Transmitter Sel */ 564 // =0: PHY linkA+B if bfLanes=3
512 /* =0: UNIPHY or PCIEPHY */ 565 // [3] Transmitter Sel
513 /* =1: LVTMA */ 566 // =0: UNIPHY or PCIEPHY
514 UCHAR ucAction; /* =0: turn off encoder */ 567 // =1: LVTMA
515 /* =1: turn on encoder */ 568 UCHAR ucAction; // =0: turn off encoder
516 UCHAR ucEncoderMode; 569 // =1: turn on encoder
517 /* =0: DP encoder */ 570 UCHAR ucEncoderMode;
518 /* =1: LVDS encoder */ 571 // =0: DP encoder
519 /* =2: DVI encoder */ 572 // =1: LVDS encoder
520 /* =3: HDMI encoder */ 573 // =2: DVI encoder
521 /* =4: SDVO encoder */ 574 // =3: HDMI encoder
522 UCHAR ucLaneNum; /* how many lanes to enable */ 575 // =4: SDVO encoder
523 UCHAR ucReserved[2]; 576 UCHAR ucLaneNum; // how many lanes to enable
524} DIG_ENCODER_CONTROL_PARAMETERS; 577 UCHAR ucReserved[2];
578}DIG_ENCODER_CONTROL_PARAMETERS;
525#define DIG_ENCODER_CONTROL_PS_ALLOCATION DIG_ENCODER_CONTROL_PARAMETERS 579#define DIG_ENCODER_CONTROL_PS_ALLOCATION DIG_ENCODER_CONTROL_PARAMETERS
526#define EXTERNAL_ENCODER_CONTROL_PARAMETER DIG_ENCODER_CONTROL_PARAMETERS 580#define EXTERNAL_ENCODER_CONTROL_PARAMETER DIG_ENCODER_CONTROL_PARAMETERS
527 581
528/* ucConfig */ 582//ucConfig
529#define ATOM_ENCODER_CONFIG_DPLINKRATE_MASK 0x01 583#define ATOM_ENCODER_CONFIG_DPLINKRATE_MASK 0x01
530#define ATOM_ENCODER_CONFIG_DPLINKRATE_1_62GHZ 0x00 584#define ATOM_ENCODER_CONFIG_DPLINKRATE_1_62GHZ 0x00
531#define ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ 0x01 585#define ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ 0x01
@@ -539,52 +593,57 @@ typedef struct _DIG_ENCODER_CONTROL_PARAMETERS {
539#define ATOM_ENCODER_CONFIG_LVTMA 0x08 593#define ATOM_ENCODER_CONFIG_LVTMA 0x08
540#define ATOM_ENCODER_CONFIG_TRANSMITTER1 0x00 594#define ATOM_ENCODER_CONFIG_TRANSMITTER1 0x00
541#define ATOM_ENCODER_CONFIG_TRANSMITTER2 0x08 595#define ATOM_ENCODER_CONFIG_TRANSMITTER2 0x08
542#define ATOM_ENCODER_CONFIG_DIGB 0x80 /* VBIOS Internal use, outside SW should set this bit=0 */ 596#define ATOM_ENCODER_CONFIG_DIGB 0x80 // VBIOS Internal use, outside SW should set this bit=0
543/* ucAction */ 597// ucAction
544/* ATOM_ENABLE: Enable Encoder */ 598// ATOM_ENABLE: Enable Encoder
545/* ATOM_DISABLE: Disable Encoder */ 599// ATOM_DISABLE: Disable Encoder
546 600
547/* ucEncoderMode */ 601//ucEncoderMode
548#define ATOM_ENCODER_MODE_DP 0 602#define ATOM_ENCODER_MODE_DP 0
549#define ATOM_ENCODER_MODE_LVDS 1 603#define ATOM_ENCODER_MODE_LVDS 1
550#define ATOM_ENCODER_MODE_DVI 2 604#define ATOM_ENCODER_MODE_DVI 2
551#define ATOM_ENCODER_MODE_HDMI 3 605#define ATOM_ENCODER_MODE_HDMI 3
552#define ATOM_ENCODER_MODE_SDVO 4 606#define ATOM_ENCODER_MODE_SDVO 4
607#define ATOM_ENCODER_MODE_DP_AUDIO 5
553#define ATOM_ENCODER_MODE_TV 13 608#define ATOM_ENCODER_MODE_TV 13
554#define ATOM_ENCODER_MODE_CV 14 609#define ATOM_ENCODER_MODE_CV 14
555#define ATOM_ENCODER_MODE_CRT 15 610#define ATOM_ENCODER_MODE_CRT 15
556 611
557typedef struct _ATOM_DIG_ENCODER_CONFIG_V2 { 612typedef struct _ATOM_DIG_ENCODER_CONFIG_V2
613{
558#if ATOM_BIG_ENDIAN 614#if ATOM_BIG_ENDIAN
559 UCHAR ucReserved1:2; 615 UCHAR ucReserved1:2;
560 UCHAR ucTransmitterSel:2; /* =0: UniphyAB, =1: UniphyCD =2: UniphyEF */ 616 UCHAR ucTransmitterSel:2; // =0: UniphyAB, =1: UniphyCD =2: UniphyEF
561 UCHAR ucLinkSel:1; /* =0: linkA/C/E =1: linkB/D/F */ 617 UCHAR ucLinkSel:1; // =0: linkA/C/E =1: linkB/D/F
562 UCHAR ucReserved:1; 618 UCHAR ucReserved:1;
563 UCHAR ucDPLinkRate:1; /* =0: 1.62Ghz, =1: 2.7Ghz */ 619 UCHAR ucDPLinkRate:1; // =0: 1.62Ghz, =1: 2.7Ghz
564#else 620#else
565 UCHAR ucDPLinkRate:1; /* =0: 1.62Ghz, =1: 2.7Ghz */ 621 UCHAR ucDPLinkRate:1; // =0: 1.62Ghz, =1: 2.7Ghz
566 UCHAR ucReserved:1; 622 UCHAR ucReserved:1;
567 UCHAR ucLinkSel:1; /* =0: linkA/C/E =1: linkB/D/F */ 623 UCHAR ucLinkSel:1; // =0: linkA/C/E =1: linkB/D/F
568 UCHAR ucTransmitterSel:2; /* =0: UniphyAB, =1: UniphyCD =2: UniphyEF */ 624 UCHAR ucTransmitterSel:2; // =0: UniphyAB, =1: UniphyCD =2: UniphyEF
569 UCHAR ucReserved1:2; 625 UCHAR ucReserved1:2;
570#endif 626#endif
571} ATOM_DIG_ENCODER_CONFIG_V2; 627}ATOM_DIG_ENCODER_CONFIG_V2;
572 628
573typedef struct _DIG_ENCODER_CONTROL_PARAMETERS_V2 {
574 USHORT usPixelClock; /* in 10KHz; for bios convenient */
575 ATOM_DIG_ENCODER_CONFIG_V2 acConfig;
576 UCHAR ucAction;
577 UCHAR ucEncoderMode;
578 /* =0: DP encoder */
579 /* =1: LVDS encoder */
580 /* =2: DVI encoder */
581 /* =3: HDMI encoder */
582 /* =4: SDVO encoder */
583 UCHAR ucLaneNum; /* how many lanes to enable */
584 UCHAR ucReserved[2];
585} DIG_ENCODER_CONTROL_PARAMETERS_V2;
586 629
587/* ucConfig */ 630typedef struct _DIG_ENCODER_CONTROL_PARAMETERS_V2
631{
632 USHORT usPixelClock; // in 10KHz; for bios convenient
633 ATOM_DIG_ENCODER_CONFIG_V2 acConfig;
634 UCHAR ucAction;
635 UCHAR ucEncoderMode;
636 // =0: DP encoder
637 // =1: LVDS encoder
638 // =2: DVI encoder
639 // =3: HDMI encoder
640 // =4: SDVO encoder
641 UCHAR ucLaneNum; // how many lanes to enable
642 UCHAR ucStatus; // = DP_LINK_TRAINING_COMPLETE or DP_LINK_TRAINING_INCOMPLETE, only used by VBIOS with command ATOM_ENCODER_CMD_QUERY_DP_LINK_TRAINING_STATUS
643 UCHAR ucReserved;
644}DIG_ENCODER_CONTROL_PARAMETERS_V2;
645
646//ucConfig
588#define ATOM_ENCODER_CONFIG_V2_DPLINKRATE_MASK 0x01 647#define ATOM_ENCODER_CONFIG_V2_DPLINKRATE_MASK 0x01
589#define ATOM_ENCODER_CONFIG_V2_DPLINKRATE_1_62GHZ 0x00 648#define ATOM_ENCODER_CONFIG_V2_DPLINKRATE_1_62GHZ 0x00
590#define ATOM_ENCODER_CONFIG_V2_DPLINKRATE_2_70GHZ 0x01 649#define ATOM_ENCODER_CONFIG_V2_DPLINKRATE_2_70GHZ 0x01
@@ -596,58 +655,122 @@ typedef struct _DIG_ENCODER_CONTROL_PARAMETERS_V2 {
596#define ATOM_ENCODER_CONFIG_V2_TRANSMITTER2 0x08 655#define ATOM_ENCODER_CONFIG_V2_TRANSMITTER2 0x08
597#define ATOM_ENCODER_CONFIG_V2_TRANSMITTER3 0x10 656#define ATOM_ENCODER_CONFIG_V2_TRANSMITTER3 0x10
598 657
599/****************************************************************************/ 658// ucAction:
600/* Structures used by UNIPHYTransmitterControlTable */ 659// ATOM_DISABLE
601/* LVTMATransmitterControlTable */ 660// ATOM_ENABLE
602/* DVOOutputControlTable */ 661#define ATOM_ENCODER_CMD_DP_LINK_TRAINING_START 0x08
603/****************************************************************************/ 662#define ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1 0x09
604typedef struct _ATOM_DP_VS_MODE { 663#define ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2 0x0a
605 UCHAR ucLaneSel; 664#define ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE 0x0b
606 UCHAR ucLaneSet; 665#define ATOM_ENCODER_CMD_DP_VIDEO_OFF 0x0c
607} ATOM_DP_VS_MODE; 666#define ATOM_ENCODER_CMD_DP_VIDEO_ON 0x0d
608 667#define ATOM_ENCODER_CMD_QUERY_DP_LINK_TRAINING_STATUS 0x0e
609typedef struct _DIG_TRANSMITTER_CONTROL_PARAMETERS { 668#define ATOM_ENCODER_CMD_SETUP 0x0f
610 union { 669
611 USHORT usPixelClock; /* in 10KHz; for bios convenient */ 670// ucStatus
612 USHORT usInitInfo; /* when init uniphy,lower 8bit is used for connector type defined in objectid.h */ 671#define ATOM_ENCODER_STATUS_LINK_TRAINING_COMPLETE 0x10
613 ATOM_DP_VS_MODE asMode; /* DP Voltage swing mode */ 672#define ATOM_ENCODER_STATUS_LINK_TRAINING_INCOMPLETE 0x00
673
674// Following function ENABLE sub-function will be used by driver when TMDS/HDMI/LVDS is used, disable function will be used by driver
675typedef struct _ATOM_DIG_ENCODER_CONFIG_V3
676{
677#if ATOM_BIG_ENDIAN
678 UCHAR ucReserved1:1;
679 UCHAR ucDigSel:3; // =0: DIGA/B/C/D/E/F
680 UCHAR ucReserved:3;
681 UCHAR ucDPLinkRate:1; // =0: 1.62Ghz, =1: 2.7Ghz
682#else
683 UCHAR ucDPLinkRate:1; // =0: 1.62Ghz, =1: 2.7Ghz
684 UCHAR ucReserved:3;
685 UCHAR ucDigSel:3; // =0: DIGA/B/C/D/E/F
686 UCHAR ucReserved1:1;
687#endif
688}ATOM_DIG_ENCODER_CONFIG_V3;
689
690#define ATOM_ENCODER_CONFIG_V3_ENCODER_SEL 0x70
691
692
693typedef struct _DIG_ENCODER_CONTROL_PARAMETERS_V3
694{
695 USHORT usPixelClock; // in 10KHz; for bios convenient
696 ATOM_DIG_ENCODER_CONFIG_V3 acConfig;
697 UCHAR ucAction;
698 UCHAR ucEncoderMode;
699 // =0: DP encoder
700 // =1: LVDS encoder
701 // =2: DVI encoder
702 // =3: HDMI encoder
703 // =4: SDVO encoder
704 // =5: DP audio
705 UCHAR ucLaneNum; // how many lanes to enable
706 UCHAR ucBitPerColor; // only valid for DP mode when ucAction = ATOM_ENCODER_CMD_SETUP
707 UCHAR ucReserved;
708}DIG_ENCODER_CONTROL_PARAMETERS_V3;
709
710
711// define ucBitPerColor:
712#define PANEL_BPC_UNDEFINE 0x00
713#define PANEL_6BIT_PER_COLOR 0x01
714#define PANEL_8BIT_PER_COLOR 0x02
715#define PANEL_10BIT_PER_COLOR 0x03
716#define PANEL_12BIT_PER_COLOR 0x04
717#define PANEL_16BIT_PER_COLOR 0x05
718
719/****************************************************************************/
720// Structures used by UNIPHYTransmitterControlTable
721// LVTMATransmitterControlTable
722// DVOOutputControlTable
723/****************************************************************************/
724typedef struct _ATOM_DP_VS_MODE
725{
726 UCHAR ucLaneSel;
727 UCHAR ucLaneSet;
728}ATOM_DP_VS_MODE;
729
730typedef struct _DIG_TRANSMITTER_CONTROL_PARAMETERS
731{
732 union
733 {
734 USHORT usPixelClock; // in 10KHz; for bios convenient
735 USHORT usInitInfo; // when init uniphy,lower 8bit is used for connector type defined in objectid.h
736 ATOM_DP_VS_MODE asMode; // DP Voltage swing mode
614 }; 737 };
615 UCHAR ucConfig; 738 UCHAR ucConfig;
616 /* [0]=0: 4 lane Link, */ 739 // [0]=0: 4 lane Link,
617 /* =1: 8 lane Link ( Dual Links TMDS ) */ 740 // =1: 8 lane Link ( Dual Links TMDS )
618 /* [1]=0: InCoherent mode */ 741 // [1]=0: InCoherent mode
619 /* =1: Coherent Mode */ 742 // =1: Coherent Mode
620 /* [2] Link Select: */ 743 // [2] Link Select:
621 /* =0: PHY linkA if bfLane<3 */ 744 // =0: PHY linkA if bfLane<3
622 /* =1: PHY linkB if bfLanes<3 */ 745 // =1: PHY linkB if bfLanes<3
623 /* =0: PHY linkA+B if bfLanes=3 */ 746 // =0: PHY linkA+B if bfLanes=3
624 /* [5:4]PCIE lane Sel */ 747 // [5:4]PCIE lane Sel
625 /* =0: lane 0~3 or 0~7 */ 748 // =0: lane 0~3 or 0~7
626 /* =1: lane 4~7 */ 749 // =1: lane 4~7
627 /* =2: lane 8~11 or 8~15 */ 750 // =2: lane 8~11 or 8~15
628 /* =3: lane 12~15 */ 751 // =3: lane 12~15
629 UCHAR ucAction; /* =0: turn off encoder */ 752 UCHAR ucAction; // =0: turn off encoder
630 /* =1: turn on encoder */ 753 // =1: turn on encoder
631 UCHAR ucReserved[4]; 754 UCHAR ucReserved[4];
632} DIG_TRANSMITTER_CONTROL_PARAMETERS; 755}DIG_TRANSMITTER_CONTROL_PARAMETERS;
633 756
634#define DIG_TRANSMITTER_CONTROL_PS_ALLOCATION DIG_TRANSMITTER_CONTROL_PARAMETERS 757#define DIG_TRANSMITTER_CONTROL_PS_ALLOCATION DIG_TRANSMITTER_CONTROL_PARAMETERS
635 758
636/* ucInitInfo */ 759//ucInitInfo
637#define ATOM_TRAMITTER_INITINFO_CONNECTOR_MASK 0x00ff 760#define ATOM_TRAMITTER_INITINFO_CONNECTOR_MASK 0x00ff
638 761
639/* ucConfig */ 762//ucConfig
640#define ATOM_TRANSMITTER_CONFIG_8LANE_LINK 0x01 763#define ATOM_TRANSMITTER_CONFIG_8LANE_LINK 0x01
641#define ATOM_TRANSMITTER_CONFIG_COHERENT 0x02 764#define ATOM_TRANSMITTER_CONFIG_COHERENT 0x02
642#define ATOM_TRANSMITTER_CONFIG_LINK_SEL_MASK 0x04 765#define ATOM_TRANSMITTER_CONFIG_LINK_SEL_MASK 0x04
643#define ATOM_TRANSMITTER_CONFIG_LINKA 0x00 766#define ATOM_TRANSMITTER_CONFIG_LINKA 0x00
644#define ATOM_TRANSMITTER_CONFIG_LINKB 0x04 767#define ATOM_TRANSMITTER_CONFIG_LINKB 0x04
645#define ATOM_TRANSMITTER_CONFIG_LINKA_B 0x00 768#define ATOM_TRANSMITTER_CONFIG_LINKA_B 0x00
646#define ATOM_TRANSMITTER_CONFIG_LINKB_A 0x04 769#define ATOM_TRANSMITTER_CONFIG_LINKB_A 0x04
647 770
648#define ATOM_TRANSMITTER_CONFIG_ENCODER_SEL_MASK 0x08 /* only used when ATOM_TRANSMITTER_ACTION_ENABLE */ 771#define ATOM_TRANSMITTER_CONFIG_ENCODER_SEL_MASK 0x08 // only used when ATOM_TRANSMITTER_ACTION_ENABLE
649#define ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER 0x00 /* only used when ATOM_TRANSMITTER_ACTION_ENABLE */ 772#define ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER 0x00 // only used when ATOM_TRANSMITTER_ACTION_ENABLE
650#define ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER 0x08 /* only used when ATOM_TRANSMITTER_ACTION_ENABLE */ 773#define ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER 0x08 // only used when ATOM_TRANSMITTER_ACTION_ENABLE
651 774
652#define ATOM_TRANSMITTER_CONFIG_CLKSRC_MASK 0x30 775#define ATOM_TRANSMITTER_CONFIG_CLKSRC_MASK 0x30
653#define ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL 0x00 776#define ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL 0x00
@@ -661,7 +784,7 @@ typedef struct _DIG_TRANSMITTER_CONTROL_PARAMETERS {
661#define ATOM_TRANSMITTER_CONFIG_LANE_8_15 0x80 784#define ATOM_TRANSMITTER_CONFIG_LANE_8_15 0x80
662#define ATOM_TRANSMITTER_CONFIG_LANE_12_15 0xc0 785#define ATOM_TRANSMITTER_CONFIG_LANE_12_15 0xc0
663 786
664/* ucAction */ 787//ucAction
665#define ATOM_TRANSMITTER_ACTION_DISABLE 0 788#define ATOM_TRANSMITTER_ACTION_DISABLE 0
666#define ATOM_TRANSMITTER_ACTION_ENABLE 1 789#define ATOM_TRANSMITTER_ACTION_ENABLE 1
667#define ATOM_TRANSMITTER_ACTION_LCD_BLOFF 2 790#define ATOM_TRANSMITTER_ACTION_LCD_BLOFF 2
@@ -674,93 +797,168 @@ typedef struct _DIG_TRANSMITTER_CONTROL_PARAMETERS {
674#define ATOM_TRANSMITTER_ACTION_ENABLE_OUTPUT 9 797#define ATOM_TRANSMITTER_ACTION_ENABLE_OUTPUT 9
675#define ATOM_TRANSMITTER_ACTION_SETUP 10 798#define ATOM_TRANSMITTER_ACTION_SETUP 10
676#define ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH 11 799#define ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH 11
800#define ATOM_TRANSMITTER_ACTION_POWER_ON 12
801#define ATOM_TRANSMITTER_ACTION_POWER_OFF 13
677 802
678/* Following are used for DigTransmitterControlTable ver1.2 */ 803// Following are used for DigTransmitterControlTable ver1.2
679typedef struct _ATOM_DIG_TRANSMITTER_CONFIG_V2 { 804typedef struct _ATOM_DIG_TRANSMITTER_CONFIG_V2
805{
680#if ATOM_BIG_ENDIAN 806#if ATOM_BIG_ENDIAN
681 UCHAR ucTransmitterSel:2; /* bit7:6: =0 Dig Transmitter 1 ( Uniphy AB ) */ 807 UCHAR ucTransmitterSel:2; //bit7:6: =0 Dig Transmitter 1 ( Uniphy AB )
682 /* =1 Dig Transmitter 2 ( Uniphy CD ) */ 808 // =1 Dig Transmitter 2 ( Uniphy CD )
683 /* =2 Dig Transmitter 3 ( Uniphy EF ) */ 809 // =2 Dig Transmitter 3 ( Uniphy EF )
684 UCHAR ucReserved:1; 810 UCHAR ucReserved:1;
685 UCHAR fDPConnector:1; /* bit4=0: DP connector =1: None DP connector */ 811 UCHAR fDPConnector:1; //bit4=0: DP connector =1: None DP connector
686 UCHAR ucEncoderSel:1; /* bit3=0: Data/Clk path source from DIGA( DIG inst0 ). =1: Data/clk path source from DIGB ( DIG inst1 ) */ 812 UCHAR ucEncoderSel:1; //bit3=0: Data/Clk path source from DIGA( DIG inst0 ). =1: Data/clk path source from DIGB ( DIG inst1 )
687 UCHAR ucLinkSel:1; /* bit2=0: Uniphy LINKA or C or E when fDualLinkConnector=0. when fDualLinkConnector=1, it means master link of dual link is A or C or E */ 813 UCHAR ucLinkSel:1; //bit2=0: Uniphy LINKA or C or E when fDualLinkConnector=0. when fDualLinkConnector=1, it means master link of dual link is A or C or E
688 /* =1: Uniphy LINKB or D or F when fDualLinkConnector=0. when fDualLinkConnector=1, it means master link of dual link is B or D or F */ 814 // =1: Uniphy LINKB or D or F when fDualLinkConnector=0. when fDualLinkConnector=1, it means master link of dual link is B or D or F
689 815
690 UCHAR fCoherentMode:1; /* bit1=1: Coherent Mode ( for DVI/HDMI mode ) */ 816 UCHAR fCoherentMode:1; //bit1=1: Coherent Mode ( for DVI/HDMI mode )
691 UCHAR fDualLinkConnector:1; /* bit0=1: Dual Link DVI connector */ 817 UCHAR fDualLinkConnector:1; //bit0=1: Dual Link DVI connector
692#else 818#else
693 UCHAR fDualLinkConnector:1; /* bit0=1: Dual Link DVI connector */ 819 UCHAR fDualLinkConnector:1; //bit0=1: Dual Link DVI connector
694 UCHAR fCoherentMode:1; /* bit1=1: Coherent Mode ( for DVI/HDMI mode ) */ 820 UCHAR fCoherentMode:1; //bit1=1: Coherent Mode ( for DVI/HDMI mode )
695 UCHAR ucLinkSel:1; /* bit2=0: Uniphy LINKA or C or E when fDualLinkConnector=0. when fDualLinkConnector=1, it means master link of dual link is A or C or E */ 821 UCHAR ucLinkSel:1; //bit2=0: Uniphy LINKA or C or E when fDualLinkConnector=0. when fDualLinkConnector=1, it means master link of dual link is A or C or E
696 /* =1: Uniphy LINKB or D or F when fDualLinkConnector=0. when fDualLinkConnector=1, it means master link of dual link is B or D or F */ 822 // =1: Uniphy LINKB or D or F when fDualLinkConnector=0. when fDualLinkConnector=1, it means master link of dual link is B or D or F
697 UCHAR ucEncoderSel:1; /* bit3=0: Data/Clk path source from DIGA( DIG inst0 ). =1: Data/clk path source from DIGB ( DIG inst1 ) */ 823 UCHAR ucEncoderSel:1; //bit3=0: Data/Clk path source from DIGA( DIG inst0 ). =1: Data/clk path source from DIGB ( DIG inst1 )
698 UCHAR fDPConnector:1; /* bit4=0: DP connector =1: None DP connector */ 824 UCHAR fDPConnector:1; //bit4=0: DP connector =1: None DP connector
699 UCHAR ucReserved:1; 825 UCHAR ucReserved:1;
700 UCHAR ucTransmitterSel:2; /* bit7:6: =0 Dig Transmitter 1 ( Uniphy AB ) */ 826 UCHAR ucTransmitterSel:2; //bit7:6: =0 Dig Transmitter 1 ( Uniphy AB )
701 /* =1 Dig Transmitter 2 ( Uniphy CD ) */ 827 // =1 Dig Transmitter 2 ( Uniphy CD )
702 /* =2 Dig Transmitter 3 ( Uniphy EF ) */ 828 // =2 Dig Transmitter 3 ( Uniphy EF )
703#endif 829#endif
704} ATOM_DIG_TRANSMITTER_CONFIG_V2; 830}ATOM_DIG_TRANSMITTER_CONFIG_V2;
705 831
706/* ucConfig */ 832//ucConfig
707/* Bit0 */ 833//Bit0
708#define ATOM_TRANSMITTER_CONFIG_V2_DUAL_LINK_CONNECTOR 0x01 834#define ATOM_TRANSMITTER_CONFIG_V2_DUAL_LINK_CONNECTOR 0x01
709 835
710/* Bit1 */ 836//Bit1
711#define ATOM_TRANSMITTER_CONFIG_V2_COHERENT 0x02 837#define ATOM_TRANSMITTER_CONFIG_V2_COHERENT 0x02
712 838
713/* Bit2 */ 839//Bit2
714#define ATOM_TRANSMITTER_CONFIG_V2_LINK_SEL_MASK 0x04 840#define ATOM_TRANSMITTER_CONFIG_V2_LINK_SEL_MASK 0x04
715#define ATOM_TRANSMITTER_CONFIG_V2_LINKA 0x00 841#define ATOM_TRANSMITTER_CONFIG_V2_LINKA 0x00
716#define ATOM_TRANSMITTER_CONFIG_V2_LINKB 0x04 842#define ATOM_TRANSMITTER_CONFIG_V2_LINKB 0x04
717 843
718/* Bit3 */ 844// Bit3
719#define ATOM_TRANSMITTER_CONFIG_V2_ENCODER_SEL_MASK 0x08 845#define ATOM_TRANSMITTER_CONFIG_V2_ENCODER_SEL_MASK 0x08
720#define ATOM_TRANSMITTER_CONFIG_V2_DIG1_ENCODER 0x00 /* only used when ucAction == ATOM_TRANSMITTER_ACTION_ENABLE or ATOM_TRANSMITTER_ACTION_SETUP */ 846#define ATOM_TRANSMITTER_CONFIG_V2_DIG1_ENCODER 0x00 // only used when ucAction == ATOM_TRANSMITTER_ACTION_ENABLE or ATOM_TRANSMITTER_ACTION_SETUP
721#define ATOM_TRANSMITTER_CONFIG_V2_DIG2_ENCODER 0x08 /* only used when ucAction == ATOM_TRANSMITTER_ACTION_ENABLE or ATOM_TRANSMITTER_ACTION_SETUP */ 847#define ATOM_TRANSMITTER_CONFIG_V2_DIG2_ENCODER 0x08 // only used when ucAction == ATOM_TRANSMITTER_ACTION_ENABLE or ATOM_TRANSMITTER_ACTION_SETUP
722 848
723/* Bit4 */ 849// Bit4
724#define ATOM_TRASMITTER_CONFIG_V2_DP_CONNECTOR 0x10 850#define ATOM_TRASMITTER_CONFIG_V2_DP_CONNECTOR 0x10
725 851
726/* Bit7:6 */ 852// Bit7:6
727#define ATOM_TRANSMITTER_CONFIG_V2_TRANSMITTER_SEL_MASK 0xC0 853#define ATOM_TRANSMITTER_CONFIG_V2_TRANSMITTER_SEL_MASK 0xC0
728#define ATOM_TRANSMITTER_CONFIG_V2_TRANSMITTER1 0x00 /* AB */ 854#define ATOM_TRANSMITTER_CONFIG_V2_TRANSMITTER1 0x00 //AB
729#define ATOM_TRANSMITTER_CONFIG_V2_TRANSMITTER2 0x40 /* CD */ 855#define ATOM_TRANSMITTER_CONFIG_V2_TRANSMITTER2 0x40 //CD
730#define ATOM_TRANSMITTER_CONFIG_V2_TRANSMITTER3 0x80 /* EF */ 856#define ATOM_TRANSMITTER_CONFIG_V2_TRANSMITTER3 0x80 //EF
731 857
732typedef struct _DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 { 858typedef struct _DIG_TRANSMITTER_CONTROL_PARAMETERS_V2
733 union { 859{
734 USHORT usPixelClock; /* in 10KHz; for bios convenient */ 860 union
735 USHORT usInitInfo; /* when init uniphy,lower 8bit is used for connector type defined in objectid.h */ 861 {
736 ATOM_DP_VS_MODE asMode; /* DP Voltage swing mode */ 862 USHORT usPixelClock; // in 10KHz; for bios convenient
863 USHORT usInitInfo; // when init uniphy,lower 8bit is used for connector type defined in objectid.h
864 ATOM_DP_VS_MODE asMode; // DP Voltage swing mode
737 }; 865 };
738 ATOM_DIG_TRANSMITTER_CONFIG_V2 acConfig; 866 ATOM_DIG_TRANSMITTER_CONFIG_V2 acConfig;
739 UCHAR ucAction; /* define as ATOM_TRANSMITER_ACTION_XXX */ 867 UCHAR ucAction; // define as ATOM_TRANSMITER_ACTION_XXX
740 UCHAR ucReserved[4]; 868 UCHAR ucReserved[4];
741} DIG_TRANSMITTER_CONTROL_PARAMETERS_V2; 869}DIG_TRANSMITTER_CONTROL_PARAMETERS_V2;
742 870
743/****************************************************************************/ 871typedef struct _ATOM_DIG_TRANSMITTER_CONFIG_V3
744/* Structures used by DAC1OuputControlTable */ 872{
745/* DAC2OuputControlTable */ 873#if ATOM_BIG_ENDIAN
746/* LVTMAOutputControlTable (Before DEC30) */ 874 UCHAR ucTransmitterSel:2; //bit7:6: =0 Dig Transmitter 1 ( Uniphy AB )
747/* TMDSAOutputControlTable (Before DEC30) */ 875 // =1 Dig Transmitter 2 ( Uniphy CD )
748/****************************************************************************/ 876 // =2 Dig Transmitter 3 ( Uniphy EF )
749typedef struct _DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS { 877 UCHAR ucRefClkSource:2; //bit5:4: PPLL1 =0, PPLL2=1, EXT_CLK=2
750 UCHAR ucAction; /* Possible input:ATOM_ENABLE||ATOMDISABLE */ 878 UCHAR ucEncoderSel:1; //bit3=0: Data/Clk path source from DIGA/C/E. =1: Data/clk path source from DIGB/D/F
751 /* When the display is LCD, in addition to above: */ 879 UCHAR ucLinkSel:1; //bit2=0: Uniphy LINKA or C or E when fDualLinkConnector=0. when fDualLinkConnector=1, it means master link of dual link is A or C or E
752 /* ATOM_LCD_BLOFF|| ATOM_LCD_BLON ||ATOM_LCD_BL_BRIGHTNESS_CONTROL||ATOM_LCD_SELFTEST_START|| */ 880 // =1: Uniphy LINKB or D or F when fDualLinkConnector=0. when fDualLinkConnector=1, it means master link of dual link is B or D or F
753 /* ATOM_LCD_SELFTEST_STOP */ 881 UCHAR fCoherentMode:1; //bit1=1: Coherent Mode ( for DVI/HDMI mode )
882 UCHAR fDualLinkConnector:1; //bit0=1: Dual Link DVI connector
883#else
884 UCHAR fDualLinkConnector:1; //bit0=1: Dual Link DVI connector
885 UCHAR fCoherentMode:1; //bit1=1: Coherent Mode ( for DVI/HDMI mode )
886 UCHAR ucLinkSel:1; //bit2=0: Uniphy LINKA or C or E when fDualLinkConnector=0. when fDualLinkConnector=1, it means master link of dual link is A or C or E
887 // =1: Uniphy LINKB or D or F when fDualLinkConnector=0. when fDualLinkConnector=1, it means master link of dual link is B or D or F
888 UCHAR ucEncoderSel:1; //bit3=0: Data/Clk path source from DIGA/C/E. =1: Data/clk path source from DIGB/D/F
889 UCHAR ucRefClkSource:2; //bit5:4: PPLL1 =0, PPLL2=1, EXT_CLK=2
890 UCHAR ucTransmitterSel:2; //bit7:6: =0 Dig Transmitter 1 ( Uniphy AB )
891 // =1 Dig Transmitter 2 ( Uniphy CD )
892 // =2 Dig Transmitter 3 ( Uniphy EF )
893#endif
894}ATOM_DIG_TRANSMITTER_CONFIG_V3;
754 895
755 UCHAR aucPadding[3]; /* padding to DWORD aligned */ 896typedef struct _DIG_TRANSMITTER_CONTROL_PARAMETERS_V3
756} DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS; 897{
898 union
899 {
900 USHORT usPixelClock; // in 10KHz; for bios convenient
901 USHORT usInitInfo; // when init uniphy,lower 8bit is used for connector type defined in objectid.h
902 ATOM_DP_VS_MODE asMode; // DP Voltage swing mode
903 };
904 ATOM_DIG_TRANSMITTER_CONFIG_V3 acConfig;
905 UCHAR ucAction; // define as ATOM_TRANSMITER_ACTION_XXX
906 UCHAR ucLaneNum;
907 UCHAR ucReserved[3];
908}DIG_TRANSMITTER_CONTROL_PARAMETERS_V3;
909
910//ucConfig
911//Bit0
912#define ATOM_TRANSMITTER_CONFIG_V3_DUAL_LINK_CONNECTOR 0x01
913
914//Bit1
915#define ATOM_TRANSMITTER_CONFIG_V3_COHERENT 0x02
916
917//Bit2
918#define ATOM_TRANSMITTER_CONFIG_V3_LINK_SEL_MASK 0x04
919#define ATOM_TRANSMITTER_CONFIG_V3_LINKA 0x00
920#define ATOM_TRANSMITTER_CONFIG_V3_LINKB 0x04
921
922// Bit3
923#define ATOM_TRANSMITTER_CONFIG_V3_ENCODER_SEL_MASK 0x08
924#define ATOM_TRANSMITTER_CONFIG_V3_DIG1_ENCODER 0x00
925#define ATOM_TRANSMITTER_CONFIG_V3_DIG2_ENCODER 0x08
926
927// Bit5:4
928#define ATOM_TRASMITTER_CONFIG_V3_REFCLK_SEL_MASK 0x30
929#define ATOM_TRASMITTER_CONFIG_V3_P1PLL 0x00
930#define ATOM_TRASMITTER_CONFIG_V3_P2PLL 0x10
931#define ATOM_TRASMITTER_CONFIG_V3_REFCLK_SRC_EXT 0x20
932
933// Bit7:6
934#define ATOM_TRANSMITTER_CONFIG_V3_TRANSMITTER_SEL_MASK 0xC0
935#define ATOM_TRANSMITTER_CONFIG_V3_TRANSMITTER1 0x00 //AB
936#define ATOM_TRANSMITTER_CONFIG_V3_TRANSMITTER2 0x40 //CD
937#define ATOM_TRANSMITTER_CONFIG_V3_TRANSMITTER3 0x80 //EF
938
939/****************************************************************************/
940// Structures used by DAC1OuputControlTable
941// DAC2OuputControlTable
942// LVTMAOutputControlTable (Before DEC30)
943// TMDSAOutputControlTable (Before DEC30)
944/****************************************************************************/
945typedef struct _DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS
946{
947 UCHAR ucAction; // Possible input:ATOM_ENABLE||ATOMDISABLE
948 // When the display is LCD, in addition to above:
949 // ATOM_LCD_BLOFF|| ATOM_LCD_BLON ||ATOM_LCD_BL_BRIGHTNESS_CONTROL||ATOM_LCD_SELFTEST_START||
950 // ATOM_LCD_SELFTEST_STOP
951
952 UCHAR aucPadding[3]; // padding to DWORD aligned
953}DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS;
757 954
758#define DISPLAY_DEVICE_OUTPUT_CONTROL_PS_ALLOCATION DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS 955#define DISPLAY_DEVICE_OUTPUT_CONTROL_PS_ALLOCATION DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS
759 956
760#define CRT1_OUTPUT_CONTROL_PARAMETERS DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS 957
958#define CRT1_OUTPUT_CONTROL_PARAMETERS DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS
761#define CRT1_OUTPUT_CONTROL_PS_ALLOCATION DISPLAY_DEVICE_OUTPUT_CONTROL_PS_ALLOCATION 959#define CRT1_OUTPUT_CONTROL_PS_ALLOCATION DISPLAY_DEVICE_OUTPUT_CONTROL_PS_ALLOCATION
762 960
763#define CRT2_OUTPUT_CONTROL_PARAMETERS DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS 961#define CRT2_OUTPUT_CONTROL_PARAMETERS DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS
764#define CRT2_OUTPUT_CONTROL_PS_ALLOCATION DISPLAY_DEVICE_OUTPUT_CONTROL_PS_ALLOCATION 962#define CRT2_OUTPUT_CONTROL_PS_ALLOCATION DISPLAY_DEVICE_OUTPUT_CONTROL_PS_ALLOCATION
765 963
766#define CV1_OUTPUT_CONTROL_PARAMETERS DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS 964#define CV1_OUTPUT_CONTROL_PARAMETERS DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS
@@ -782,397 +980,550 @@ typedef struct _DISPLAY_DEVICE_OUTPUT_CONTROL_PARAMETERS {
782#define DVO_OUTPUT_CONTROL_PS_ALLOCATION DIG_TRANSMITTER_CONTROL_PS_ALLOCATION 980#define DVO_OUTPUT_CONTROL_PS_ALLOCATION DIG_TRANSMITTER_CONTROL_PS_ALLOCATION
783#define DVO_OUTPUT_CONTROL_PARAMETERS_V3 DIG_TRANSMITTER_CONTROL_PARAMETERS 981#define DVO_OUTPUT_CONTROL_PARAMETERS_V3 DIG_TRANSMITTER_CONTROL_PARAMETERS
784 982
785/****************************************************************************/ 983/****************************************************************************/
786/* Structures used by BlankCRTCTable */ 984// Structures used by BlankCRTCTable
787/****************************************************************************/ 985/****************************************************************************/
788typedef struct _BLANK_CRTC_PARAMETERS { 986typedef struct _BLANK_CRTC_PARAMETERS
789 UCHAR ucCRTC; /* ATOM_CRTC1 or ATOM_CRTC2 */ 987{
790 UCHAR ucBlanking; /* ATOM_BLANKING or ATOM_BLANKINGOFF */ 988 UCHAR ucCRTC; // ATOM_CRTC1 or ATOM_CRTC2
791 USHORT usBlackColorRCr; 989 UCHAR ucBlanking; // ATOM_BLANKING or ATOM_BLANKINGOFF
792 USHORT usBlackColorGY; 990 USHORT usBlackColorRCr;
793 USHORT usBlackColorBCb; 991 USHORT usBlackColorGY;
794} BLANK_CRTC_PARAMETERS; 992 USHORT usBlackColorBCb;
993}BLANK_CRTC_PARAMETERS;
795#define BLANK_CRTC_PS_ALLOCATION BLANK_CRTC_PARAMETERS 994#define BLANK_CRTC_PS_ALLOCATION BLANK_CRTC_PARAMETERS
796 995
797/****************************************************************************/ 996/****************************************************************************/
798/* Structures used by EnableCRTCTable */ 997// Structures used by EnableCRTCTable
799/* EnableCRTCMemReqTable */ 998// EnableCRTCMemReqTable
800/* UpdateCRTC_DoubleBufferRegistersTable */ 999// UpdateCRTC_DoubleBufferRegistersTable
801/****************************************************************************/ 1000/****************************************************************************/
802typedef struct _ENABLE_CRTC_PARAMETERS { 1001typedef struct _ENABLE_CRTC_PARAMETERS
803 UCHAR ucCRTC; /* ATOM_CRTC1 or ATOM_CRTC2 */ 1002{
804 UCHAR ucEnable; /* ATOM_ENABLE or ATOM_DISABLE */ 1003 UCHAR ucCRTC; // ATOM_CRTC1 or ATOM_CRTC2
805 UCHAR ucPadding[2]; 1004 UCHAR ucEnable; // ATOM_ENABLE or ATOM_DISABLE
806} ENABLE_CRTC_PARAMETERS; 1005 UCHAR ucPadding[2];
1006}ENABLE_CRTC_PARAMETERS;
807#define ENABLE_CRTC_PS_ALLOCATION ENABLE_CRTC_PARAMETERS 1007#define ENABLE_CRTC_PS_ALLOCATION ENABLE_CRTC_PARAMETERS
808 1008
809/****************************************************************************/ 1009/****************************************************************************/
810/* Structures used by SetCRTC_OverScanTable */ 1010// Structures used by SetCRTC_OverScanTable
811/****************************************************************************/ 1011/****************************************************************************/
812typedef struct _SET_CRTC_OVERSCAN_PARAMETERS { 1012typedef struct _SET_CRTC_OVERSCAN_PARAMETERS
813 USHORT usOverscanRight; /* right */ 1013{
814 USHORT usOverscanLeft; /* left */ 1014 USHORT usOverscanRight; // right
815 USHORT usOverscanBottom; /* bottom */ 1015 USHORT usOverscanLeft; // left
816 USHORT usOverscanTop; /* top */ 1016 USHORT usOverscanBottom; // bottom
817 UCHAR ucCRTC; /* ATOM_CRTC1 or ATOM_CRTC2 */ 1017 USHORT usOverscanTop; // top
818 UCHAR ucPadding[3]; 1018 UCHAR ucCRTC; // ATOM_CRTC1 or ATOM_CRTC2
819} SET_CRTC_OVERSCAN_PARAMETERS; 1019 UCHAR ucPadding[3];
1020}SET_CRTC_OVERSCAN_PARAMETERS;
820#define SET_CRTC_OVERSCAN_PS_ALLOCATION SET_CRTC_OVERSCAN_PARAMETERS 1021#define SET_CRTC_OVERSCAN_PS_ALLOCATION SET_CRTC_OVERSCAN_PARAMETERS
821 1022
822/****************************************************************************/ 1023/****************************************************************************/
823/* Structures used by SetCRTC_ReplicationTable */ 1024// Structures used by SetCRTC_ReplicationTable
824/****************************************************************************/ 1025/****************************************************************************/
825typedef struct _SET_CRTC_REPLICATION_PARAMETERS { 1026typedef struct _SET_CRTC_REPLICATION_PARAMETERS
826 UCHAR ucH_Replication; /* horizontal replication */ 1027{
827 UCHAR ucV_Replication; /* vertical replication */ 1028 UCHAR ucH_Replication; // horizontal replication
828 UCHAR usCRTC; /* ATOM_CRTC1 or ATOM_CRTC2 */ 1029 UCHAR ucV_Replication; // vertical replication
829 UCHAR ucPadding; 1030 UCHAR usCRTC; // ATOM_CRTC1 or ATOM_CRTC2
830} SET_CRTC_REPLICATION_PARAMETERS; 1031 UCHAR ucPadding;
1032}SET_CRTC_REPLICATION_PARAMETERS;
831#define SET_CRTC_REPLICATION_PS_ALLOCATION SET_CRTC_REPLICATION_PARAMETERS 1033#define SET_CRTC_REPLICATION_PS_ALLOCATION SET_CRTC_REPLICATION_PARAMETERS
832 1034
833/****************************************************************************/ 1035/****************************************************************************/
834/* Structures used by SelectCRTC_SourceTable */ 1036// Structures used by SelectCRTC_SourceTable
835/****************************************************************************/ 1037/****************************************************************************/
836typedef struct _SELECT_CRTC_SOURCE_PARAMETERS { 1038typedef struct _SELECT_CRTC_SOURCE_PARAMETERS
837 UCHAR ucCRTC; /* ATOM_CRTC1 or ATOM_CRTC2 */ 1039{
838 UCHAR ucDevice; /* ATOM_DEVICE_CRT1|ATOM_DEVICE_CRT2|.... */ 1040 UCHAR ucCRTC; // ATOM_CRTC1 or ATOM_CRTC2
839 UCHAR ucPadding[2]; 1041 UCHAR ucDevice; // ATOM_DEVICE_CRT1|ATOM_DEVICE_CRT2|....
840} SELECT_CRTC_SOURCE_PARAMETERS; 1042 UCHAR ucPadding[2];
1043}SELECT_CRTC_SOURCE_PARAMETERS;
841#define SELECT_CRTC_SOURCE_PS_ALLOCATION SELECT_CRTC_SOURCE_PARAMETERS 1044#define SELECT_CRTC_SOURCE_PS_ALLOCATION SELECT_CRTC_SOURCE_PARAMETERS
842 1045
843typedef struct _SELECT_CRTC_SOURCE_PARAMETERS_V2 { 1046typedef struct _SELECT_CRTC_SOURCE_PARAMETERS_V2
844 UCHAR ucCRTC; /* ATOM_CRTC1 or ATOM_CRTC2 */ 1047{
845 UCHAR ucEncoderID; /* DAC1/DAC2/TVOUT/DIG1/DIG2/DVO */ 1048 UCHAR ucCRTC; // ATOM_CRTC1 or ATOM_CRTC2
846 UCHAR ucEncodeMode; /* Encoding mode, only valid when using DIG1/DIG2/DVO */ 1049 UCHAR ucEncoderID; // DAC1/DAC2/TVOUT/DIG1/DIG2/DVO
847 UCHAR ucPadding; 1050 UCHAR ucEncodeMode; // Encoding mode, only valid when using DIG1/DIG2/DVO
848} SELECT_CRTC_SOURCE_PARAMETERS_V2; 1051 UCHAR ucPadding;
849 1052}SELECT_CRTC_SOURCE_PARAMETERS_V2;
850/* ucEncoderID */ 1053
851/* #define ASIC_INT_DAC1_ENCODER_ID 0x00 */ 1054//ucEncoderID
852/* #define ASIC_INT_TV_ENCODER_ID 0x02 */ 1055//#define ASIC_INT_DAC1_ENCODER_ID 0x00
853/* #define ASIC_INT_DIG1_ENCODER_ID 0x03 */ 1056//#define ASIC_INT_TV_ENCODER_ID 0x02
854/* #define ASIC_INT_DAC2_ENCODER_ID 0x04 */ 1057//#define ASIC_INT_DIG1_ENCODER_ID 0x03
855/* #define ASIC_EXT_TV_ENCODER_ID 0x06 */ 1058//#define ASIC_INT_DAC2_ENCODER_ID 0x04
856/* #define ASIC_INT_DVO_ENCODER_ID 0x07 */ 1059//#define ASIC_EXT_TV_ENCODER_ID 0x06
857/* #define ASIC_INT_DIG2_ENCODER_ID 0x09 */ 1060//#define ASIC_INT_DVO_ENCODER_ID 0x07
858/* #define ASIC_EXT_DIG_ENCODER_ID 0x05 */ 1061//#define ASIC_INT_DIG2_ENCODER_ID 0x09
859 1062//#define ASIC_EXT_DIG_ENCODER_ID 0x05
860/* ucEncodeMode */ 1063
861/* #define ATOM_ENCODER_MODE_DP 0 */ 1064//ucEncodeMode
862/* #define ATOM_ENCODER_MODE_LVDS 1 */ 1065//#define ATOM_ENCODER_MODE_DP 0
863/* #define ATOM_ENCODER_MODE_DVI 2 */ 1066//#define ATOM_ENCODER_MODE_LVDS 1
864/* #define ATOM_ENCODER_MODE_HDMI 3 */ 1067//#define ATOM_ENCODER_MODE_DVI 2
865/* #define ATOM_ENCODER_MODE_SDVO 4 */ 1068//#define ATOM_ENCODER_MODE_HDMI 3
866/* #define ATOM_ENCODER_MODE_TV 13 */ 1069//#define ATOM_ENCODER_MODE_SDVO 4
867/* #define ATOM_ENCODER_MODE_CV 14 */ 1070//#define ATOM_ENCODER_MODE_TV 13
868/* #define ATOM_ENCODER_MODE_CRT 15 */ 1071//#define ATOM_ENCODER_MODE_CV 14
869 1072//#define ATOM_ENCODER_MODE_CRT 15
870/****************************************************************************/ 1073
871/* Structures used by SetPixelClockTable */ 1074/****************************************************************************/
872/* GetPixelClockTable */ 1075// Structures used by SetPixelClockTable
873/****************************************************************************/ 1076// GetPixelClockTable
874/* Major revision=1., Minor revision=1 */ 1077/****************************************************************************/
875typedef struct _PIXEL_CLOCK_PARAMETERS { 1078//Major revision=1., Minor revision=1
876 USHORT usPixelClock; /* in 10kHz unit; for bios convenient = (RefClk*FB_Div)/(Ref_Div*Post_Div) */ 1079typedef struct _PIXEL_CLOCK_PARAMETERS
877 /* 0 means disable PPLL */ 1080{
878 USHORT usRefDiv; /* Reference divider */ 1081 USHORT usPixelClock; // in 10kHz unit; for bios convenient = (RefClk*FB_Div)/(Ref_Div*Post_Div)
879 USHORT usFbDiv; /* feedback divider */ 1082 // 0 means disable PPLL
880 UCHAR ucPostDiv; /* post divider */ 1083 USHORT usRefDiv; // Reference divider
881 UCHAR ucFracFbDiv; /* fractional feedback divider */ 1084 USHORT usFbDiv; // feedback divider
882 UCHAR ucPpll; /* ATOM_PPLL1 or ATOM_PPL2 */ 1085 UCHAR ucPostDiv; // post divider
883 UCHAR ucRefDivSrc; /* ATOM_PJITTER or ATO_NONPJITTER */ 1086 UCHAR ucFracFbDiv; // fractional feedback divider
884 UCHAR ucCRTC; /* Which CRTC uses this Ppll */ 1087 UCHAR ucPpll; // ATOM_PPLL1 or ATOM_PPL2
885 UCHAR ucPadding; 1088 UCHAR ucRefDivSrc; // ATOM_PJITTER or ATO_NONPJITTER
886} PIXEL_CLOCK_PARAMETERS; 1089 UCHAR ucCRTC; // Which CRTC uses this Ppll
887 1090 UCHAR ucPadding;
888/* Major revision=1., Minor revision=2, add ucMiscIfno */ 1091}PIXEL_CLOCK_PARAMETERS;
889/* ucMiscInfo: */ 1092
1093//Major revision=1., Minor revision=2, add ucMiscIfno
1094//ucMiscInfo:
890#define MISC_FORCE_REPROG_PIXEL_CLOCK 0x1 1095#define MISC_FORCE_REPROG_PIXEL_CLOCK 0x1
891#define MISC_DEVICE_INDEX_MASK 0xF0 1096#define MISC_DEVICE_INDEX_MASK 0xF0
892#define MISC_DEVICE_INDEX_SHIFT 4 1097#define MISC_DEVICE_INDEX_SHIFT 4
893 1098
894typedef struct _PIXEL_CLOCK_PARAMETERS_V2 { 1099typedef struct _PIXEL_CLOCK_PARAMETERS_V2
895 USHORT usPixelClock; /* in 10kHz unit; for bios convenient = (RefClk*FB_Div)/(Ref_Div*Post_Div) */ 1100{
896 /* 0 means disable PPLL */ 1101 USHORT usPixelClock; // in 10kHz unit; for bios convenient = (RefClk*FB_Div)/(Ref_Div*Post_Div)
897 USHORT usRefDiv; /* Reference divider */ 1102 // 0 means disable PPLL
898 USHORT usFbDiv; /* feedback divider */ 1103 USHORT usRefDiv; // Reference divider
899 UCHAR ucPostDiv; /* post divider */ 1104 USHORT usFbDiv; // feedback divider
900 UCHAR ucFracFbDiv; /* fractional feedback divider */ 1105 UCHAR ucPostDiv; // post divider
901 UCHAR ucPpll; /* ATOM_PPLL1 or ATOM_PPL2 */ 1106 UCHAR ucFracFbDiv; // fractional feedback divider
902 UCHAR ucRefDivSrc; /* ATOM_PJITTER or ATO_NONPJITTER */ 1107 UCHAR ucPpll; // ATOM_PPLL1 or ATOM_PPL2
903 UCHAR ucCRTC; /* Which CRTC uses this Ppll */ 1108 UCHAR ucRefDivSrc; // ATOM_PJITTER or ATO_NONPJITTER
904 UCHAR ucMiscInfo; /* Different bits for different purpose, bit [7:4] as device index, bit[0]=Force prog */ 1109 UCHAR ucCRTC; // Which CRTC uses this Ppll
905} PIXEL_CLOCK_PARAMETERS_V2; 1110 UCHAR ucMiscInfo; // Different bits for different purpose, bit [7:4] as device index, bit[0]=Force prog
906 1111}PIXEL_CLOCK_PARAMETERS_V2;
907/* Major revision=1., Minor revision=3, structure/definition change */ 1112
908/* ucEncoderMode: */ 1113//Major revision=1., Minor revision=3, structure/definition change
909/* ATOM_ENCODER_MODE_DP */ 1114//ucEncoderMode:
910/* ATOM_ENOCDER_MODE_LVDS */ 1115//ATOM_ENCODER_MODE_DP
911/* ATOM_ENOCDER_MODE_DVI */ 1116//ATOM_ENOCDER_MODE_LVDS
912/* ATOM_ENOCDER_MODE_HDMI */ 1117//ATOM_ENOCDER_MODE_DVI
913/* ATOM_ENOCDER_MODE_SDVO */ 1118//ATOM_ENOCDER_MODE_HDMI
914/* ATOM_ENCODER_MODE_TV 13 */ 1119//ATOM_ENOCDER_MODE_SDVO
915/* ATOM_ENCODER_MODE_CV 14 */ 1120//ATOM_ENCODER_MODE_TV 13
916/* ATOM_ENCODER_MODE_CRT 15 */ 1121//ATOM_ENCODER_MODE_CV 14
917 1122//ATOM_ENCODER_MODE_CRT 15
918/* ucDVOConfig */ 1123
919/* #define DVO_ENCODER_CONFIG_RATE_SEL 0x01 */ 1124//ucDVOConfig
920/* #define DVO_ENCODER_CONFIG_DDR_SPEED 0x00 */ 1125//#define DVO_ENCODER_CONFIG_RATE_SEL 0x01
921/* #define DVO_ENCODER_CONFIG_SDR_SPEED 0x01 */ 1126//#define DVO_ENCODER_CONFIG_DDR_SPEED 0x00
922/* #define DVO_ENCODER_CONFIG_OUTPUT_SEL 0x0c */ 1127//#define DVO_ENCODER_CONFIG_SDR_SPEED 0x01
923/* #define DVO_ENCODER_CONFIG_LOW12BIT 0x00 */ 1128//#define DVO_ENCODER_CONFIG_OUTPUT_SEL 0x0c
924/* #define DVO_ENCODER_CONFIG_UPPER12BIT 0x04 */ 1129//#define DVO_ENCODER_CONFIG_LOW12BIT 0x00
925/* #define DVO_ENCODER_CONFIG_24BIT 0x08 */ 1130//#define DVO_ENCODER_CONFIG_UPPER12BIT 0x04
926 1131//#define DVO_ENCODER_CONFIG_24BIT 0x08
927/* ucMiscInfo: also changed, see below */ 1132
1133//ucMiscInfo: also changed, see below
928#define PIXEL_CLOCK_MISC_FORCE_PROG_PPLL 0x01 1134#define PIXEL_CLOCK_MISC_FORCE_PROG_PPLL 0x01
929#define PIXEL_CLOCK_MISC_VGA_MODE 0x02 1135#define PIXEL_CLOCK_MISC_VGA_MODE 0x02
930#define PIXEL_CLOCK_MISC_CRTC_SEL_MASK 0x04 1136#define PIXEL_CLOCK_MISC_CRTC_SEL_MASK 0x04
931#define PIXEL_CLOCK_MISC_CRTC_SEL_CRTC1 0x00 1137#define PIXEL_CLOCK_MISC_CRTC_SEL_CRTC1 0x00
932#define PIXEL_CLOCK_MISC_CRTC_SEL_CRTC2 0x04 1138#define PIXEL_CLOCK_MISC_CRTC_SEL_CRTC2 0x04
933#define PIXEL_CLOCK_MISC_USE_ENGINE_FOR_DISPCLK 0x08 1139#define PIXEL_CLOCK_MISC_USE_ENGINE_FOR_DISPCLK 0x08
1140#define PIXEL_CLOCK_MISC_REF_DIV_SRC 0x10
1141// V1.4 for RoadRunner
1142#define PIXEL_CLOCK_V4_MISC_SS_ENABLE 0x10
1143#define PIXEL_CLOCK_V4_MISC_COHERENT_MODE 0x20
934 1144
935typedef struct _PIXEL_CLOCK_PARAMETERS_V3 { 1145typedef struct _PIXEL_CLOCK_PARAMETERS_V3
936 USHORT usPixelClock; /* in 10kHz unit; for bios convenient = (RefClk*FB_Div)/(Ref_Div*Post_Div) */ 1146{
937 /* 0 means disable PPLL. For VGA PPLL,make sure this value is not 0. */ 1147 USHORT usPixelClock; // in 10kHz unit; for bios convenient = (RefClk*FB_Div)/(Ref_Div*Post_Div)
938 USHORT usRefDiv; /* Reference divider */ 1148 // 0 means disable PPLL. For VGA PPLL,make sure this value is not 0.
939 USHORT usFbDiv; /* feedback divider */ 1149 USHORT usRefDiv; // Reference divider
940 UCHAR ucPostDiv; /* post divider */ 1150 USHORT usFbDiv; // feedback divider
941 UCHAR ucFracFbDiv; /* fractional feedback divider */ 1151 UCHAR ucPostDiv; // post divider
942 UCHAR ucPpll; /* ATOM_PPLL1 or ATOM_PPL2 */ 1152 UCHAR ucFracFbDiv; // fractional feedback divider
943 UCHAR ucTransmitterId; /* graphic encoder id defined in objectId.h */ 1153 UCHAR ucPpll; // ATOM_PPLL1 or ATOM_PPL2
944 union { 1154 UCHAR ucTransmitterId; // graphic encoder id defined in objectId.h
945 UCHAR ucEncoderMode; /* encoder type defined as ATOM_ENCODER_MODE_DP/DVI/HDMI/ */ 1155 union
946 UCHAR ucDVOConfig; /* when use DVO, need to know SDR/DDR, 12bit or 24bit */ 1156 {
1157 UCHAR ucEncoderMode; // encoder type defined as ATOM_ENCODER_MODE_DP/DVI/HDMI/
1158 UCHAR ucDVOConfig; // when use DVO, need to know SDR/DDR, 12bit or 24bit
947 }; 1159 };
948 UCHAR ucMiscInfo; /* bit[0]=Force program, bit[1]= set pclk for VGA, b[2]= CRTC sel */ 1160 UCHAR ucMiscInfo; // bit[0]=Force program, bit[1]= set pclk for VGA, b[2]= CRTC sel
949 /* bit[3]=0:use PPLL for dispclk source, =1: use engine clock for dispclock source */ 1161 // bit[3]=0:use PPLL for dispclk source, =1: use engine clock for dispclock source
950} PIXEL_CLOCK_PARAMETERS_V3; 1162 // bit[4]=0:use XTALIN as the source of reference divider,=1 use the pre-defined clock as the source of reference divider
1163}PIXEL_CLOCK_PARAMETERS_V3;
951 1164
952#define PIXEL_CLOCK_PARAMETERS_LAST PIXEL_CLOCK_PARAMETERS_V2 1165#define PIXEL_CLOCK_PARAMETERS_LAST PIXEL_CLOCK_PARAMETERS_V2
953#define GET_PIXEL_CLOCK_PS_ALLOCATION PIXEL_CLOCK_PARAMETERS_LAST 1166#define GET_PIXEL_CLOCK_PS_ALLOCATION PIXEL_CLOCK_PARAMETERS_LAST
954 1167
955/****************************************************************************/ 1168typedef struct _PIXEL_CLOCK_PARAMETERS_V5
956/* Structures used by AdjustDisplayPllTable */ 1169{
957/****************************************************************************/ 1170 UCHAR ucCRTC; // ATOM_CRTC1~6, indicate the CRTC controller to
958typedef struct _ADJUST_DISPLAY_PLL_PARAMETERS { 1171 // drive the pixel clock. not used for DCPLL case.
1172 union{
1173 UCHAR ucReserved;
1174 UCHAR ucFracFbDiv; // [gphan] temporary to prevent build problem. remove it after driver code is changed.
1175 };
1176 USHORT usPixelClock; // target the pixel clock to drive the CRTC timing
1177 // 0 means disable PPLL/DCPLL.
1178 USHORT usFbDiv; // feedback divider integer part.
1179 UCHAR ucPostDiv; // post divider.
1180 UCHAR ucRefDiv; // Reference divider
1181 UCHAR ucPpll; // ATOM_PPLL1/ATOM_PPLL2/ATOM_DCPLL
1182 UCHAR ucTransmitterID; // ASIC encoder id defined in objectId.h,
1183 // indicate which graphic encoder will be used.
1184 UCHAR ucEncoderMode; // Encoder mode:
1185 UCHAR ucMiscInfo; // bit[0]= Force program PPLL
1186 // bit[1]= when VGA timing is used.
1187 // bit[3:2]= HDMI panel bit depth: =0: 24bpp =1:30bpp, =2:32bpp
1188 // bit[4]= RefClock source for PPLL.
1189 // =0: XTLAIN( default mode )
1190 // =1: other external clock source, which is pre-defined
1191 // by VBIOS depend on the feature required.
1192 // bit[7:5]: reserved.
1193 ULONG ulFbDivDecFrac; // 20 bit feedback divider decimal fraction part, range from 1~999999 ( 0.000001 to 0.999999 )
1194
1195}PIXEL_CLOCK_PARAMETERS_V5;
1196
1197#define PIXEL_CLOCK_V5_MISC_FORCE_PROG_PPLL 0x01
1198#define PIXEL_CLOCK_V5_MISC_VGA_MODE 0x02
1199#define PIXEL_CLOCK_V5_MISC_HDMI_BPP_MASK 0x0c
1200#define PIXEL_CLOCK_V5_MISC_HDMI_24BPP 0x00
1201#define PIXEL_CLOCK_V5_MISC_HDMI_30BPP 0x04
1202#define PIXEL_CLOCK_V5_MISC_HDMI_32BPP 0x08
1203#define PIXEL_CLOCK_V5_MISC_REF_DIV_SRC 0x10
1204
1205typedef struct _GET_DISP_PLL_STATUS_INPUT_PARAMETERS_V2
1206{
1207 PIXEL_CLOCK_PARAMETERS_V3 sDispClkInput;
1208}GET_DISP_PLL_STATUS_INPUT_PARAMETERS_V2;
1209
1210typedef struct _GET_DISP_PLL_STATUS_OUTPUT_PARAMETERS_V2
1211{
1212 UCHAR ucStatus;
1213 UCHAR ucRefDivSrc; // =1: reference clock source from XTALIN, =0: source from PCIE ref clock
1214 UCHAR ucReserved[2];
1215}GET_DISP_PLL_STATUS_OUTPUT_PARAMETERS_V2;
1216
1217typedef struct _GET_DISP_PLL_STATUS_INPUT_PARAMETERS_V3
1218{
1219 PIXEL_CLOCK_PARAMETERS_V5 sDispClkInput;
1220}GET_DISP_PLL_STATUS_INPUT_PARAMETERS_V3;
1221
1222/****************************************************************************/
1223// Structures used by AdjustDisplayPllTable
1224/****************************************************************************/
1225typedef struct _ADJUST_DISPLAY_PLL_PARAMETERS
1226{
959 USHORT usPixelClock; 1227 USHORT usPixelClock;
960 UCHAR ucTransmitterID; 1228 UCHAR ucTransmitterID;
961 UCHAR ucEncodeMode; 1229 UCHAR ucEncodeMode;
962 union { 1230 union
963 UCHAR ucDVOConfig; /* if DVO, need passing link rate and output 12bitlow or 24bit */ 1231 {
964 UCHAR ucConfig; /* if none DVO, not defined yet */ 1232 UCHAR ucDVOConfig; //if DVO, need passing link rate and output 12bitlow or 24bit
1233 UCHAR ucConfig; //if none DVO, not defined yet
965 }; 1234 };
966 UCHAR ucReserved[3]; 1235 UCHAR ucReserved[3];
967} ADJUST_DISPLAY_PLL_PARAMETERS; 1236}ADJUST_DISPLAY_PLL_PARAMETERS;
968 1237
969#define ADJUST_DISPLAY_CONFIG_SS_ENABLE 0x10 1238#define ADJUST_DISPLAY_CONFIG_SS_ENABLE 0x10
970
971#define ADJUST_DISPLAY_PLL_PS_ALLOCATION ADJUST_DISPLAY_PLL_PARAMETERS 1239#define ADJUST_DISPLAY_PLL_PS_ALLOCATION ADJUST_DISPLAY_PLL_PARAMETERS
972 1240
973/****************************************************************************/ 1241typedef struct _ADJUST_DISPLAY_PLL_INPUT_PARAMETERS_V3
974/* Structures used by EnableYUVTable */ 1242{
975/****************************************************************************/ 1243 USHORT usPixelClock; // target pixel clock
976typedef struct _ENABLE_YUV_PARAMETERS { 1244 UCHAR ucTransmitterID; // transmitter id defined in objectid.h
977 UCHAR ucEnable; /* ATOM_ENABLE:Enable YUV or ATOM_DISABLE:Disable YUV (RGB) */ 1245 UCHAR ucEncodeMode; // encoder mode: CRT, LVDS, DP, TMDS or HDMI
978 UCHAR ucCRTC; /* Which CRTC needs this YUV or RGB format */ 1246 UCHAR ucDispPllConfig; // display pll configure parameter defined as following DISPPLL_CONFIG_XXXX
979 UCHAR ucPadding[2]; 1247 UCHAR ucReserved[3];
980} ENABLE_YUV_PARAMETERS; 1248}ADJUST_DISPLAY_PLL_INPUT_PARAMETERS_V3;
1249
1250// usDispPllConfig v1.2 for RoadRunner
1251#define DISPPLL_CONFIG_DVO_RATE_SEL 0x0001 // need only when ucTransmitterID = DVO
1252#define DISPPLL_CONFIG_DVO_DDR_SPEED 0x0000 // need only when ucTransmitterID = DVO
1253#define DISPPLL_CONFIG_DVO_SDR_SPEED 0x0001 // need only when ucTransmitterID = DVO
1254#define DISPPLL_CONFIG_DVO_OUTPUT_SEL 0x000c // need only when ucTransmitterID = DVO
1255#define DISPPLL_CONFIG_DVO_LOW12BIT 0x0000 // need only when ucTransmitterID = DVO
1256#define DISPPLL_CONFIG_DVO_UPPER12BIT 0x0004 // need only when ucTransmitterID = DVO
1257#define DISPPLL_CONFIG_DVO_24BIT 0x0008 // need only when ucTransmitterID = DVO
1258#define DISPPLL_CONFIG_SS_ENABLE 0x0010 // Only used when ucEncoderMode = DP or LVDS
1259#define DISPPLL_CONFIG_COHERENT_MODE 0x0020 // Only used when ucEncoderMode = TMDS or HDMI
1260#define DISPPLL_CONFIG_DUAL_LINK 0x0040 // Only used when ucEncoderMode = TMDS or LVDS
1261
1262
1263typedef struct _ADJUST_DISPLAY_PLL_OUTPUT_PARAMETERS_V3
1264{
1265 ULONG ulDispPllFreq; // return display PPLL freq which is used to generate the pixclock, and related idclk, symclk etc
1266 UCHAR ucRefDiv; // if it is none-zero, it is used to be calculated the other ppll parameter fb_divider and post_div ( if it is not given )
1267 UCHAR ucPostDiv; // if it is none-zero, it is used to be calculated the other ppll parameter fb_divider
1268 UCHAR ucReserved[2];
1269}ADJUST_DISPLAY_PLL_OUTPUT_PARAMETERS_V3;
1270
1271typedef struct _ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3
1272{
1273 union
1274 {
1275 ADJUST_DISPLAY_PLL_INPUT_PARAMETERS_V3 sInput;
1276 ADJUST_DISPLAY_PLL_OUTPUT_PARAMETERS_V3 sOutput;
1277 };
1278} ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3;
1279
1280/****************************************************************************/
1281// Structures used by EnableYUVTable
1282/****************************************************************************/
1283typedef struct _ENABLE_YUV_PARAMETERS
1284{
1285 UCHAR ucEnable; // ATOM_ENABLE:Enable YUV or ATOM_DISABLE:Disable YUV (RGB)
1286 UCHAR ucCRTC; // Which CRTC needs this YUV or RGB format
1287 UCHAR ucPadding[2];
1288}ENABLE_YUV_PARAMETERS;
981#define ENABLE_YUV_PS_ALLOCATION ENABLE_YUV_PARAMETERS 1289#define ENABLE_YUV_PS_ALLOCATION ENABLE_YUV_PARAMETERS
982 1290
983/****************************************************************************/ 1291/****************************************************************************/
984/* Structures used by GetMemoryClockTable */ 1292// Structures used by GetMemoryClockTable
985/****************************************************************************/ 1293/****************************************************************************/
986typedef struct _GET_MEMORY_CLOCK_PARAMETERS { 1294typedef struct _GET_MEMORY_CLOCK_PARAMETERS
987 ULONG ulReturnMemoryClock; /* current memory speed in 10KHz unit */ 1295{
1296 ULONG ulReturnMemoryClock; // current memory speed in 10KHz unit
988} GET_MEMORY_CLOCK_PARAMETERS; 1297} GET_MEMORY_CLOCK_PARAMETERS;
989#define GET_MEMORY_CLOCK_PS_ALLOCATION GET_MEMORY_CLOCK_PARAMETERS 1298#define GET_MEMORY_CLOCK_PS_ALLOCATION GET_MEMORY_CLOCK_PARAMETERS
990 1299
991/****************************************************************************/ 1300/****************************************************************************/
992/* Structures used by GetEngineClockTable */ 1301// Structures used by GetEngineClockTable
993/****************************************************************************/ 1302/****************************************************************************/
994typedef struct _GET_ENGINE_CLOCK_PARAMETERS { 1303typedef struct _GET_ENGINE_CLOCK_PARAMETERS
995 ULONG ulReturnEngineClock; /* current engine speed in 10KHz unit */ 1304{
1305 ULONG ulReturnEngineClock; // current engine speed in 10KHz unit
996} GET_ENGINE_CLOCK_PARAMETERS; 1306} GET_ENGINE_CLOCK_PARAMETERS;
997#define GET_ENGINE_CLOCK_PS_ALLOCATION GET_ENGINE_CLOCK_PARAMETERS 1307#define GET_ENGINE_CLOCK_PS_ALLOCATION GET_ENGINE_CLOCK_PARAMETERS
998 1308
999/****************************************************************************/ 1309/****************************************************************************/
1000/* Following Structures and constant may be obsolete */ 1310// Following Structures and constant may be obsolete
1001/****************************************************************************/ 1311/****************************************************************************/
1002/* Maxium 8 bytes,the data read in will be placed in the parameter space. */ 1312//Maxium 8 bytes,the data read in will be placed in the parameter space.
1003/* Read operaion successeful when the paramter space is non-zero, otherwise read operation failed */ 1313//Read operaion successeful when the paramter space is non-zero, otherwise read operation failed
1004typedef struct _READ_EDID_FROM_HW_I2C_DATA_PARAMETERS { 1314typedef struct _READ_EDID_FROM_HW_I2C_DATA_PARAMETERS
1005 USHORT usPrescale; /* Ratio between Engine clock and I2C clock */ 1315{
1006 USHORT usVRAMAddress; /* Adress in Frame Buffer where to pace raw EDID */ 1316 USHORT usPrescale; //Ratio between Engine clock and I2C clock
1007 USHORT usStatus; /* When use output: lower byte EDID checksum, high byte hardware status */ 1317 USHORT usVRAMAddress; //Adress in Frame Buffer where to pace raw EDID
1008 /* WHen use input: lower byte as 'byte to read':currently limited to 128byte or 1byte */ 1318 USHORT usStatus; //When use output: lower byte EDID checksum, high byte hardware status
1009 UCHAR ucSlaveAddr; /* Read from which slave */ 1319 //WHen use input: lower byte as 'byte to read':currently limited to 128byte or 1byte
1010 UCHAR ucLineNumber; /* Read from which HW assisted line */ 1320 UCHAR ucSlaveAddr; //Read from which slave
1011} READ_EDID_FROM_HW_I2C_DATA_PARAMETERS; 1321 UCHAR ucLineNumber; //Read from which HW assisted line
1322}READ_EDID_FROM_HW_I2C_DATA_PARAMETERS;
1012#define READ_EDID_FROM_HW_I2C_DATA_PS_ALLOCATION READ_EDID_FROM_HW_I2C_DATA_PARAMETERS 1323#define READ_EDID_FROM_HW_I2C_DATA_PS_ALLOCATION READ_EDID_FROM_HW_I2C_DATA_PARAMETERS
1013 1324
1325
1014#define ATOM_WRITE_I2C_FORMAT_PSOFFSET_PSDATABYTE 0 1326#define ATOM_WRITE_I2C_FORMAT_PSOFFSET_PSDATABYTE 0
1015#define ATOM_WRITE_I2C_FORMAT_PSOFFSET_PSTWODATABYTES 1 1327#define ATOM_WRITE_I2C_FORMAT_PSOFFSET_PSTWODATABYTES 1
1016#define ATOM_WRITE_I2C_FORMAT_PSCOUNTER_PSOFFSET_IDDATABLOCK 2 1328#define ATOM_WRITE_I2C_FORMAT_PSCOUNTER_PSOFFSET_IDDATABLOCK 2
1017#define ATOM_WRITE_I2C_FORMAT_PSCOUNTER_IDOFFSET_PLUS_IDDATABLOCK 3 1329#define ATOM_WRITE_I2C_FORMAT_PSCOUNTER_IDOFFSET_PLUS_IDDATABLOCK 3
1018#define ATOM_WRITE_I2C_FORMAT_IDCOUNTER_IDOFFSET_IDDATABLOCK 4 1330#define ATOM_WRITE_I2C_FORMAT_IDCOUNTER_IDOFFSET_IDDATABLOCK 4
1019 1331
1020typedef struct _WRITE_ONE_BYTE_HW_I2C_DATA_PARAMETERS { 1332typedef struct _WRITE_ONE_BYTE_HW_I2C_DATA_PARAMETERS
1021 USHORT usPrescale; /* Ratio between Engine clock and I2C clock */ 1333{
1022 USHORT usByteOffset; /* Write to which byte */ 1334 USHORT usPrescale; //Ratio between Engine clock and I2C clock
1023 /* Upper portion of usByteOffset is Format of data */ 1335 USHORT usByteOffset; //Write to which byte
1024 /* 1bytePS+offsetPS */ 1336 //Upper portion of usByteOffset is Format of data
1025 /* 2bytesPS+offsetPS */ 1337 //1bytePS+offsetPS
1026 /* blockID+offsetPS */ 1338 //2bytesPS+offsetPS
1027 /* blockID+offsetID */ 1339 //blockID+offsetPS
1028 /* blockID+counterID+offsetID */ 1340 //blockID+offsetID
1029 UCHAR ucData; /* PS data1 */ 1341 //blockID+counterID+offsetID
1030 UCHAR ucStatus; /* Status byte 1=success, 2=failure, Also is used as PS data2 */ 1342 UCHAR ucData; //PS data1
1031 UCHAR ucSlaveAddr; /* Write to which slave */ 1343 UCHAR ucStatus; //Status byte 1=success, 2=failure, Also is used as PS data2
1032 UCHAR ucLineNumber; /* Write from which HW assisted line */ 1344 UCHAR ucSlaveAddr; //Write to which slave
1033} WRITE_ONE_BYTE_HW_I2C_DATA_PARAMETERS; 1345 UCHAR ucLineNumber; //Write from which HW assisted line
1346}WRITE_ONE_BYTE_HW_I2C_DATA_PARAMETERS;
1034 1347
1035#define WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION WRITE_ONE_BYTE_HW_I2C_DATA_PARAMETERS 1348#define WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION WRITE_ONE_BYTE_HW_I2C_DATA_PARAMETERS
1036 1349
1037typedef struct _SET_UP_HW_I2C_DATA_PARAMETERS { 1350typedef struct _SET_UP_HW_I2C_DATA_PARAMETERS
1038 USHORT usPrescale; /* Ratio between Engine clock and I2C clock */ 1351{
1039 UCHAR ucSlaveAddr; /* Write to which slave */ 1352 USHORT usPrescale; //Ratio between Engine clock and I2C clock
1040 UCHAR ucLineNumber; /* Write from which HW assisted line */ 1353 UCHAR ucSlaveAddr; //Write to which slave
1041} SET_UP_HW_I2C_DATA_PARAMETERS; 1354 UCHAR ucLineNumber; //Write from which HW assisted line
1355}SET_UP_HW_I2C_DATA_PARAMETERS;
1356
1042 1357
1043/**************************************************************************/ 1358/**************************************************************************/
1044#define SPEED_FAN_CONTROL_PS_ALLOCATION WRITE_ONE_BYTE_HW_I2C_DATA_PARAMETERS 1359#define SPEED_FAN_CONTROL_PS_ALLOCATION WRITE_ONE_BYTE_HW_I2C_DATA_PARAMETERS
1045 1360
1046/****************************************************************************/ 1361/****************************************************************************/
1047/* Structures used by PowerConnectorDetectionTable */ 1362// Structures used by PowerConnectorDetectionTable
1048/****************************************************************************/ 1363/****************************************************************************/
1049typedef struct _POWER_CONNECTOR_DETECTION_PARAMETERS { 1364typedef struct _POWER_CONNECTOR_DETECTION_PARAMETERS
1050 UCHAR ucPowerConnectorStatus; /* Used for return value 0: detected, 1:not detected */ 1365{
1051 UCHAR ucPwrBehaviorId; 1366 UCHAR ucPowerConnectorStatus; //Used for return value 0: detected, 1:not detected
1052 USHORT usPwrBudget; /* how much power currently boot to in unit of watt */ 1367 UCHAR ucPwrBehaviorId;
1053} POWER_CONNECTOR_DETECTION_PARAMETERS; 1368 USHORT usPwrBudget; //how much power currently boot to in unit of watt
1054 1369}POWER_CONNECTOR_DETECTION_PARAMETERS;
1055typedef struct POWER_CONNECTOR_DETECTION_PS_ALLOCATION { 1370
1056 UCHAR ucPowerConnectorStatus; /* Used for return value 0: detected, 1:not detected */ 1371typedef struct POWER_CONNECTOR_DETECTION_PS_ALLOCATION
1057 UCHAR ucReserved; 1372{
1058 USHORT usPwrBudget; /* how much power currently boot to in unit of watt */ 1373 UCHAR ucPowerConnectorStatus; //Used for return value 0: detected, 1:not detected
1059 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved; 1374 UCHAR ucReserved;
1060} POWER_CONNECTOR_DETECTION_PS_ALLOCATION; 1375 USHORT usPwrBudget; //how much power currently boot to in unit of watt
1376 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved;
1377}POWER_CONNECTOR_DETECTION_PS_ALLOCATION;
1061 1378
1062/****************************LVDS SS Command Table Definitions**********************/ 1379/****************************LVDS SS Command Table Definitions**********************/
1063 1380
1064/****************************************************************************/ 1381/****************************************************************************/
1065/* Structures used by EnableSpreadSpectrumOnPPLLTable */ 1382// Structures used by EnableSpreadSpectrumOnPPLLTable
1066/****************************************************************************/ 1383/****************************************************************************/
1067typedef struct _ENABLE_LVDS_SS_PARAMETERS { 1384typedef struct _ENABLE_LVDS_SS_PARAMETERS
1068 USHORT usSpreadSpectrumPercentage; 1385{
1069 UCHAR ucSpreadSpectrumType; /* Bit1=0 Down Spread,=1 Center Spread. Bit1=1 Ext. =0 Int. Others:TBD */ 1386 USHORT usSpreadSpectrumPercentage;
1070 UCHAR ucSpreadSpectrumStepSize_Delay; /* bits3:2 SS_STEP_SIZE; bit 6:4 SS_DELAY */ 1387 UCHAR ucSpreadSpectrumType; //Bit1=0 Down Spread,=1 Center Spread. Bit1=1 Ext. =0 Int. Others:TBD
1071 UCHAR ucEnable; /* ATOM_ENABLE or ATOM_DISABLE */ 1388 UCHAR ucSpreadSpectrumStepSize_Delay; //bits3:2 SS_STEP_SIZE; bit 6:4 SS_DELAY
1072 UCHAR ucPadding[3]; 1389 UCHAR ucEnable; //ATOM_ENABLE or ATOM_DISABLE
1073} ENABLE_LVDS_SS_PARAMETERS; 1390 UCHAR ucPadding[3];
1074 1391}ENABLE_LVDS_SS_PARAMETERS;
1075/* ucTableFormatRevision=1,ucTableContentRevision=2 */ 1392
1076typedef struct _ENABLE_LVDS_SS_PARAMETERS_V2 { 1393//ucTableFormatRevision=1,ucTableContentRevision=2
1077 USHORT usSpreadSpectrumPercentage; 1394typedef struct _ENABLE_LVDS_SS_PARAMETERS_V2
1078 UCHAR ucSpreadSpectrumType; /* Bit1=0 Down Spread,=1 Center Spread. Bit1=1 Ext. =0 Int. Others:TBD */ 1395{
1079 UCHAR ucSpreadSpectrumStep; /* */ 1396 USHORT usSpreadSpectrumPercentage;
1080 UCHAR ucEnable; /* ATOM_ENABLE or ATOM_DISABLE */ 1397 UCHAR ucSpreadSpectrumType; //Bit1=0 Down Spread,=1 Center Spread. Bit1=1 Ext. =0 Int. Others:TBD
1081 UCHAR ucSpreadSpectrumDelay; 1398 UCHAR ucSpreadSpectrumStep; //
1082 UCHAR ucSpreadSpectrumRange; 1399 UCHAR ucEnable; //ATOM_ENABLE or ATOM_DISABLE
1083 UCHAR ucPadding; 1400 UCHAR ucSpreadSpectrumDelay;
1084} ENABLE_LVDS_SS_PARAMETERS_V2; 1401 UCHAR ucSpreadSpectrumRange;
1085 1402 UCHAR ucPadding;
1086/* This new structure is based on ENABLE_LVDS_SS_PARAMETERS but expands to SS on PPLL, so other devices can use SS. */ 1403}ENABLE_LVDS_SS_PARAMETERS_V2;
1087typedef struct _ENABLE_SPREAD_SPECTRUM_ON_PPLL { 1404
1088 USHORT usSpreadSpectrumPercentage; 1405//This new structure is based on ENABLE_LVDS_SS_PARAMETERS but expands to SS on PPLL, so other devices can use SS.
1089 UCHAR ucSpreadSpectrumType; /* Bit1=0 Down Spread,=1 Center Spread. Bit1=1 Ext. =0 Int. Others:TBD */ 1406typedef struct _ENABLE_SPREAD_SPECTRUM_ON_PPLL
1090 UCHAR ucSpreadSpectrumStep; /* */ 1407{
1091 UCHAR ucEnable; /* ATOM_ENABLE or ATOM_DISABLE */ 1408 USHORT usSpreadSpectrumPercentage;
1092 UCHAR ucSpreadSpectrumDelay; 1409 UCHAR ucSpreadSpectrumType; // Bit1=0 Down Spread,=1 Center Spread. Bit1=1 Ext. =0 Int. Others:TBD
1093 UCHAR ucSpreadSpectrumRange; 1410 UCHAR ucSpreadSpectrumStep; //
1094 UCHAR ucPpll; /* ATOM_PPLL1/ATOM_PPLL2 */ 1411 UCHAR ucEnable; // ATOM_ENABLE or ATOM_DISABLE
1095} ENABLE_SPREAD_SPECTRUM_ON_PPLL; 1412 UCHAR ucSpreadSpectrumDelay;
1413 UCHAR ucSpreadSpectrumRange;
1414 UCHAR ucPpll; // ATOM_PPLL1/ATOM_PPLL2
1415}ENABLE_SPREAD_SPECTRUM_ON_PPLL;
1416
1417typedef struct _ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2
1418{
1419 USHORT usSpreadSpectrumPercentage;
1420 UCHAR ucSpreadSpectrumType; // Bit[0]: 0-Down Spread,1-Center Spread.
1421 // Bit[1]: 1-Ext. 0-Int.
1422 // Bit[3:2]: =0 P1PLL =1 P2PLL =2 DCPLL
1423 // Bits[7:4] reserved
1424 UCHAR ucEnable; // ATOM_ENABLE or ATOM_DISABLE
1425 USHORT usSpreadSpectrumAmount; // Includes SS_AMOUNT_FBDIV[7:0] and SS_AMOUNT_NFRAC_SLIP[11:8]
1426 USHORT usSpreadSpectrumStep; // SS_STEP_SIZE_DSFRAC
1427}ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2;
1428
1429#define ATOM_PPLL_SS_TYPE_V2_DOWN_SPREAD 0x00
1430#define ATOM_PPLL_SS_TYPE_V2_CENTRE_SPREAD 0x01
1431#define ATOM_PPLL_SS_TYPE_V2_EXT_SPREAD 0x02
1432#define ATOM_PPLL_SS_TYPE_V2_PPLL_SEL_MASK 0x0c
1433#define ATOM_PPLL_SS_TYPE_V2_P1PLL 0x00
1434#define ATOM_PPLL_SS_TYPE_V2_P2PLL 0x04
1435#define ATOM_PPLL_SS_TYPE_V2_DCPLL 0x08
1436#define ATOM_PPLL_SS_AMOUNT_V2_FBDIV_MASK 0x00FF
1437#define ATOM_PPLL_SS_AMOUNT_V2_FBDIV_SHIFT 0
1438#define ATOM_PPLL_SS_AMOUNT_V2_NFRAC_MASK 0x0F00
1439#define ATOM_PPLL_SS_AMOUNT_V2_NFRAC_SHIFT 8
1096 1440
1097#define ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION ENABLE_SPREAD_SPECTRUM_ON_PPLL 1441#define ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION ENABLE_SPREAD_SPECTRUM_ON_PPLL
1098 1442
1099/**************************************************************************/ 1443/**************************************************************************/
1100 1444
1101typedef struct _SET_PIXEL_CLOCK_PS_ALLOCATION { 1445typedef struct _SET_PIXEL_CLOCK_PS_ALLOCATION
1102 PIXEL_CLOCK_PARAMETERS sPCLKInput; 1446{
1103 ENABLE_SPREAD_SPECTRUM_ON_PPLL sReserved; /* Caller doesn't need to init this portion */ 1447 PIXEL_CLOCK_PARAMETERS sPCLKInput;
1104} SET_PIXEL_CLOCK_PS_ALLOCATION; 1448 ENABLE_SPREAD_SPECTRUM_ON_PPLL sReserved;//Caller doesn't need to init this portion
1449}SET_PIXEL_CLOCK_PS_ALLOCATION;
1105 1450
1106#define ENABLE_VGA_RENDER_PS_ALLOCATION SET_PIXEL_CLOCK_PS_ALLOCATION 1451#define ENABLE_VGA_RENDER_PS_ALLOCATION SET_PIXEL_CLOCK_PS_ALLOCATION
1107 1452
1108/****************************************************************************/ 1453/****************************************************************************/
1109/* Structures used by ### */ 1454// Structures used by ###
1110/****************************************************************************/ 1455/****************************************************************************/
1111typedef struct _MEMORY_TRAINING_PARAMETERS { 1456typedef struct _MEMORY_TRAINING_PARAMETERS
1112 ULONG ulTargetMemoryClock; /* In 10Khz unit */ 1457{
1113} MEMORY_TRAINING_PARAMETERS; 1458 ULONG ulTargetMemoryClock; //In 10Khz unit
1459}MEMORY_TRAINING_PARAMETERS;
1114#define MEMORY_TRAINING_PS_ALLOCATION MEMORY_TRAINING_PARAMETERS 1460#define MEMORY_TRAINING_PS_ALLOCATION MEMORY_TRAINING_PARAMETERS
1115 1461
1462
1116/****************************LVDS and other encoder command table definitions **********************/ 1463/****************************LVDS and other encoder command table definitions **********************/
1117 1464
1118/****************************************************************************/
1119/* Structures used by LVDSEncoderControlTable (Before DCE30) */
1120/* LVTMAEncoderControlTable (Before DCE30) */
1121/* TMDSAEncoderControlTable (Before DCE30) */
1122/****************************************************************************/
1123typedef struct _LVDS_ENCODER_CONTROL_PARAMETERS {
1124 USHORT usPixelClock; /* in 10KHz; for bios convenient */
1125 UCHAR ucMisc; /* bit0=0: Enable single link */
1126 /* =1: Enable dual link */
1127 /* Bit1=0: 666RGB */
1128 /* =1: 888RGB */
1129 UCHAR ucAction; /* 0: turn off encoder */
1130 /* 1: setup and turn on encoder */
1131} LVDS_ENCODER_CONTROL_PARAMETERS;
1132 1465
1133#define LVDS_ENCODER_CONTROL_PS_ALLOCATION LVDS_ENCODER_CONTROL_PARAMETERS 1466/****************************************************************************/
1467// Structures used by LVDSEncoderControlTable (Before DCE30)
1468// LVTMAEncoderControlTable (Before DCE30)
1469// TMDSAEncoderControlTable (Before DCE30)
1470/****************************************************************************/
1471typedef struct _LVDS_ENCODER_CONTROL_PARAMETERS
1472{
1473 USHORT usPixelClock; // in 10KHz; for bios convenient
1474 UCHAR ucMisc; // bit0=0: Enable single link
1475 // =1: Enable dual link
1476 // Bit1=0: 666RGB
1477 // =1: 888RGB
1478 UCHAR ucAction; // 0: turn off encoder
1479 // 1: setup and turn on encoder
1480}LVDS_ENCODER_CONTROL_PARAMETERS;
1134 1481
1482#define LVDS_ENCODER_CONTROL_PS_ALLOCATION LVDS_ENCODER_CONTROL_PARAMETERS
1483
1135#define TMDS1_ENCODER_CONTROL_PARAMETERS LVDS_ENCODER_CONTROL_PARAMETERS 1484#define TMDS1_ENCODER_CONTROL_PARAMETERS LVDS_ENCODER_CONTROL_PARAMETERS
1136#define TMDS1_ENCODER_CONTROL_PS_ALLOCATION TMDS1_ENCODER_CONTROL_PARAMETERS 1485#define TMDS1_ENCODER_CONTROL_PS_ALLOCATION TMDS1_ENCODER_CONTROL_PARAMETERS
1137 1486
1138#define TMDS2_ENCODER_CONTROL_PARAMETERS TMDS1_ENCODER_CONTROL_PARAMETERS 1487#define TMDS2_ENCODER_CONTROL_PARAMETERS TMDS1_ENCODER_CONTROL_PARAMETERS
1139#define TMDS2_ENCODER_CONTROL_PS_ALLOCATION TMDS2_ENCODER_CONTROL_PARAMETERS 1488#define TMDS2_ENCODER_CONTROL_PS_ALLOCATION TMDS2_ENCODER_CONTROL_PARAMETERS
1140 1489
1141/* ucTableFormatRevision=1,ucTableContentRevision=2 */
1142typedef struct _LVDS_ENCODER_CONTROL_PARAMETERS_V2 {
1143 USHORT usPixelClock; /* in 10KHz; for bios convenient */
1144 UCHAR ucMisc; /* see PANEL_ENCODER_MISC_xx defintions below */
1145 UCHAR ucAction; /* 0: turn off encoder */
1146 /* 1: setup and turn on encoder */
1147 UCHAR ucTruncate; /* bit0=0: Disable truncate */
1148 /* =1: Enable truncate */
1149 /* bit4=0: 666RGB */
1150 /* =1: 888RGB */
1151 UCHAR ucSpatial; /* bit0=0: Disable spatial dithering */
1152 /* =1: Enable spatial dithering */
1153 /* bit4=0: 666RGB */
1154 /* =1: 888RGB */
1155 UCHAR ucTemporal; /* bit0=0: Disable temporal dithering */
1156 /* =1: Enable temporal dithering */
1157 /* bit4=0: 666RGB */
1158 /* =1: 888RGB */
1159 /* bit5=0: Gray level 2 */
1160 /* =1: Gray level 4 */
1161 UCHAR ucFRC; /* bit4=0: 25FRC_SEL pattern E */
1162 /* =1: 25FRC_SEL pattern F */
1163 /* bit6:5=0: 50FRC_SEL pattern A */
1164 /* =1: 50FRC_SEL pattern B */
1165 /* =2: 50FRC_SEL pattern C */
1166 /* =3: 50FRC_SEL pattern D */
1167 /* bit7=0: 75FRC_SEL pattern E */
1168 /* =1: 75FRC_SEL pattern F */
1169} LVDS_ENCODER_CONTROL_PARAMETERS_V2;
1170 1490
1171#define LVDS_ENCODER_CONTROL_PS_ALLOCATION_V2 LVDS_ENCODER_CONTROL_PARAMETERS_V2 1491//ucTableFormatRevision=1,ucTableContentRevision=2
1492typedef struct _LVDS_ENCODER_CONTROL_PARAMETERS_V2
1493{
1494 USHORT usPixelClock; // in 10KHz; for bios convenient
1495 UCHAR ucMisc; // see PANEL_ENCODER_MISC_xx defintions below
1496 UCHAR ucAction; // 0: turn off encoder
1497 // 1: setup and turn on encoder
1498 UCHAR ucTruncate; // bit0=0: Disable truncate
1499 // =1: Enable truncate
1500 // bit4=0: 666RGB
1501 // =1: 888RGB
1502 UCHAR ucSpatial; // bit0=0: Disable spatial dithering
1503 // =1: Enable spatial dithering
1504 // bit4=0: 666RGB
1505 // =1: 888RGB
1506 UCHAR ucTemporal; // bit0=0: Disable temporal dithering
1507 // =1: Enable temporal dithering
1508 // bit4=0: 666RGB
1509 // =1: 888RGB
1510 // bit5=0: Gray level 2
1511 // =1: Gray level 4
1512 UCHAR ucFRC; // bit4=0: 25FRC_SEL pattern E
1513 // =1: 25FRC_SEL pattern F
1514 // bit6:5=0: 50FRC_SEL pattern A
1515 // =1: 50FRC_SEL pattern B
1516 // =2: 50FRC_SEL pattern C
1517 // =3: 50FRC_SEL pattern D
1518 // bit7=0: 75FRC_SEL pattern E
1519 // =1: 75FRC_SEL pattern F
1520}LVDS_ENCODER_CONTROL_PARAMETERS_V2;
1172 1521
1522#define LVDS_ENCODER_CONTROL_PS_ALLOCATION_V2 LVDS_ENCODER_CONTROL_PARAMETERS_V2
1523
1173#define TMDS1_ENCODER_CONTROL_PARAMETERS_V2 LVDS_ENCODER_CONTROL_PARAMETERS_V2 1524#define TMDS1_ENCODER_CONTROL_PARAMETERS_V2 LVDS_ENCODER_CONTROL_PARAMETERS_V2
1174#define TMDS1_ENCODER_CONTROL_PS_ALLOCATION_V2 TMDS1_ENCODER_CONTROL_PARAMETERS_V2 1525#define TMDS1_ENCODER_CONTROL_PS_ALLOCATION_V2 TMDS1_ENCODER_CONTROL_PARAMETERS_V2
1175 1526
1176#define TMDS2_ENCODER_CONTROL_PARAMETERS_V2 TMDS1_ENCODER_CONTROL_PARAMETERS_V2 1527#define TMDS2_ENCODER_CONTROL_PARAMETERS_V2 TMDS1_ENCODER_CONTROL_PARAMETERS_V2
1177#define TMDS2_ENCODER_CONTROL_PS_ALLOCATION_V2 TMDS2_ENCODER_CONTROL_PARAMETERS_V2 1528#define TMDS2_ENCODER_CONTROL_PS_ALLOCATION_V2 TMDS2_ENCODER_CONTROL_PARAMETERS_V2
1178 1529
@@ -1185,38 +1536,42 @@ typedef struct _LVDS_ENCODER_CONTROL_PARAMETERS_V2 {
1185#define TMDS2_ENCODER_CONTROL_PARAMETERS_V3 LVDS_ENCODER_CONTROL_PARAMETERS_V3 1536#define TMDS2_ENCODER_CONTROL_PARAMETERS_V3 LVDS_ENCODER_CONTROL_PARAMETERS_V3
1186#define TMDS2_ENCODER_CONTROL_PS_ALLOCATION_V3 TMDS2_ENCODER_CONTROL_PARAMETERS_V3 1537#define TMDS2_ENCODER_CONTROL_PS_ALLOCATION_V3 TMDS2_ENCODER_CONTROL_PARAMETERS_V3
1187 1538
1188/****************************************************************************/ 1539/****************************************************************************/
1189/* Structures used by ### */ 1540// Structures used by ###
1190/****************************************************************************/ 1541/****************************************************************************/
1191typedef struct _ENABLE_EXTERNAL_TMDS_ENCODER_PARAMETERS { 1542typedef struct _ENABLE_EXTERNAL_TMDS_ENCODER_PARAMETERS
1192 UCHAR ucEnable; /* Enable or Disable External TMDS encoder */ 1543{
1193 UCHAR ucMisc; /* Bit0=0:Enable Single link;=1:Enable Dual link;Bit1 {=0:666RGB, =1:888RGB} */ 1544 UCHAR ucEnable; // Enable or Disable External TMDS encoder
1194 UCHAR ucPadding[2]; 1545 UCHAR ucMisc; // Bit0=0:Enable Single link;=1:Enable Dual link;Bit1 {=0:666RGB, =1:888RGB}
1195} ENABLE_EXTERNAL_TMDS_ENCODER_PARAMETERS; 1546 UCHAR ucPadding[2];
1196 1547}ENABLE_EXTERNAL_TMDS_ENCODER_PARAMETERS;
1197typedef struct _ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION { 1548
1198 ENABLE_EXTERNAL_TMDS_ENCODER_PARAMETERS sXTmdsEncoder; 1549typedef struct _ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION
1199 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved; /* Caller doesn't need to init this portion */ 1550{
1200} ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION; 1551 ENABLE_EXTERNAL_TMDS_ENCODER_PARAMETERS sXTmdsEncoder;
1552 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved; //Caller doesn't need to init this portion
1553}ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION;
1201 1554
1202#define ENABLE_EXTERNAL_TMDS_ENCODER_PARAMETERS_V2 LVDS_ENCODER_CONTROL_PARAMETERS_V2 1555#define ENABLE_EXTERNAL_TMDS_ENCODER_PARAMETERS_V2 LVDS_ENCODER_CONTROL_PARAMETERS_V2
1203 1556
1204typedef struct _ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION_V2 { 1557typedef struct _ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION_V2
1205 ENABLE_EXTERNAL_TMDS_ENCODER_PARAMETERS_V2 sXTmdsEncoder; 1558{
1206 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved; /* Caller doesn't need to init this portion */ 1559 ENABLE_EXTERNAL_TMDS_ENCODER_PARAMETERS_V2 sXTmdsEncoder;
1207} ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION_V2; 1560 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved; //Caller doesn't need to init this portion
1561}ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION_V2;
1208 1562
1209typedef struct _EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION { 1563typedef struct _EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION
1210 DIG_ENCODER_CONTROL_PARAMETERS sDigEncoder; 1564{
1211 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved; 1565 DIG_ENCODER_CONTROL_PARAMETERS sDigEncoder;
1212} EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION; 1566 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved;
1567}EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION;
1213 1568
1214/****************************************************************************/ 1569/****************************************************************************/
1215/* Structures used by DVOEncoderControlTable */ 1570// Structures used by DVOEncoderControlTable
1216/****************************************************************************/ 1571/****************************************************************************/
1217/* ucTableFormatRevision=1,ucTableContentRevision=3 */ 1572//ucTableFormatRevision=1,ucTableContentRevision=3
1218 1573
1219/* ucDVOConfig: */ 1574//ucDVOConfig:
1220#define DVO_ENCODER_CONFIG_RATE_SEL 0x01 1575#define DVO_ENCODER_CONFIG_RATE_SEL 0x01
1221#define DVO_ENCODER_CONFIG_DDR_SPEED 0x00 1576#define DVO_ENCODER_CONFIG_DDR_SPEED 0x00
1222#define DVO_ENCODER_CONFIG_SDR_SPEED 0x01 1577#define DVO_ENCODER_CONFIG_SDR_SPEED 0x01
@@ -1225,21 +1580,22 @@ typedef struct _EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION {
1225#define DVO_ENCODER_CONFIG_UPPER12BIT 0x04 1580#define DVO_ENCODER_CONFIG_UPPER12BIT 0x04
1226#define DVO_ENCODER_CONFIG_24BIT 0x08 1581#define DVO_ENCODER_CONFIG_24BIT 0x08
1227 1582
1228typedef struct _DVO_ENCODER_CONTROL_PARAMETERS_V3 { 1583typedef struct _DVO_ENCODER_CONTROL_PARAMETERS_V3
1229 USHORT usPixelClock; 1584{
1230 UCHAR ucDVOConfig; 1585 USHORT usPixelClock;
1231 UCHAR ucAction; /* ATOM_ENABLE/ATOM_DISABLE/ATOM_HPD_INIT */ 1586 UCHAR ucDVOConfig;
1232 UCHAR ucReseved[4]; 1587 UCHAR ucAction; //ATOM_ENABLE/ATOM_DISABLE/ATOM_HPD_INIT
1233} DVO_ENCODER_CONTROL_PARAMETERS_V3; 1588 UCHAR ucReseved[4];
1589}DVO_ENCODER_CONTROL_PARAMETERS_V3;
1234#define DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 DVO_ENCODER_CONTROL_PARAMETERS_V3 1590#define DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 DVO_ENCODER_CONTROL_PARAMETERS_V3
1235 1591
1236/* ucTableFormatRevision=1 */ 1592//ucTableFormatRevision=1
1237/* ucTableContentRevision=3 structure is not changed but usMisc add bit 1 as another input for */ 1593//ucTableContentRevision=3 structure is not changed but usMisc add bit 1 as another input for
1238/* bit1=0: non-coherent mode */ 1594// bit1=0: non-coherent mode
1239/* =1: coherent mode */ 1595// =1: coherent mode
1240 1596
1241/* ========================================================================================== */ 1597//==========================================================================================
1242/* Only change is here next time when changing encoder parameter definitions again! */ 1598//Only change is here next time when changing encoder parameter definitions again!
1243#define LVDS_ENCODER_CONTROL_PARAMETERS_LAST LVDS_ENCODER_CONTROL_PARAMETERS_V3 1599#define LVDS_ENCODER_CONTROL_PARAMETERS_LAST LVDS_ENCODER_CONTROL_PARAMETERS_V3
1244#define LVDS_ENCODER_CONTROL_PS_ALLOCATION_LAST LVDS_ENCODER_CONTROL_PARAMETERS_LAST 1600#define LVDS_ENCODER_CONTROL_PS_ALLOCATION_LAST LVDS_ENCODER_CONTROL_PARAMETERS_LAST
1245 1601
@@ -1252,7 +1608,7 @@ typedef struct _DVO_ENCODER_CONTROL_PARAMETERS_V3 {
1252#define DVO_ENCODER_CONTROL_PARAMETERS_LAST DVO_ENCODER_CONTROL_PARAMETERS 1608#define DVO_ENCODER_CONTROL_PARAMETERS_LAST DVO_ENCODER_CONTROL_PARAMETERS
1253#define DVO_ENCODER_CONTROL_PS_ALLOCATION_LAST DVO_ENCODER_CONTROL_PS_ALLOCATION 1609#define DVO_ENCODER_CONTROL_PS_ALLOCATION_LAST DVO_ENCODER_CONTROL_PS_ALLOCATION
1254 1610
1255/* ========================================================================================== */ 1611//==========================================================================================
1256#define PANEL_ENCODER_MISC_DUAL 0x01 1612#define PANEL_ENCODER_MISC_DUAL 0x01
1257#define PANEL_ENCODER_MISC_COHERENT 0x02 1613#define PANEL_ENCODER_MISC_COHERENT 0x02
1258#define PANEL_ENCODER_MISC_TMDS_LINKB 0x04 1614#define PANEL_ENCODER_MISC_TMDS_LINKB 0x04
@@ -1281,159 +1637,159 @@ typedef struct _DVO_ENCODER_CONTROL_PARAMETERS_V3 {
1281#define PANEL_ENCODER_75FRC_E 0x00 1637#define PANEL_ENCODER_75FRC_E 0x00
1282#define PANEL_ENCODER_75FRC_F 0x80 1638#define PANEL_ENCODER_75FRC_F 0x80
1283 1639
1284/****************************************************************************/ 1640/****************************************************************************/
1285/* Structures used by SetVoltageTable */ 1641// Structures used by SetVoltageTable
1286/****************************************************************************/ 1642/****************************************************************************/
1287#define SET_VOLTAGE_TYPE_ASIC_VDDC 1 1643#define SET_VOLTAGE_TYPE_ASIC_VDDC 1
1288#define SET_VOLTAGE_TYPE_ASIC_MVDDC 2 1644#define SET_VOLTAGE_TYPE_ASIC_MVDDC 2
1289#define SET_VOLTAGE_TYPE_ASIC_MVDDQ 3 1645#define SET_VOLTAGE_TYPE_ASIC_MVDDQ 3
1290#define SET_VOLTAGE_TYPE_ASIC_VDDCI 4 1646#define SET_VOLTAGE_TYPE_ASIC_VDDCI 4
1291#define SET_VOLTAGE_INIT_MODE 5 1647#define SET_VOLTAGE_INIT_MODE 5
1292#define SET_VOLTAGE_GET_MAX_VOLTAGE 6 /* Gets the Max. voltage for the soldered Asic */ 1648#define SET_VOLTAGE_GET_MAX_VOLTAGE 6 //Gets the Max. voltage for the soldered Asic
1293 1649
1294#define SET_ASIC_VOLTAGE_MODE_ALL_SOURCE 0x1 1650#define SET_ASIC_VOLTAGE_MODE_ALL_SOURCE 0x1
1295#define SET_ASIC_VOLTAGE_MODE_SOURCE_A 0x2 1651#define SET_ASIC_VOLTAGE_MODE_SOURCE_A 0x2
1296#define SET_ASIC_VOLTAGE_MODE_SOURCE_B 0x4 1652#define SET_ASIC_VOLTAGE_MODE_SOURCE_B 0x4
1297 1653
1298#define SET_ASIC_VOLTAGE_MODE_SET_VOLTAGE 0x0 1654#define SET_ASIC_VOLTAGE_MODE_SET_VOLTAGE 0x0
1299#define SET_ASIC_VOLTAGE_MODE_GET_GPIOVAL 0x1 1655#define SET_ASIC_VOLTAGE_MODE_GET_GPIOVAL 0x1
1300#define SET_ASIC_VOLTAGE_MODE_GET_GPIOMASK 0x2 1656#define SET_ASIC_VOLTAGE_MODE_GET_GPIOMASK 0x2
1301 1657
1302typedef struct _SET_VOLTAGE_PARAMETERS { 1658typedef struct _SET_VOLTAGE_PARAMETERS
1303 UCHAR ucVoltageType; /* To tell which voltage to set up, VDDC/MVDDC/MVDDQ */ 1659{
1304 UCHAR ucVoltageMode; /* To set all, to set source A or source B or ... */ 1660 UCHAR ucVoltageType; // To tell which voltage to set up, VDDC/MVDDC/MVDDQ
1305 UCHAR ucVoltageIndex; /* An index to tell which voltage level */ 1661 UCHAR ucVoltageMode; // To set all, to set source A or source B or ...
1306 UCHAR ucReserved; 1662 UCHAR ucVoltageIndex; // An index to tell which voltage level
1307} SET_VOLTAGE_PARAMETERS; 1663 UCHAR ucReserved;
1308 1664}SET_VOLTAGE_PARAMETERS;
1309typedef struct _SET_VOLTAGE_PARAMETERS_V2 {
1310 UCHAR ucVoltageType; /* To tell which voltage to set up, VDDC/MVDDC/MVDDQ */
1311 UCHAR ucVoltageMode; /* Not used, maybe use for state machine for differen power mode */
1312 USHORT usVoltageLevel; /* real voltage level */
1313} SET_VOLTAGE_PARAMETERS_V2;
1314
1315typedef struct _SET_VOLTAGE_PS_ALLOCATION {
1316 SET_VOLTAGE_PARAMETERS sASICSetVoltage;
1317 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved;
1318} SET_VOLTAGE_PS_ALLOCATION;
1319
1320/****************************************************************************/
1321/* Structures used by TVEncoderControlTable */
1322/****************************************************************************/
1323typedef struct _TV_ENCODER_CONTROL_PARAMETERS {
1324 USHORT usPixelClock; /* in 10KHz; for bios convenient */
1325 UCHAR ucTvStandard; /* See definition "ATOM_TV_NTSC ..." */
1326 UCHAR ucAction; /* 0: turn off encoder */
1327 /* 1: setup and turn on encoder */
1328} TV_ENCODER_CONTROL_PARAMETERS;
1329
1330typedef struct _TV_ENCODER_CONTROL_PS_ALLOCATION {
1331 TV_ENCODER_CONTROL_PARAMETERS sTVEncoder;
1332 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved; /* Don't set this one */
1333} TV_ENCODER_CONTROL_PS_ALLOCATION;
1334
1335/* ==============================Data Table Portion==================================== */
1336
1337#ifdef UEFI_BUILD
1338#define UTEMP USHORT
1339#define USHORT void*
1340#endif
1341
1342/****************************************************************************/
1343/* Structure used in Data.mtb */
1344/****************************************************************************/
1345typedef struct _ATOM_MASTER_LIST_OF_DATA_TABLES {
1346 USHORT UtilityPipeLine; /* Offest for the utility to get parser info,Don't change this position! */
1347 USHORT MultimediaCapabilityInfo; /* Only used by MM Lib,latest version 1.1, not configuable from Bios, need to include the table to build Bios */
1348 USHORT MultimediaConfigInfo; /* Only used by MM Lib,latest version 2.1, not configuable from Bios, need to include the table to build Bios */
1349 USHORT StandardVESA_Timing; /* Only used by Bios */
1350 USHORT FirmwareInfo; /* Shared by various SW components,latest version 1.4 */
1351 USHORT DAC_Info; /* Will be obsolete from R600 */
1352 USHORT LVDS_Info; /* Shared by various SW components,latest version 1.1 */
1353 USHORT TMDS_Info; /* Will be obsolete from R600 */
1354 USHORT AnalogTV_Info; /* Shared by various SW components,latest version 1.1 */
1355 USHORT SupportedDevicesInfo; /* Will be obsolete from R600 */
1356 USHORT GPIO_I2C_Info; /* Shared by various SW components,latest version 1.2 will be used from R600 */
1357 USHORT VRAM_UsageByFirmware; /* Shared by various SW components,latest version 1.3 will be used from R600 */
1358 USHORT GPIO_Pin_LUT; /* Shared by various SW components,latest version 1.1 */
1359 USHORT VESA_ToInternalModeLUT; /* Only used by Bios */
1360 USHORT ComponentVideoInfo; /* Shared by various SW components,latest version 2.1 will be used from R600 */
1361 USHORT PowerPlayInfo; /* Shared by various SW components,latest version 2.1,new design from R600 */
1362 USHORT CompassionateData; /* Will be obsolete from R600 */
1363 USHORT SaveRestoreInfo; /* Only used by Bios */
1364 USHORT PPLL_SS_Info; /* Shared by various SW components,latest version 1.2, used to call SS_Info, change to new name because of int ASIC SS info */
1365 USHORT OemInfo; /* Defined and used by external SW, should be obsolete soon */
1366 USHORT XTMDS_Info; /* Will be obsolete from R600 */
1367 USHORT MclkSS_Info; /* Shared by various SW components,latest version 1.1, only enabled when ext SS chip is used */
1368 USHORT Object_Header; /* Shared by various SW components,latest version 1.1 */
1369 USHORT IndirectIOAccess; /* Only used by Bios,this table position can't change at all!! */
1370 USHORT MC_InitParameter; /* Only used by command table */
1371 USHORT ASIC_VDDC_Info; /* Will be obsolete from R600 */
1372 USHORT ASIC_InternalSS_Info; /* New tabel name from R600, used to be called "ASIC_MVDDC_Info" */
1373 USHORT TV_VideoMode; /* Only used by command table */
1374 USHORT VRAM_Info; /* Only used by command table, latest version 1.3 */
1375 USHORT MemoryTrainingInfo; /* Used for VBIOS and Diag utility for memory training purpose since R600. the new table rev start from 2.1 */
1376 USHORT IntegratedSystemInfo; /* Shared by various SW components */
1377 USHORT ASIC_ProfilingInfo; /* New table name from R600, used to be called "ASIC_VDDCI_Info" for pre-R600 */
1378 USHORT VoltageObjectInfo; /* Shared by various SW components, latest version 1.1 */
1379 USHORT PowerSourceInfo; /* Shared by various SW components, latest versoin 1.1 */
1380} ATOM_MASTER_LIST_OF_DATA_TABLES;
1381
1382#ifdef UEFI_BUILD
1383#define USHORT UTEMP
1384#endif
1385 1665
1386typedef struct _ATOM_MASTER_DATA_TABLE { 1666typedef struct _SET_VOLTAGE_PARAMETERS_V2
1387 ATOM_COMMON_TABLE_HEADER sHeader; 1667{
1388 ATOM_MASTER_LIST_OF_DATA_TABLES ListOfDataTables; 1668 UCHAR ucVoltageType; // To tell which voltage to set up, VDDC/MVDDC/MVDDQ
1389} ATOM_MASTER_DATA_TABLE; 1669 UCHAR ucVoltageMode; // Not used, maybe use for state machine for differen power mode
1670 USHORT usVoltageLevel; // real voltage level
1671}SET_VOLTAGE_PARAMETERS_V2;
1390 1672
1391/****************************************************************************/ 1673typedef struct _SET_VOLTAGE_PS_ALLOCATION
1392/* Structure used in MultimediaCapabilityInfoTable */ 1674{
1393/****************************************************************************/ 1675 SET_VOLTAGE_PARAMETERS sASICSetVoltage;
1394typedef struct _ATOM_MULTIMEDIA_CAPABILITY_INFO { 1676 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved;
1395 ATOM_COMMON_TABLE_HEADER sHeader; 1677}SET_VOLTAGE_PS_ALLOCATION;
1396 ULONG ulSignature; /* HW info table signature string "$ATI" */ 1678
1397 UCHAR ucI2C_Type; /* I2C type (normal GP_IO, ImpactTV GP_IO, Dedicated I2C pin, etc) */ 1679/****************************************************************************/
1398 UCHAR ucTV_OutInfo; /* Type of TV out supported (3:0) and video out crystal frequency (6:4) and TV data port (7) */ 1680// Structures used by TVEncoderControlTable
1399 UCHAR ucVideoPortInfo; /* Provides the video port capabilities */ 1681/****************************************************************************/
1400 UCHAR ucHostPortInfo; /* Provides host port configuration information */ 1682typedef struct _TV_ENCODER_CONTROL_PARAMETERS
1401} ATOM_MULTIMEDIA_CAPABILITY_INFO; 1683{
1684 USHORT usPixelClock; // in 10KHz; for bios convenient
1685 UCHAR ucTvStandard; // See definition "ATOM_TV_NTSC ..."
1686 UCHAR ucAction; // 0: turn off encoder
1687 // 1: setup and turn on encoder
1688}TV_ENCODER_CONTROL_PARAMETERS;
1402 1689
1403/****************************************************************************/ 1690typedef struct _TV_ENCODER_CONTROL_PS_ALLOCATION
1404/* Structure used in MultimediaConfigInfoTable */ 1691{
1405/****************************************************************************/ 1692 TV_ENCODER_CONTROL_PARAMETERS sTVEncoder;
1406typedef struct _ATOM_MULTIMEDIA_CONFIG_INFO { 1693 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved; // Don't set this one
1407 ATOM_COMMON_TABLE_HEADER sHeader; 1694}TV_ENCODER_CONTROL_PS_ALLOCATION;
1408 ULONG ulSignature; /* MM info table signature sting "$MMT" */
1409 UCHAR ucTunerInfo; /* Type of tuner installed on the adapter (4:0) and video input for tuner (7:5) */
1410 UCHAR ucAudioChipInfo; /* List the audio chip type (3:0) product type (4) and OEM revision (7:5) */
1411 UCHAR ucProductID; /* Defines as OEM ID or ATI board ID dependent on product type setting */
1412 UCHAR ucMiscInfo1; /* Tuner voltage (1:0) HW teletext support (3:2) FM audio decoder (5:4) reserved (6) audio scrambling (7) */
1413 UCHAR ucMiscInfo2; /* I2S input config (0) I2S output config (1) I2S Audio Chip (4:2) SPDIF Output Config (5) reserved (7:6) */
1414 UCHAR ucMiscInfo3; /* Video Decoder Type (3:0) Video In Standard/Crystal (7:4) */
1415 UCHAR ucMiscInfo4; /* Video Decoder Host Config (2:0) reserved (7:3) */
1416 UCHAR ucVideoInput0Info; /* Video Input 0 Type (1:0) F/B setting (2) physical connector ID (5:3) reserved (7:6) */
1417 UCHAR ucVideoInput1Info; /* Video Input 1 Type (1:0) F/B setting (2) physical connector ID (5:3) reserved (7:6) */
1418 UCHAR ucVideoInput2Info; /* Video Input 2 Type (1:0) F/B setting (2) physical connector ID (5:3) reserved (7:6) */
1419 UCHAR ucVideoInput3Info; /* Video Input 3 Type (1:0) F/B setting (2) physical connector ID (5:3) reserved (7:6) */
1420 UCHAR ucVideoInput4Info; /* Video Input 4 Type (1:0) F/B setting (2) physical connector ID (5:3) reserved (7:6) */
1421} ATOM_MULTIMEDIA_CONFIG_INFO;
1422 1695
1423/****************************************************************************/ 1696//==============================Data Table Portion====================================
1424/* Structures used in FirmwareInfoTable */
1425/****************************************************************************/
1426 1697
1427/* usBIOSCapability Defintion: */ 1698/****************************************************************************/
1428/* Bit 0 = 0: Bios image is not Posted, =1:Bios image is Posted; */ 1699// Structure used in Data.mtb
1429/* Bit 1 = 0: Dual CRTC is not supported, =1: Dual CRTC is supported; */ 1700/****************************************************************************/
1430/* Bit 2 = 0: Extended Desktop is not supported, =1: Extended Desktop is supported; */ 1701typedef struct _ATOM_MASTER_LIST_OF_DATA_TABLES
1431/* Others: Reserved */ 1702{
1703 USHORT UtilityPipeLine; // Offest for the utility to get parser info,Don't change this position!
1704 USHORT MultimediaCapabilityInfo; // Only used by MM Lib,latest version 1.1, not configuable from Bios, need to include the table to build Bios
1705 USHORT MultimediaConfigInfo; // Only used by MM Lib,latest version 2.1, not configuable from Bios, need to include the table to build Bios
1706 USHORT StandardVESA_Timing; // Only used by Bios
1707 USHORT FirmwareInfo; // Shared by various SW components,latest version 1.4
1708 USHORT DAC_Info; // Will be obsolete from R600
1709 USHORT LVDS_Info; // Shared by various SW components,latest version 1.1
1710 USHORT TMDS_Info; // Will be obsolete from R600
1711 USHORT AnalogTV_Info; // Shared by various SW components,latest version 1.1
1712 USHORT SupportedDevicesInfo; // Will be obsolete from R600
1713 USHORT GPIO_I2C_Info; // Shared by various SW components,latest version 1.2 will be used from R600
1714 USHORT VRAM_UsageByFirmware; // Shared by various SW components,latest version 1.3 will be used from R600
1715 USHORT GPIO_Pin_LUT; // Shared by various SW components,latest version 1.1
1716 USHORT VESA_ToInternalModeLUT; // Only used by Bios
1717 USHORT ComponentVideoInfo; // Shared by various SW components,latest version 2.1 will be used from R600
1718 USHORT PowerPlayInfo; // Shared by various SW components,latest version 2.1,new design from R600
1719 USHORT CompassionateData; // Will be obsolete from R600
1720 USHORT SaveRestoreInfo; // Only used by Bios
1721 USHORT PPLL_SS_Info; // Shared by various SW components,latest version 1.2, used to call SS_Info, change to new name because of int ASIC SS info
1722 USHORT OemInfo; // Defined and used by external SW, should be obsolete soon
1723 USHORT XTMDS_Info; // Will be obsolete from R600
1724 USHORT MclkSS_Info; // Shared by various SW components,latest version 1.1, only enabled when ext SS chip is used
1725 USHORT Object_Header; // Shared by various SW components,latest version 1.1
1726 USHORT IndirectIOAccess; // Only used by Bios,this table position can't change at all!!
1727 USHORT MC_InitParameter; // Only used by command table
1728 USHORT ASIC_VDDC_Info; // Will be obsolete from R600
1729 USHORT ASIC_InternalSS_Info; // New tabel name from R600, used to be called "ASIC_MVDDC_Info"
1730 USHORT TV_VideoMode; // Only used by command table
1731 USHORT VRAM_Info; // Only used by command table, latest version 1.3
1732 USHORT MemoryTrainingInfo; // Used for VBIOS and Diag utility for memory training purpose since R600. the new table rev start from 2.1
1733 USHORT IntegratedSystemInfo; // Shared by various SW components
1734 USHORT ASIC_ProfilingInfo; // New table name from R600, used to be called "ASIC_VDDCI_Info" for pre-R600
1735 USHORT VoltageObjectInfo; // Shared by various SW components, latest version 1.1
1736 USHORT PowerSourceInfo; // Shared by various SW components, latest versoin 1.1
1737}ATOM_MASTER_LIST_OF_DATA_TABLES;
1738
1739typedef struct _ATOM_MASTER_DATA_TABLE
1740{
1741 ATOM_COMMON_TABLE_HEADER sHeader;
1742 ATOM_MASTER_LIST_OF_DATA_TABLES ListOfDataTables;
1743}ATOM_MASTER_DATA_TABLE;
1744
1745/****************************************************************************/
1746// Structure used in MultimediaCapabilityInfoTable
1747/****************************************************************************/
1748typedef struct _ATOM_MULTIMEDIA_CAPABILITY_INFO
1749{
1750 ATOM_COMMON_TABLE_HEADER sHeader;
1751 ULONG ulSignature; // HW info table signature string "$ATI"
1752 UCHAR ucI2C_Type; // I2C type (normal GP_IO, ImpactTV GP_IO, Dedicated I2C pin, etc)
1753 UCHAR ucTV_OutInfo; // Type of TV out supported (3:0) and video out crystal frequency (6:4) and TV data port (7)
1754 UCHAR ucVideoPortInfo; // Provides the video port capabilities
1755 UCHAR ucHostPortInfo; // Provides host port configuration information
1756}ATOM_MULTIMEDIA_CAPABILITY_INFO;
1757
1758/****************************************************************************/
1759// Structure used in MultimediaConfigInfoTable
1760/****************************************************************************/
1761typedef struct _ATOM_MULTIMEDIA_CONFIG_INFO
1762{
1763 ATOM_COMMON_TABLE_HEADER sHeader;
1764 ULONG ulSignature; // MM info table signature sting "$MMT"
1765 UCHAR ucTunerInfo; // Type of tuner installed on the adapter (4:0) and video input for tuner (7:5)
1766 UCHAR ucAudioChipInfo; // List the audio chip type (3:0) product type (4) and OEM revision (7:5)
1767 UCHAR ucProductID; // Defines as OEM ID or ATI board ID dependent on product type setting
1768 UCHAR ucMiscInfo1; // Tuner voltage (1:0) HW teletext support (3:2) FM audio decoder (5:4) reserved (6) audio scrambling (7)
1769 UCHAR ucMiscInfo2; // I2S input config (0) I2S output config (1) I2S Audio Chip (4:2) SPDIF Output Config (5) reserved (7:6)
1770 UCHAR ucMiscInfo3; // Video Decoder Type (3:0) Video In Standard/Crystal (7:4)
1771 UCHAR ucMiscInfo4; // Video Decoder Host Config (2:0) reserved (7:3)
1772 UCHAR ucVideoInput0Info;// Video Input 0 Type (1:0) F/B setting (2) physical connector ID (5:3) reserved (7:6)
1773 UCHAR ucVideoInput1Info;// Video Input 1 Type (1:0) F/B setting (2) physical connector ID (5:3) reserved (7:6)
1774 UCHAR ucVideoInput2Info;// Video Input 2 Type (1:0) F/B setting (2) physical connector ID (5:3) reserved (7:6)
1775 UCHAR ucVideoInput3Info;// Video Input 3 Type (1:0) F/B setting (2) physical connector ID (5:3) reserved (7:6)
1776 UCHAR ucVideoInput4Info;// Video Input 4 Type (1:0) F/B setting (2) physical connector ID (5:3) reserved (7:6)
1777}ATOM_MULTIMEDIA_CONFIG_INFO;
1778
1779/****************************************************************************/
1780// Structures used in FirmwareInfoTable
1781/****************************************************************************/
1782
1783// usBIOSCapability Defintion:
1784// Bit 0 = 0: Bios image is not Posted, =1:Bios image is Posted;
1785// Bit 1 = 0: Dual CRTC is not supported, =1: Dual CRTC is supported;
1786// Bit 2 = 0: Extended Desktop is not supported, =1: Extended Desktop is supported;
1787// Others: Reserved
1432#define ATOM_BIOS_INFO_ATOM_FIRMWARE_POSTED 0x0001 1788#define ATOM_BIOS_INFO_ATOM_FIRMWARE_POSTED 0x0001
1433#define ATOM_BIOS_INFO_DUAL_CRTC_SUPPORT 0x0002 1789#define ATOM_BIOS_INFO_DUAL_CRTC_SUPPORT 0x0002
1434#define ATOM_BIOS_INFO_EXTENDED_DESKTOP_SUPPORT 0x0004 1790#define ATOM_BIOS_INFO_EXTENDED_DESKTOP_SUPPORT 0x0004
1435#define ATOM_BIOS_INFO_MEMORY_CLOCK_SS_SUPPORT 0x0008 1791#define ATOM_BIOS_INFO_MEMORY_CLOCK_SS_SUPPORT 0x0008 // (valid from v1.1 ~v1.4):=1: memclk SS enable, =0 memclk SS disable.
1436#define ATOM_BIOS_INFO_ENGINE_CLOCK_SS_SUPPORT 0x0010 1792#define ATOM_BIOS_INFO_ENGINE_CLOCK_SS_SUPPORT 0x0010 // (valid from v1.1 ~v1.4):=1: engclk SS enable, =0 engclk SS disable.
1437#define ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU 0x0020 1793#define ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU 0x0020
1438#define ATOM_BIOS_INFO_WMI_SUPPORT 0x0040 1794#define ATOM_BIOS_INFO_WMI_SUPPORT 0x0040
1439#define ATOM_BIOS_INFO_PPMODE_ASSIGNGED_BY_SYSTEM 0x0080 1795#define ATOM_BIOS_INFO_PPMODE_ASSIGNGED_BY_SYSTEM 0x0080
@@ -1441,242 +1797,292 @@ typedef struct _ATOM_MULTIMEDIA_CONFIG_INFO {
1441#define ATOM_BIOS_INFO_HYPERMEMORY_SIZE_MASK 0x1E00 1797#define ATOM_BIOS_INFO_HYPERMEMORY_SIZE_MASK 0x1E00
1442#define ATOM_BIOS_INFO_VPOST_WITHOUT_FIRST_MODE_SET 0x2000 1798#define ATOM_BIOS_INFO_VPOST_WITHOUT_FIRST_MODE_SET 0x2000
1443#define ATOM_BIOS_INFO_BIOS_SCRATCH6_SCL2_REDEFINE 0x4000 1799#define ATOM_BIOS_INFO_BIOS_SCRATCH6_SCL2_REDEFINE 0x4000
1800#define ATOM_BIOS_INFO_MEMORY_CLOCK_EXT_SS_SUPPORT 0x0008 // (valid from v2.1 ): =1: memclk ss enable with external ss chip
1801#define ATOM_BIOS_INFO_ENGINE_CLOCK_EXT_SS_SUPPORT 0x0010 // (valid from v2.1 ): =1: engclk ss enable with external ss chip
1444 1802
1445#ifndef _H2INC 1803#ifndef _H2INC
1446 1804
1447/* Please don't add or expand this bitfield structure below, this one will retire soon.! */ 1805//Please don't add or expand this bitfield structure below, this one will retire soon.!
1448typedef struct _ATOM_FIRMWARE_CAPABILITY { 1806typedef struct _ATOM_FIRMWARE_CAPABILITY
1807{
1449#if ATOM_BIG_ENDIAN 1808#if ATOM_BIG_ENDIAN
1450 USHORT Reserved:3; 1809 USHORT Reserved:3;
1451 USHORT HyperMemory_Size:4; 1810 USHORT HyperMemory_Size:4;
1452 USHORT HyperMemory_Support:1; 1811 USHORT HyperMemory_Support:1;
1453 USHORT PPMode_Assigned:1; 1812 USHORT PPMode_Assigned:1;
1454 USHORT WMI_SUPPORT:1; 1813 USHORT WMI_SUPPORT:1;
1455 USHORT GPUControlsBL:1; 1814 USHORT GPUControlsBL:1;
1456 USHORT EngineClockSS_Support:1; 1815 USHORT EngineClockSS_Support:1;
1457 USHORT MemoryClockSS_Support:1; 1816 USHORT MemoryClockSS_Support:1;
1458 USHORT ExtendedDesktopSupport:1; 1817 USHORT ExtendedDesktopSupport:1;
1459 USHORT DualCRTC_Support:1; 1818 USHORT DualCRTC_Support:1;
1460 USHORT FirmwarePosted:1; 1819 USHORT FirmwarePosted:1;
1461#else 1820#else
1462 USHORT FirmwarePosted:1; 1821 USHORT FirmwarePosted:1;
1463 USHORT DualCRTC_Support:1; 1822 USHORT DualCRTC_Support:1;
1464 USHORT ExtendedDesktopSupport:1; 1823 USHORT ExtendedDesktopSupport:1;
1465 USHORT MemoryClockSS_Support:1; 1824 USHORT MemoryClockSS_Support:1;
1466 USHORT EngineClockSS_Support:1; 1825 USHORT EngineClockSS_Support:1;
1467 USHORT GPUControlsBL:1; 1826 USHORT GPUControlsBL:1;
1468 USHORT WMI_SUPPORT:1; 1827 USHORT WMI_SUPPORT:1;
1469 USHORT PPMode_Assigned:1; 1828 USHORT PPMode_Assigned:1;
1470 USHORT HyperMemory_Support:1; 1829 USHORT HyperMemory_Support:1;
1471 USHORT HyperMemory_Size:4; 1830 USHORT HyperMemory_Size:4;
1472 USHORT Reserved:3; 1831 USHORT Reserved:3;
1473#endif 1832#endif
1474} ATOM_FIRMWARE_CAPABILITY; 1833}ATOM_FIRMWARE_CAPABILITY;
1475 1834
1476typedef union _ATOM_FIRMWARE_CAPABILITY_ACCESS { 1835typedef union _ATOM_FIRMWARE_CAPABILITY_ACCESS
1477 ATOM_FIRMWARE_CAPABILITY sbfAccess; 1836{
1478 USHORT susAccess; 1837 ATOM_FIRMWARE_CAPABILITY sbfAccess;
1479} ATOM_FIRMWARE_CAPABILITY_ACCESS; 1838 USHORT susAccess;
1839}ATOM_FIRMWARE_CAPABILITY_ACCESS;
1480 1840
1481#else 1841#else
1482 1842
1483typedef union _ATOM_FIRMWARE_CAPABILITY_ACCESS { 1843typedef union _ATOM_FIRMWARE_CAPABILITY_ACCESS
1484 USHORT susAccess; 1844{
1485} ATOM_FIRMWARE_CAPABILITY_ACCESS; 1845 USHORT susAccess;
1846}ATOM_FIRMWARE_CAPABILITY_ACCESS;
1486 1847
1487#endif 1848#endif
1488 1849
1489typedef struct _ATOM_FIRMWARE_INFO { 1850typedef struct _ATOM_FIRMWARE_INFO
1490 ATOM_COMMON_TABLE_HEADER sHeader; 1851{
1491 ULONG ulFirmwareRevision; 1852 ATOM_COMMON_TABLE_HEADER sHeader;
1492 ULONG ulDefaultEngineClock; /* In 10Khz unit */ 1853 ULONG ulFirmwareRevision;
1493 ULONG ulDefaultMemoryClock; /* In 10Khz unit */ 1854 ULONG ulDefaultEngineClock; //In 10Khz unit
1494 ULONG ulDriverTargetEngineClock; /* In 10Khz unit */ 1855 ULONG ulDefaultMemoryClock; //In 10Khz unit
1495 ULONG ulDriverTargetMemoryClock; /* In 10Khz unit */ 1856 ULONG ulDriverTargetEngineClock; //In 10Khz unit
1496 ULONG ulMaxEngineClockPLL_Output; /* In 10Khz unit */ 1857 ULONG ulDriverTargetMemoryClock; //In 10Khz unit
1497 ULONG ulMaxMemoryClockPLL_Output; /* In 10Khz unit */ 1858 ULONG ulMaxEngineClockPLL_Output; //In 10Khz unit
1498 ULONG ulMaxPixelClockPLL_Output; /* In 10Khz unit */ 1859 ULONG ulMaxMemoryClockPLL_Output; //In 10Khz unit
1499 ULONG ulASICMaxEngineClock; /* In 10Khz unit */ 1860 ULONG ulMaxPixelClockPLL_Output; //In 10Khz unit
1500 ULONG ulASICMaxMemoryClock; /* In 10Khz unit */ 1861 ULONG ulASICMaxEngineClock; //In 10Khz unit
1501 UCHAR ucASICMaxTemperature; 1862 ULONG ulASICMaxMemoryClock; //In 10Khz unit
1502 UCHAR ucPadding[3]; /* Don't use them */ 1863 UCHAR ucASICMaxTemperature;
1503 ULONG aulReservedForBIOS[3]; /* Don't use them */ 1864 UCHAR ucPadding[3]; //Don't use them
1504 USHORT usMinEngineClockPLL_Input; /* In 10Khz unit */ 1865 ULONG aulReservedForBIOS[3]; //Don't use them
1505 USHORT usMaxEngineClockPLL_Input; /* In 10Khz unit */ 1866 USHORT usMinEngineClockPLL_Input; //In 10Khz unit
1506 USHORT usMinEngineClockPLL_Output; /* In 10Khz unit */ 1867 USHORT usMaxEngineClockPLL_Input; //In 10Khz unit
1507 USHORT usMinMemoryClockPLL_Input; /* In 10Khz unit */ 1868 USHORT usMinEngineClockPLL_Output; //In 10Khz unit
1508 USHORT usMaxMemoryClockPLL_Input; /* In 10Khz unit */ 1869 USHORT usMinMemoryClockPLL_Input; //In 10Khz unit
1509 USHORT usMinMemoryClockPLL_Output; /* In 10Khz unit */ 1870 USHORT usMaxMemoryClockPLL_Input; //In 10Khz unit
1510 USHORT usMaxPixelClock; /* In 10Khz unit, Max. Pclk */ 1871 USHORT usMinMemoryClockPLL_Output; //In 10Khz unit
1511 USHORT usMinPixelClockPLL_Input; /* In 10Khz unit */ 1872 USHORT usMaxPixelClock; //In 10Khz unit, Max. Pclk
1512 USHORT usMaxPixelClockPLL_Input; /* In 10Khz unit */ 1873 USHORT usMinPixelClockPLL_Input; //In 10Khz unit
1513 USHORT usMinPixelClockPLL_Output; /* In 10Khz unit, the definitions above can't change!!! */ 1874 USHORT usMaxPixelClockPLL_Input; //In 10Khz unit
1514 ATOM_FIRMWARE_CAPABILITY_ACCESS usFirmwareCapability; 1875 USHORT usMinPixelClockPLL_Output; //In 10Khz unit, the definitions above can't change!!!
1515 USHORT usReferenceClock; /* In 10Khz unit */ 1876 ATOM_FIRMWARE_CAPABILITY_ACCESS usFirmwareCapability;
1516 USHORT usPM_RTS_Location; /* RTS PM4 starting location in ROM in 1Kb unit */ 1877 USHORT usReferenceClock; //In 10Khz unit
1517 UCHAR ucPM_RTS_StreamSize; /* RTS PM4 packets in Kb unit */ 1878 USHORT usPM_RTS_Location; //RTS PM4 starting location in ROM in 1Kb unit
1518 UCHAR ucDesign_ID; /* Indicate what is the board design */ 1879 UCHAR ucPM_RTS_StreamSize; //RTS PM4 packets in Kb unit
1519 UCHAR ucMemoryModule_ID; /* Indicate what is the board design */ 1880 UCHAR ucDesign_ID; //Indicate what is the board design
1520} ATOM_FIRMWARE_INFO; 1881 UCHAR ucMemoryModule_ID; //Indicate what is the board design
1521 1882}ATOM_FIRMWARE_INFO;
1522typedef struct _ATOM_FIRMWARE_INFO_V1_2 { 1883
1523 ATOM_COMMON_TABLE_HEADER sHeader; 1884typedef struct _ATOM_FIRMWARE_INFO_V1_2
1524 ULONG ulFirmwareRevision; 1885{
1525 ULONG ulDefaultEngineClock; /* In 10Khz unit */ 1886 ATOM_COMMON_TABLE_HEADER sHeader;
1526 ULONG ulDefaultMemoryClock; /* In 10Khz unit */ 1887 ULONG ulFirmwareRevision;
1527 ULONG ulDriverTargetEngineClock; /* In 10Khz unit */ 1888 ULONG ulDefaultEngineClock; //In 10Khz unit
1528 ULONG ulDriverTargetMemoryClock; /* In 10Khz unit */ 1889 ULONG ulDefaultMemoryClock; //In 10Khz unit
1529 ULONG ulMaxEngineClockPLL_Output; /* In 10Khz unit */ 1890 ULONG ulDriverTargetEngineClock; //In 10Khz unit
1530 ULONG ulMaxMemoryClockPLL_Output; /* In 10Khz unit */ 1891 ULONG ulDriverTargetMemoryClock; //In 10Khz unit
1531 ULONG ulMaxPixelClockPLL_Output; /* In 10Khz unit */ 1892 ULONG ulMaxEngineClockPLL_Output; //In 10Khz unit
1532 ULONG ulASICMaxEngineClock; /* In 10Khz unit */ 1893 ULONG ulMaxMemoryClockPLL_Output; //In 10Khz unit
1533 ULONG ulASICMaxMemoryClock; /* In 10Khz unit */ 1894 ULONG ulMaxPixelClockPLL_Output; //In 10Khz unit
1534 UCHAR ucASICMaxTemperature; 1895 ULONG ulASICMaxEngineClock; //In 10Khz unit
1535 UCHAR ucMinAllowedBL_Level; 1896 ULONG ulASICMaxMemoryClock; //In 10Khz unit
1536 UCHAR ucPadding[2]; /* Don't use them */ 1897 UCHAR ucASICMaxTemperature;
1537 ULONG aulReservedForBIOS[2]; /* Don't use them */ 1898 UCHAR ucMinAllowedBL_Level;
1538 ULONG ulMinPixelClockPLL_Output; /* In 10Khz unit */ 1899 UCHAR ucPadding[2]; //Don't use them
1539 USHORT usMinEngineClockPLL_Input; /* In 10Khz unit */ 1900 ULONG aulReservedForBIOS[2]; //Don't use them
1540 USHORT usMaxEngineClockPLL_Input; /* In 10Khz unit */ 1901 ULONG ulMinPixelClockPLL_Output; //In 10Khz unit
1541 USHORT usMinEngineClockPLL_Output; /* In 10Khz unit */ 1902 USHORT usMinEngineClockPLL_Input; //In 10Khz unit
1542 USHORT usMinMemoryClockPLL_Input; /* In 10Khz unit */ 1903 USHORT usMaxEngineClockPLL_Input; //In 10Khz unit
1543 USHORT usMaxMemoryClockPLL_Input; /* In 10Khz unit */ 1904 USHORT usMinEngineClockPLL_Output; //In 10Khz unit
1544 USHORT usMinMemoryClockPLL_Output; /* In 10Khz unit */ 1905 USHORT usMinMemoryClockPLL_Input; //In 10Khz unit
1545 USHORT usMaxPixelClock; /* In 10Khz unit, Max. Pclk */ 1906 USHORT usMaxMemoryClockPLL_Input; //In 10Khz unit
1546 USHORT usMinPixelClockPLL_Input; /* In 10Khz unit */ 1907 USHORT usMinMemoryClockPLL_Output; //In 10Khz unit
1547 USHORT usMaxPixelClockPLL_Input; /* In 10Khz unit */ 1908 USHORT usMaxPixelClock; //In 10Khz unit, Max. Pclk
1548 USHORT usMinPixelClockPLL_Output; /* In 10Khz unit - lower 16bit of ulMinPixelClockPLL_Output */ 1909 USHORT usMinPixelClockPLL_Input; //In 10Khz unit
1549 ATOM_FIRMWARE_CAPABILITY_ACCESS usFirmwareCapability; 1910 USHORT usMaxPixelClockPLL_Input; //In 10Khz unit
1550 USHORT usReferenceClock; /* In 10Khz unit */ 1911 USHORT usMinPixelClockPLL_Output; //In 10Khz unit - lower 16bit of ulMinPixelClockPLL_Output
1551 USHORT usPM_RTS_Location; /* RTS PM4 starting location in ROM in 1Kb unit */ 1912 ATOM_FIRMWARE_CAPABILITY_ACCESS usFirmwareCapability;
1552 UCHAR ucPM_RTS_StreamSize; /* RTS PM4 packets in Kb unit */ 1913 USHORT usReferenceClock; //In 10Khz unit
1553 UCHAR ucDesign_ID; /* Indicate what is the board design */ 1914 USHORT usPM_RTS_Location; //RTS PM4 starting location in ROM in 1Kb unit
1554 UCHAR ucMemoryModule_ID; /* Indicate what is the board design */ 1915 UCHAR ucPM_RTS_StreamSize; //RTS PM4 packets in Kb unit
1555} ATOM_FIRMWARE_INFO_V1_2; 1916 UCHAR ucDesign_ID; //Indicate what is the board design
1556 1917 UCHAR ucMemoryModule_ID; //Indicate what is the board design
1557typedef struct _ATOM_FIRMWARE_INFO_V1_3 { 1918}ATOM_FIRMWARE_INFO_V1_2;
1558 ATOM_COMMON_TABLE_HEADER sHeader; 1919
1559 ULONG ulFirmwareRevision; 1920typedef struct _ATOM_FIRMWARE_INFO_V1_3
1560 ULONG ulDefaultEngineClock; /* In 10Khz unit */ 1921{
1561 ULONG ulDefaultMemoryClock; /* In 10Khz unit */ 1922 ATOM_COMMON_TABLE_HEADER sHeader;
1562 ULONG ulDriverTargetEngineClock; /* In 10Khz unit */ 1923 ULONG ulFirmwareRevision;
1563 ULONG ulDriverTargetMemoryClock; /* In 10Khz unit */ 1924 ULONG ulDefaultEngineClock; //In 10Khz unit
1564 ULONG ulMaxEngineClockPLL_Output; /* In 10Khz unit */ 1925 ULONG ulDefaultMemoryClock; //In 10Khz unit
1565 ULONG ulMaxMemoryClockPLL_Output; /* In 10Khz unit */ 1926 ULONG ulDriverTargetEngineClock; //In 10Khz unit
1566 ULONG ulMaxPixelClockPLL_Output; /* In 10Khz unit */ 1927 ULONG ulDriverTargetMemoryClock; //In 10Khz unit
1567 ULONG ulASICMaxEngineClock; /* In 10Khz unit */ 1928 ULONG ulMaxEngineClockPLL_Output; //In 10Khz unit
1568 ULONG ulASICMaxMemoryClock; /* In 10Khz unit */ 1929 ULONG ulMaxMemoryClockPLL_Output; //In 10Khz unit
1569 UCHAR ucASICMaxTemperature; 1930 ULONG ulMaxPixelClockPLL_Output; //In 10Khz unit
1570 UCHAR ucMinAllowedBL_Level; 1931 ULONG ulASICMaxEngineClock; //In 10Khz unit
1571 UCHAR ucPadding[2]; /* Don't use them */ 1932 ULONG ulASICMaxMemoryClock; //In 10Khz unit
1572 ULONG aulReservedForBIOS; /* Don't use them */ 1933 UCHAR ucASICMaxTemperature;
1573 ULONG ul3DAccelerationEngineClock; /* In 10Khz unit */ 1934 UCHAR ucMinAllowedBL_Level;
1574 ULONG ulMinPixelClockPLL_Output; /* In 10Khz unit */ 1935 UCHAR ucPadding[2]; //Don't use them
1575 USHORT usMinEngineClockPLL_Input; /* In 10Khz unit */ 1936 ULONG aulReservedForBIOS; //Don't use them
1576 USHORT usMaxEngineClockPLL_Input; /* In 10Khz unit */ 1937 ULONG ul3DAccelerationEngineClock;//In 10Khz unit
1577 USHORT usMinEngineClockPLL_Output; /* In 10Khz unit */ 1938 ULONG ulMinPixelClockPLL_Output; //In 10Khz unit
1578 USHORT usMinMemoryClockPLL_Input; /* In 10Khz unit */ 1939 USHORT usMinEngineClockPLL_Input; //In 10Khz unit
1579 USHORT usMaxMemoryClockPLL_Input; /* In 10Khz unit */ 1940 USHORT usMaxEngineClockPLL_Input; //In 10Khz unit
1580 USHORT usMinMemoryClockPLL_Output; /* In 10Khz unit */ 1941 USHORT usMinEngineClockPLL_Output; //In 10Khz unit
1581 USHORT usMaxPixelClock; /* In 10Khz unit, Max. Pclk */ 1942 USHORT usMinMemoryClockPLL_Input; //In 10Khz unit
1582 USHORT usMinPixelClockPLL_Input; /* In 10Khz unit */ 1943 USHORT usMaxMemoryClockPLL_Input; //In 10Khz unit
1583 USHORT usMaxPixelClockPLL_Input; /* In 10Khz unit */ 1944 USHORT usMinMemoryClockPLL_Output; //In 10Khz unit
1584 USHORT usMinPixelClockPLL_Output; /* In 10Khz unit - lower 16bit of ulMinPixelClockPLL_Output */ 1945 USHORT usMaxPixelClock; //In 10Khz unit, Max. Pclk
1585 ATOM_FIRMWARE_CAPABILITY_ACCESS usFirmwareCapability; 1946 USHORT usMinPixelClockPLL_Input; //In 10Khz unit
1586 USHORT usReferenceClock; /* In 10Khz unit */ 1947 USHORT usMaxPixelClockPLL_Input; //In 10Khz unit
1587 USHORT usPM_RTS_Location; /* RTS PM4 starting location in ROM in 1Kb unit */ 1948 USHORT usMinPixelClockPLL_Output; //In 10Khz unit - lower 16bit of ulMinPixelClockPLL_Output
1588 UCHAR ucPM_RTS_StreamSize; /* RTS PM4 packets in Kb unit */ 1949 ATOM_FIRMWARE_CAPABILITY_ACCESS usFirmwareCapability;
1589 UCHAR ucDesign_ID; /* Indicate what is the board design */ 1950 USHORT usReferenceClock; //In 10Khz unit
1590 UCHAR ucMemoryModule_ID; /* Indicate what is the board design */ 1951 USHORT usPM_RTS_Location; //RTS PM4 starting location in ROM in 1Kb unit
1591} ATOM_FIRMWARE_INFO_V1_3; 1952 UCHAR ucPM_RTS_StreamSize; //RTS PM4 packets in Kb unit
1592 1953 UCHAR ucDesign_ID; //Indicate what is the board design
1593typedef struct _ATOM_FIRMWARE_INFO_V1_4 { 1954 UCHAR ucMemoryModule_ID; //Indicate what is the board design
1594 ATOM_COMMON_TABLE_HEADER sHeader; 1955}ATOM_FIRMWARE_INFO_V1_3;
1595 ULONG ulFirmwareRevision; 1956
1596 ULONG ulDefaultEngineClock; /* In 10Khz unit */ 1957typedef struct _ATOM_FIRMWARE_INFO_V1_4
1597 ULONG ulDefaultMemoryClock; /* In 10Khz unit */ 1958{
1598 ULONG ulDriverTargetEngineClock; /* In 10Khz unit */ 1959 ATOM_COMMON_TABLE_HEADER sHeader;
1599 ULONG ulDriverTargetMemoryClock; /* In 10Khz unit */ 1960 ULONG ulFirmwareRevision;
1600 ULONG ulMaxEngineClockPLL_Output; /* In 10Khz unit */ 1961 ULONG ulDefaultEngineClock; //In 10Khz unit
1601 ULONG ulMaxMemoryClockPLL_Output; /* In 10Khz unit */ 1962 ULONG ulDefaultMemoryClock; //In 10Khz unit
1602 ULONG ulMaxPixelClockPLL_Output; /* In 10Khz unit */ 1963 ULONG ulDriverTargetEngineClock; //In 10Khz unit
1603 ULONG ulASICMaxEngineClock; /* In 10Khz unit */ 1964 ULONG ulDriverTargetMemoryClock; //In 10Khz unit
1604 ULONG ulASICMaxMemoryClock; /* In 10Khz unit */ 1965 ULONG ulMaxEngineClockPLL_Output; //In 10Khz unit
1605 UCHAR ucASICMaxTemperature; 1966 ULONG ulMaxMemoryClockPLL_Output; //In 10Khz unit
1606 UCHAR ucMinAllowedBL_Level; 1967 ULONG ulMaxPixelClockPLL_Output; //In 10Khz unit
1607 USHORT usBootUpVDDCVoltage; /* In MV unit */ 1968 ULONG ulASICMaxEngineClock; //In 10Khz unit
1608 USHORT usLcdMinPixelClockPLL_Output; /* In MHz unit */ 1969 ULONG ulASICMaxMemoryClock; //In 10Khz unit
1609 USHORT usLcdMaxPixelClockPLL_Output; /* In MHz unit */ 1970 UCHAR ucASICMaxTemperature;
1610 ULONG ul3DAccelerationEngineClock; /* In 10Khz unit */ 1971 UCHAR ucMinAllowedBL_Level;
1611 ULONG ulMinPixelClockPLL_Output; /* In 10Khz unit */ 1972 USHORT usBootUpVDDCVoltage; //In MV unit
1612 USHORT usMinEngineClockPLL_Input; /* In 10Khz unit */ 1973 USHORT usLcdMinPixelClockPLL_Output; // In MHz unit
1613 USHORT usMaxEngineClockPLL_Input; /* In 10Khz unit */ 1974 USHORT usLcdMaxPixelClockPLL_Output; // In MHz unit
1614 USHORT usMinEngineClockPLL_Output; /* In 10Khz unit */ 1975 ULONG ul3DAccelerationEngineClock;//In 10Khz unit
1615 USHORT usMinMemoryClockPLL_Input; /* In 10Khz unit */ 1976 ULONG ulMinPixelClockPLL_Output; //In 10Khz unit
1616 USHORT usMaxMemoryClockPLL_Input; /* In 10Khz unit */ 1977 USHORT usMinEngineClockPLL_Input; //In 10Khz unit
1617 USHORT usMinMemoryClockPLL_Output; /* In 10Khz unit */ 1978 USHORT usMaxEngineClockPLL_Input; //In 10Khz unit
1618 USHORT usMaxPixelClock; /* In 10Khz unit, Max. Pclk */ 1979 USHORT usMinEngineClockPLL_Output; //In 10Khz unit
1619 USHORT usMinPixelClockPLL_Input; /* In 10Khz unit */ 1980 USHORT usMinMemoryClockPLL_Input; //In 10Khz unit
1620 USHORT usMaxPixelClockPLL_Input; /* In 10Khz unit */ 1981 USHORT usMaxMemoryClockPLL_Input; //In 10Khz unit
1621 USHORT usMinPixelClockPLL_Output; /* In 10Khz unit - lower 16bit of ulMinPixelClockPLL_Output */ 1982 USHORT usMinMemoryClockPLL_Output; //In 10Khz unit
1622 ATOM_FIRMWARE_CAPABILITY_ACCESS usFirmwareCapability; 1983 USHORT usMaxPixelClock; //In 10Khz unit, Max. Pclk
1623 USHORT usReferenceClock; /* In 10Khz unit */ 1984 USHORT usMinPixelClockPLL_Input; //In 10Khz unit
1624 USHORT usPM_RTS_Location; /* RTS PM4 starting location in ROM in 1Kb unit */ 1985 USHORT usMaxPixelClockPLL_Input; //In 10Khz unit
1625 UCHAR ucPM_RTS_StreamSize; /* RTS PM4 packets in Kb unit */ 1986 USHORT usMinPixelClockPLL_Output; //In 10Khz unit - lower 16bit of ulMinPixelClockPLL_Output
1626 UCHAR ucDesign_ID; /* Indicate what is the board design */ 1987 ATOM_FIRMWARE_CAPABILITY_ACCESS usFirmwareCapability;
1627 UCHAR ucMemoryModule_ID; /* Indicate what is the board design */ 1988 USHORT usReferenceClock; //In 10Khz unit
1628} ATOM_FIRMWARE_INFO_V1_4; 1989 USHORT usPM_RTS_Location; //RTS PM4 starting location in ROM in 1Kb unit
1629 1990 UCHAR ucPM_RTS_StreamSize; //RTS PM4 packets in Kb unit
1630#define ATOM_FIRMWARE_INFO_LAST ATOM_FIRMWARE_INFO_V1_4 1991 UCHAR ucDesign_ID; //Indicate what is the board design
1631 1992 UCHAR ucMemoryModule_ID; //Indicate what is the board design
1632/****************************************************************************/ 1993}ATOM_FIRMWARE_INFO_V1_4;
1633/* Structures used in IntegratedSystemInfoTable */ 1994
1634/****************************************************************************/ 1995//the structure below to be used from Cypress
1996typedef struct _ATOM_FIRMWARE_INFO_V2_1
1997{
1998 ATOM_COMMON_TABLE_HEADER sHeader;
1999 ULONG ulFirmwareRevision;
2000 ULONG ulDefaultEngineClock; //In 10Khz unit
2001 ULONG ulDefaultMemoryClock; //In 10Khz unit
2002 ULONG ulReserved1;
2003 ULONG ulReserved2;
2004 ULONG ulMaxEngineClockPLL_Output; //In 10Khz unit
2005 ULONG ulMaxMemoryClockPLL_Output; //In 10Khz unit
2006 ULONG ulMaxPixelClockPLL_Output; //In 10Khz unit
2007 ULONG ulBinaryAlteredInfo; //Was ulASICMaxEngineClock
2008 ULONG ulDefaultDispEngineClkFreq; //In 10Khz unit
2009 UCHAR ucReserved1; //Was ucASICMaxTemperature;
2010 UCHAR ucMinAllowedBL_Level;
2011 USHORT usBootUpVDDCVoltage; //In MV unit
2012 USHORT usLcdMinPixelClockPLL_Output; // In MHz unit
2013 USHORT usLcdMaxPixelClockPLL_Output; // In MHz unit
2014 ULONG ulReserved4; //Was ulAsicMaximumVoltage
2015 ULONG ulMinPixelClockPLL_Output; //In 10Khz unit
2016 USHORT usMinEngineClockPLL_Input; //In 10Khz unit
2017 USHORT usMaxEngineClockPLL_Input; //In 10Khz unit
2018 USHORT usMinEngineClockPLL_Output; //In 10Khz unit
2019 USHORT usMinMemoryClockPLL_Input; //In 10Khz unit
2020 USHORT usMaxMemoryClockPLL_Input; //In 10Khz unit
2021 USHORT usMinMemoryClockPLL_Output; //In 10Khz unit
2022 USHORT usMaxPixelClock; //In 10Khz unit, Max. Pclk
2023 USHORT usMinPixelClockPLL_Input; //In 10Khz unit
2024 USHORT usMaxPixelClockPLL_Input; //In 10Khz unit
2025 USHORT usMinPixelClockPLL_Output; //In 10Khz unit - lower 16bit of ulMinPixelClockPLL_Output
2026 ATOM_FIRMWARE_CAPABILITY_ACCESS usFirmwareCapability;
2027 USHORT usCoreReferenceClock; //In 10Khz unit
2028 USHORT usMemoryReferenceClock; //In 10Khz unit
2029 USHORT usUniphyDPModeExtClkFreq; //In 10Khz unit, if it is 0, In DP Mode Uniphy Input clock from internal PPLL, otherwise Input clock from external Spread clock
2030 UCHAR ucMemoryModule_ID; //Indicate what is the board design
2031 UCHAR ucReserved4[3];
2032}ATOM_FIRMWARE_INFO_V2_1;
2033
2034
2035#define ATOM_FIRMWARE_INFO_LAST ATOM_FIRMWARE_INFO_V2_1
2036
2037/****************************************************************************/
2038// Structures used in IntegratedSystemInfoTable
2039/****************************************************************************/
1635#define IGP_CAP_FLAG_DYNAMIC_CLOCK_EN 0x2 2040#define IGP_CAP_FLAG_DYNAMIC_CLOCK_EN 0x2
1636#define IGP_CAP_FLAG_AC_CARD 0x4 2041#define IGP_CAP_FLAG_AC_CARD 0x4
1637#define IGP_CAP_FLAG_SDVO_CARD 0x8 2042#define IGP_CAP_FLAG_SDVO_CARD 0x8
1638#define IGP_CAP_FLAG_POSTDIV_BY_2_MODE 0x10 2043#define IGP_CAP_FLAG_POSTDIV_BY_2_MODE 0x10
1639 2044
1640typedef struct _ATOM_INTEGRATED_SYSTEM_INFO { 2045typedef struct _ATOM_INTEGRATED_SYSTEM_INFO
1641 ATOM_COMMON_TABLE_HEADER sHeader; 2046{
1642 ULONG ulBootUpEngineClock; /* in 10kHz unit */ 2047 ATOM_COMMON_TABLE_HEADER sHeader;
1643 ULONG ulBootUpMemoryClock; /* in 10kHz unit */ 2048 ULONG ulBootUpEngineClock; //in 10kHz unit
1644 ULONG ulMaxSystemMemoryClock; /* in 10kHz unit */ 2049 ULONG ulBootUpMemoryClock; //in 10kHz unit
1645 ULONG ulMinSystemMemoryClock; /* in 10kHz unit */ 2050 ULONG ulMaxSystemMemoryClock; //in 10kHz unit
1646 UCHAR ucNumberOfCyclesInPeriodHi; 2051 ULONG ulMinSystemMemoryClock; //in 10kHz unit
1647 UCHAR ucLCDTimingSel; /* =0:not valid.!=0 sel this timing descriptor from LCD EDID. */ 2052 UCHAR ucNumberOfCyclesInPeriodHi;
1648 USHORT usReserved1; 2053 UCHAR ucLCDTimingSel; //=0:not valid.!=0 sel this timing descriptor from LCD EDID.
1649 USHORT usInterNBVoltageLow; /* An intermidiate PMW value to set the voltage */ 2054 USHORT usReserved1;
1650 USHORT usInterNBVoltageHigh; /* Another intermidiate PMW value to set the voltage */ 2055 USHORT usInterNBVoltageLow; //An intermidiate PMW value to set the voltage
1651 ULONG ulReserved[2]; 2056 USHORT usInterNBVoltageHigh; //Another intermidiate PMW value to set the voltage
1652 2057 ULONG ulReserved[2];
1653 USHORT usFSBClock; /* In MHz unit */ 2058
1654 USHORT usCapabilityFlag; /* Bit0=1 indicates the fake HDMI support,Bit1=0/1 for Dynamic clocking dis/enable */ 2059 USHORT usFSBClock; //In MHz unit
1655 /* Bit[3:2]== 0:No PCIE card, 1:AC card, 2:SDVO card */ 2060 USHORT usCapabilityFlag; //Bit0=1 indicates the fake HDMI support,Bit1=0/1 for Dynamic clocking dis/enable
1656 /* Bit[4]==1: P/2 mode, ==0: P/1 mode */ 2061 //Bit[3:2]== 0:No PCIE card, 1:AC card, 2:SDVO card
1657 USHORT usPCIENBCfgReg7; /* bit[7:0]=MUX_Sel, bit[9:8]=MUX_SEL_LEVEL2, bit[10]=Lane_Reversal */ 2062 //Bit[4]==1: P/2 mode, ==0: P/1 mode
1658 USHORT usK8MemoryClock; /* in MHz unit */ 2063 USHORT usPCIENBCfgReg7; //bit[7:0]=MUX_Sel, bit[9:8]=MUX_SEL_LEVEL2, bit[10]=Lane_Reversal
1659 USHORT usK8SyncStartDelay; /* in 0.01 us unit */ 2064 USHORT usK8MemoryClock; //in MHz unit
1660 USHORT usK8DataReturnTime; /* in 0.01 us unit */ 2065 USHORT usK8SyncStartDelay; //in 0.01 us unit
1661 UCHAR ucMaxNBVoltage; 2066 USHORT usK8DataReturnTime; //in 0.01 us unit
1662 UCHAR ucMinNBVoltage; 2067 UCHAR ucMaxNBVoltage;
1663 UCHAR ucMemoryType; /* [7:4]=1:DDR1;=2:DDR2;=3:DDR3.[3:0] is reserved */ 2068 UCHAR ucMinNBVoltage;
1664 UCHAR ucNumberOfCyclesInPeriod; /* CG.FVTHROT_PWM_CTRL_REG0.NumberOfCyclesInPeriod */ 2069 UCHAR ucMemoryType; //[7:4]=1:DDR1;=2:DDR2;=3:DDR3.[3:0] is reserved
1665 UCHAR ucStartingPWM_HighTime; /* CG.FVTHROT_PWM_CTRL_REG0.StartingPWM_HighTime */ 2070 UCHAR ucNumberOfCyclesInPeriod; //CG.FVTHROT_PWM_CTRL_REG0.NumberOfCyclesInPeriod
1666 UCHAR ucHTLinkWidth; /* 16 bit vs. 8 bit */ 2071 UCHAR ucStartingPWM_HighTime; //CG.FVTHROT_PWM_CTRL_REG0.StartingPWM_HighTime
1667 UCHAR ucMaxNBVoltageHigh; 2072 UCHAR ucHTLinkWidth; //16 bit vs. 8 bit
1668 UCHAR ucMinNBVoltageHigh; 2073 UCHAR ucMaxNBVoltageHigh;
1669} ATOM_INTEGRATED_SYSTEM_INFO; 2074 UCHAR ucMinNBVoltageHigh;
2075}ATOM_INTEGRATED_SYSTEM_INFO;
1670 2076
1671/* Explanation on entries in ATOM_INTEGRATED_SYSTEM_INFO 2077/* Explanation on entries in ATOM_INTEGRATED_SYSTEM_INFO
1672ulBootUpMemoryClock: For Intel IGP,it's the UMA system memory clock 2078ulBootUpMemoryClock: For Intel IGP,it's the UMA system memory clock
1673 For AMD IGP,it's 0 if no SidePort memory installed or it's the boot-up SidePort memory clock 2079 For AMD IGP,it's 0 if no SidePort memory installed or it's the boot-up SidePort memory clock
1674ulMaxSystemMemoryClock: For Intel IGP,it's the Max freq from memory SPD if memory runs in ASYNC mode or otherwise (SYNC mode) it's 0 2080ulMaxSystemMemoryClock: For Intel IGP,it's the Max freq from memory SPD if memory runs in ASYNC mode or otherwise (SYNC mode) it's 0
1675 For AMD IGP,for now this can be 0 2081 For AMD IGP,for now this can be 0
1676ulMinSystemMemoryClock: For Intel IGP,it's 133MHz if memory runs in ASYNC mode or otherwise (SYNC mode) it's 0 2082ulMinSystemMemoryClock: For Intel IGP,it's 133MHz if memory runs in ASYNC mode or otherwise (SYNC mode) it's 0
1677 For AMD IGP,for now this can be 0 2083 For AMD IGP,for now this can be 0
1678 2084
1679usFSBClock: For Intel IGP,it's FSB Freq 2085usFSBClock: For Intel IGP,it's FSB Freq
1680 For AMD IGP,it's HT Link Speed 2086 For AMD IGP,it's HT Link Speed
1681 2087
1682usK8MemoryClock: For AMD IGP only. For RevF CPU, set it to 200 2088usK8MemoryClock: For AMD IGP only. For RevF CPU, set it to 200
@@ -1687,98 +2093,113 @@ VC:Voltage Control
1687ucMaxNBVoltage: Voltage regulator dependent PWM value. Low 8 bits of the value for the max voltage.Set this one to 0xFF if VC without PWM. Set this to 0x0 if no VC at all. 2093ucMaxNBVoltage: Voltage regulator dependent PWM value. Low 8 bits of the value for the max voltage.Set this one to 0xFF if VC without PWM. Set this to 0x0 if no VC at all.
1688ucMinNBVoltage: Voltage regulator dependent PWM value. Low 8 bits of the value for the min voltage.Set this one to 0x00 if VC without PWM or no VC at all. 2094ucMinNBVoltage: Voltage regulator dependent PWM value. Low 8 bits of the value for the min voltage.Set this one to 0x00 if VC without PWM or no VC at all.
1689 2095
1690ucNumberOfCyclesInPeriod: Indicate how many cycles when PWM duty is 100%. low 8 bits of the value. 2096ucNumberOfCyclesInPeriod: Indicate how many cycles when PWM duty is 100%. low 8 bits of the value.
1691ucNumberOfCyclesInPeriodHi: Indicate how many cycles when PWM duty is 100%. high 8 bits of the value.If the PWM has an inverter,set bit [7]==1,otherwise set it 0 2097ucNumberOfCyclesInPeriodHi: Indicate how many cycles when PWM duty is 100%. high 8 bits of the value.If the PWM has an inverter,set bit [7]==1,otherwise set it 0
1692 2098
1693ucMaxNBVoltageHigh: Voltage regulator dependent PWM value. High 8 bits of the value for the max voltage.Set this one to 0xFF if VC without PWM. Set this to 0x0 if no VC at all. 2099ucMaxNBVoltageHigh: Voltage regulator dependent PWM value. High 8 bits of the value for the max voltage.Set this one to 0xFF if VC without PWM. Set this to 0x0 if no VC at all.
1694ucMinNBVoltageHigh: Voltage regulator dependent PWM value. High 8 bits of the value for the min voltage.Set this one to 0x00 if VC without PWM or no VC at all. 2100ucMinNBVoltageHigh: Voltage regulator dependent PWM value. High 8 bits of the value for the min voltage.Set this one to 0x00 if VC without PWM or no VC at all.
1695 2101
2102
1696usInterNBVoltageLow: Voltage regulator dependent PWM value. The value makes the the voltage >=Min NB voltage but <=InterNBVoltageHigh. Set this to 0x0000 if VC without PWM or no VC at all. 2103usInterNBVoltageLow: Voltage regulator dependent PWM value. The value makes the the voltage >=Min NB voltage but <=InterNBVoltageHigh. Set this to 0x0000 if VC without PWM or no VC at all.
1697usInterNBVoltageHigh: Voltage regulator dependent PWM value. The value makes the the voltage >=InterNBVoltageLow but <=Max NB voltage.Set this to 0x0000 if VC without PWM or no VC at all. 2104usInterNBVoltageHigh: Voltage regulator dependent PWM value. The value makes the the voltage >=InterNBVoltageLow but <=Max NB voltage.Set this to 0x0000 if VC without PWM or no VC at all.
1698*/ 2105*/
1699 2106
2107
1700/* 2108/*
1701The following IGP table is introduced from RS780, which is supposed to be put by SBIOS in FB before IGP VBIOS starts VPOST; 2109The following IGP table is introduced from RS780, which is supposed to be put by SBIOS in FB before IGP VBIOS starts VPOST;
1702Then VBIOS will copy the whole structure to its image so all GPU SW components can access this data structure to get whatever they need. 2110Then VBIOS will copy the whole structure to its image so all GPU SW components can access this data structure to get whatever they need.
1703The enough reservation should allow us to never change table revisions. Whenever needed, a GPU SW component can use reserved portion for new data entries. 2111The enough reservation should allow us to never change table revisions. Whenever needed, a GPU SW component can use reserved portion for new data entries.
1704 2112
1705SW components can access the IGP system infor structure in the same way as before 2113SW components can access the IGP system infor structure in the same way as before
1706*/ 2114*/
1707 2115
1708typedef struct _ATOM_INTEGRATED_SYSTEM_INFO_V2 { 2116
1709 ATOM_COMMON_TABLE_HEADER sHeader; 2117typedef struct _ATOM_INTEGRATED_SYSTEM_INFO_V2
1710 ULONG ulBootUpEngineClock; /* in 10kHz unit */ 2118{
1711 ULONG ulReserved1[2]; /* must be 0x0 for the reserved */ 2119 ATOM_COMMON_TABLE_HEADER sHeader;
1712 ULONG ulBootUpUMAClock; /* in 10kHz unit */ 2120 ULONG ulBootUpEngineClock; //in 10kHz unit
1713 ULONG ulBootUpSidePortClock; /* in 10kHz unit */ 2121 ULONG ulReserved1[2]; //must be 0x0 for the reserved
1714 ULONG ulMinSidePortClock; /* in 10kHz unit */ 2122 ULONG ulBootUpUMAClock; //in 10kHz unit
1715 ULONG ulReserved2[6]; /* must be 0x0 for the reserved */ 2123 ULONG ulBootUpSidePortClock; //in 10kHz unit
1716 ULONG ulSystemConfig; /* see explanation below */ 2124 ULONG ulMinSidePortClock; //in 10kHz unit
1717 ULONG ulBootUpReqDisplayVector; 2125 ULONG ulReserved2[6]; //must be 0x0 for the reserved
1718 ULONG ulOtherDisplayMisc; 2126 ULONG ulSystemConfig; //see explanation below
1719 ULONG ulDDISlot1Config; 2127 ULONG ulBootUpReqDisplayVector;
1720 ULONG ulDDISlot2Config; 2128 ULONG ulOtherDisplayMisc;
1721 UCHAR ucMemoryType; /* [3:0]=1:DDR1;=2:DDR2;=3:DDR3.[7:4] is reserved */ 2129 ULONG ulDDISlot1Config;
1722 UCHAR ucUMAChannelNumber; 2130 ULONG ulDDISlot2Config;
1723 UCHAR ucDockingPinBit; 2131 UCHAR ucMemoryType; //[3:0]=1:DDR1;=2:DDR2;=3:DDR3.[7:4] is reserved
1724 UCHAR ucDockingPinPolarity; 2132 UCHAR ucUMAChannelNumber;
1725 ULONG ulDockingPinCFGInfo; 2133 UCHAR ucDockingPinBit;
1726 ULONG ulCPUCapInfo; 2134 UCHAR ucDockingPinPolarity;
1727 USHORT usNumberOfCyclesInPeriod; 2135 ULONG ulDockingPinCFGInfo;
1728 USHORT usMaxNBVoltage; 2136 ULONG ulCPUCapInfo;
1729 USHORT usMinNBVoltage; 2137 USHORT usNumberOfCyclesInPeriod;
1730 USHORT usBootUpNBVoltage; 2138 USHORT usMaxNBVoltage;
1731 ULONG ulHTLinkFreq; /* in 10Khz */ 2139 USHORT usMinNBVoltage;
1732 USHORT usMinHTLinkWidth; 2140 USHORT usBootUpNBVoltage;
1733 USHORT usMaxHTLinkWidth; 2141 ULONG ulHTLinkFreq; //in 10Khz
1734 USHORT usUMASyncStartDelay; 2142 USHORT usMinHTLinkWidth;
1735 USHORT usUMADataReturnTime; 2143 USHORT usMaxHTLinkWidth;
1736 USHORT usLinkStatusZeroTime; 2144 USHORT usUMASyncStartDelay;
1737 USHORT usReserved; 2145 USHORT usUMADataReturnTime;
1738 ULONG ulHighVoltageHTLinkFreq; /* in 10Khz */ 2146 USHORT usLinkStatusZeroTime;
1739 ULONG ulLowVoltageHTLinkFreq; /* in 10Khz */ 2147 USHORT usDACEfuse; //for storing badgap value (for RS880 only)
1740 USHORT usMaxUpStreamHTLinkWidth; 2148 ULONG ulHighVoltageHTLinkFreq; // in 10Khz
1741 USHORT usMaxDownStreamHTLinkWidth; 2149 ULONG ulLowVoltageHTLinkFreq; // in 10Khz
1742 USHORT usMinUpStreamHTLinkWidth; 2150 USHORT usMaxUpStreamHTLinkWidth;
1743 USHORT usMinDownStreamHTLinkWidth; 2151 USHORT usMaxDownStreamHTLinkWidth;
1744 ULONG ulReserved3[97]; /* must be 0x0 */ 2152 USHORT usMinUpStreamHTLinkWidth;
1745} ATOM_INTEGRATED_SYSTEM_INFO_V2; 2153 USHORT usMinDownStreamHTLinkWidth;
2154 USHORT usFirmwareVersion; //0 means FW is not supported. Otherwise it's the FW version loaded by SBIOS and driver should enable FW.
2155 USHORT usFullT0Time; // Input to calculate minimum HT link change time required by NB P-State. Unit is 0.01us.
2156 ULONG ulReserved3[96]; //must be 0x0
2157}ATOM_INTEGRATED_SYSTEM_INFO_V2;
1746 2158
1747/* 2159/*
1748ulBootUpEngineClock: Boot-up Engine Clock in 10Khz; 2160ulBootUpEngineClock: Boot-up Engine Clock in 10Khz;
1749ulBootUpUMAClock: Boot-up UMA Clock in 10Khz; it must be 0x0 when UMA is not present 2161ulBootUpUMAClock: Boot-up UMA Clock in 10Khz; it must be 0x0 when UMA is not present
1750ulBootUpSidePortClock: Boot-up SidePort Clock in 10Khz; it must be 0x0 when SidePort Memory is not present,this could be equal to or less than maximum supported Sideport memory clock 2162ulBootUpSidePortClock: Boot-up SidePort Clock in 10Khz; it must be 0x0 when SidePort Memory is not present,this could be equal to or less than maximum supported Sideport memory clock
1751 2163
1752ulSystemConfig: 2164ulSystemConfig:
1753Bit[0]=1: PowerExpress mode =0 Non-PowerExpress mode; 2165Bit[0]=1: PowerExpress mode =0 Non-PowerExpress mode;
1754Bit[1]=1: system boots up at AMD overdrived state or user customized mode. In this case, driver will just stick to this boot-up mode. No other PowerPlay state 2166Bit[1]=1: system boots up at AMD overdrived state or user customized mode. In this case, driver will just stick to this boot-up mode. No other PowerPlay state
1755 =0: system boots up at driver control state. Power state depends on PowerPlay table. 2167 =0: system boots up at driver control state. Power state depends on PowerPlay table.
1756Bit[2]=1: PWM method is used on NB voltage control. =0: GPIO method is used. 2168Bit[2]=1: PWM method is used on NB voltage control. =0: GPIO method is used.
1757Bit[3]=1: Only one power state(Performance) will be supported. 2169Bit[3]=1: Only one power state(Performance) will be supported.
1758 =0: Multiple power states supported from PowerPlay table. 2170 =0: Multiple power states supported from PowerPlay table.
1759Bit[4]=1: CLMC is supported and enabled on current system. 2171Bit[4]=1: CLMC is supported and enabled on current system.
1760 =0: CLMC is not supported or enabled on current system. SBIOS need to support HT link/freq change through ATIF interface. 2172 =0: CLMC is not supported or enabled on current system. SBIOS need to support HT link/freq change through ATIF interface.
1761Bit[5]=1: Enable CDLW for all driver control power states. Max HT width is from SBIOS, while Min HT width is determined by display requirement. 2173Bit[5]=1: Enable CDLW for all driver control power states. Max HT width is from SBIOS, while Min HT width is determined by display requirement.
1762 =0: CDLW is disabled. If CLMC is enabled case, Min HT width will be set equal to Max HT width. If CLMC disabled case, Max HT width will be applied. 2174 =0: CDLW is disabled. If CLMC is enabled case, Min HT width will be set equal to Max HT width. If CLMC disabled case, Max HT width will be applied.
1763Bit[6]=1: High Voltage requested for all power states. In this case, voltage will be forced at 1.1v and powerplay table voltage drop/throttling request will be ignored. 2175Bit[6]=1: High Voltage requested for all power states. In this case, voltage will be forced at 1.1v and powerplay table voltage drop/throttling request will be ignored.
1764 =0: Voltage settings is determined by powerplay table. 2176 =0: Voltage settings is determined by powerplay table.
1765Bit[7]=1: Enable CLMC as hybrid Mode. CDLD and CILR will be disabled in this case and we're using legacy C1E. This is workaround for CPU(Griffin) performance issue. 2177Bit[7]=1: Enable CLMC as hybrid Mode. CDLD and CILR will be disabled in this case and we're using legacy C1E. This is workaround for CPU(Griffin) performance issue.
1766 =0: Enable CLMC as regular mode, CDLD and CILR will be enabled. 2178 =0: Enable CLMC as regular mode, CDLD and CILR will be enabled.
2179Bit[8]=1: CDLF is supported and enabled on current system.
2180 =0: CDLF is not supported or enabled on current system.
2181Bit[9]=1: DLL Shut Down feature is enabled on current system.
2182 =0: DLL Shut Down feature is not enabled or supported on current system.
1767 2183
1768ulBootUpReqDisplayVector: This dword is a bit vector indicates what display devices are requested during boot-up. Refer to ATOM_DEVICE_xxx_SUPPORT for the bit vector definitions. 2184ulBootUpReqDisplayVector: This dword is a bit vector indicates what display devices are requested during boot-up. Refer to ATOM_DEVICE_xxx_SUPPORT for the bit vector definitions.
1769 2185
1770ulOtherDisplayMisc: [15:8]- Bootup LCD Expansion selection; 0-center, 1-full panel size expansion; 2186ulOtherDisplayMisc: [15:8]- Bootup LCD Expansion selection; 0-center, 1-full panel size expansion;
1771 [7:0] - BootupTV standard selection; This is a bit vector to indicate what TV standards are supported by the system. Refer to ucTVSuppportedStd definition; 2187 [7:0] - BootupTV standard selection; This is a bit vector to indicate what TV standards are supported by the system. Refer to ucTVSupportedStd definition;
1772 2188
1773ulDDISlot1Config: Describes the PCIE lane configuration on this DDI PCIE slot (ADD2 card) or connector (Mobile design). 2189ulDDISlot1Config: Describes the PCIE lane configuration on this DDI PCIE slot (ADD2 card) or connector (Mobile design).
1774 [3:0] - Bit vector to indicate PCIE lane config of the DDI slot/connector on chassis (bit 0=1 lane 3:0; bit 1=1 lane 7:4; bit 2=1 lane 11:8; bit 3=1 lane 15:12) 2190 [3:0] - Bit vector to indicate PCIE lane config of the DDI slot/connector on chassis (bit 0=1 lane 3:0; bit 1=1 lane 7:4; bit 2=1 lane 11:8; bit 3=1 lane 15:12)
1775 [7:4] - Bit vector to indicate PCIE lane config of the same DDI slot/connector on docking station (bit 0=1 lane 3:0; bit 1=1 lane 7:4; bit 2=1 lane 11:8; bit 3=1 lane 15:12) 2191 [7:4] - Bit vector to indicate PCIE lane config of the same DDI slot/connector on docking station (bit 4=1 lane 3:0; bit 5=1 lane 7:4; bit 6=1 lane 11:8; bit 7=1 lane 15:12)
1776 [15:8] - Lane configuration attribute; 2192 When a DDI connector is not "paired" (meaming two connections mutualexclusive on chassis or docking, only one of them can be connected at one time.
2193 in both chassis and docking, SBIOS has to duplicate the same PCIE lane info from chassis to docking or vice versa. For example:
2194 one DDI connector is only populated in docking with PCIE lane 8-11, but there is no paired connection on chassis, SBIOS has to copy bit 6 to bit 2.
2195
2196 [15:8] - Lane configuration attribute;
1777 [23:16]- Connector type, possible value: 2197 [23:16]- Connector type, possible value:
1778 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D 2198 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D
1779 CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_D 2199 CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_D
1780 CONNECTOR_OBJECT_ID_HDMI_TYPE_A 2200 CONNECTOR_OBJECT_ID_HDMI_TYPE_A
1781 CONNECTOR_OBJECT_ID_DISPLAYPORT 2201 CONNECTOR_OBJECT_ID_DISPLAYPORT
2202 CONNECTOR_OBJECT_ID_eDP
1782 [31:24]- Reserved 2203 [31:24]- Reserved
1783 2204
1784ulDDISlot2Config: Same as Slot1. 2205ulDDISlot2Config: Same as Slot1.
@@ -1787,29 +2208,31 @@ For IGP, Hypermemory is the only memory type showed in CCC.
1787 2208
1788ucUMAChannelNumber: how many channels for the UMA; 2209ucUMAChannelNumber: how many channels for the UMA;
1789 2210
1790ulDockingPinCFGInfo: [15:0]-Bus/Device/Function # to CFG to read this Docking Pin; [31:16]-reg offset in CFG to read this pin 2211ulDockingPinCFGInfo: [15:0]-Bus/Device/Function # to CFG to read this Docking Pin; [31:16]-reg offset in CFG to read this pin
1791ucDockingPinBit: which bit in this register to read the pin status; 2212ucDockingPinBit: which bit in this register to read the pin status;
1792ucDockingPinPolarity:Polarity of the pin when docked; 2213ucDockingPinPolarity:Polarity of the pin when docked;
1793 2214
1794ulCPUCapInfo: [7:0]=1:Griffin;[7:0]=2:Greyhound;[7:0]=3:K8, other bits reserved for now and must be 0x0 2215ulCPUCapInfo: [7:0]=1:Griffin;[7:0]=2:Greyhound;[7:0]=3:K8, other bits reserved for now and must be 0x0
1795 2216
1796usNumberOfCyclesInPeriod:Indicate how many cycles when PWM duty is 100%. 2217usNumberOfCyclesInPeriod:Indicate how many cycles when PWM duty is 100%.
1797usMaxNBVoltage:Max. voltage control value in either PWM or GPIO mode. 2218
2219usMaxNBVoltage:Max. voltage control value in either PWM or GPIO mode.
1798usMinNBVoltage:Min. voltage control value in either PWM or GPIO mode. 2220usMinNBVoltage:Min. voltage control value in either PWM or GPIO mode.
1799 GPIO mode: both usMaxNBVoltage & usMinNBVoltage have a valid value ulSystemConfig.SYSTEM_CONFIG_USE_PWM_ON_VOLTAGE=0 2221 GPIO mode: both usMaxNBVoltage & usMinNBVoltage have a valid value ulSystemConfig.SYSTEM_CONFIG_USE_PWM_ON_VOLTAGE=0
1800 PWM mode: both usMaxNBVoltage & usMinNBVoltage have a valid value ulSystemConfig.SYSTEM_CONFIG_USE_PWM_ON_VOLTAGE=1 2222 PWM mode: both usMaxNBVoltage & usMinNBVoltage have a valid value ulSystemConfig.SYSTEM_CONFIG_USE_PWM_ON_VOLTAGE=1
1801 GPU SW don't control mode: usMaxNBVoltage & usMinNBVoltage=0 and no care about ulSystemConfig.SYSTEM_CONFIG_USE_PWM_ON_VOLTAGE 2223 GPU SW don't control mode: usMaxNBVoltage & usMinNBVoltage=0 and no care about ulSystemConfig.SYSTEM_CONFIG_USE_PWM_ON_VOLTAGE
2224
1802usBootUpNBVoltage:Boot-up voltage regulator dependent PWM value. 2225usBootUpNBVoltage:Boot-up voltage regulator dependent PWM value.
1803 2226
1804ulHTLinkFreq: Bootup HT link Frequency in 10Khz. 2227ulHTLinkFreq: Bootup HT link Frequency in 10Khz.
1805usMinHTLinkWidth: Bootup minimum HT link width. If CDLW disabled, this is equal to usMaxHTLinkWidth. 2228usMinHTLinkWidth: Bootup minimum HT link width. If CDLW disabled, this is equal to usMaxHTLinkWidth.
1806 If CDLW enabled, both upstream and downstream width should be the same during bootup.
1807usMaxHTLinkWidth: Bootup maximum HT link width. If CDLW disabled, this is equal to usMinHTLinkWidth.
1808 If CDLW enabled, both upstream and downstream width should be the same during bootup. 2229 If CDLW enabled, both upstream and downstream width should be the same during bootup.
2230usMaxHTLinkWidth: Bootup maximum HT link width. If CDLW disabled, this is equal to usMinHTLinkWidth.
2231 If CDLW enabled, both upstream and downstream width should be the same during bootup.
1809 2232
1810usUMASyncStartDelay: Memory access latency, required for watermark calculation 2233usUMASyncStartDelay: Memory access latency, required for watermark calculation
1811usUMADataReturnTime: Memory access latency, required for watermark calculation 2234usUMADataReturnTime: Memory access latency, required for watermark calculation
1812usLinkStatusZeroTime:Memory access latency required for watermark calculation, set this to 0x0 for K8 CPU, set a proper value in 0.01 the unit of us 2235usLinkStatusZeroTime:Memory access latency required for watermark calculation, set this to 0x0 for K8 CPU, set a proper value in 0.01 the unit of us
1813for Griffin or Greyhound. SBIOS needs to convert to actual time by: 2236for Griffin or Greyhound. SBIOS needs to convert to actual time by:
1814 if T0Ttime [5:4]=00b, then usLinkStatusZeroTime=T0Ttime [3:0]*0.1us (0.0 to 1.5us) 2237 if T0Ttime [5:4]=00b, then usLinkStatusZeroTime=T0Ttime [3:0]*0.1us (0.0 to 1.5us)
1815 if T0Ttime [5:4]=01b, then usLinkStatusZeroTime=T0Ttime [3:0]*0.5us (0.0 to 7.5us) 2238 if T0Ttime [5:4]=01b, then usLinkStatusZeroTime=T0Ttime [3:0]*0.5us (0.0 to 7.5us)
@@ -1817,7 +2240,7 @@ for Griffin or Greyhound. SBIOS needs to convert to actual time by:
1817 if T0Ttime [5:4]=11b, and T0Ttime [3:0]=0x0 to 0xa, then usLinkStatusZeroTime=T0Ttime [3:0]*20us (0.0 to 200us) 2240 if T0Ttime [5:4]=11b, and T0Ttime [3:0]=0x0 to 0xa, then usLinkStatusZeroTime=T0Ttime [3:0]*20us (0.0 to 200us)
1818 2241
1819ulHighVoltageHTLinkFreq: HT link frequency for power state with low voltage. If boot up runs in HT1, this must be 0. 2242ulHighVoltageHTLinkFreq: HT link frequency for power state with low voltage. If boot up runs in HT1, this must be 0.
1820 This must be less than or equal to ulHTLinkFreq(bootup frequency). 2243 This must be less than or equal to ulHTLinkFreq(bootup frequency).
1821ulLowVoltageHTLinkFreq: HT link frequency for power state with low voltage or voltage scaling 1.0v~1.1v. If boot up runs in HT1, this must be 0. 2244ulLowVoltageHTLinkFreq: HT link frequency for power state with low voltage or voltage scaling 1.0v~1.1v. If boot up runs in HT1, this must be 0.
1822 This must be less than or equal to ulHighVoltageHTLinkFreq. 2245 This must be less than or equal to ulHighVoltageHTLinkFreq.
1823 2246
@@ -1827,14 +2250,17 @@ usMinUpStreamHTLinkWidth: Asymmetric link width support in the future, to rep
1827usMinDownStreamHTLinkWidth: same as above. 2250usMinDownStreamHTLinkWidth: same as above.
1828*/ 2251*/
1829 2252
2253
1830#define SYSTEM_CONFIG_POWEREXPRESS_ENABLE 0x00000001 2254#define SYSTEM_CONFIG_POWEREXPRESS_ENABLE 0x00000001
1831#define SYSTEM_CONFIG_RUN_AT_OVERDRIVE_ENGINE 0x00000002 2255#define SYSTEM_CONFIG_RUN_AT_OVERDRIVE_ENGINE 0x00000002
1832#define SYSTEM_CONFIG_USE_PWM_ON_VOLTAGE 0x00000004 2256#define SYSTEM_CONFIG_USE_PWM_ON_VOLTAGE 0x00000004
1833#define SYSTEM_CONFIG_PERFORMANCE_POWERSTATE_ONLY 0x00000008 2257#define SYSTEM_CONFIG_PERFORMANCE_POWERSTATE_ONLY 0x00000008
1834#define SYSTEM_CONFIG_CLMC_ENABLED 0x00000010 2258#define SYSTEM_CONFIG_CLMC_ENABLED 0x00000010
1835#define SYSTEM_CONFIG_CDLW_ENABLED 0x00000020 2259#define SYSTEM_CONFIG_CDLW_ENABLED 0x00000020
1836#define SYSTEM_CONFIG_HIGH_VOLTAGE_REQUESTED 0x00000040 2260#define SYSTEM_CONFIG_HIGH_VOLTAGE_REQUESTED 0x00000040
1837#define SYSTEM_CONFIG_CLMC_HYBRID_MODE_ENABLED 0x00000080 2261#define SYSTEM_CONFIG_CLMC_HYBRID_MODE_ENABLED 0x00000080
2262#define SYSTEM_CONFIG_CDLF_ENABLED 0x00000100
2263#define SYSTEM_CONFIG_DLL_SHUTDOWN_ENABLED 0x00000200
1838 2264
1839#define IGP_DDI_SLOT_LANE_CONFIG_MASK 0x000000FF 2265#define IGP_DDI_SLOT_LANE_CONFIG_MASK 0x000000FF
1840 2266
@@ -1851,6 +2277,41 @@ usMinDownStreamHTLinkWidth: same as above.
1851 2277
1852#define IGP_DDI_SLOT_CONNECTOR_TYPE_MASK 0x00FF0000 2278#define IGP_DDI_SLOT_CONNECTOR_TYPE_MASK 0x00FF0000
1853 2279
2280// IntegratedSystemInfoTable new Rev is V5 after V2, because of the real rev of V2 is v1.4. This rev is used for RR
2281typedef struct _ATOM_INTEGRATED_SYSTEM_INFO_V5
2282{
2283 ATOM_COMMON_TABLE_HEADER sHeader;
2284 ULONG ulBootUpEngineClock; //in 10kHz unit
2285 ULONG ulDentistVCOFreq; //Dentist VCO clock in 10kHz unit, the source of GPU SCLK, LCLK, UCLK and VCLK.
2286 ULONG ulLClockFreq; //GPU Lclk freq in 10kHz unit, have relationship with NCLK in NorthBridge
2287 ULONG ulBootUpUMAClock; //in 10kHz unit
2288 ULONG ulReserved1[8]; //must be 0x0 for the reserved
2289 ULONG ulBootUpReqDisplayVector;
2290 ULONG ulOtherDisplayMisc;
2291 ULONG ulReserved2[4]; //must be 0x0 for the reserved
2292 ULONG ulSystemConfig; //TBD
2293 ULONG ulCPUCapInfo; //TBD
2294 USHORT usMaxNBVoltage; //high NB voltage, calculated using current VDDNB (D24F2xDC) and VDDNB offset fuse;
2295 USHORT usMinNBVoltage; //low NB voltage, calculated using current VDDNB (D24F2xDC) and VDDNB offset fuse;
2296 USHORT usBootUpNBVoltage; //boot up NB voltage
2297 UCHAR ucHtcTmpLmt; //bit [22:16] of D24F3x64 Hardware Thermal Control (HTC) Register, may not be needed, TBD
2298 UCHAR ucTjOffset; //bit [28:22] of D24F3xE4 Thermtrip Status Register,may not be needed, TBD
2299 ULONG ulReserved3[4]; //must be 0x0 for the reserved
2300 ULONG ulDDISlot1Config; //see above ulDDISlot1Config definition
2301 ULONG ulDDISlot2Config;
2302 ULONG ulDDISlot3Config;
2303 ULONG ulDDISlot4Config;
2304 ULONG ulReserved4[4]; //must be 0x0 for the reserved
2305 UCHAR ucMemoryType; //[3:0]=1:DDR1;=2:DDR2;=3:DDR3.[7:4] is reserved
2306 UCHAR ucUMAChannelNumber;
2307 USHORT usReserved;
2308 ULONG ulReserved5[4]; //must be 0x0 for the reserved
2309 ULONG ulCSR_M3_ARB_CNTL_DEFAULT[10];//arrays with values for CSR M3 arbiter for default
2310 ULONG ulCSR_M3_ARB_CNTL_UVD[10]; //arrays with values for CSR M3 arbiter for UVD playback
2311 ULONG ulCSR_M3_ARB_CNTL_FS3D[10];//arrays with values for CSR M3 arbiter for Full Screen 3D applications
2312 ULONG ulReserved6[61]; //must be 0x0
2313}ATOM_INTEGRATED_SYSTEM_INFO_V5;
2314
1854#define ATOM_CRT_INT_ENCODER1_INDEX 0x00000000 2315#define ATOM_CRT_INT_ENCODER1_INDEX 0x00000000
1855#define ATOM_LCD_INT_ENCODER1_INDEX 0x00000001 2316#define ATOM_LCD_INT_ENCODER1_INDEX 0x00000001
1856#define ATOM_TV_INT_ENCODER1_INDEX 0x00000002 2317#define ATOM_TV_INT_ENCODER1_INDEX 0x00000002
@@ -1866,8 +2327,8 @@ usMinDownStreamHTLinkWidth: same as above.
1866#define ATOM_DFP_INT_ENCODER3_INDEX 0x0000000C 2327#define ATOM_DFP_INT_ENCODER3_INDEX 0x0000000C
1867#define ATOM_DFP_INT_ENCODER4_INDEX 0x0000000D 2328#define ATOM_DFP_INT_ENCODER4_INDEX 0x0000000D
1868 2329
1869/* define ASIC internal encoder id ( bit vector ) */ 2330// define ASIC internal encoder id ( bit vector ), used for CRTC_SourceSelTable
1870#define ASIC_INT_DAC1_ENCODER_ID 0x00 2331#define ASIC_INT_DAC1_ENCODER_ID 0x00
1871#define ASIC_INT_TV_ENCODER_ID 0x02 2332#define ASIC_INT_TV_ENCODER_ID 0x02
1872#define ASIC_INT_DIG1_ENCODER_ID 0x03 2333#define ASIC_INT_DIG1_ENCODER_ID 0x03
1873#define ASIC_INT_DAC2_ENCODER_ID 0x04 2334#define ASIC_INT_DAC2_ENCODER_ID 0x04
@@ -1875,10 +2336,24 @@ usMinDownStreamHTLinkWidth: same as above.
1875#define ASIC_INT_DVO_ENCODER_ID 0x07 2336#define ASIC_INT_DVO_ENCODER_ID 0x07
1876#define ASIC_INT_DIG2_ENCODER_ID 0x09 2337#define ASIC_INT_DIG2_ENCODER_ID 0x09
1877#define ASIC_EXT_DIG_ENCODER_ID 0x05 2338#define ASIC_EXT_DIG_ENCODER_ID 0x05
2339#define ASIC_EXT_DIG2_ENCODER_ID 0x08
2340#define ASIC_INT_DIG3_ENCODER_ID 0x0a
2341#define ASIC_INT_DIG4_ENCODER_ID 0x0b
2342#define ASIC_INT_DIG5_ENCODER_ID 0x0c
2343#define ASIC_INT_DIG6_ENCODER_ID 0x0d
1878 2344
1879/* define Encoder attribute */ 2345//define Encoder attribute
1880#define ATOM_ANALOG_ENCODER 0 2346#define ATOM_ANALOG_ENCODER 0
1881#define ATOM_DIGITAL_ENCODER 1 2347#define ATOM_DIGITAL_ENCODER 1
2348#define ATOM_DP_ENCODER 2
2349
2350#define ATOM_ENCODER_ENUM_MASK 0x70
2351#define ATOM_ENCODER_ENUM_ID1 0x00
2352#define ATOM_ENCODER_ENUM_ID2 0x10
2353#define ATOM_ENCODER_ENUM_ID3 0x20
2354#define ATOM_ENCODER_ENUM_ID4 0x30
2355#define ATOM_ENCODER_ENUM_ID5 0x40
2356#define ATOM_ENCODER_ENUM_ID6 0x50
1882 2357
1883#define ATOM_DEVICE_CRT1_INDEX 0x00000000 2358#define ATOM_DEVICE_CRT1_INDEX 0x00000000
1884#define ATOM_DEVICE_LCD1_INDEX 0x00000001 2359#define ATOM_DEVICE_LCD1_INDEX 0x00000001
@@ -1886,45 +2361,40 @@ usMinDownStreamHTLinkWidth: same as above.
1886#define ATOM_DEVICE_DFP1_INDEX 0x00000003 2361#define ATOM_DEVICE_DFP1_INDEX 0x00000003
1887#define ATOM_DEVICE_CRT2_INDEX 0x00000004 2362#define ATOM_DEVICE_CRT2_INDEX 0x00000004
1888#define ATOM_DEVICE_LCD2_INDEX 0x00000005 2363#define ATOM_DEVICE_LCD2_INDEX 0x00000005
1889#define ATOM_DEVICE_TV2_INDEX 0x00000006 2364#define ATOM_DEVICE_DFP6_INDEX 0x00000006
1890#define ATOM_DEVICE_DFP2_INDEX 0x00000007 2365#define ATOM_DEVICE_DFP2_INDEX 0x00000007
1891#define ATOM_DEVICE_CV_INDEX 0x00000008 2366#define ATOM_DEVICE_CV_INDEX 0x00000008
1892#define ATOM_DEVICE_DFP3_INDEX 0x00000009 2367#define ATOM_DEVICE_DFP3_INDEX 0x00000009
1893#define ATOM_DEVICE_DFP4_INDEX 0x0000000A 2368#define ATOM_DEVICE_DFP4_INDEX 0x0000000A
1894#define ATOM_DEVICE_DFP5_INDEX 0x0000000B 2369#define ATOM_DEVICE_DFP5_INDEX 0x0000000B
2370
1895#define ATOM_DEVICE_RESERVEDC_INDEX 0x0000000C 2371#define ATOM_DEVICE_RESERVEDC_INDEX 0x0000000C
1896#define ATOM_DEVICE_RESERVEDD_INDEX 0x0000000D 2372#define ATOM_DEVICE_RESERVEDD_INDEX 0x0000000D
1897#define ATOM_DEVICE_RESERVEDE_INDEX 0x0000000E 2373#define ATOM_DEVICE_RESERVEDE_INDEX 0x0000000E
1898#define ATOM_DEVICE_RESERVEDF_INDEX 0x0000000F 2374#define ATOM_DEVICE_RESERVEDF_INDEX 0x0000000F
1899#define ATOM_MAX_SUPPORTED_DEVICE_INFO (ATOM_DEVICE_DFP3_INDEX+1) 2375#define ATOM_MAX_SUPPORTED_DEVICE_INFO (ATOM_DEVICE_DFP3_INDEX+1)
1900#define ATOM_MAX_SUPPORTED_DEVICE_INFO_2 ATOM_MAX_SUPPORTED_DEVICE_INFO 2376#define ATOM_MAX_SUPPORTED_DEVICE_INFO_2 ATOM_MAX_SUPPORTED_DEVICE_INFO
1901#define ATOM_MAX_SUPPORTED_DEVICE_INFO_3 (ATOM_DEVICE_DFP5_INDEX + 1) 2377#define ATOM_MAX_SUPPORTED_DEVICE_INFO_3 (ATOM_DEVICE_DFP5_INDEX + 1 )
1902 2378
1903#define ATOM_MAX_SUPPORTED_DEVICE (ATOM_DEVICE_RESERVEDF_INDEX+1) 2379#define ATOM_MAX_SUPPORTED_DEVICE (ATOM_DEVICE_RESERVEDF_INDEX+1)
1904 2380
1905#define ATOM_DEVICE_CRT1_SUPPORT (0x1L << ATOM_DEVICE_CRT1_INDEX) 2381#define ATOM_DEVICE_CRT1_SUPPORT (0x1L << ATOM_DEVICE_CRT1_INDEX )
1906#define ATOM_DEVICE_LCD1_SUPPORT (0x1L << ATOM_DEVICE_LCD1_INDEX) 2382#define ATOM_DEVICE_LCD1_SUPPORT (0x1L << ATOM_DEVICE_LCD1_INDEX )
1907#define ATOM_DEVICE_TV1_SUPPORT (0x1L << ATOM_DEVICE_TV1_INDEX) 2383#define ATOM_DEVICE_TV1_SUPPORT (0x1L << ATOM_DEVICE_TV1_INDEX )
1908#define ATOM_DEVICE_DFP1_SUPPORT (0x1L << ATOM_DEVICE_DFP1_INDEX) 2384#define ATOM_DEVICE_DFP1_SUPPORT (0x1L << ATOM_DEVICE_DFP1_INDEX )
1909#define ATOM_DEVICE_CRT2_SUPPORT (0x1L << ATOM_DEVICE_CRT2_INDEX) 2385#define ATOM_DEVICE_CRT2_SUPPORT (0x1L << ATOM_DEVICE_CRT2_INDEX )
1910#define ATOM_DEVICE_LCD2_SUPPORT (0x1L << ATOM_DEVICE_LCD2_INDEX) 2386#define ATOM_DEVICE_LCD2_SUPPORT (0x1L << ATOM_DEVICE_LCD2_INDEX )
1911#define ATOM_DEVICE_TV2_SUPPORT (0x1L << ATOM_DEVICE_TV2_INDEX) 2387#define ATOM_DEVICE_DFP6_SUPPORT (0x1L << ATOM_DEVICE_DFP6_INDEX )
1912#define ATOM_DEVICE_DFP2_SUPPORT (0x1L << ATOM_DEVICE_DFP2_INDEX) 2388#define ATOM_DEVICE_DFP2_SUPPORT (0x1L << ATOM_DEVICE_DFP2_INDEX )
1913#define ATOM_DEVICE_CV_SUPPORT (0x1L << ATOM_DEVICE_CV_INDEX) 2389#define ATOM_DEVICE_CV_SUPPORT (0x1L << ATOM_DEVICE_CV_INDEX )
1914#define ATOM_DEVICE_DFP3_SUPPORT (0x1L << ATOM_DEVICE_DFP3_INDEX) 2390#define ATOM_DEVICE_DFP3_SUPPORT (0x1L << ATOM_DEVICE_DFP3_INDEX )
1915#define ATOM_DEVICE_DFP4_SUPPORT (0x1L << ATOM_DEVICE_DFP4_INDEX ) 2391#define ATOM_DEVICE_DFP4_SUPPORT (0x1L << ATOM_DEVICE_DFP4_INDEX )
1916#define ATOM_DEVICE_DFP5_SUPPORT (0x1L << ATOM_DEVICE_DFP5_INDEX) 2392#define ATOM_DEVICE_DFP5_SUPPORT (0x1L << ATOM_DEVICE_DFP5_INDEX )
1917 2393
1918#define ATOM_DEVICE_CRT_SUPPORT \ 2394#define ATOM_DEVICE_CRT_SUPPORT (ATOM_DEVICE_CRT1_SUPPORT | ATOM_DEVICE_CRT2_SUPPORT)
1919 (ATOM_DEVICE_CRT1_SUPPORT | ATOM_DEVICE_CRT2_SUPPORT) 2395#define ATOM_DEVICE_DFP_SUPPORT (ATOM_DEVICE_DFP1_SUPPORT | ATOM_DEVICE_DFP2_SUPPORT | ATOM_DEVICE_DFP3_SUPPORT | ATOM_DEVICE_DFP4_SUPPORT | ATOM_DEVICE_DFP5_SUPPORT | ATOM_DEVICE_DFP6_SUPPORT)
1920#define ATOM_DEVICE_DFP_SUPPORT \ 2396#define ATOM_DEVICE_TV_SUPPORT (ATOM_DEVICE_TV1_SUPPORT)
1921 (ATOM_DEVICE_DFP1_SUPPORT | ATOM_DEVICE_DFP2_SUPPORT | \ 2397#define ATOM_DEVICE_LCD_SUPPORT (ATOM_DEVICE_LCD1_SUPPORT | ATOM_DEVICE_LCD2_SUPPORT)
1922 ATOM_DEVICE_DFP3_SUPPORT | ATOM_DEVICE_DFP4_SUPPORT | \
1923 ATOM_DEVICE_DFP5_SUPPORT)
1924#define ATOM_DEVICE_TV_SUPPORT \
1925 (ATOM_DEVICE_TV1_SUPPORT | ATOM_DEVICE_TV2_SUPPORT)
1926#define ATOM_DEVICE_LCD_SUPPORT \
1927 (ATOM_DEVICE_LCD1_SUPPORT | ATOM_DEVICE_LCD2_SUPPORT)
1928 2398
1929#define ATOM_DEVICE_CONNECTOR_TYPE_MASK 0x000000F0 2399#define ATOM_DEVICE_CONNECTOR_TYPE_MASK 0x000000F0
1930#define ATOM_DEVICE_CONNECTOR_TYPE_SHIFT 0x00000004 2400#define ATOM_DEVICE_CONNECTOR_TYPE_SHIFT 0x00000004
@@ -1942,6 +2412,7 @@ usMinDownStreamHTLinkWidth: same as above.
1942#define ATOM_DEVICE_CONNECTOR_CASE_1 0x0000000E 2412#define ATOM_DEVICE_CONNECTOR_CASE_1 0x0000000E
1943#define ATOM_DEVICE_CONNECTOR_DISPLAYPORT 0x0000000F 2413#define ATOM_DEVICE_CONNECTOR_DISPLAYPORT 0x0000000F
1944 2414
2415
1945#define ATOM_DEVICE_DAC_INFO_MASK 0x0000000F 2416#define ATOM_DEVICE_DAC_INFO_MASK 0x0000000F
1946#define ATOM_DEVICE_DAC_INFO_SHIFT 0x00000000 2417#define ATOM_DEVICE_DAC_INFO_SHIFT 0x00000000
1947#define ATOM_DEVICE_DAC_INFO_NODAC 0x00000000 2418#define ATOM_DEVICE_DAC_INFO_NODAC 0x00000000
@@ -1958,139 +2429,150 @@ usMinDownStreamHTLinkWidth: same as above.
1958#define ATOM_DEVICE_I2C_ID_SHIFT 0x00000004 2429#define ATOM_DEVICE_I2C_ID_SHIFT 0x00000004
1959#define ATOM_DEVICE_I2C_ID_IS_FOR_NON_MM_USE 0x00000001 2430#define ATOM_DEVICE_I2C_ID_IS_FOR_NON_MM_USE 0x00000001
1960#define ATOM_DEVICE_I2C_ID_IS_FOR_MM_USE 0x00000002 2431#define ATOM_DEVICE_I2C_ID_IS_FOR_MM_USE 0x00000002
1961#define ATOM_DEVICE_I2C_ID_IS_FOR_SDVO_USE 0x00000003 /* For IGP RS600 */ 2432#define ATOM_DEVICE_I2C_ID_IS_FOR_SDVO_USE 0x00000003 //For IGP RS600
1962#define ATOM_DEVICE_I2C_ID_IS_FOR_DAC_SCL 0x00000004 /* For IGP RS690 */ 2433#define ATOM_DEVICE_I2C_ID_IS_FOR_DAC_SCL 0x00000004 //For IGP RS690
1963 2434
1964#define ATOM_DEVICE_I2C_HARDWARE_CAP_MASK 0x00000080 2435#define ATOM_DEVICE_I2C_HARDWARE_CAP_MASK 0x00000080
1965#define ATOM_DEVICE_I2C_HARDWARE_CAP_SHIFT 0x00000007 2436#define ATOM_DEVICE_I2C_HARDWARE_CAP_SHIFT 0x00000007
1966#define ATOM_DEVICE_USES_SOFTWARE_ASSISTED_I2C 0x00000000 2437#define ATOM_DEVICE_USES_SOFTWARE_ASSISTED_I2C 0x00000000
1967#define ATOM_DEVICE_USES_HARDWARE_ASSISTED_I2C 0x00000001 2438#define ATOM_DEVICE_USES_HARDWARE_ASSISTED_I2C 0x00000001
1968 2439
1969/* usDeviceSupport: */ 2440// usDeviceSupport:
1970/* Bits0 = 0 - no CRT1 support= 1- CRT1 is supported */ 2441// Bits0 = 0 - no CRT1 support= 1- CRT1 is supported
1971/* Bit 1 = 0 - no LCD1 support= 1- LCD1 is supported */ 2442// Bit 1 = 0 - no LCD1 support= 1- LCD1 is supported
1972/* Bit 2 = 0 - no TV1 support= 1- TV1 is supported */ 2443// Bit 2 = 0 - no TV1 support= 1- TV1 is supported
1973/* Bit 3 = 0 - no DFP1 support= 1- DFP1 is supported */ 2444// Bit 3 = 0 - no DFP1 support= 1- DFP1 is supported
1974/* Bit 4 = 0 - no CRT2 support= 1- CRT2 is supported */ 2445// Bit 4 = 0 - no CRT2 support= 1- CRT2 is supported
1975/* Bit 5 = 0 - no LCD2 support= 1- LCD2 is supported */ 2446// Bit 5 = 0 - no LCD2 support= 1- LCD2 is supported
1976/* Bit 6 = 0 - no TV2 support= 1- TV2 is supported */ 2447// Bit 6 = 0 - no DFP6 support= 1- DFP6 is supported
1977/* Bit 7 = 0 - no DFP2 support= 1- DFP2 is supported */ 2448// Bit 7 = 0 - no DFP2 support= 1- DFP2 is supported
1978/* Bit 8 = 0 - no CV support= 1- CV is supported */ 2449// Bit 8 = 0 - no CV support= 1- CV is supported
1979/* Bit 9 = 0 - no DFP3 support= 1- DFP3 is supported */ 2450// Bit 9 = 0 - no DFP3 support= 1- DFP3 is supported
1980/* Byte1 (Supported Device Info) */ 2451// Bit 10 = 0 - no DFP4 support= 1- DFP4 is supported
1981/* Bit 0 = = 0 - no CV support= 1- CV is supported */ 2452// Bit 11 = 0 - no DFP5 support= 1- DFP5 is supported
1982/* */ 2453//
1983/* */ 2454//
1984
1985/* ucI2C_ConfigID */
1986/* [7:0] - I2C LINE Associate ID */
1987/* = 0 - no I2C */
1988/* [7] - HW_Cap = 1, [6:0]=HW assisted I2C ID(HW line selection) */
1989/* = 0, [6:0]=SW assisted I2C ID */
1990/* [6-4] - HW_ENGINE_ID = 1, HW engine for NON multimedia use */
1991/* = 2, HW engine for Multimedia use */
1992/* = 3-7 Reserved for future I2C engines */
1993/* [3-0] - I2C_LINE_MUX = A Mux number when it's HW assisted I2C or GPIO ID when it's SW I2C */
1994
1995typedef struct _ATOM_I2C_ID_CONFIG {
1996#if ATOM_BIG_ENDIAN
1997 UCHAR bfHW_Capable:1;
1998 UCHAR bfHW_EngineID:3;
1999 UCHAR bfI2C_LineMux:4;
2000#else
2001 UCHAR bfI2C_LineMux:4;
2002 UCHAR bfHW_EngineID:3;
2003 UCHAR bfHW_Capable:1;
2004#endif
2005} ATOM_I2C_ID_CONFIG;
2006
2007typedef union _ATOM_I2C_ID_CONFIG_ACCESS {
2008 ATOM_I2C_ID_CONFIG sbfAccess;
2009 UCHAR ucAccess;
2010} ATOM_I2C_ID_CONFIG_ACCESS;
2011 2455
2012/****************************************************************************/ 2456/****************************************************************************/
2013/* Structure used in GPIO_I2C_InfoTable */ 2457/* Structure used in MclkSS_InfoTable */
2014/****************************************************************************/ 2458/****************************************************************************/
2015typedef struct _ATOM_GPIO_I2C_ASSIGMENT { 2459// ucI2C_ConfigID
2016 USHORT usClkMaskRegisterIndex; 2460// [7:0] - I2C LINE Associate ID
2017 USHORT usClkEnRegisterIndex; 2461// = 0 - no I2C
2018 USHORT usClkY_RegisterIndex; 2462// [7] - HW_Cap = 1, [6:0]=HW assisted I2C ID(HW line selection)
2019 USHORT usClkA_RegisterIndex; 2463// = 0, [6:0]=SW assisted I2C ID
2020 USHORT usDataMaskRegisterIndex; 2464// [6-4] - HW_ENGINE_ID = 1, HW engine for NON multimedia use
2021 USHORT usDataEnRegisterIndex; 2465// = 2, HW engine for Multimedia use
2022 USHORT usDataY_RegisterIndex; 2466// = 3-7 Reserved for future I2C engines
2023 USHORT usDataA_RegisterIndex; 2467// [3-0] - I2C_LINE_MUX = A Mux number when it's HW assisted I2C or GPIO ID when it's SW I2C
2024 ATOM_I2C_ID_CONFIG_ACCESS sucI2cId; 2468
2025 UCHAR ucClkMaskShift; 2469typedef struct _ATOM_I2C_ID_CONFIG
2026 UCHAR ucClkEnShift; 2470{
2027 UCHAR ucClkY_Shift; 2471#if ATOM_BIG_ENDIAN
2028 UCHAR ucClkA_Shift; 2472 UCHAR bfHW_Capable:1;
2029 UCHAR ucDataMaskShift; 2473 UCHAR bfHW_EngineID:3;
2030 UCHAR ucDataEnShift; 2474 UCHAR bfI2C_LineMux:4;
2031 UCHAR ucDataY_Shift; 2475#else
2032 UCHAR ucDataA_Shift; 2476 UCHAR bfI2C_LineMux:4;
2033 UCHAR ucReserved1; 2477 UCHAR bfHW_EngineID:3;
2034 UCHAR ucReserved2; 2478 UCHAR bfHW_Capable:1;
2035} ATOM_GPIO_I2C_ASSIGMENT; 2479#endif
2036 2480}ATOM_I2C_ID_CONFIG;
2037typedef struct _ATOM_GPIO_I2C_INFO {
2038 ATOM_COMMON_TABLE_HEADER sHeader;
2039 ATOM_GPIO_I2C_ASSIGMENT asGPIO_Info[ATOM_MAX_SUPPORTED_DEVICE];
2040} ATOM_GPIO_I2C_INFO;
2041 2481
2042/****************************************************************************/ 2482typedef union _ATOM_I2C_ID_CONFIG_ACCESS
2043/* Common Structure used in other structures */ 2483{
2044/****************************************************************************/ 2484 ATOM_I2C_ID_CONFIG sbfAccess;
2485 UCHAR ucAccess;
2486}ATOM_I2C_ID_CONFIG_ACCESS;
2487
2488
2489/****************************************************************************/
2490// Structure used in GPIO_I2C_InfoTable
2491/****************************************************************************/
2492typedef struct _ATOM_GPIO_I2C_ASSIGMENT
2493{
2494 USHORT usClkMaskRegisterIndex;
2495 USHORT usClkEnRegisterIndex;
2496 USHORT usClkY_RegisterIndex;
2497 USHORT usClkA_RegisterIndex;
2498 USHORT usDataMaskRegisterIndex;
2499 USHORT usDataEnRegisterIndex;
2500 USHORT usDataY_RegisterIndex;
2501 USHORT usDataA_RegisterIndex;
2502 ATOM_I2C_ID_CONFIG_ACCESS sucI2cId;
2503 UCHAR ucClkMaskShift;
2504 UCHAR ucClkEnShift;
2505 UCHAR ucClkY_Shift;
2506 UCHAR ucClkA_Shift;
2507 UCHAR ucDataMaskShift;
2508 UCHAR ucDataEnShift;
2509 UCHAR ucDataY_Shift;
2510 UCHAR ucDataA_Shift;
2511 UCHAR ucReserved1;
2512 UCHAR ucReserved2;
2513}ATOM_GPIO_I2C_ASSIGMENT;
2514
2515typedef struct _ATOM_GPIO_I2C_INFO
2516{
2517 ATOM_COMMON_TABLE_HEADER sHeader;
2518 ATOM_GPIO_I2C_ASSIGMENT asGPIO_Info[ATOM_MAX_SUPPORTED_DEVICE];
2519}ATOM_GPIO_I2C_INFO;
2520
2521/****************************************************************************/
2522// Common Structure used in other structures
2523/****************************************************************************/
2045 2524
2046#ifndef _H2INC 2525#ifndef _H2INC
2047 2526
2048/* Please don't add or expand this bitfield structure below, this one will retire soon.! */ 2527//Please don't add or expand this bitfield structure below, this one will retire soon.!
2049typedef struct _ATOM_MODE_MISC_INFO { 2528typedef struct _ATOM_MODE_MISC_INFO
2529{
2050#if ATOM_BIG_ENDIAN 2530#if ATOM_BIG_ENDIAN
2051 USHORT Reserved:6; 2531 USHORT Reserved:6;
2052 USHORT RGB888:1; 2532 USHORT RGB888:1;
2053 USHORT DoubleClock:1; 2533 USHORT DoubleClock:1;
2054 USHORT Interlace:1; 2534 USHORT Interlace:1;
2055 USHORT CompositeSync:1; 2535 USHORT CompositeSync:1;
2056 USHORT V_ReplicationBy2:1; 2536 USHORT V_ReplicationBy2:1;
2057 USHORT H_ReplicationBy2:1; 2537 USHORT H_ReplicationBy2:1;
2058 USHORT VerticalCutOff:1; 2538 USHORT VerticalCutOff:1;
2059 USHORT VSyncPolarity:1; /* 0=Active High, 1=Active Low */ 2539 USHORT VSyncPolarity:1; //0=Active High, 1=Active Low
2060 USHORT HSyncPolarity:1; /* 0=Active High, 1=Active Low */ 2540 USHORT HSyncPolarity:1; //0=Active High, 1=Active Low
2061 USHORT HorizontalCutOff:1; 2541 USHORT HorizontalCutOff:1;
2062#else 2542#else
2063 USHORT HorizontalCutOff:1; 2543 USHORT HorizontalCutOff:1;
2064 USHORT HSyncPolarity:1; /* 0=Active High, 1=Active Low */ 2544 USHORT HSyncPolarity:1; //0=Active High, 1=Active Low
2065 USHORT VSyncPolarity:1; /* 0=Active High, 1=Active Low */ 2545 USHORT VSyncPolarity:1; //0=Active High, 1=Active Low
2066 USHORT VerticalCutOff:1; 2546 USHORT VerticalCutOff:1;
2067 USHORT H_ReplicationBy2:1; 2547 USHORT H_ReplicationBy2:1;
2068 USHORT V_ReplicationBy2:1; 2548 USHORT V_ReplicationBy2:1;
2069 USHORT CompositeSync:1; 2549 USHORT CompositeSync:1;
2070 USHORT Interlace:1; 2550 USHORT Interlace:1;
2071 USHORT DoubleClock:1; 2551 USHORT DoubleClock:1;
2072 USHORT RGB888:1; 2552 USHORT RGB888:1;
2073 USHORT Reserved:6; 2553 USHORT Reserved:6;
2074#endif 2554#endif
2075} ATOM_MODE_MISC_INFO; 2555}ATOM_MODE_MISC_INFO;
2076 2556
2077typedef union _ATOM_MODE_MISC_INFO_ACCESS { 2557typedef union _ATOM_MODE_MISC_INFO_ACCESS
2078 ATOM_MODE_MISC_INFO sbfAccess; 2558{
2079 USHORT usAccess; 2559 ATOM_MODE_MISC_INFO sbfAccess;
2080} ATOM_MODE_MISC_INFO_ACCESS; 2560 USHORT usAccess;
2081 2561}ATOM_MODE_MISC_INFO_ACCESS;
2562
2082#else 2563#else
2083 2564
2084typedef union _ATOM_MODE_MISC_INFO_ACCESS { 2565typedef union _ATOM_MODE_MISC_INFO_ACCESS
2085 USHORT usAccess; 2566{
2086} ATOM_MODE_MISC_INFO_ACCESS; 2567 USHORT usAccess;
2087 2568}ATOM_MODE_MISC_INFO_ACCESS;
2569
2088#endif 2570#endif
2089 2571
2090/* usModeMiscInfo- */ 2572// usModeMiscInfo-
2091#define ATOM_H_CUTOFF 0x01 2573#define ATOM_H_CUTOFF 0x01
2092#define ATOM_HSYNC_POLARITY 0x02 /* 0=Active High, 1=Active Low */ 2574#define ATOM_HSYNC_POLARITY 0x02 //0=Active High, 1=Active Low
2093#define ATOM_VSYNC_POLARITY 0x04 /* 0=Active High, 1=Active Low */ 2575#define ATOM_VSYNC_POLARITY 0x04 //0=Active High, 1=Active Low
2094#define ATOM_V_CUTOFF 0x08 2576#define ATOM_V_CUTOFF 0x08
2095#define ATOM_H_REPLICATIONBY2 0x10 2577#define ATOM_H_REPLICATIONBY2 0x10
2096#define ATOM_V_REPLICATIONBY2 0x20 2578#define ATOM_V_REPLICATIONBY2 0x20
@@ -2099,10 +2581,10 @@ typedef union _ATOM_MODE_MISC_INFO_ACCESS {
2099#define ATOM_DOUBLE_CLOCK_MODE 0x100 2581#define ATOM_DOUBLE_CLOCK_MODE 0x100
2100#define ATOM_RGB888_MODE 0x200 2582#define ATOM_RGB888_MODE 0x200
2101 2583
2102/* usRefreshRate- */ 2584//usRefreshRate-
2103#define ATOM_REFRESH_43 43 2585#define ATOM_REFRESH_43 43
2104#define ATOM_REFRESH_47 47 2586#define ATOM_REFRESH_47 47
2105#define ATOM_REFRESH_56 56 2587#define ATOM_REFRESH_56 56
2106#define ATOM_REFRESH_60 60 2588#define ATOM_REFRESH_60 60
2107#define ATOM_REFRESH_65 65 2589#define ATOM_REFRESH_65 65
2108#define ATOM_REFRESH_70 70 2590#define ATOM_REFRESH_70 70
@@ -2110,192 +2592,233 @@ typedef union _ATOM_MODE_MISC_INFO_ACCESS {
2110#define ATOM_REFRESH_75 75 2592#define ATOM_REFRESH_75 75
2111#define ATOM_REFRESH_85 85 2593#define ATOM_REFRESH_85 85
2112 2594
2113/* ATOM_MODE_TIMING data are exactly the same as VESA timing data. */ 2595// ATOM_MODE_TIMING data are exactly the same as VESA timing data.
2114/* Translation from EDID to ATOM_MODE_TIMING, use the following formula. */ 2596// Translation from EDID to ATOM_MODE_TIMING, use the following formula.
2115/* */ 2597//
2116/* VESA_HTOTAL = VESA_ACTIVE + 2* VESA_BORDER + VESA_BLANK */ 2598// VESA_HTOTAL = VESA_ACTIVE + 2* VESA_BORDER + VESA_BLANK
2117/* = EDID_HA + EDID_HBL */ 2599// = EDID_HA + EDID_HBL
2118/* VESA_HDISP = VESA_ACTIVE = EDID_HA */ 2600// VESA_HDISP = VESA_ACTIVE = EDID_HA
2119/* VESA_HSYNC_START = VESA_ACTIVE + VESA_BORDER + VESA_FRONT_PORCH */ 2601// VESA_HSYNC_START = VESA_ACTIVE + VESA_BORDER + VESA_FRONT_PORCH
2120/* = EDID_HA + EDID_HSO */ 2602// = EDID_HA + EDID_HSO
2121/* VESA_HSYNC_WIDTH = VESA_HSYNC_TIME = EDID_HSPW */ 2603// VESA_HSYNC_WIDTH = VESA_HSYNC_TIME = EDID_HSPW
2122/* VESA_BORDER = EDID_BORDER */ 2604// VESA_BORDER = EDID_BORDER
2123 2605
2124/****************************************************************************/ 2606/****************************************************************************/
2125/* Structure used in SetCRTC_UsingDTDTimingTable */ 2607// Structure used in SetCRTC_UsingDTDTimingTable
2126/****************************************************************************/ 2608/****************************************************************************/
2127typedef struct _SET_CRTC_USING_DTD_TIMING_PARAMETERS { 2609typedef struct _SET_CRTC_USING_DTD_TIMING_PARAMETERS
2128 USHORT usH_Size; 2610{
2129 USHORT usH_Blanking_Time; 2611 USHORT usH_Size;
2130 USHORT usV_Size; 2612 USHORT usH_Blanking_Time;
2131 USHORT usV_Blanking_Time; 2613 USHORT usV_Size;
2132 USHORT usH_SyncOffset; 2614 USHORT usV_Blanking_Time;
2133 USHORT usH_SyncWidth; 2615 USHORT usH_SyncOffset;
2134 USHORT usV_SyncOffset; 2616 USHORT usH_SyncWidth;
2135 USHORT usV_SyncWidth; 2617 USHORT usV_SyncOffset;
2136 ATOM_MODE_MISC_INFO_ACCESS susModeMiscInfo; 2618 USHORT usV_SyncWidth;
2137 UCHAR ucH_Border; /* From DFP EDID */ 2619 ATOM_MODE_MISC_INFO_ACCESS susModeMiscInfo;
2138 UCHAR ucV_Border; 2620 UCHAR ucH_Border; // From DFP EDID
2139 UCHAR ucCRTC; /* ATOM_CRTC1 or ATOM_CRTC2 */ 2621 UCHAR ucV_Border;
2140 UCHAR ucPadding[3]; 2622 UCHAR ucCRTC; // ATOM_CRTC1 or ATOM_CRTC2
2141} SET_CRTC_USING_DTD_TIMING_PARAMETERS; 2623 UCHAR ucPadding[3];
2142 2624}SET_CRTC_USING_DTD_TIMING_PARAMETERS;
2143/****************************************************************************/ 2625
2144/* Structure used in SetCRTC_TimingTable */ 2626/****************************************************************************/
2145/****************************************************************************/ 2627// Structure used in SetCRTC_TimingTable
2146typedef struct _SET_CRTC_TIMING_PARAMETERS { 2628/****************************************************************************/
2147 USHORT usH_Total; /* horizontal total */ 2629typedef struct _SET_CRTC_TIMING_PARAMETERS
2148 USHORT usH_Disp; /* horizontal display */ 2630{
2149 USHORT usH_SyncStart; /* horozontal Sync start */ 2631 USHORT usH_Total; // horizontal total
2150 USHORT usH_SyncWidth; /* horizontal Sync width */ 2632 USHORT usH_Disp; // horizontal display
2151 USHORT usV_Total; /* vertical total */ 2633 USHORT usH_SyncStart; // horozontal Sync start
2152 USHORT usV_Disp; /* vertical display */ 2634 USHORT usH_SyncWidth; // horizontal Sync width
2153 USHORT usV_SyncStart; /* vertical Sync start */ 2635 USHORT usV_Total; // vertical total
2154 USHORT usV_SyncWidth; /* vertical Sync width */ 2636 USHORT usV_Disp; // vertical display
2155 ATOM_MODE_MISC_INFO_ACCESS susModeMiscInfo; 2637 USHORT usV_SyncStart; // vertical Sync start
2156 UCHAR ucCRTC; /* ATOM_CRTC1 or ATOM_CRTC2 */ 2638 USHORT usV_SyncWidth; // vertical Sync width
2157 UCHAR ucOverscanRight; /* right */ 2639 ATOM_MODE_MISC_INFO_ACCESS susModeMiscInfo;
2158 UCHAR ucOverscanLeft; /* left */ 2640 UCHAR ucCRTC; // ATOM_CRTC1 or ATOM_CRTC2
2159 UCHAR ucOverscanBottom; /* bottom */ 2641 UCHAR ucOverscanRight; // right
2160 UCHAR ucOverscanTop; /* top */ 2642 UCHAR ucOverscanLeft; // left
2161 UCHAR ucReserved; 2643 UCHAR ucOverscanBottom; // bottom
2162} SET_CRTC_TIMING_PARAMETERS; 2644 UCHAR ucOverscanTop; // top
2645 UCHAR ucReserved;
2646}SET_CRTC_TIMING_PARAMETERS;
2163#define SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION SET_CRTC_TIMING_PARAMETERS 2647#define SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION SET_CRTC_TIMING_PARAMETERS
2164 2648
2165/****************************************************************************/ 2649/****************************************************************************/
2166/* Structure used in StandardVESA_TimingTable */ 2650// Structure used in StandardVESA_TimingTable
2167/* AnalogTV_InfoTable */ 2651// AnalogTV_InfoTable
2168/* ComponentVideoInfoTable */ 2652// ComponentVideoInfoTable
2169/****************************************************************************/ 2653/****************************************************************************/
2170typedef struct _ATOM_MODE_TIMING { 2654typedef struct _ATOM_MODE_TIMING
2171 USHORT usCRTC_H_Total; 2655{
2172 USHORT usCRTC_H_Disp; 2656 USHORT usCRTC_H_Total;
2173 USHORT usCRTC_H_SyncStart; 2657 USHORT usCRTC_H_Disp;
2174 USHORT usCRTC_H_SyncWidth; 2658 USHORT usCRTC_H_SyncStart;
2175 USHORT usCRTC_V_Total; 2659 USHORT usCRTC_H_SyncWidth;
2176 USHORT usCRTC_V_Disp; 2660 USHORT usCRTC_V_Total;
2177 USHORT usCRTC_V_SyncStart; 2661 USHORT usCRTC_V_Disp;
2178 USHORT usCRTC_V_SyncWidth; 2662 USHORT usCRTC_V_SyncStart;
2179 USHORT usPixelClock; /* in 10Khz unit */ 2663 USHORT usCRTC_V_SyncWidth;
2180 ATOM_MODE_MISC_INFO_ACCESS susModeMiscInfo; 2664 USHORT usPixelClock; //in 10Khz unit
2181 USHORT usCRTC_OverscanRight; 2665 ATOM_MODE_MISC_INFO_ACCESS susModeMiscInfo;
2182 USHORT usCRTC_OverscanLeft; 2666 USHORT usCRTC_OverscanRight;
2183 USHORT usCRTC_OverscanBottom; 2667 USHORT usCRTC_OverscanLeft;
2184 USHORT usCRTC_OverscanTop; 2668 USHORT usCRTC_OverscanBottom;
2185 USHORT usReserve; 2669 USHORT usCRTC_OverscanTop;
2186 UCHAR ucInternalModeNumber; 2670 USHORT usReserve;
2187 UCHAR ucRefreshRate; 2671 UCHAR ucInternalModeNumber;
2188} ATOM_MODE_TIMING; 2672 UCHAR ucRefreshRate;
2189 2673}ATOM_MODE_TIMING;
2190typedef struct _ATOM_DTD_FORMAT { 2674
2191 USHORT usPixClk; 2675typedef struct _ATOM_DTD_FORMAT
2192 USHORT usHActive; 2676{
2193 USHORT usHBlanking_Time; 2677 USHORT usPixClk;
2194 USHORT usVActive; 2678 USHORT usHActive;
2195 USHORT usVBlanking_Time; 2679 USHORT usHBlanking_Time;
2196 USHORT usHSyncOffset; 2680 USHORT usVActive;
2197 USHORT usHSyncWidth; 2681 USHORT usVBlanking_Time;
2198 USHORT usVSyncOffset; 2682 USHORT usHSyncOffset;
2199 USHORT usVSyncWidth; 2683 USHORT usHSyncWidth;
2200 USHORT usImageHSize; 2684 USHORT usVSyncOffset;
2201 USHORT usImageVSize; 2685 USHORT usVSyncWidth;
2202 UCHAR ucHBorder; 2686 USHORT usImageHSize;
2203 UCHAR ucVBorder; 2687 USHORT usImageVSize;
2204 ATOM_MODE_MISC_INFO_ACCESS susModeMiscInfo; 2688 UCHAR ucHBorder;
2205 UCHAR ucInternalModeNumber; 2689 UCHAR ucVBorder;
2206 UCHAR ucRefreshRate; 2690 ATOM_MODE_MISC_INFO_ACCESS susModeMiscInfo;
2207} ATOM_DTD_FORMAT; 2691 UCHAR ucInternalModeNumber;
2208 2692 UCHAR ucRefreshRate;
2209/****************************************************************************/ 2693}ATOM_DTD_FORMAT;
2210/* Structure used in LVDS_InfoTable */ 2694
2211/* * Need a document to describe this table */ 2695/****************************************************************************/
2212/****************************************************************************/ 2696// Structure used in LVDS_InfoTable
2697// * Need a document to describe this table
2698/****************************************************************************/
2213#define SUPPORTED_LCD_REFRESHRATE_30Hz 0x0004 2699#define SUPPORTED_LCD_REFRESHRATE_30Hz 0x0004
2214#define SUPPORTED_LCD_REFRESHRATE_40Hz 0x0008 2700#define SUPPORTED_LCD_REFRESHRATE_40Hz 0x0008
2215#define SUPPORTED_LCD_REFRESHRATE_50Hz 0x0010 2701#define SUPPORTED_LCD_REFRESHRATE_50Hz 0x0010
2216#define SUPPORTED_LCD_REFRESHRATE_60Hz 0x0020 2702#define SUPPORTED_LCD_REFRESHRATE_60Hz 0x0020
2217 2703
2218/* Once DAL sees this CAP is set, it will read EDID from LCD on its own instead of using sLCDTiming in ATOM_LVDS_INFO_V12. */ 2704//ucTableFormatRevision=1
2219/* Other entries in ATOM_LVDS_INFO_V12 are still valid/useful to DAL */ 2705//ucTableContentRevision=1
2220#define LCDPANEL_CAP_READ_EDID 0x1 2706typedef struct _ATOM_LVDS_INFO
2221 2707{
2222/* ucTableFormatRevision=1 */ 2708 ATOM_COMMON_TABLE_HEADER sHeader;
2223/* ucTableContentRevision=1 */ 2709 ATOM_DTD_FORMAT sLCDTiming;
2224typedef struct _ATOM_LVDS_INFO { 2710 USHORT usModePatchTableOffset;
2225 ATOM_COMMON_TABLE_HEADER sHeader; 2711 USHORT usSupportedRefreshRate; //Refer to panel info table in ATOMBIOS extension Spec.
2226 ATOM_DTD_FORMAT sLCDTiming; 2712 USHORT usOffDelayInMs;
2227 USHORT usModePatchTableOffset; 2713 UCHAR ucPowerSequenceDigOntoDEin10Ms;
2228 USHORT usSupportedRefreshRate; /* Refer to panel info table in ATOMBIOS extension Spec. */ 2714 UCHAR ucPowerSequenceDEtoBLOnin10Ms;
2229 USHORT usOffDelayInMs; 2715 UCHAR ucLVDS_Misc; // Bit0:{=0:single, =1:dual},Bit1 {=0:666RGB, =1:888RGB},Bit2:3:{Grey level}
2230 UCHAR ucPowerSequenceDigOntoDEin10Ms; 2716 // Bit4:{=0:LDI format for RGB888, =1 FPDI format for RGB888}
2231 UCHAR ucPowerSequenceDEtoBLOnin10Ms; 2717 // Bit5:{=0:Spatial Dithering disabled;1 Spatial Dithering enabled}
2232 UCHAR ucLVDS_Misc; /* Bit0:{=0:single, =1:dual},Bit1 {=0:666RGB, =1:888RGB},Bit2:3:{Grey level} */ 2718 // Bit6:{=0:Temporal Dithering disabled;1 Temporal Dithering enabled}
2233 /* Bit4:{=0:LDI format for RGB888, =1 FPDI format for RGB888} */ 2719 UCHAR ucPanelDefaultRefreshRate;
2234 /* Bit5:{=0:Spatial Dithering disabled;1 Spatial Dithering enabled} */ 2720 UCHAR ucPanelIdentification;
2235 /* Bit6:{=0:Temporal Dithering disabled;1 Temporal Dithering enabled} */ 2721 UCHAR ucSS_Id;
2236 UCHAR ucPanelDefaultRefreshRate; 2722}ATOM_LVDS_INFO;
2237 UCHAR ucPanelIdentification; 2723
2238 UCHAR ucSS_Id; 2724//ucTableFormatRevision=1
2239} ATOM_LVDS_INFO; 2725//ucTableContentRevision=2
2240 2726typedef struct _ATOM_LVDS_INFO_V12
2241/* ucTableFormatRevision=1 */ 2727{
2242/* ucTableContentRevision=2 */ 2728 ATOM_COMMON_TABLE_HEADER sHeader;
2243typedef struct _ATOM_LVDS_INFO_V12 { 2729 ATOM_DTD_FORMAT sLCDTiming;
2244 ATOM_COMMON_TABLE_HEADER sHeader; 2730 USHORT usExtInfoTableOffset;
2245 ATOM_DTD_FORMAT sLCDTiming; 2731 USHORT usSupportedRefreshRate; //Refer to panel info table in ATOMBIOS extension Spec.
2246 USHORT usExtInfoTableOffset; 2732 USHORT usOffDelayInMs;
2247 USHORT usSupportedRefreshRate; /* Refer to panel info table in ATOMBIOS extension Spec. */ 2733 UCHAR ucPowerSequenceDigOntoDEin10Ms;
2248 USHORT usOffDelayInMs; 2734 UCHAR ucPowerSequenceDEtoBLOnin10Ms;
2249 UCHAR ucPowerSequenceDigOntoDEin10Ms; 2735 UCHAR ucLVDS_Misc; // Bit0:{=0:single, =1:dual},Bit1 {=0:666RGB, =1:888RGB},Bit2:3:{Grey level}
2250 UCHAR ucPowerSequenceDEtoBLOnin10Ms; 2736 // Bit4:{=0:LDI format for RGB888, =1 FPDI format for RGB888}
2251 UCHAR ucLVDS_Misc; /* Bit0:{=0:single, =1:dual},Bit1 {=0:666RGB, =1:888RGB},Bit2:3:{Grey level} */ 2737 // Bit5:{=0:Spatial Dithering disabled;1 Spatial Dithering enabled}
2252 /* Bit4:{=0:LDI format for RGB888, =1 FPDI format for RGB888} */ 2738 // Bit6:{=0:Temporal Dithering disabled;1 Temporal Dithering enabled}
2253 /* Bit5:{=0:Spatial Dithering disabled;1 Spatial Dithering enabled} */ 2739 UCHAR ucPanelDefaultRefreshRate;
2254 /* Bit6:{=0:Temporal Dithering disabled;1 Temporal Dithering enabled} */ 2740 UCHAR ucPanelIdentification;
2255 UCHAR ucPanelDefaultRefreshRate; 2741 UCHAR ucSS_Id;
2256 UCHAR ucPanelIdentification; 2742 USHORT usLCDVenderID;
2257 UCHAR ucSS_Id; 2743 USHORT usLCDProductID;
2258 USHORT usLCDVenderID; 2744 UCHAR ucLCDPanel_SpecialHandlingCap;
2259 USHORT usLCDProductID; 2745 UCHAR ucPanelInfoSize; // start from ATOM_DTD_FORMAT to end of panel info, include ExtInfoTable
2260 UCHAR ucLCDPanel_SpecialHandlingCap; 2746 UCHAR ucReserved[2];
2261 UCHAR ucPanelInfoSize; /* start from ATOM_DTD_FORMAT to end of panel info, include ExtInfoTable */ 2747}ATOM_LVDS_INFO_V12;
2262 UCHAR ucReserved[2]; 2748
2263} ATOM_LVDS_INFO_V12; 2749//Definitions for ucLCDPanel_SpecialHandlingCap:
2750
2751//Once DAL sees this CAP is set, it will read EDID from LCD on its own instead of using sLCDTiming in ATOM_LVDS_INFO_V12.
2752//Other entries in ATOM_LVDS_INFO_V12 are still valid/useful to DAL
2753#define LCDPANEL_CAP_READ_EDID 0x1
2754
2755//If a design supports DRR (dynamic refresh rate) on internal panels (LVDS or EDP), this cap is set in ucLCDPanel_SpecialHandlingCap together
2756//with multiple supported refresh rates@usSupportedRefreshRate. This cap should not be set when only slow refresh rate is supported (static
2757//refresh rate switch by SW. This is only valid from ATOM_LVDS_INFO_V12
2758#define LCDPANEL_CAP_DRR_SUPPORTED 0x2
2759
2760//Use this cap bit for a quick reference whether an embadded panel (LCD1 ) is LVDS or eDP.
2761#define LCDPANEL_CAP_eDP 0x4
2762
2763
2764//Color Bit Depth definition in EDID V1.4 @BYTE 14h
2765//Bit 6 5 4
2766 // 0 0 0 - Color bit depth is undefined
2767 // 0 0 1 - 6 Bits per Primary Color
2768 // 0 1 0 - 8 Bits per Primary Color
2769 // 0 1 1 - 10 Bits per Primary Color
2770 // 1 0 0 - 12 Bits per Primary Color
2771 // 1 0 1 - 14 Bits per Primary Color
2772 // 1 1 0 - 16 Bits per Primary Color
2773 // 1 1 1 - Reserved
2774
2775#define PANEL_COLOR_BIT_DEPTH_MASK 0x70
2776
2777// Bit7:{=0:Random Dithering disabled;1 Random Dithering enabled}
2778#define PANEL_RANDOM_DITHER 0x80
2779#define PANEL_RANDOM_DITHER_MASK 0x80
2780
2264 2781
2265#define ATOM_LVDS_INFO_LAST ATOM_LVDS_INFO_V12 2782#define ATOM_LVDS_INFO_LAST ATOM_LVDS_INFO_V12
2266 2783
2267typedef struct _ATOM_PATCH_RECORD_MODE { 2784typedef struct _ATOM_PATCH_RECORD_MODE
2268 UCHAR ucRecordType; 2785{
2269 USHORT usHDisp; 2786 UCHAR ucRecordType;
2270 USHORT usVDisp; 2787 USHORT usHDisp;
2271} ATOM_PATCH_RECORD_MODE; 2788 USHORT usVDisp;
2789}ATOM_PATCH_RECORD_MODE;
2272 2790
2273typedef struct _ATOM_LCD_RTS_RECORD { 2791typedef struct _ATOM_LCD_RTS_RECORD
2274 UCHAR ucRecordType; 2792{
2275 UCHAR ucRTSValue; 2793 UCHAR ucRecordType;
2276} ATOM_LCD_RTS_RECORD; 2794 UCHAR ucRTSValue;
2795}ATOM_LCD_RTS_RECORD;
2277 2796
2278/* !! If the record below exits, it shoud always be the first record for easy use in command table!!! */ 2797//!! If the record below exits, it shoud always be the first record for easy use in command table!!!
2279typedef struct _ATOM_LCD_MODE_CONTROL_CAP { 2798// The record below is only used when LVDS_Info is present. From ATOM_LVDS_INFO_V12, use ucLCDPanel_SpecialHandlingCap instead.
2280 UCHAR ucRecordType; 2799typedef struct _ATOM_LCD_MODE_CONTROL_CAP
2281 USHORT usLCDCap; 2800{
2282} ATOM_LCD_MODE_CONTROL_CAP; 2801 UCHAR ucRecordType;
2802 USHORT usLCDCap;
2803}ATOM_LCD_MODE_CONTROL_CAP;
2283 2804
2284#define LCD_MODE_CAP_BL_OFF 1 2805#define LCD_MODE_CAP_BL_OFF 1
2285#define LCD_MODE_CAP_CRTC_OFF 2 2806#define LCD_MODE_CAP_CRTC_OFF 2
2286#define LCD_MODE_CAP_PANEL_OFF 4 2807#define LCD_MODE_CAP_PANEL_OFF 4
2287 2808
2288typedef struct _ATOM_FAKE_EDID_PATCH_RECORD { 2809typedef struct _ATOM_FAKE_EDID_PATCH_RECORD
2289 UCHAR ucRecordType; 2810{
2290 UCHAR ucFakeEDIDLength; 2811 UCHAR ucRecordType;
2291 UCHAR ucFakeEDIDString[1]; /* This actually has ucFakeEdidLength elements. */ 2812 UCHAR ucFakeEDIDLength;
2813 UCHAR ucFakeEDIDString[1]; // This actually has ucFakeEdidLength elements.
2292} ATOM_FAKE_EDID_PATCH_RECORD; 2814} ATOM_FAKE_EDID_PATCH_RECORD;
2293 2815
2294typedef struct _ATOM_PANEL_RESOLUTION_PATCH_RECORD { 2816typedef struct _ATOM_PANEL_RESOLUTION_PATCH_RECORD
2295 UCHAR ucRecordType; 2817{
2296 USHORT usHSize; 2818 UCHAR ucRecordType;
2297 USHORT usVSize; 2819 USHORT usHSize;
2298} ATOM_PANEL_RESOLUTION_PATCH_RECORD; 2820 USHORT usVSize;
2821}ATOM_PANEL_RESOLUTION_PATCH_RECORD;
2299 2822
2300#define LCD_MODE_PATCH_RECORD_MODE_TYPE 1 2823#define LCD_MODE_PATCH_RECORD_MODE_TYPE 1
2301#define LCD_RTS_RECORD_TYPE 2 2824#define LCD_RTS_RECORD_TYPE 2
@@ -2306,21 +2829,25 @@ typedef struct _ATOM_PANEL_RESOLUTION_PATCH_RECORD {
2306 2829
2307/****************************Spread Spectrum Info Table Definitions **********************/ 2830/****************************Spread Spectrum Info Table Definitions **********************/
2308 2831
2309/* ucTableFormatRevision=1 */ 2832//ucTableFormatRevision=1
2310/* ucTableContentRevision=2 */ 2833//ucTableContentRevision=2
2311typedef struct _ATOM_SPREAD_SPECTRUM_ASSIGNMENT { 2834typedef struct _ATOM_SPREAD_SPECTRUM_ASSIGNMENT
2312 USHORT usSpreadSpectrumPercentage; 2835{
2313 UCHAR ucSpreadSpectrumType; /* Bit1=0 Down Spread,=1 Center Spread. Bit1=1 Ext. =0 Int. Others:TBD */ 2836 USHORT usSpreadSpectrumPercentage;
2314 UCHAR ucSS_Step; 2837 UCHAR ucSpreadSpectrumType; //Bit1=0 Down Spread,=1 Center Spread. Bit1=1 Ext. =0 Int. Bit2=1: PCIE REFCLK SS =0 iternal PPLL SS Others:TBD
2315 UCHAR ucSS_Delay; 2838 UCHAR ucSS_Step;
2316 UCHAR ucSS_Id; 2839 UCHAR ucSS_Delay;
2317 UCHAR ucRecommendedRef_Div; 2840 UCHAR ucSS_Id;
2318 UCHAR ucSS_Range; /* it was reserved for V11 */ 2841 UCHAR ucRecommendedRef_Div;
2319} ATOM_SPREAD_SPECTRUM_ASSIGNMENT; 2842 UCHAR ucSS_Range; //it was reserved for V11
2843}ATOM_SPREAD_SPECTRUM_ASSIGNMENT;
2320 2844
2321#define ATOM_MAX_SS_ENTRY 16 2845#define ATOM_MAX_SS_ENTRY 16
2322#define ATOM_DP_SS_ID1 0x0f1 /* SS modulation freq=30k */ 2846#define ATOM_DP_SS_ID1 0x0f1 // SS ID for internal DP stream at 2.7Ghz. if ATOM_DP_SS_ID2 does not exist in SS_InfoTable, it is used for internal DP stream at 1.62Ghz as well.
2323#define ATOM_DP_SS_ID2 0x0f2 /* SS modulation freq=33k */ 2847#define ATOM_DP_SS_ID2 0x0f2 // SS ID for internal DP stream at 1.62Ghz, if it exists in SS_InfoTable.
2848#define ATOM_LVLINK_2700MHz_SS_ID 0x0f3 // SS ID for LV link translator chip at 2.7Ghz
2849#define ATOM_LVLINK_1620MHz_SS_ID 0x0f4 // SS ID for LV link translator chip at 1.62Ghz
2850
2324 2851
2325#define ATOM_SS_DOWN_SPREAD_MODE_MASK 0x00000000 2852#define ATOM_SS_DOWN_SPREAD_MODE_MASK 0x00000000
2326#define ATOM_SS_DOWN_SPREAD_MODE 0x00000000 2853#define ATOM_SS_DOWN_SPREAD_MODE 0x00000000
@@ -2329,29 +2856,30 @@ typedef struct _ATOM_SPREAD_SPECTRUM_ASSIGNMENT {
2329#define ATOM_INTERNAL_SS_MASK 0x00000000 2856#define ATOM_INTERNAL_SS_MASK 0x00000000
2330#define ATOM_EXTERNAL_SS_MASK 0x00000002 2857#define ATOM_EXTERNAL_SS_MASK 0x00000002
2331#define EXEC_SS_STEP_SIZE_SHIFT 2 2858#define EXEC_SS_STEP_SIZE_SHIFT 2
2332#define EXEC_SS_DELAY_SHIFT 4 2859#define EXEC_SS_DELAY_SHIFT 4
2333#define ACTIVEDATA_TO_BLON_DELAY_SHIFT 4 2860#define ACTIVEDATA_TO_BLON_DELAY_SHIFT 4
2334 2861
2335typedef struct _ATOM_SPREAD_SPECTRUM_INFO { 2862typedef struct _ATOM_SPREAD_SPECTRUM_INFO
2336 ATOM_COMMON_TABLE_HEADER sHeader; 2863{
2337 ATOM_SPREAD_SPECTRUM_ASSIGNMENT asSS_Info[ATOM_MAX_SS_ENTRY]; 2864 ATOM_COMMON_TABLE_HEADER sHeader;
2338} ATOM_SPREAD_SPECTRUM_INFO; 2865 ATOM_SPREAD_SPECTRUM_ASSIGNMENT asSS_Info[ATOM_MAX_SS_ENTRY];
2339 2866}ATOM_SPREAD_SPECTRUM_INFO;
2340/****************************************************************************/ 2867
2341/* Structure used in AnalogTV_InfoTable (Top level) */ 2868/****************************************************************************/
2342/****************************************************************************/ 2869// Structure used in AnalogTV_InfoTable (Top level)
2343/* ucTVBootUpDefaultStd definiton: */ 2870/****************************************************************************/
2344 2871//ucTVBootUpDefaultStd definiton:
2345/* ATOM_TV_NTSC 1 */ 2872
2346/* ATOM_TV_NTSCJ 2 */ 2873//ATOM_TV_NTSC 1
2347/* ATOM_TV_PAL 3 */ 2874//ATOM_TV_NTSCJ 2
2348/* ATOM_TV_PALM 4 */ 2875//ATOM_TV_PAL 3
2349/* ATOM_TV_PALCN 5 */ 2876//ATOM_TV_PALM 4
2350/* ATOM_TV_PALN 6 */ 2877//ATOM_TV_PALCN 5
2351/* ATOM_TV_PAL60 7 */ 2878//ATOM_TV_PALN 6
2352/* ATOM_TV_SECAM 8 */ 2879//ATOM_TV_PAL60 7
2353 2880//ATOM_TV_SECAM 8
2354/* ucTVSuppportedStd definition: */ 2881
2882//ucTVSupportedStd definition:
2355#define NTSC_SUPPORT 0x1 2883#define NTSC_SUPPORT 0x1
2356#define NTSCJ_SUPPORT 0x2 2884#define NTSCJ_SUPPORT 0x2
2357 2885
@@ -2364,46 +2892,58 @@ typedef struct _ATOM_SPREAD_SPECTRUM_INFO {
2364 2892
2365#define MAX_SUPPORTED_TV_TIMING 2 2893#define MAX_SUPPORTED_TV_TIMING 2
2366 2894
2367typedef struct _ATOM_ANALOG_TV_INFO { 2895typedef struct _ATOM_ANALOG_TV_INFO
2368 ATOM_COMMON_TABLE_HEADER sHeader; 2896{
2369 UCHAR ucTV_SupportedStandard; 2897 ATOM_COMMON_TABLE_HEADER sHeader;
2370 UCHAR ucTV_BootUpDefaultStandard; 2898 UCHAR ucTV_SupportedStandard;
2371 UCHAR ucExt_TV_ASIC_ID; 2899 UCHAR ucTV_BootUpDefaultStandard;
2372 UCHAR ucExt_TV_ASIC_SlaveAddr; 2900 UCHAR ucExt_TV_ASIC_ID;
2373 /*ATOM_DTD_FORMAT aModeTimings[MAX_SUPPORTED_TV_TIMING]; */ 2901 UCHAR ucExt_TV_ASIC_SlaveAddr;
2374 ATOM_MODE_TIMING aModeTimings[MAX_SUPPORTED_TV_TIMING]; 2902 /*ATOM_DTD_FORMAT aModeTimings[MAX_SUPPORTED_TV_TIMING];*/
2375} ATOM_ANALOG_TV_INFO; 2903 ATOM_MODE_TIMING aModeTimings[MAX_SUPPORTED_TV_TIMING];
2904}ATOM_ANALOG_TV_INFO;
2376 2905
2377#define MAX_SUPPORTED_TV_TIMING_V1_2 3 2906#define MAX_SUPPORTED_TV_TIMING_V1_2 3
2378 2907
2379typedef struct _ATOM_ANALOG_TV_INFO_V1_2 { 2908typedef struct _ATOM_ANALOG_TV_INFO_V1_2
2380 ATOM_COMMON_TABLE_HEADER sHeader; 2909{
2381 UCHAR ucTV_SupportedStandard; 2910 ATOM_COMMON_TABLE_HEADER sHeader;
2382 UCHAR ucTV_BootUpDefaultStandard; 2911 UCHAR ucTV_SupportedStandard;
2383 UCHAR ucExt_TV_ASIC_ID; 2912 UCHAR ucTV_BootUpDefaultStandard;
2384 UCHAR ucExt_TV_ASIC_SlaveAddr; 2913 UCHAR ucExt_TV_ASIC_ID;
2385 ATOM_DTD_FORMAT aModeTimings[MAX_SUPPORTED_TV_TIMING]; 2914 UCHAR ucExt_TV_ASIC_SlaveAddr;
2386} ATOM_ANALOG_TV_INFO_V1_2; 2915 ATOM_DTD_FORMAT aModeTimings[MAX_SUPPORTED_TV_TIMING_V1_2];
2916}ATOM_ANALOG_TV_INFO_V1_2;
2917
2918typedef struct _ATOM_DPCD_INFO
2919{
2920 UCHAR ucRevisionNumber; //10h : Revision 1.0; 11h : Revision 1.1
2921 UCHAR ucMaxLinkRate; //06h : 1.62Gbps per lane; 0Ah = 2.7Gbps per lane
2922 UCHAR ucMaxLane; //Bits 4:0 = MAX_LANE_COUNT (1/2/4). Bit 7 = ENHANCED_FRAME_CAP
2923 UCHAR ucMaxDownSpread; //Bit0 = 0: No Down spread; Bit0 = 1: 0.5% (Subject to change according to DP spec)
2924}ATOM_DPCD_INFO;
2925
2926#define ATOM_DPCD_MAX_LANE_MASK 0x1F
2387 2927
2388/**************************************************************************/ 2928/**************************************************************************/
2389/* VRAM usage and their defintions */ 2929// VRAM usage and their defintions
2390 2930
2391/* One chunk of VRAM used by Bios are for HWICON surfaces,EDID data. */ 2931// One chunk of VRAM used by Bios are for HWICON surfaces,EDID data.
2392/* Current Mode timing and Dail Timing and/or STD timing data EACH device. They can be broken down as below. */ 2932// Current Mode timing and Dail Timing and/or STD timing data EACH device. They can be broken down as below.
2393/* All the addresses below are the offsets from the frame buffer start.They all MUST be Dword aligned! */ 2933// All the addresses below are the offsets from the frame buffer start.They all MUST be Dword aligned!
2394/* To driver: The physical address of this memory portion=mmFB_START(4K aligned)+ATOMBIOS_VRAM_USAGE_START_ADDR+ATOM_x_ADDR */ 2934// To driver: The physical address of this memory portion=mmFB_START(4K aligned)+ATOMBIOS_VRAM_USAGE_START_ADDR+ATOM_x_ADDR
2395/* To Bios: ATOMBIOS_VRAM_USAGE_START_ADDR+ATOM_x_ADDR->MM_INDEX */ 2935// To Bios: ATOMBIOS_VRAM_USAGE_START_ADDR+ATOM_x_ADDR->MM_INDEX
2396 2936
2397#ifndef VESA_MEMORY_IN_64K_BLOCK 2937#ifndef VESA_MEMORY_IN_64K_BLOCK
2398#define VESA_MEMORY_IN_64K_BLOCK 0x100 /* 256*64K=16Mb (Max. VESA memory is 16Mb!) */ 2938#define VESA_MEMORY_IN_64K_BLOCK 0x100 //256*64K=16Mb (Max. VESA memory is 16Mb!)
2399#endif 2939#endif
2400 2940
2401#define ATOM_EDID_RAW_DATASIZE 256 /* In Bytes */ 2941#define ATOM_EDID_RAW_DATASIZE 256 //In Bytes
2402#define ATOM_HWICON_SURFACE_SIZE 4096 /* In Bytes */ 2942#define ATOM_HWICON_SURFACE_SIZE 4096 //In Bytes
2403#define ATOM_HWICON_INFOTABLE_SIZE 32 2943#define ATOM_HWICON_INFOTABLE_SIZE 32
2404#define MAX_DTD_MODE_IN_VRAM 6 2944#define MAX_DTD_MODE_IN_VRAM 6
2405#define ATOM_DTD_MODE_SUPPORT_TBL_SIZE (MAX_DTD_MODE_IN_VRAM*28) /* 28= (SIZEOF ATOM_DTD_FORMAT) */ 2945#define ATOM_DTD_MODE_SUPPORT_TBL_SIZE (MAX_DTD_MODE_IN_VRAM*28) //28= (SIZEOF ATOM_DTD_FORMAT)
2406#define ATOM_STD_MODE_SUPPORT_TBL_SIZE (32*8) /* 32 is a predefined number,8= (SIZEOF ATOM_STD_FORMAT) */ 2946#define ATOM_STD_MODE_SUPPORT_TBL_SIZE 32*8 //32 is a predefined number,8= (SIZEOF ATOM_STD_FORMAT)
2407#define DFP_ENCODER_TYPE_OFFSET 0x80 2947#define DFP_ENCODER_TYPE_OFFSET 0x80
2408#define DP_ENCODER_LANE_NUM_OFFSET 0x84 2948#define DP_ENCODER_LANE_NUM_OFFSET 0x84
2409#define DP_ENCODER_LINK_RATE_OFFSET 0x88 2949#define DP_ENCODER_LINK_RATE_OFFSET 0x88
@@ -2417,7 +2957,7 @@ typedef struct _ATOM_ANALOG_TV_INFO_V1_2 {
2417 2957
2418#define ATOM_LCD1_EDID_ADDR (ATOM_CRT1_STD_MODE_TBL_ADDR + ATOM_STD_MODE_SUPPORT_TBL_SIZE) 2958#define ATOM_LCD1_EDID_ADDR (ATOM_CRT1_STD_MODE_TBL_ADDR + ATOM_STD_MODE_SUPPORT_TBL_SIZE)
2419#define ATOM_LCD1_DTD_MODE_TBL_ADDR (ATOM_LCD1_EDID_ADDR + ATOM_EDID_RAW_DATASIZE) 2959#define ATOM_LCD1_DTD_MODE_TBL_ADDR (ATOM_LCD1_EDID_ADDR + ATOM_EDID_RAW_DATASIZE)
2420#define ATOM_LCD1_STD_MODE_TBL_ADDR (ATOM_LCD1_DTD_MODE_TBL_ADDR + ATOM_DTD_MODE_SUPPORT_TBL_SIZE) 2960#define ATOM_LCD1_STD_MODE_TBL_ADDR (ATOM_LCD1_DTD_MODE_TBL_ADDR + ATOM_DTD_MODE_SUPPORT_TBL_SIZE)
2421 2961
2422#define ATOM_TV1_DTD_MODE_TBL_ADDR (ATOM_LCD1_STD_MODE_TBL_ADDR + ATOM_STD_MODE_SUPPORT_TBL_SIZE) 2962#define ATOM_TV1_DTD_MODE_TBL_ADDR (ATOM_LCD1_STD_MODE_TBL_ADDR + ATOM_STD_MODE_SUPPORT_TBL_SIZE)
2423 2963
@@ -2431,13 +2971,13 @@ typedef struct _ATOM_ANALOG_TV_INFO_V1_2 {
2431 2971
2432#define ATOM_LCD2_EDID_ADDR (ATOM_CRT2_STD_MODE_TBL_ADDR + ATOM_STD_MODE_SUPPORT_TBL_SIZE) 2972#define ATOM_LCD2_EDID_ADDR (ATOM_CRT2_STD_MODE_TBL_ADDR + ATOM_STD_MODE_SUPPORT_TBL_SIZE)
2433#define ATOM_LCD2_DTD_MODE_TBL_ADDR (ATOM_LCD2_EDID_ADDR + ATOM_EDID_RAW_DATASIZE) 2973#define ATOM_LCD2_DTD_MODE_TBL_ADDR (ATOM_LCD2_EDID_ADDR + ATOM_EDID_RAW_DATASIZE)
2434#define ATOM_LCD2_STD_MODE_TBL_ADDR (ATOM_LCD2_DTD_MODE_TBL_ADDR + ATOM_DTD_MODE_SUPPORT_TBL_SIZE) 2974#define ATOM_LCD2_STD_MODE_TBL_ADDR (ATOM_LCD2_DTD_MODE_TBL_ADDR + ATOM_DTD_MODE_SUPPORT_TBL_SIZE)
2435 2975
2436#define ATOM_TV2_EDID_ADDR (ATOM_LCD2_STD_MODE_TBL_ADDR + ATOM_STD_MODE_SUPPORT_TBL_SIZE) 2976#define ATOM_DFP6_EDID_ADDR (ATOM_LCD2_STD_MODE_TBL_ADDR + ATOM_STD_MODE_SUPPORT_TBL_SIZE)
2437#define ATOM_TV2_DTD_MODE_TBL_ADDR (ATOM_TV2_EDID_ADDR + ATOM_EDID_RAW_DATASIZE) 2977#define ATOM_DFP6_DTD_MODE_TBL_ADDR (ATOM_DFP6_EDID_ADDR + ATOM_EDID_RAW_DATASIZE)
2438#define ATOM_TV2_STD_MODE_TBL_ADDR (ATOM_TV2_DTD_MODE_TBL_ADDR + ATOM_DTD_MODE_SUPPORT_TBL_SIZE) 2978#define ATOM_DFP6_STD_MODE_TBL_ADDR (ATOM_DFP6_DTD_MODE_TBL_ADDR + ATOM_DTD_MODE_SUPPORT_TBL_SIZE)
2439 2979
2440#define ATOM_DFP2_EDID_ADDR (ATOM_TV2_STD_MODE_TBL_ADDR + ATOM_STD_MODE_SUPPORT_TBL_SIZE) 2980#define ATOM_DFP2_EDID_ADDR (ATOM_DFP6_STD_MODE_TBL_ADDR + ATOM_STD_MODE_SUPPORT_TBL_SIZE)
2441#define ATOM_DFP2_DTD_MODE_TBL_ADDR (ATOM_DFP2_EDID_ADDR + ATOM_EDID_RAW_DATASIZE) 2981#define ATOM_DFP2_DTD_MODE_TBL_ADDR (ATOM_DFP2_EDID_ADDR + ATOM_EDID_RAW_DATASIZE)
2442#define ATOM_DFP2_STD_MODE_TBL_ADDR (ATOM_DFP2_DTD_MODE_TBL_ADDR + ATOM_DTD_MODE_SUPPORT_TBL_SIZE) 2982#define ATOM_DFP2_STD_MODE_TBL_ADDR (ATOM_DFP2_DTD_MODE_TBL_ADDR + ATOM_DTD_MODE_SUPPORT_TBL_SIZE)
2443 2983
@@ -2457,533 +2997,850 @@ typedef struct _ATOM_ANALOG_TV_INFO_V1_2 {
2457#define ATOM_DFP5_DTD_MODE_TBL_ADDR (ATOM_DFP5_EDID_ADDR + ATOM_EDID_RAW_DATASIZE) 2997#define ATOM_DFP5_DTD_MODE_TBL_ADDR (ATOM_DFP5_EDID_ADDR + ATOM_EDID_RAW_DATASIZE)
2458#define ATOM_DFP5_STD_MODE_TBL_ADDR (ATOM_DFP5_DTD_MODE_TBL_ADDR + ATOM_DTD_MODE_SUPPORT_TBL_SIZE) 2998#define ATOM_DFP5_STD_MODE_TBL_ADDR (ATOM_DFP5_DTD_MODE_TBL_ADDR + ATOM_DTD_MODE_SUPPORT_TBL_SIZE)
2459 2999
2460#define ATOM_DP_TRAINING_TBL_ADDR (ATOM_DFP5_STD_MODE_TBL_ADDR+ATOM_STD_MODE_SUPPORT_TBL_SIZE) 3000#define ATOM_DP_TRAINING_TBL_ADDR (ATOM_DFP5_STD_MODE_TBL_ADDR+ATOM_STD_MODE_SUPPORT_TBL_SIZE)
2461 3001
2462#define ATOM_STACK_STORAGE_START (ATOM_DP_TRAINING_TBL_ADDR + 256) 3002#define ATOM_STACK_STORAGE_START (ATOM_DP_TRAINING_TBL_ADDR+256)
2463#define ATOM_STACK_STORAGE_END (ATOM_STACK_STORAGE_START + 512) 3003#define ATOM_STACK_STORAGE_END ATOM_STACK_STORAGE_START+512
2464 3004
2465/* The size below is in Kb! */ 3005//The size below is in Kb!
2466#define ATOM_VRAM_RESERVE_SIZE ((((ATOM_STACK_STORAGE_END - ATOM_HWICON1_SURFACE_ADDR)>>10)+4)&0xFFFC) 3006#define ATOM_VRAM_RESERVE_SIZE ((((ATOM_STACK_STORAGE_END - ATOM_HWICON1_SURFACE_ADDR)>>10)+4)&0xFFFC)
2467 3007
2468#define ATOM_VRAM_OPERATION_FLAGS_MASK 0xC0000000L 3008#define ATOM_VRAM_OPERATION_FLAGS_MASK 0xC0000000L
2469#define ATOM_VRAM_OPERATION_FLAGS_SHIFT 30 3009#define ATOM_VRAM_OPERATION_FLAGS_SHIFT 30
2470#define ATOM_VRAM_BLOCK_NEEDS_NO_RESERVATION 0x1 3010#define ATOM_VRAM_BLOCK_NEEDS_NO_RESERVATION 0x1
2471#define ATOM_VRAM_BLOCK_NEEDS_RESERVATION 0x0 3011#define ATOM_VRAM_BLOCK_NEEDS_RESERVATION 0x0
2472 3012
2473/***********************************************************************************/ 3013/***********************************************************************************/
2474/* Structure used in VRAM_UsageByFirmwareTable */ 3014// Structure used in VRAM_UsageByFirmwareTable
2475/* Note1: This table is filled by SetBiosReservationStartInFB in CoreCommSubs.asm */ 3015// Note1: This table is filled by SetBiosReservationStartInFB in CoreCommSubs.asm
2476/* at running time. */ 3016// at running time.
2477/* note2: From RV770, the memory is more than 32bit addressable, so we will change */ 3017// note2: From RV770, the memory is more than 32bit addressable, so we will change
2478/* ucTableFormatRevision=1,ucTableContentRevision=4, the strcuture remains */ 3018// ucTableFormatRevision=1,ucTableContentRevision=4, the strcuture remains
2479/* exactly same as 1.1 and 1.2 (1.3 is never in use), but ulStartAddrUsedByFirmware */ 3019// exactly same as 1.1 and 1.2 (1.3 is never in use), but ulStartAddrUsedByFirmware
2480/* (in offset to start of memory address) is KB aligned instead of byte aligend. */ 3020// (in offset to start of memory address) is KB aligned instead of byte aligend.
2481/***********************************************************************************/ 3021/***********************************************************************************/
3022// Note3:
3023/* If we change usReserved to "usFBUsedbyDrvInKB", then to VBIOS this usFBUsedbyDrvInKB is a predefined, unchanged constant across VGA or non VGA adapter,
3024for CAIL, The size of FB access area is known, only thing missing is the Offset of FB Access area, so we can have:
3025
3026If (ulStartAddrUsedByFirmware!=0)
3027FBAccessAreaOffset= ulStartAddrUsedByFirmware - usFBUsedbyDrvInKB;
3028Reserved area has been claimed by VBIOS including this FB access area; CAIL doesn't need to reserve any extra area for this purpose
3029else //Non VGA case
3030 if (FB_Size<=2Gb)
3031 FBAccessAreaOffset= FB_Size - usFBUsedbyDrvInKB;
3032 else
3033 FBAccessAreaOffset= Aper_Size - usFBUsedbyDrvInKB
3034
3035CAIL needs to claim an reserved area defined by FBAccessAreaOffset and usFBUsedbyDrvInKB in non VGA case.*/
3036
2482#define ATOM_MAX_FIRMWARE_VRAM_USAGE_INFO 1 3037#define ATOM_MAX_FIRMWARE_VRAM_USAGE_INFO 1
2483 3038
2484typedef struct _ATOM_FIRMWARE_VRAM_RESERVE_INFO { 3039typedef struct _ATOM_FIRMWARE_VRAM_RESERVE_INFO
2485 ULONG ulStartAddrUsedByFirmware; 3040{
2486 USHORT usFirmwareUseInKb; 3041 ULONG ulStartAddrUsedByFirmware;
2487 USHORT usReserved; 3042 USHORT usFirmwareUseInKb;
2488} ATOM_FIRMWARE_VRAM_RESERVE_INFO; 3043 USHORT usReserved;
3044}ATOM_FIRMWARE_VRAM_RESERVE_INFO;
3045
3046typedef struct _ATOM_VRAM_USAGE_BY_FIRMWARE
3047{
3048 ATOM_COMMON_TABLE_HEADER sHeader;
3049 ATOM_FIRMWARE_VRAM_RESERVE_INFO asFirmwareVramReserveInfo[ATOM_MAX_FIRMWARE_VRAM_USAGE_INFO];
3050}ATOM_VRAM_USAGE_BY_FIRMWARE;
2489 3051
2490typedef struct _ATOM_VRAM_USAGE_BY_FIRMWARE { 3052// change verion to 1.5, when allow driver to allocate the vram area for command table access.
2491 ATOM_COMMON_TABLE_HEADER sHeader; 3053typedef struct _ATOM_FIRMWARE_VRAM_RESERVE_INFO_V1_5
2492 ATOM_FIRMWARE_VRAM_RESERVE_INFO 3054{
2493 asFirmwareVramReserveInfo[ATOM_MAX_FIRMWARE_VRAM_USAGE_INFO]; 3055 ULONG ulStartAddrUsedByFirmware;
2494} ATOM_VRAM_USAGE_BY_FIRMWARE; 3056 USHORT usFirmwareUseInKb;
3057 USHORT usFBUsedByDrvInKb;
3058}ATOM_FIRMWARE_VRAM_RESERVE_INFO_V1_5;
2495 3059
2496/****************************************************************************/ 3060typedef struct _ATOM_VRAM_USAGE_BY_FIRMWARE_V1_5
2497/* Structure used in GPIO_Pin_LUTTable */ 3061{
2498/****************************************************************************/ 3062 ATOM_COMMON_TABLE_HEADER sHeader;
2499typedef struct _ATOM_GPIO_PIN_ASSIGNMENT { 3063 ATOM_FIRMWARE_VRAM_RESERVE_INFO_V1_5 asFirmwareVramReserveInfo[ATOM_MAX_FIRMWARE_VRAM_USAGE_INFO];
2500 USHORT usGpioPin_AIndex; 3064}ATOM_VRAM_USAGE_BY_FIRMWARE_V1_5;
2501 UCHAR ucGpioPinBitShift; 3065
2502 UCHAR ucGPIO_ID; 3066/****************************************************************************/
2503} ATOM_GPIO_PIN_ASSIGNMENT; 3067// Structure used in GPIO_Pin_LUTTable
3068/****************************************************************************/
3069typedef struct _ATOM_GPIO_PIN_ASSIGNMENT
3070{
3071 USHORT usGpioPin_AIndex;
3072 UCHAR ucGpioPinBitShift;
3073 UCHAR ucGPIO_ID;
3074}ATOM_GPIO_PIN_ASSIGNMENT;
2504 3075
2505typedef struct _ATOM_GPIO_PIN_LUT { 3076typedef struct _ATOM_GPIO_PIN_LUT
2506 ATOM_COMMON_TABLE_HEADER sHeader; 3077{
2507 ATOM_GPIO_PIN_ASSIGNMENT asGPIO_Pin[1]; 3078 ATOM_COMMON_TABLE_HEADER sHeader;
2508} ATOM_GPIO_PIN_LUT; 3079 ATOM_GPIO_PIN_ASSIGNMENT asGPIO_Pin[1];
3080}ATOM_GPIO_PIN_LUT;
2509 3081
2510/****************************************************************************/ 3082/****************************************************************************/
2511/* Structure used in ComponentVideoInfoTable */ 3083// Structure used in ComponentVideoInfoTable
2512/****************************************************************************/ 3084/****************************************************************************/
2513#define GPIO_PIN_ACTIVE_HIGH 0x1 3085#define GPIO_PIN_ACTIVE_HIGH 0x1
2514 3086
2515#define MAX_SUPPORTED_CV_STANDARDS 5 3087#define MAX_SUPPORTED_CV_STANDARDS 5
2516 3088
2517/* definitions for ATOM_D_INFO.ucSettings */ 3089// definitions for ATOM_D_INFO.ucSettings
2518#define ATOM_GPIO_SETTINGS_BITSHIFT_MASK 0x1F /* [4:0] */ 3090#define ATOM_GPIO_SETTINGS_BITSHIFT_MASK 0x1F // [4:0]
2519#define ATOM_GPIO_SETTINGS_RESERVED_MASK 0x60 /* [6:5] = must be zeroed out */ 3091#define ATOM_GPIO_SETTINGS_RESERVED_MASK 0x60 // [6:5] = must be zeroed out
2520#define ATOM_GPIO_SETTINGS_ACTIVE_MASK 0x80 /* [7] */ 3092#define ATOM_GPIO_SETTINGS_ACTIVE_MASK 0x80 // [7]
2521 3093
2522typedef struct _ATOM_GPIO_INFO { 3094typedef struct _ATOM_GPIO_INFO
2523 USHORT usAOffset; 3095{
2524 UCHAR ucSettings; 3096 USHORT usAOffset;
2525 UCHAR ucReserved; 3097 UCHAR ucSettings;
2526} ATOM_GPIO_INFO; 3098 UCHAR ucReserved;
3099}ATOM_GPIO_INFO;
2527 3100
2528/* definitions for ATOM_COMPONENT_VIDEO_INFO.ucMiscInfo (bit vector) */ 3101// definitions for ATOM_COMPONENT_VIDEO_INFO.ucMiscInfo (bit vector)
2529#define ATOM_CV_RESTRICT_FORMAT_SELECTION 0x2 3102#define ATOM_CV_RESTRICT_FORMAT_SELECTION 0x2
2530 3103
2531/* definitions for ATOM_COMPONENT_VIDEO_INFO.uc480i/uc480p/uc720p/uc1080i */ 3104// definitions for ATOM_COMPONENT_VIDEO_INFO.uc480i/uc480p/uc720p/uc1080i
2532#define ATOM_GPIO_DEFAULT_MODE_EN 0x80 /* [7]; */ 3105#define ATOM_GPIO_DEFAULT_MODE_EN 0x80 //[7];
2533#define ATOM_GPIO_SETTING_PERMODE_MASK 0x7F /* [6:0] */ 3106#define ATOM_GPIO_SETTING_PERMODE_MASK 0x7F //[6:0]
2534 3107
2535/* definitions for ATOM_COMPONENT_VIDEO_INFO.ucLetterBoxMode */ 3108// definitions for ATOM_COMPONENT_VIDEO_INFO.ucLetterBoxMode
2536/* Line 3 out put 5V. */ 3109//Line 3 out put 5V.
2537#define ATOM_CV_LINE3_ASPECTRATIO_16_9_GPIO_A 0x01 /* represent gpio 3 state for 16:9 */ 3110#define ATOM_CV_LINE3_ASPECTRATIO_16_9_GPIO_A 0x01 //represent gpio 3 state for 16:9
2538#define ATOM_CV_LINE3_ASPECTRATIO_16_9_GPIO_B 0x02 /* represent gpio 4 state for 16:9 */ 3111#define ATOM_CV_LINE3_ASPECTRATIO_16_9_GPIO_B 0x02 //represent gpio 4 state for 16:9
2539#define ATOM_CV_LINE3_ASPECTRATIO_16_9_GPIO_SHIFT 0x0 3112#define ATOM_CV_LINE3_ASPECTRATIO_16_9_GPIO_SHIFT 0x0
2540 3113
2541/* Line 3 out put 2.2V */ 3114//Line 3 out put 2.2V
2542#define ATOM_CV_LINE3_ASPECTRATIO_4_3_LETBOX_GPIO_A 0x04 /* represent gpio 3 state for 4:3 Letter box */ 3115#define ATOM_CV_LINE3_ASPECTRATIO_4_3_LETBOX_GPIO_A 0x04 //represent gpio 3 state for 4:3 Letter box
2543#define ATOM_CV_LINE3_ASPECTRATIO_4_3_LETBOX_GPIO_B 0x08 /* represent gpio 4 state for 4:3 Letter box */ 3116#define ATOM_CV_LINE3_ASPECTRATIO_4_3_LETBOX_GPIO_B 0x08 //represent gpio 4 state for 4:3 Letter box
2544#define ATOM_CV_LINE3_ASPECTRATIO_4_3_LETBOX_GPIO_SHIFT 0x2 3117#define ATOM_CV_LINE3_ASPECTRATIO_4_3_LETBOX_GPIO_SHIFT 0x2
2545 3118
2546/* Line 3 out put 0V */ 3119//Line 3 out put 0V
2547#define ATOM_CV_LINE3_ASPECTRATIO_4_3_GPIO_A 0x10 /* represent gpio 3 state for 4:3 */ 3120#define ATOM_CV_LINE3_ASPECTRATIO_4_3_GPIO_A 0x10 //represent gpio 3 state for 4:3
2548#define ATOM_CV_LINE3_ASPECTRATIO_4_3_GPIO_B 0x20 /* represent gpio 4 state for 4:3 */ 3121#define ATOM_CV_LINE3_ASPECTRATIO_4_3_GPIO_B 0x20 //represent gpio 4 state for 4:3
2549#define ATOM_CV_LINE3_ASPECTRATIO_4_3_GPIO_SHIFT 0x4 3122#define ATOM_CV_LINE3_ASPECTRATIO_4_3_GPIO_SHIFT 0x4
2550 3123
2551#define ATOM_CV_LINE3_ASPECTRATIO_MASK 0x3F /* bit [5:0] */ 3124#define ATOM_CV_LINE3_ASPECTRATIO_MASK 0x3F // bit [5:0]
2552 3125
2553#define ATOM_CV_LINE3_ASPECTRATIO_EXIST 0x80 /* bit 7 */ 3126#define ATOM_CV_LINE3_ASPECTRATIO_EXIST 0x80 //bit 7
2554 3127
2555/* GPIO bit index in gpio setting per mode value, also represend the block no. in gpio blocks. */ 3128//GPIO bit index in gpio setting per mode value, also represend the block no. in gpio blocks.
2556#define ATOM_GPIO_INDEX_LINE3_ASPECRATIO_GPIO_A 3 /* bit 3 in uc480i/uc480p/uc720p/uc1080i, which represend the default gpio bit setting for the mode. */ 3129#define ATOM_GPIO_INDEX_LINE3_ASPECRATIO_GPIO_A 3 //bit 3 in uc480i/uc480p/uc720p/uc1080i, which represend the default gpio bit setting for the mode.
2557#define ATOM_GPIO_INDEX_LINE3_ASPECRATIO_GPIO_B 4 /* bit 4 in uc480i/uc480p/uc720p/uc1080i, which represend the default gpio bit setting for the mode. */ 3130#define ATOM_GPIO_INDEX_LINE3_ASPECRATIO_GPIO_B 4 //bit 4 in uc480i/uc480p/uc720p/uc1080i, which represend the default gpio bit setting for the mode.
2558 3131
2559typedef struct _ATOM_COMPONENT_VIDEO_INFO { 3132
2560 ATOM_COMMON_TABLE_HEADER sHeader; 3133typedef struct _ATOM_COMPONENT_VIDEO_INFO
2561 USHORT usMask_PinRegisterIndex; 3134{
2562 USHORT usEN_PinRegisterIndex; 3135 ATOM_COMMON_TABLE_HEADER sHeader;
2563 USHORT usY_PinRegisterIndex; 3136 USHORT usMask_PinRegisterIndex;
2564 USHORT usA_PinRegisterIndex; 3137 USHORT usEN_PinRegisterIndex;
2565 UCHAR ucBitShift; 3138 USHORT usY_PinRegisterIndex;
2566 UCHAR ucPinActiveState; /* ucPinActiveState: Bit0=1 active high, =0 active low */ 3139 USHORT usA_PinRegisterIndex;
2567 ATOM_DTD_FORMAT sReserved; /* must be zeroed out */ 3140 UCHAR ucBitShift;
2568 UCHAR ucMiscInfo; 3141 UCHAR ucPinActiveState; //ucPinActiveState: Bit0=1 active high, =0 active low
2569 UCHAR uc480i; 3142 ATOM_DTD_FORMAT sReserved; // must be zeroed out
2570 UCHAR uc480p; 3143 UCHAR ucMiscInfo;
2571 UCHAR uc720p; 3144 UCHAR uc480i;
2572 UCHAR uc1080i; 3145 UCHAR uc480p;
2573 UCHAR ucLetterBoxMode; 3146 UCHAR uc720p;
2574 UCHAR ucReserved[3]; 3147 UCHAR uc1080i;
2575 UCHAR ucNumOfWbGpioBlocks; /* For Component video D-Connector support. If zere, NTSC type connector */ 3148 UCHAR ucLetterBoxMode;
2576 ATOM_GPIO_INFO aWbGpioStateBlock[MAX_SUPPORTED_CV_STANDARDS]; 3149 UCHAR ucReserved[3];
2577 ATOM_DTD_FORMAT aModeTimings[MAX_SUPPORTED_CV_STANDARDS]; 3150 UCHAR ucNumOfWbGpioBlocks; //For Component video D-Connector support. If zere, NTSC type connector
2578} ATOM_COMPONENT_VIDEO_INFO; 3151 ATOM_GPIO_INFO aWbGpioStateBlock[MAX_SUPPORTED_CV_STANDARDS];
2579 3152 ATOM_DTD_FORMAT aModeTimings[MAX_SUPPORTED_CV_STANDARDS];
2580/* ucTableFormatRevision=2 */ 3153}ATOM_COMPONENT_VIDEO_INFO;
2581/* ucTableContentRevision=1 */ 3154
2582typedef struct _ATOM_COMPONENT_VIDEO_INFO_V21 { 3155//ucTableFormatRevision=2
2583 ATOM_COMMON_TABLE_HEADER sHeader; 3156//ucTableContentRevision=1
2584 UCHAR ucMiscInfo; 3157typedef struct _ATOM_COMPONENT_VIDEO_INFO_V21
2585 UCHAR uc480i; 3158{
2586 UCHAR uc480p; 3159 ATOM_COMMON_TABLE_HEADER sHeader;
2587 UCHAR uc720p; 3160 UCHAR ucMiscInfo;
2588 UCHAR uc1080i; 3161 UCHAR uc480i;
2589 UCHAR ucReserved; 3162 UCHAR uc480p;
2590 UCHAR ucLetterBoxMode; 3163 UCHAR uc720p;
2591 UCHAR ucNumOfWbGpioBlocks; /* For Component video D-Connector support. If zere, NTSC type connector */ 3164 UCHAR uc1080i;
2592 ATOM_GPIO_INFO aWbGpioStateBlock[MAX_SUPPORTED_CV_STANDARDS]; 3165 UCHAR ucReserved;
2593 ATOM_DTD_FORMAT aModeTimings[MAX_SUPPORTED_CV_STANDARDS]; 3166 UCHAR ucLetterBoxMode;
2594} ATOM_COMPONENT_VIDEO_INFO_V21; 3167 UCHAR ucNumOfWbGpioBlocks; //For Component video D-Connector support. If zere, NTSC type connector
3168 ATOM_GPIO_INFO aWbGpioStateBlock[MAX_SUPPORTED_CV_STANDARDS];
3169 ATOM_DTD_FORMAT aModeTimings[MAX_SUPPORTED_CV_STANDARDS];
3170}ATOM_COMPONENT_VIDEO_INFO_V21;
2595 3171
2596#define ATOM_COMPONENT_VIDEO_INFO_LAST ATOM_COMPONENT_VIDEO_INFO_V21 3172#define ATOM_COMPONENT_VIDEO_INFO_LAST ATOM_COMPONENT_VIDEO_INFO_V21
2597 3173
2598/****************************************************************************/ 3174/****************************************************************************/
2599/* Structure used in object_InfoTable */ 3175// Structure used in object_InfoTable
2600/****************************************************************************/ 3176/****************************************************************************/
2601typedef struct _ATOM_OBJECT_HEADER { 3177typedef struct _ATOM_OBJECT_HEADER
2602 ATOM_COMMON_TABLE_HEADER sHeader; 3178{
2603 USHORT usDeviceSupport; 3179 ATOM_COMMON_TABLE_HEADER sHeader;
2604 USHORT usConnectorObjectTableOffset; 3180 USHORT usDeviceSupport;
2605 USHORT usRouterObjectTableOffset; 3181 USHORT usConnectorObjectTableOffset;
2606 USHORT usEncoderObjectTableOffset; 3182 USHORT usRouterObjectTableOffset;
2607 USHORT usProtectionObjectTableOffset; /* only available when Protection block is independent. */ 3183 USHORT usEncoderObjectTableOffset;
2608 USHORT usDisplayPathTableOffset; 3184 USHORT usProtectionObjectTableOffset; //only available when Protection block is independent.
2609} ATOM_OBJECT_HEADER; 3185 USHORT usDisplayPathTableOffset;
2610 3186}ATOM_OBJECT_HEADER;
2611typedef struct _ATOM_DISPLAY_OBJECT_PATH { 3187
2612 USHORT usDeviceTag; /* supported device */ 3188typedef struct _ATOM_OBJECT_HEADER_V3
2613 USHORT usSize; /* the size of ATOM_DISPLAY_OBJECT_PATH */ 3189{
2614 USHORT usConnObjectId; /* Connector Object ID */ 3190 ATOM_COMMON_TABLE_HEADER sHeader;
2615 USHORT usGPUObjectId; /* GPU ID */ 3191 USHORT usDeviceSupport;
2616 USHORT usGraphicObjIds[1]; /* 1st Encoder Obj source from GPU to last Graphic Obj destinate to connector. */ 3192 USHORT usConnectorObjectTableOffset;
2617} ATOM_DISPLAY_OBJECT_PATH; 3193 USHORT usRouterObjectTableOffset;
2618 3194 USHORT usEncoderObjectTableOffset;
2619typedef struct _ATOM_DISPLAY_OBJECT_PATH_TABLE { 3195 USHORT usProtectionObjectTableOffset; //only available when Protection block is independent.
2620 UCHAR ucNumOfDispPath; 3196 USHORT usDisplayPathTableOffset;
2621 UCHAR ucVersion; 3197 USHORT usMiscObjectTableOffset;
2622 UCHAR ucPadding[2]; 3198}ATOM_OBJECT_HEADER_V3;
2623 ATOM_DISPLAY_OBJECT_PATH asDispPath[1]; 3199
2624} ATOM_DISPLAY_OBJECT_PATH_TABLE; 3200typedef struct _ATOM_DISPLAY_OBJECT_PATH
2625 3201{
2626typedef struct _ATOM_OBJECT /* each object has this structure */ 3202 USHORT usDeviceTag; //supported device
2627{ 3203 USHORT usSize; //the size of ATOM_DISPLAY_OBJECT_PATH
2628 USHORT usObjectID; 3204 USHORT usConnObjectId; //Connector Object ID
2629 USHORT usSrcDstTableOffset; 3205 USHORT usGPUObjectId; //GPU ID
2630 USHORT usRecordOffset; /* this pointing to a bunch of records defined below */ 3206 USHORT usGraphicObjIds[1]; //1st Encoder Obj source from GPU to last Graphic Obj destinate to connector.
2631 USHORT usReserved; 3207}ATOM_DISPLAY_OBJECT_PATH;
2632} ATOM_OBJECT; 3208
2633 3209typedef struct _ATOM_DISPLAY_OBJECT_PATH_TABLE
2634typedef struct _ATOM_OBJECT_TABLE /* Above 4 object table offset pointing to a bunch of objects all have this structure */ 3210{
2635{ 3211 UCHAR ucNumOfDispPath;
2636 UCHAR ucNumberOfObjects; 3212 UCHAR ucVersion;
2637 UCHAR ucPadding[3]; 3213 UCHAR ucPadding[2];
2638 ATOM_OBJECT asObjects[1]; 3214 ATOM_DISPLAY_OBJECT_PATH asDispPath[1];
2639} ATOM_OBJECT_TABLE; 3215}ATOM_DISPLAY_OBJECT_PATH_TABLE;
2640 3216
2641typedef struct _ATOM_SRC_DST_TABLE_FOR_ONE_OBJECT /* usSrcDstTableOffset pointing to this structure */ 3217
2642{ 3218typedef struct _ATOM_OBJECT //each object has this structure
2643 UCHAR ucNumberOfSrc; 3219{
2644 USHORT usSrcObjectID[1]; 3220 USHORT usObjectID;
2645 UCHAR ucNumberOfDst; 3221 USHORT usSrcDstTableOffset;
2646 USHORT usDstObjectID[1]; 3222 USHORT usRecordOffset; //this pointing to a bunch of records defined below
2647} ATOM_SRC_DST_TABLE_FOR_ONE_OBJECT; 3223 USHORT usReserved;
2648 3224}ATOM_OBJECT;
2649/* Related definitions, all records are differnt but they have a commond header */ 3225
2650typedef struct _ATOM_COMMON_RECORD_HEADER { 3226typedef struct _ATOM_OBJECT_TABLE //Above 4 object table offset pointing to a bunch of objects all have this structure
2651 UCHAR ucRecordType; /* An emun to indicate the record type */ 3227{
2652 UCHAR ucRecordSize; /* The size of the whole record in byte */ 3228 UCHAR ucNumberOfObjects;
2653} ATOM_COMMON_RECORD_HEADER; 3229 UCHAR ucPadding[3];
2654 3230 ATOM_OBJECT asObjects[1];
2655#define ATOM_I2C_RECORD_TYPE 1 3231}ATOM_OBJECT_TABLE;
3232
3233typedef struct _ATOM_SRC_DST_TABLE_FOR_ONE_OBJECT //usSrcDstTableOffset pointing to this structure
3234{
3235 UCHAR ucNumberOfSrc;
3236 USHORT usSrcObjectID[1];
3237 UCHAR ucNumberOfDst;
3238 USHORT usDstObjectID[1];
3239}ATOM_SRC_DST_TABLE_FOR_ONE_OBJECT;
3240
3241
3242//Two definitions below are for OPM on MXM module designs
3243
3244#define EXT_HPDPIN_LUTINDEX_0 0
3245#define EXT_HPDPIN_LUTINDEX_1 1
3246#define EXT_HPDPIN_LUTINDEX_2 2
3247#define EXT_HPDPIN_LUTINDEX_3 3
3248#define EXT_HPDPIN_LUTINDEX_4 4
3249#define EXT_HPDPIN_LUTINDEX_5 5
3250#define EXT_HPDPIN_LUTINDEX_6 6
3251#define EXT_HPDPIN_LUTINDEX_7 7
3252#define MAX_NUMBER_OF_EXT_HPDPIN_LUT_ENTRIES (EXT_HPDPIN_LUTINDEX_7+1)
3253
3254#define EXT_AUXDDC_LUTINDEX_0 0
3255#define EXT_AUXDDC_LUTINDEX_1 1
3256#define EXT_AUXDDC_LUTINDEX_2 2
3257#define EXT_AUXDDC_LUTINDEX_3 3
3258#define EXT_AUXDDC_LUTINDEX_4 4
3259#define EXT_AUXDDC_LUTINDEX_5 5
3260#define EXT_AUXDDC_LUTINDEX_6 6
3261#define EXT_AUXDDC_LUTINDEX_7 7
3262#define MAX_NUMBER_OF_EXT_AUXDDC_LUT_ENTRIES (EXT_AUXDDC_LUTINDEX_7+1)
3263
3264typedef struct _EXT_DISPLAY_PATH
3265{
3266 USHORT usDeviceTag; //A bit vector to show what devices are supported
3267 USHORT usDeviceACPIEnum; //16bit device ACPI id.
3268 USHORT usDeviceConnector; //A physical connector for displays to plug in, using object connector definitions
3269 UCHAR ucExtAUXDDCLutIndex; //An index into external AUX/DDC channel LUT
3270 UCHAR ucExtHPDPINLutIndex; //An index into external HPD pin LUT
3271 USHORT usExtEncoderObjId; //external encoder object id
3272 USHORT usReserved[3];
3273}EXT_DISPLAY_PATH;
3274
3275#define NUMBER_OF_UCHAR_FOR_GUID 16
3276#define MAX_NUMBER_OF_EXT_DISPLAY_PATH 7
3277
3278typedef struct _ATOM_EXTERNAL_DISPLAY_CONNECTION_INFO
3279{
3280 ATOM_COMMON_TABLE_HEADER sHeader;
3281 UCHAR ucGuid [NUMBER_OF_UCHAR_FOR_GUID]; // a GUID is a 16 byte long string
3282 EXT_DISPLAY_PATH sPath[MAX_NUMBER_OF_EXT_DISPLAY_PATH]; // total of fixed 7 entries.
3283 UCHAR ucChecksum; // a simple Checksum of the sum of whole structure equal to 0x0.
3284 UCHAR Reserved [7]; // for potential expansion
3285}ATOM_EXTERNAL_DISPLAY_CONNECTION_INFO;
3286
3287//Related definitions, all records are differnt but they have a commond header
3288typedef struct _ATOM_COMMON_RECORD_HEADER
3289{
3290 UCHAR ucRecordType; //An emun to indicate the record type
3291 UCHAR ucRecordSize; //The size of the whole record in byte
3292}ATOM_COMMON_RECORD_HEADER;
3293
3294
3295#define ATOM_I2C_RECORD_TYPE 1
2656#define ATOM_HPD_INT_RECORD_TYPE 2 3296#define ATOM_HPD_INT_RECORD_TYPE 2
2657#define ATOM_OUTPUT_PROTECTION_RECORD_TYPE 3 3297#define ATOM_OUTPUT_PROTECTION_RECORD_TYPE 3
2658#define ATOM_CONNECTOR_DEVICE_TAG_RECORD_TYPE 4 3298#define ATOM_CONNECTOR_DEVICE_TAG_RECORD_TYPE 4
2659#define ATOM_CONNECTOR_DVI_EXT_INPUT_RECORD_TYPE 5 /* Obsolete, switch to use GPIO_CNTL_RECORD_TYPE */ 3299#define ATOM_CONNECTOR_DVI_EXT_INPUT_RECORD_TYPE 5 //Obsolete, switch to use GPIO_CNTL_RECORD_TYPE
2660#define ATOM_ENCODER_FPGA_CONTROL_RECORD_TYPE 6 /* Obsolete, switch to use GPIO_CNTL_RECORD_TYPE */ 3300#define ATOM_ENCODER_FPGA_CONTROL_RECORD_TYPE 6 //Obsolete, switch to use GPIO_CNTL_RECORD_TYPE
2661#define ATOM_CONNECTOR_CVTV_SHARE_DIN_RECORD_TYPE 7 3301#define ATOM_CONNECTOR_CVTV_SHARE_DIN_RECORD_TYPE 7
2662#define ATOM_JTAG_RECORD_TYPE 8 /* Obsolete, switch to use GPIO_CNTL_RECORD_TYPE */ 3302#define ATOM_JTAG_RECORD_TYPE 8 //Obsolete, switch to use GPIO_CNTL_RECORD_TYPE
2663#define ATOM_OBJECT_GPIO_CNTL_RECORD_TYPE 9 3303#define ATOM_OBJECT_GPIO_CNTL_RECORD_TYPE 9
2664#define ATOM_ENCODER_DVO_CF_RECORD_TYPE 10 3304#define ATOM_ENCODER_DVO_CF_RECORD_TYPE 10
2665#define ATOM_CONNECTOR_CF_RECORD_TYPE 11 3305#define ATOM_CONNECTOR_CF_RECORD_TYPE 11
2666#define ATOM_CONNECTOR_HARDCODE_DTD_RECORD_TYPE 12 3306#define ATOM_CONNECTOR_HARDCODE_DTD_RECORD_TYPE 12
2667#define ATOM_CONNECTOR_PCIE_SUBCONNECTOR_RECORD_TYPE 13 3307#define ATOM_CONNECTOR_PCIE_SUBCONNECTOR_RECORD_TYPE 13
2668#define ATOM_ROUTER_DDC_PATH_SELECT_RECORD_TYPE 14 3308#define ATOM_ROUTER_DDC_PATH_SELECT_RECORD_TYPE 14
2669#define ATOM_ROUTER_DATA_CLOCK_PATH_SELECT_RECORD_TYPE 15 3309#define ATOM_ROUTER_DATA_CLOCK_PATH_SELECT_RECORD_TYPE 15
2670 3310#define ATOM_CONNECTOR_HPDPIN_LUT_RECORD_TYPE 16 //This is for the case when connectors are not known to object table
2671/* Must be updated when new record type is added,equal to that record definition! */ 3311#define ATOM_CONNECTOR_AUXDDC_LUT_RECORD_TYPE 17 //This is for the case when connectors are not known to object table
2672#define ATOM_MAX_OBJECT_RECORD_NUMBER ATOM_CONNECTOR_CF_RECORD_TYPE 3312#define ATOM_OBJECT_LINK_RECORD_TYPE 18 //Once this record is present under one object, it indicats the oobject is linked to another obj described by the record
2673 3313#define ATOM_CONNECTOR_REMOTE_CAP_RECORD_TYPE 19
2674typedef struct _ATOM_I2C_RECORD { 3314
2675 ATOM_COMMON_RECORD_HEADER sheader; 3315
2676 ATOM_I2C_ID_CONFIG sucI2cId; 3316//Must be updated when new record type is added,equal to that record definition!
2677 UCHAR ucI2CAddr; /* The slave address, it's 0 when the record is attached to connector for DDC */ 3317#define ATOM_MAX_OBJECT_RECORD_NUMBER ATOM_CONNECTOR_REMOTE_CAP_RECORD_TYPE
2678} ATOM_I2C_RECORD; 3318
2679 3319typedef struct _ATOM_I2C_RECORD
2680typedef struct _ATOM_HPD_INT_RECORD { 3320{
2681 ATOM_COMMON_RECORD_HEADER sheader; 3321 ATOM_COMMON_RECORD_HEADER sheader;
2682 UCHAR ucHPDIntGPIOID; /* Corresponding block in GPIO_PIN_INFO table gives the pin info */ 3322 ATOM_I2C_ID_CONFIG sucI2cId;
2683 UCHAR ucPluggged_PinState; 3323 UCHAR ucI2CAddr; //The slave address, it's 0 when the record is attached to connector for DDC
2684} ATOM_HPD_INT_RECORD; 3324}ATOM_I2C_RECORD;
2685 3325
2686typedef struct _ATOM_OUTPUT_PROTECTION_RECORD { 3326typedef struct _ATOM_HPD_INT_RECORD
2687 ATOM_COMMON_RECORD_HEADER sheader; 3327{
2688 UCHAR ucProtectionFlag; 3328 ATOM_COMMON_RECORD_HEADER sheader;
2689 UCHAR ucReserved; 3329 UCHAR ucHPDIntGPIOID; //Corresponding block in GPIO_PIN_INFO table gives the pin info
2690} ATOM_OUTPUT_PROTECTION_RECORD; 3330 UCHAR ucPlugged_PinState;
2691 3331}ATOM_HPD_INT_RECORD;
2692typedef struct _ATOM_CONNECTOR_DEVICE_TAG { 3332
2693 ULONG ulACPIDeviceEnum; /* Reserved for now */ 3333
2694 USHORT usDeviceID; /* This Id is same as "ATOM_DEVICE_XXX_SUPPORT" */ 3334typedef struct _ATOM_OUTPUT_PROTECTION_RECORD
2695 USHORT usPadding; 3335{
2696} ATOM_CONNECTOR_DEVICE_TAG; 3336 ATOM_COMMON_RECORD_HEADER sheader;
2697 3337 UCHAR ucProtectionFlag;
2698typedef struct _ATOM_CONNECTOR_DEVICE_TAG_RECORD { 3338 UCHAR ucReserved;
2699 ATOM_COMMON_RECORD_HEADER sheader; 3339}ATOM_OUTPUT_PROTECTION_RECORD;
2700 UCHAR ucNumberOfDevice; 3340
2701 UCHAR ucReserved; 3341typedef struct _ATOM_CONNECTOR_DEVICE_TAG
2702 ATOM_CONNECTOR_DEVICE_TAG asDeviceTag[1]; /* This Id is same as "ATOM_DEVICE_XXX_SUPPORT", 1 is only for allocation */ 3342{
2703} ATOM_CONNECTOR_DEVICE_TAG_RECORD; 3343 ULONG ulACPIDeviceEnum; //Reserved for now
2704 3344 USHORT usDeviceID; //This Id is same as "ATOM_DEVICE_XXX_SUPPORT"
2705typedef struct _ATOM_CONNECTOR_DVI_EXT_INPUT_RECORD { 3345 USHORT usPadding;
2706 ATOM_COMMON_RECORD_HEADER sheader; 3346}ATOM_CONNECTOR_DEVICE_TAG;
2707 UCHAR ucConfigGPIOID; 3347
2708 UCHAR ucConfigGPIOState; /* Set to 1 when it's active high to enable external flow in */ 3348typedef struct _ATOM_CONNECTOR_DEVICE_TAG_RECORD
2709 UCHAR ucFlowinGPIPID; 3349{
2710 UCHAR ucExtInGPIPID; 3350 ATOM_COMMON_RECORD_HEADER sheader;
2711} ATOM_CONNECTOR_DVI_EXT_INPUT_RECORD; 3351 UCHAR ucNumberOfDevice;
2712 3352 UCHAR ucReserved;
2713typedef struct _ATOM_ENCODER_FPGA_CONTROL_RECORD { 3353 ATOM_CONNECTOR_DEVICE_TAG asDeviceTag[1]; //This Id is same as "ATOM_DEVICE_XXX_SUPPORT", 1 is only for allocation
2714 ATOM_COMMON_RECORD_HEADER sheader; 3354}ATOM_CONNECTOR_DEVICE_TAG_RECORD;
2715 UCHAR ucCTL1GPIO_ID; 3355
2716 UCHAR ucCTL1GPIOState; /* Set to 1 when it's active high */ 3356
2717 UCHAR ucCTL2GPIO_ID; 3357typedef struct _ATOM_CONNECTOR_DVI_EXT_INPUT_RECORD
2718 UCHAR ucCTL2GPIOState; /* Set to 1 when it's active high */ 3358{
2719 UCHAR ucCTL3GPIO_ID; 3359 ATOM_COMMON_RECORD_HEADER sheader;
2720 UCHAR ucCTL3GPIOState; /* Set to 1 when it's active high */ 3360 UCHAR ucConfigGPIOID;
2721 UCHAR ucCTLFPGA_IN_ID; 3361 UCHAR ucConfigGPIOState; //Set to 1 when it's active high to enable external flow in
2722 UCHAR ucPadding[3]; 3362 UCHAR ucFlowinGPIPID;
2723} ATOM_ENCODER_FPGA_CONTROL_RECORD; 3363 UCHAR ucExtInGPIPID;
2724 3364}ATOM_CONNECTOR_DVI_EXT_INPUT_RECORD;
2725typedef struct _ATOM_CONNECTOR_CVTV_SHARE_DIN_RECORD { 3365
2726 ATOM_COMMON_RECORD_HEADER sheader; 3366typedef struct _ATOM_ENCODER_FPGA_CONTROL_RECORD
2727 UCHAR ucGPIOID; /* Corresponding block in GPIO_PIN_INFO table gives the pin info */ 3367{
2728 UCHAR ucTVActiveState; /* Indicating when the pin==0 or 1 when TV is connected */ 3368 ATOM_COMMON_RECORD_HEADER sheader;
2729} ATOM_CONNECTOR_CVTV_SHARE_DIN_RECORD; 3369 UCHAR ucCTL1GPIO_ID;
2730 3370 UCHAR ucCTL1GPIOState; //Set to 1 when it's active high
2731typedef struct _ATOM_JTAG_RECORD { 3371 UCHAR ucCTL2GPIO_ID;
2732 ATOM_COMMON_RECORD_HEADER sheader; 3372 UCHAR ucCTL2GPIOState; //Set to 1 when it's active high
2733 UCHAR ucTMSGPIO_ID; 3373 UCHAR ucCTL3GPIO_ID;
2734 UCHAR ucTMSGPIOState; /* Set to 1 when it's active high */ 3374 UCHAR ucCTL3GPIOState; //Set to 1 when it's active high
2735 UCHAR ucTCKGPIO_ID; 3375 UCHAR ucCTLFPGA_IN_ID;
2736 UCHAR ucTCKGPIOState; /* Set to 1 when it's active high */ 3376 UCHAR ucPadding[3];
2737 UCHAR ucTDOGPIO_ID; 3377}ATOM_ENCODER_FPGA_CONTROL_RECORD;
2738 UCHAR ucTDOGPIOState; /* Set to 1 when it's active high */ 3378
2739 UCHAR ucTDIGPIO_ID; 3379typedef struct _ATOM_CONNECTOR_CVTV_SHARE_DIN_RECORD
2740 UCHAR ucTDIGPIOState; /* Set to 1 when it's active high */ 3380{
2741 UCHAR ucPadding[2]; 3381 ATOM_COMMON_RECORD_HEADER sheader;
2742} ATOM_JTAG_RECORD; 3382 UCHAR ucGPIOID; //Corresponding block in GPIO_PIN_INFO table gives the pin info
2743 3383 UCHAR ucTVActiveState; //Indicating when the pin==0 or 1 when TV is connected
2744/* The following generic object gpio pin control record type will replace JTAG_RECORD/FPGA_CONTROL_RECORD/DVI_EXT_INPUT_RECORD above gradually */ 3384}ATOM_CONNECTOR_CVTV_SHARE_DIN_RECORD;
2745typedef struct _ATOM_GPIO_PIN_CONTROL_PAIR { 3385
2746 UCHAR ucGPIOID; /* GPIO_ID, find the corresponding ID in GPIO_LUT table */ 3386typedef struct _ATOM_JTAG_RECORD
2747 UCHAR ucGPIO_PinState; /* Pin state showing how to set-up the pin */ 3387{
2748} ATOM_GPIO_PIN_CONTROL_PAIR; 3388 ATOM_COMMON_RECORD_HEADER sheader;
2749 3389 UCHAR ucTMSGPIO_ID;
2750typedef struct _ATOM_OBJECT_GPIO_CNTL_RECORD { 3390 UCHAR ucTMSGPIOState; //Set to 1 when it's active high
2751 ATOM_COMMON_RECORD_HEADER sheader; 3391 UCHAR ucTCKGPIO_ID;
2752 UCHAR ucFlags; /* Future expnadibility */ 3392 UCHAR ucTCKGPIOState; //Set to 1 when it's active high
2753 UCHAR ucNumberOfPins; /* Number of GPIO pins used to control the object */ 3393 UCHAR ucTDOGPIO_ID;
2754 ATOM_GPIO_PIN_CONTROL_PAIR asGpio[1]; /* the real gpio pin pair determined by number of pins ucNumberOfPins */ 3394 UCHAR ucTDOGPIOState; //Set to 1 when it's active high
2755} ATOM_OBJECT_GPIO_CNTL_RECORD; 3395 UCHAR ucTDIGPIO_ID;
2756 3396 UCHAR ucTDIGPIOState; //Set to 1 when it's active high
2757/* Definitions for GPIO pin state */ 3397 UCHAR ucPadding[2];
3398}ATOM_JTAG_RECORD;
3399
3400
3401//The following generic object gpio pin control record type will replace JTAG_RECORD/FPGA_CONTROL_RECORD/DVI_EXT_INPUT_RECORD above gradually
3402typedef struct _ATOM_GPIO_PIN_CONTROL_PAIR
3403{
3404 UCHAR ucGPIOID; // GPIO_ID, find the corresponding ID in GPIO_LUT table
3405 UCHAR ucGPIO_PinState; // Pin state showing how to set-up the pin
3406}ATOM_GPIO_PIN_CONTROL_PAIR;
3407
3408typedef struct _ATOM_OBJECT_GPIO_CNTL_RECORD
3409{
3410 ATOM_COMMON_RECORD_HEADER sheader;
3411 UCHAR ucFlags; // Future expnadibility
3412 UCHAR ucNumberOfPins; // Number of GPIO pins used to control the object
3413 ATOM_GPIO_PIN_CONTROL_PAIR asGpio[1]; // the real gpio pin pair determined by number of pins ucNumberOfPins
3414}ATOM_OBJECT_GPIO_CNTL_RECORD;
3415
3416//Definitions for GPIO pin state
2758#define GPIO_PIN_TYPE_INPUT 0x00 3417#define GPIO_PIN_TYPE_INPUT 0x00
2759#define GPIO_PIN_TYPE_OUTPUT 0x10 3418#define GPIO_PIN_TYPE_OUTPUT 0x10
2760#define GPIO_PIN_TYPE_HW_CONTROL 0x20 3419#define GPIO_PIN_TYPE_HW_CONTROL 0x20
2761 3420
2762/* For GPIO_PIN_TYPE_OUTPUT the following is defined */ 3421//For GPIO_PIN_TYPE_OUTPUT the following is defined
2763#define GPIO_PIN_OUTPUT_STATE_MASK 0x01 3422#define GPIO_PIN_OUTPUT_STATE_MASK 0x01
2764#define GPIO_PIN_OUTPUT_STATE_SHIFT 0 3423#define GPIO_PIN_OUTPUT_STATE_SHIFT 0
2765#define GPIO_PIN_STATE_ACTIVE_LOW 0x0 3424#define GPIO_PIN_STATE_ACTIVE_LOW 0x0
2766#define GPIO_PIN_STATE_ACTIVE_HIGH 0x1 3425#define GPIO_PIN_STATE_ACTIVE_HIGH 0x1
2767 3426
2768typedef struct _ATOM_ENCODER_DVO_CF_RECORD { 3427// Indexes to GPIO array in GLSync record
2769 ATOM_COMMON_RECORD_HEADER sheader; 3428#define ATOM_GPIO_INDEX_GLSYNC_REFCLK 0
2770 ULONG ulStrengthControl; /* DVOA strength control for CF */ 3429#define ATOM_GPIO_INDEX_GLSYNC_HSYNC 1
2771 UCHAR ucPadding[2]; 3430#define ATOM_GPIO_INDEX_GLSYNC_VSYNC 2
2772} ATOM_ENCODER_DVO_CF_RECORD; 3431#define ATOM_GPIO_INDEX_GLSYNC_SWAP_REQ 3
3432#define ATOM_GPIO_INDEX_GLSYNC_SWAP_GNT 4
3433#define ATOM_GPIO_INDEX_GLSYNC_INTERRUPT 5
3434#define ATOM_GPIO_INDEX_GLSYNC_V_RESET 6
3435#define ATOM_GPIO_INDEX_GLSYNC_MAX 7
3436
3437typedef struct _ATOM_ENCODER_DVO_CF_RECORD
3438{
3439 ATOM_COMMON_RECORD_HEADER sheader;
3440 ULONG ulStrengthControl; // DVOA strength control for CF
3441 UCHAR ucPadding[2];
3442}ATOM_ENCODER_DVO_CF_RECORD;
2773 3443
2774/* value for ATOM_CONNECTOR_CF_RECORD.ucConnectedDvoBundle */ 3444// value for ATOM_CONNECTOR_CF_RECORD.ucConnectedDvoBundle
2775#define ATOM_CONNECTOR_CF_RECORD_CONNECTED_UPPER12BITBUNDLEA 1 3445#define ATOM_CONNECTOR_CF_RECORD_CONNECTED_UPPER12BITBUNDLEA 1
2776#define ATOM_CONNECTOR_CF_RECORD_CONNECTED_LOWER12BITBUNDLEB 2 3446#define ATOM_CONNECTOR_CF_RECORD_CONNECTED_LOWER12BITBUNDLEB 2
2777 3447
2778typedef struct _ATOM_CONNECTOR_CF_RECORD { 3448typedef struct _ATOM_CONNECTOR_CF_RECORD
2779 ATOM_COMMON_RECORD_HEADER sheader; 3449{
2780 USHORT usMaxPixClk; 3450 ATOM_COMMON_RECORD_HEADER sheader;
2781 UCHAR ucFlowCntlGpioId; 3451 USHORT usMaxPixClk;
2782 UCHAR ucSwapCntlGpioId; 3452 UCHAR ucFlowCntlGpioId;
2783 UCHAR ucConnectedDvoBundle; 3453 UCHAR ucSwapCntlGpioId;
2784 UCHAR ucPadding; 3454 UCHAR ucConnectedDvoBundle;
2785} ATOM_CONNECTOR_CF_RECORD; 3455 UCHAR ucPadding;
2786 3456}ATOM_CONNECTOR_CF_RECORD;
2787typedef struct _ATOM_CONNECTOR_HARDCODE_DTD_RECORD { 3457
2788 ATOM_COMMON_RECORD_HEADER sheader; 3458typedef struct _ATOM_CONNECTOR_HARDCODE_DTD_RECORD
2789 ATOM_DTD_FORMAT asTiming; 3459{
2790} ATOM_CONNECTOR_HARDCODE_DTD_RECORD; 3460 ATOM_COMMON_RECORD_HEADER sheader;
2791 3461 ATOM_DTD_FORMAT asTiming;
2792typedef struct _ATOM_CONNECTOR_PCIE_SUBCONNECTOR_RECORD { 3462}ATOM_CONNECTOR_HARDCODE_DTD_RECORD;
2793 ATOM_COMMON_RECORD_HEADER sheader; /* ATOM_CONNECTOR_PCIE_SUBCONNECTOR_RECORD_TYPE */ 3463
2794 UCHAR ucSubConnectorType; /* CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D|X_ID_DUAL_LINK_DVI_D|HDMI_TYPE_A */ 3464typedef struct _ATOM_CONNECTOR_PCIE_SUBCONNECTOR_RECORD
2795 UCHAR ucReserved; 3465{
2796} ATOM_CONNECTOR_PCIE_SUBCONNECTOR_RECORD; 3466 ATOM_COMMON_RECORD_HEADER sheader; //ATOM_CONNECTOR_PCIE_SUBCONNECTOR_RECORD_TYPE
2797 3467 UCHAR ucSubConnectorType; //CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D|X_ID_DUAL_LINK_DVI_D|HDMI_TYPE_A
2798typedef struct _ATOM_ROUTER_DDC_PATH_SELECT_RECORD { 3468 UCHAR ucReserved;
2799 ATOM_COMMON_RECORD_HEADER sheader; 3469}ATOM_CONNECTOR_PCIE_SUBCONNECTOR_RECORD;
2800 UCHAR ucMuxType; /* decide the number of ucMuxState, =0, no pin state, =1: single state with complement, >1: multiple state */ 3470
2801 UCHAR ucMuxControlPin; 3471
2802 UCHAR ucMuxState[2]; /* for alligment purpose */ 3472typedef struct _ATOM_ROUTER_DDC_PATH_SELECT_RECORD
2803} ATOM_ROUTER_DDC_PATH_SELECT_RECORD; 3473{
2804 3474 ATOM_COMMON_RECORD_HEADER sheader;
2805typedef struct _ATOM_ROUTER_DATA_CLOCK_PATH_SELECT_RECORD { 3475 UCHAR ucMuxType; //decide the number of ucMuxState, =0, no pin state, =1: single state with complement, >1: multiple state
2806 ATOM_COMMON_RECORD_HEADER sheader; 3476 UCHAR ucMuxControlPin;
2807 UCHAR ucMuxType; 3477 UCHAR ucMuxState[2]; //for alligment purpose
2808 UCHAR ucMuxControlPin; 3478}ATOM_ROUTER_DDC_PATH_SELECT_RECORD;
2809 UCHAR ucMuxState[2]; /* for alligment purpose */ 3479
2810} ATOM_ROUTER_DATA_CLOCK_PATH_SELECT_RECORD; 3480typedef struct _ATOM_ROUTER_DATA_CLOCK_PATH_SELECT_RECORD
2811 3481{
2812/* define ucMuxType */ 3482 ATOM_COMMON_RECORD_HEADER sheader;
3483 UCHAR ucMuxType;
3484 UCHAR ucMuxControlPin;
3485 UCHAR ucMuxState[2]; //for alligment purpose
3486}ATOM_ROUTER_DATA_CLOCK_PATH_SELECT_RECORD;
3487
3488// define ucMuxType
2813#define ATOM_ROUTER_MUX_PIN_STATE_MASK 0x0f 3489#define ATOM_ROUTER_MUX_PIN_STATE_MASK 0x0f
2814#define ATOM_ROUTER_MUX_PIN_SINGLE_STATE_COMPLEMENT 0x01 3490#define ATOM_ROUTER_MUX_PIN_SINGLE_STATE_COMPLEMENT 0x01
2815 3491
2816/****************************************************************************/ 3492typedef struct _ATOM_CONNECTOR_HPDPIN_LUT_RECORD //record for ATOM_CONNECTOR_HPDPIN_LUT_RECORD_TYPE
2817/* ASIC voltage data table */ 3493{
2818/****************************************************************************/ 3494 ATOM_COMMON_RECORD_HEADER sheader;
2819typedef struct _ATOM_VOLTAGE_INFO_HEADER { 3495 UCHAR ucHPDPINMap[MAX_NUMBER_OF_EXT_HPDPIN_LUT_ENTRIES]; //An fixed size array which maps external pins to internal GPIO_PIN_INFO table
2820 USHORT usVDDCBaseLevel; /* In number of 50mv unit */ 3496}ATOM_CONNECTOR_HPDPIN_LUT_RECORD;
2821 USHORT usReserved; /* For possible extension table offset */ 3497
2822 UCHAR ucNumOfVoltageEntries; 3498typedef struct _ATOM_CONNECTOR_AUXDDC_LUT_RECORD //record for ATOM_CONNECTOR_AUXDDC_LUT_RECORD_TYPE
2823 UCHAR ucBytesPerVoltageEntry; 3499{
2824 UCHAR ucVoltageStep; /* Indicating in how many mv increament is one step, 0.5mv unit */ 3500 ATOM_COMMON_RECORD_HEADER sheader;
2825 UCHAR ucDefaultVoltageEntry; 3501 ATOM_I2C_ID_CONFIG ucAUXDDCMap[MAX_NUMBER_OF_EXT_AUXDDC_LUT_ENTRIES]; //An fixed size array which maps external pins to internal DDC ID
2826 UCHAR ucVoltageControlI2cLine; 3502}ATOM_CONNECTOR_AUXDDC_LUT_RECORD;
2827 UCHAR ucVoltageControlAddress; 3503
2828 UCHAR ucVoltageControlOffset; 3504typedef struct _ATOM_OBJECT_LINK_RECORD
2829} ATOM_VOLTAGE_INFO_HEADER; 3505{
2830 3506 ATOM_COMMON_RECORD_HEADER sheader;
2831typedef struct _ATOM_VOLTAGE_INFO { 3507 USHORT usObjectID; //could be connector, encorder or other object in object.h
2832 ATOM_COMMON_TABLE_HEADER sHeader; 3508}ATOM_OBJECT_LINK_RECORD;
2833 ATOM_VOLTAGE_INFO_HEADER viHeader; 3509
2834 UCHAR ucVoltageEntries[64]; /* 64 is for allocation, the actual number of entry is present at ucNumOfVoltageEntries*ucBytesPerVoltageEntry */ 3510typedef struct _ATOM_CONNECTOR_REMOTE_CAP_RECORD
2835} ATOM_VOLTAGE_INFO; 3511{
2836 3512 ATOM_COMMON_RECORD_HEADER sheader;
2837typedef struct _ATOM_VOLTAGE_FORMULA { 3513 USHORT usReserved;
2838 USHORT usVoltageBaseLevel; /* In number of 1mv unit */ 3514}ATOM_CONNECTOR_REMOTE_CAP_RECORD;
2839 USHORT usVoltageStep; /* Indicating in how many mv increament is one step, 1mv unit */ 3515
2840 UCHAR ucNumOfVoltageEntries; /* Number of Voltage Entry, which indicate max Voltage */ 3516/****************************************************************************/
2841 UCHAR ucFlag; /* bit0=0 :step is 1mv =1 0.5mv */ 3517// ASIC voltage data table
2842 UCHAR ucBaseVID; /* if there is no lookup table, VID= BaseVID + ( Vol - BaseLevle ) /VoltageStep */ 3518/****************************************************************************/
2843 UCHAR ucReserved; 3519typedef struct _ATOM_VOLTAGE_INFO_HEADER
2844 UCHAR ucVIDAdjustEntries[32]; /* 32 is for allocation, the actual number of entry is present at ucNumOfVoltageEntries */ 3520{
2845} ATOM_VOLTAGE_FORMULA; 3521 USHORT usVDDCBaseLevel; //In number of 50mv unit
2846 3522 USHORT usReserved; //For possible extension table offset
2847typedef struct _ATOM_VOLTAGE_CONTROL { 3523 UCHAR ucNumOfVoltageEntries;
2848 UCHAR ucVoltageControlId; /* Indicate it is controlled by I2C or GPIO or HW state machine */ 3524 UCHAR ucBytesPerVoltageEntry;
2849 UCHAR ucVoltageControlI2cLine; 3525 UCHAR ucVoltageStep; //Indicating in how many mv increament is one step, 0.5mv unit
2850 UCHAR ucVoltageControlAddress; 3526 UCHAR ucDefaultVoltageEntry;
2851 UCHAR ucVoltageControlOffset; 3527 UCHAR ucVoltageControlI2cLine;
2852 USHORT usGpioPin_AIndex; /* GPIO_PAD register index */ 3528 UCHAR ucVoltageControlAddress;
2853 UCHAR ucGpioPinBitShift[9]; /* at most 8 pin support 255 VIDs, termintate with 0xff */ 3529 UCHAR ucVoltageControlOffset;
2854 UCHAR ucReserved; 3530}ATOM_VOLTAGE_INFO_HEADER;
2855} ATOM_VOLTAGE_CONTROL; 3531
2856 3532typedef struct _ATOM_VOLTAGE_INFO
2857/* Define ucVoltageControlId */ 3533{
3534 ATOM_COMMON_TABLE_HEADER sHeader;
3535 ATOM_VOLTAGE_INFO_HEADER viHeader;
3536 UCHAR ucVoltageEntries[64]; //64 is for allocation, the actual number of entry is present at ucNumOfVoltageEntries*ucBytesPerVoltageEntry
3537}ATOM_VOLTAGE_INFO;
3538
3539
3540typedef struct _ATOM_VOLTAGE_FORMULA
3541{
3542 USHORT usVoltageBaseLevel; // In number of 1mv unit
3543 USHORT usVoltageStep; // Indicating in how many mv increament is one step, 1mv unit
3544 UCHAR ucNumOfVoltageEntries; // Number of Voltage Entry, which indicate max Voltage
3545 UCHAR ucFlag; // bit0=0 :step is 1mv =1 0.5mv
3546 UCHAR ucBaseVID; // if there is no lookup table, VID= BaseVID + ( Vol - BaseLevle ) /VoltageStep
3547 UCHAR ucReserved;
3548 UCHAR ucVIDAdjustEntries[32]; // 32 is for allocation, the actual number of entry is present at ucNumOfVoltageEntries
3549}ATOM_VOLTAGE_FORMULA;
3550
3551typedef struct _VOLTAGE_LUT_ENTRY
3552{
3553 USHORT usVoltageCode; // The Voltage ID, either GPIO or I2C code
3554 USHORT usVoltageValue; // The corresponding Voltage Value, in mV
3555}VOLTAGE_LUT_ENTRY;
3556
3557typedef struct _ATOM_VOLTAGE_FORMULA_V2
3558{
3559 UCHAR ucNumOfVoltageEntries; // Number of Voltage Entry, which indicate max Voltage
3560 UCHAR ucReserved[3];
3561 VOLTAGE_LUT_ENTRY asVIDAdjustEntries[32];// 32 is for allocation, the actual number of entries is in ucNumOfVoltageEntries
3562}ATOM_VOLTAGE_FORMULA_V2;
3563
3564typedef struct _ATOM_VOLTAGE_CONTROL
3565{
3566 UCHAR ucVoltageControlId; //Indicate it is controlled by I2C or GPIO or HW state machine
3567 UCHAR ucVoltageControlI2cLine;
3568 UCHAR ucVoltageControlAddress;
3569 UCHAR ucVoltageControlOffset;
3570 USHORT usGpioPin_AIndex; //GPIO_PAD register index
3571 UCHAR ucGpioPinBitShift[9]; //at most 8 pin support 255 VIDs, termintate with 0xff
3572 UCHAR ucReserved;
3573}ATOM_VOLTAGE_CONTROL;
3574
3575// Define ucVoltageControlId
2858#define VOLTAGE_CONTROLLED_BY_HW 0x00 3576#define VOLTAGE_CONTROLLED_BY_HW 0x00
2859#define VOLTAGE_CONTROLLED_BY_I2C_MASK 0x7F 3577#define VOLTAGE_CONTROLLED_BY_I2C_MASK 0x7F
2860#define VOLTAGE_CONTROLLED_BY_GPIO 0x80 3578#define VOLTAGE_CONTROLLED_BY_GPIO 0x80
2861#define VOLTAGE_CONTROL_ID_LM64 0x01 /* I2C control, used for R5xx Core Voltage */ 3579#define VOLTAGE_CONTROL_ID_LM64 0x01 //I2C control, used for R5xx Core Voltage
2862#define VOLTAGE_CONTROL_ID_DAC 0x02 /* I2C control, used for R5xx/R6xx MVDDC,MVDDQ or VDDCI */ 3580#define VOLTAGE_CONTROL_ID_DAC 0x02 //I2C control, used for R5xx/R6xx MVDDC,MVDDQ or VDDCI
2863#define VOLTAGE_CONTROL_ID_VT116xM 0x03 /* I2C control, used for R6xx Core Voltage */ 3581#define VOLTAGE_CONTROL_ID_VT116xM 0x03 //I2C control, used for R6xx Core Voltage
2864#define VOLTAGE_CONTROL_ID_DS4402 0x04 3582#define VOLTAGE_CONTROL_ID_DS4402 0x04
2865 3583
2866typedef struct _ATOM_VOLTAGE_OBJECT { 3584typedef struct _ATOM_VOLTAGE_OBJECT
2867 UCHAR ucVoltageType; /* Indicate Voltage Source: VDDC, MVDDC, MVDDQ or MVDDCI */ 3585{
2868 UCHAR ucSize; /* Size of Object */ 3586 UCHAR ucVoltageType; //Indicate Voltage Source: VDDC, MVDDC, MVDDQ or MVDDCI
2869 ATOM_VOLTAGE_CONTROL asControl; /* describ how to control */ 3587 UCHAR ucSize; //Size of Object
2870 ATOM_VOLTAGE_FORMULA asFormula; /* Indicate How to convert real Voltage to VID */ 3588 ATOM_VOLTAGE_CONTROL asControl; //describ how to control
2871} ATOM_VOLTAGE_OBJECT; 3589 ATOM_VOLTAGE_FORMULA asFormula; //Indicate How to convert real Voltage to VID
2872 3590}ATOM_VOLTAGE_OBJECT;
2873typedef struct _ATOM_VOLTAGE_OBJECT_INFO { 3591
2874 ATOM_COMMON_TABLE_HEADER sHeader; 3592typedef struct _ATOM_VOLTAGE_OBJECT_V2
2875 ATOM_VOLTAGE_OBJECT asVoltageObj[3]; /* Info for Voltage control */ 3593{
2876} ATOM_VOLTAGE_OBJECT_INFO; 3594 UCHAR ucVoltageType; //Indicate Voltage Source: VDDC, MVDDC, MVDDQ or MVDDCI
2877 3595 UCHAR ucSize; //Size of Object
2878typedef struct _ATOM_LEAKID_VOLTAGE { 3596 ATOM_VOLTAGE_CONTROL asControl; //describ how to control
2879 UCHAR ucLeakageId; 3597 ATOM_VOLTAGE_FORMULA_V2 asFormula; //Indicate How to convert real Voltage to VID
2880 UCHAR ucReserved; 3598}ATOM_VOLTAGE_OBJECT_V2;
2881 USHORT usVoltage; 3599
2882} ATOM_LEAKID_VOLTAGE; 3600typedef struct _ATOM_VOLTAGE_OBJECT_INFO
2883 3601{
2884typedef struct _ATOM_ASIC_PROFILE_VOLTAGE { 3602 ATOM_COMMON_TABLE_HEADER sHeader;
2885 UCHAR ucProfileId; 3603 ATOM_VOLTAGE_OBJECT asVoltageObj[3]; //Info for Voltage control
2886 UCHAR ucReserved; 3604}ATOM_VOLTAGE_OBJECT_INFO;
2887 USHORT usSize; 3605
2888 USHORT usEfuseSpareStartAddr; 3606typedef struct _ATOM_VOLTAGE_OBJECT_INFO_V2
2889 USHORT usFuseIndex[8]; /* from LSB to MSB, Max 8bit,end of 0xffff if less than 8 efuse id, */ 3607{
2890 ATOM_LEAKID_VOLTAGE asLeakVol[2]; /* Leakid and relatd voltage */ 3608 ATOM_COMMON_TABLE_HEADER sHeader;
2891} ATOM_ASIC_PROFILE_VOLTAGE; 3609 ATOM_VOLTAGE_OBJECT_V2 asVoltageObj[3]; //Info for Voltage control
2892 3610}ATOM_VOLTAGE_OBJECT_INFO_V2;
2893/* ucProfileId */ 3611
2894#define ATOM_ASIC_PROFILE_ID_EFUSE_VOLTAGE 1 3612typedef struct _ATOM_LEAKID_VOLTAGE
3613{
3614 UCHAR ucLeakageId;
3615 UCHAR ucReserved;
3616 USHORT usVoltage;
3617}ATOM_LEAKID_VOLTAGE;
3618
3619typedef struct _ATOM_ASIC_PROFILE_VOLTAGE
3620{
3621 UCHAR ucProfileId;
3622 UCHAR ucReserved;
3623 USHORT usSize;
3624 USHORT usEfuseSpareStartAddr;
3625 USHORT usFuseIndex[8]; //from LSB to MSB, Max 8bit,end of 0xffff if less than 8 efuse id,
3626 ATOM_LEAKID_VOLTAGE asLeakVol[2]; //Leakid and relatd voltage
3627}ATOM_ASIC_PROFILE_VOLTAGE;
3628
3629//ucProfileId
3630#define ATOM_ASIC_PROFILE_ID_EFUSE_VOLTAGE 1
2895#define ATOM_ASIC_PROFILE_ID_EFUSE_PERFORMANCE_VOLTAGE 1 3631#define ATOM_ASIC_PROFILE_ID_EFUSE_PERFORMANCE_VOLTAGE 1
2896#define ATOM_ASIC_PROFILE_ID_EFUSE_THERMAL_VOLTAGE 2 3632#define ATOM_ASIC_PROFILE_ID_EFUSE_THERMAL_VOLTAGE 2
2897 3633
2898typedef struct _ATOM_ASIC_PROFILING_INFO { 3634typedef struct _ATOM_ASIC_PROFILING_INFO
2899 ATOM_COMMON_TABLE_HEADER asHeader; 3635{
2900 ATOM_ASIC_PROFILE_VOLTAGE asVoltage; 3636 ATOM_COMMON_TABLE_HEADER asHeader;
2901} ATOM_ASIC_PROFILING_INFO; 3637 ATOM_ASIC_PROFILE_VOLTAGE asVoltage;
2902 3638}ATOM_ASIC_PROFILING_INFO;
2903typedef struct _ATOM_POWER_SOURCE_OBJECT { 3639
2904 UCHAR ucPwrSrcId; /* Power source */ 3640typedef struct _ATOM_POWER_SOURCE_OBJECT
2905 UCHAR ucPwrSensorType; /* GPIO, I2C or none */ 3641{
2906 UCHAR ucPwrSensId; /* if GPIO detect, it is GPIO id, if I2C detect, it is I2C id */ 3642 UCHAR ucPwrSrcId; // Power source
2907 UCHAR ucPwrSensSlaveAddr; /* Slave address if I2C detect */ 3643 UCHAR ucPwrSensorType; // GPIO, I2C or none
2908 UCHAR ucPwrSensRegIndex; /* I2C register Index if I2C detect */ 3644 UCHAR ucPwrSensId; // if GPIO detect, it is GPIO id, if I2C detect, it is I2C id
2909 UCHAR ucPwrSensRegBitMask; /* detect which bit is used if I2C detect */ 3645 UCHAR ucPwrSensSlaveAddr; // Slave address if I2C detect
2910 UCHAR ucPwrSensActiveState; /* high active or low active */ 3646 UCHAR ucPwrSensRegIndex; // I2C register Index if I2C detect
2911 UCHAR ucReserve[3]; /* reserve */ 3647 UCHAR ucPwrSensRegBitMask; // detect which bit is used if I2C detect
2912 USHORT usSensPwr; /* in unit of watt */ 3648 UCHAR ucPwrSensActiveState; // high active or low active
2913} ATOM_POWER_SOURCE_OBJECT; 3649 UCHAR ucReserve[3]; // reserve
2914 3650 USHORT usSensPwr; // in unit of watt
2915typedef struct _ATOM_POWER_SOURCE_INFO { 3651}ATOM_POWER_SOURCE_OBJECT;
2916 ATOM_COMMON_TABLE_HEADER asHeader; 3652
2917 UCHAR asPwrbehave[16]; 3653typedef struct _ATOM_POWER_SOURCE_INFO
2918 ATOM_POWER_SOURCE_OBJECT asPwrObj[1]; 3654{
2919} ATOM_POWER_SOURCE_INFO; 3655 ATOM_COMMON_TABLE_HEADER asHeader;
2920 3656 UCHAR asPwrbehave[16];
2921/* Define ucPwrSrcId */ 3657 ATOM_POWER_SOURCE_OBJECT asPwrObj[1];
3658}ATOM_POWER_SOURCE_INFO;
3659
3660
3661//Define ucPwrSrcId
2922#define POWERSOURCE_PCIE_ID1 0x00 3662#define POWERSOURCE_PCIE_ID1 0x00
2923#define POWERSOURCE_6PIN_CONNECTOR_ID1 0x01 3663#define POWERSOURCE_6PIN_CONNECTOR_ID1 0x01
2924#define POWERSOURCE_8PIN_CONNECTOR_ID1 0x02 3664#define POWERSOURCE_8PIN_CONNECTOR_ID1 0x02
2925#define POWERSOURCE_6PIN_CONNECTOR_ID2 0x04 3665#define POWERSOURCE_6PIN_CONNECTOR_ID2 0x04
2926#define POWERSOURCE_8PIN_CONNECTOR_ID2 0x08 3666#define POWERSOURCE_8PIN_CONNECTOR_ID2 0x08
2927 3667
2928/* define ucPwrSensorId */ 3668//define ucPwrSensorId
2929#define POWER_SENSOR_ALWAYS 0x00 3669#define POWER_SENSOR_ALWAYS 0x00
2930#define POWER_SENSOR_GPIO 0x01 3670#define POWER_SENSOR_GPIO 0x01
2931#define POWER_SENSOR_I2C 0x02 3671#define POWER_SENSOR_I2C 0x02
2932 3672
3673typedef struct _ATOM_INTEGRATED_SYSTEM_INFO_V6
3674{
3675 ATOM_COMMON_TABLE_HEADER sHeader;
3676 ULONG ulBootUpEngineClock;
3677 ULONG ulDentistVCOFreq;
3678 ULONG ulBootUpUMAClock;
3679 ULONG ulReserved1[8];
3680 ULONG ulBootUpReqDisplayVector;
3681 ULONG ulOtherDisplayMisc;
3682 ULONG ulGPUCapInfo;
3683 ULONG ulReserved2[3];
3684 ULONG ulSystemConfig;
3685 ULONG ulCPUCapInfo;
3686 USHORT usMaxNBVoltage;
3687 USHORT usMinNBVoltage;
3688 USHORT usBootUpNBVoltage;
3689 USHORT usExtDispConnInfoOffset;
3690 UCHAR ucHtcTmpLmt;
3691 UCHAR ucTjOffset;
3692 UCHAR ucMemoryType;
3693 UCHAR ucUMAChannelNumber;
3694 ULONG ulCSR_M3_ARB_CNTL_DEFAULT[10];
3695 ULONG ulCSR_M3_ARB_CNTL_UVD[10];
3696 ULONG ulCSR_M3_ARB_CNTL_FS3D[10];
3697 ULONG ulReserved3[42];
3698 ATOM_EXTERNAL_DISPLAY_CONNECTION_INFO sExtDispConnInfo;
3699}ATOM_INTEGRATED_SYSTEM_INFO_V6;
3700
3701/**********************************************************************************************************************
3702// ATOM_INTEGRATED_SYSTEM_INFO_V6 Description
3703//ulBootUpEngineClock: VBIOS bootup Engine clock frequency, in 10kHz unit.
3704//ulDentistVCOFreq: Dentist VCO clock in 10kHz unit.
3705//ulBootUpUMAClock: System memory boot up clock frequency in 10Khz unit.
3706//ulReserved1[8] Reserved by now, must be 0x0.
3707//ulBootUpReqDisplayVector VBIOS boot up display IDs
3708// ATOM_DEVICE_CRT1_SUPPORT 0x0001
3709// ATOM_DEVICE_CRT2_SUPPORT 0x0010
3710// ATOM_DEVICE_DFP1_SUPPORT 0x0008
3711// ATOM_DEVICE_DFP6_SUPPORT 0x0040
3712// ATOM_DEVICE_DFP2_SUPPORT 0x0080
3713// ATOM_DEVICE_DFP3_SUPPORT 0x0200
3714// ATOM_DEVICE_DFP4_SUPPORT 0x0400
3715// ATOM_DEVICE_DFP5_SUPPORT 0x0800
3716// ATOM_DEVICE_LCD1_SUPPORT 0x0002
3717//ulOtherDisplayMisc Other display related flags, not defined yet.
3718//ulGPUCapInfo TBD
3719//ulReserved2[3] must be 0x0 for the reserved.
3720//ulSystemConfig TBD
3721//ulCPUCapInfo TBD
3722//usMaxNBVoltage High NB voltage in unit of mv, calculated using current VDDNB (D24F2xDC) and VDDNB offset fuse.
3723//usMinNBVoltage Low NB voltage in unit of mv, calculated using current VDDNB (D24F2xDC) and VDDNB offset fuse.
3724//usBootUpNBVoltage Boot up NB voltage in unit of mv.
3725//ucHtcTmpLmt Bit [22:16] of D24F3x64 Thermal Control (HTC) Register.
3726//ucTjOffset Bit [28:22] of D24F3xE4 Thermtrip Status Register,may not be needed.
3727//ucMemoryType [3:0]=1:DDR1;=2:DDR2;=3:DDR3.[7:4] is reserved.
3728//ucUMAChannelNumber System memory channel numbers.
3729//usExtDispConnectionInfoOffset ATOM_EXTERNAL_DISPLAY_CONNECTION_INFO offset relative to beginning of this table.
3730//ulCSR_M3_ARB_CNTL_DEFAULT[10] Arrays with values for CSR M3 arbiter for default
3731//ulCSR_M3_ARB_CNTL_UVD[10] Arrays with values for CSR M3 arbiter for UVD playback.
3732//ulCSR_M3_ARB_CNTL_FS3D[10] Arrays with values for CSR M3 arbiter for Full Screen 3D applications.
3733**********************************************************************************************************************/
3734
2933/**************************************************************************/ 3735/**************************************************************************/
2934/* This portion is only used when ext thermal chip or engine/memory clock SS chip is populated on a design */ 3736// This portion is only used when ext thermal chip or engine/memory clock SS chip is populated on a design
2935/* Memory SS Info Table */ 3737//Memory SS Info Table
2936/* Define Memory Clock SS chip ID */ 3738//Define Memory Clock SS chip ID
2937#define ICS91719 1 3739#define ICS91719 1
2938#define ICS91720 2 3740#define ICS91720 2
2939 3741
2940/* Define one structure to inform SW a "block of data" writing to external SS chip via I2C protocol */ 3742//Define one structure to inform SW a "block of data" writing to external SS chip via I2C protocol
2941typedef struct _ATOM_I2C_DATA_RECORD { 3743typedef struct _ATOM_I2C_DATA_RECORD
2942 UCHAR ucNunberOfBytes; /* Indicates how many bytes SW needs to write to the external ASIC for one block, besides to "Start" and "Stop" */ 3744{
2943 UCHAR ucI2CData[1]; /* I2C data in bytes, should be less than 16 bytes usually */ 3745 UCHAR ucNunberOfBytes; //Indicates how many bytes SW needs to write to the external ASIC for one block, besides to "Start" and "Stop"
2944} ATOM_I2C_DATA_RECORD; 3746 UCHAR ucI2CData[1]; //I2C data in bytes, should be less than 16 bytes usually
2945 3747}ATOM_I2C_DATA_RECORD;
2946/* Define one structure to inform SW how many blocks of data writing to external SS chip via I2C protocol, in addition to other information */ 3748
2947typedef struct _ATOM_I2C_DEVICE_SETUP_INFO { 3749
2948 ATOM_I2C_ID_CONFIG_ACCESS sucI2cId; /* I2C line and HW/SW assisted cap. */ 3750//Define one structure to inform SW how many blocks of data writing to external SS chip via I2C protocol, in addition to other information
2949 UCHAR ucSSChipID; /* SS chip being used */ 3751typedef struct _ATOM_I2C_DEVICE_SETUP_INFO
2950 UCHAR ucSSChipSlaveAddr; /* Slave Address to set up this SS chip */ 3752{
2951 UCHAR ucNumOfI2CDataRecords; /* number of data block */ 3753 ATOM_I2C_ID_CONFIG_ACCESS sucI2cId; //I2C line and HW/SW assisted cap.
2952 ATOM_I2C_DATA_RECORD asI2CData[1]; 3754 UCHAR ucSSChipID; //SS chip being used
2953} ATOM_I2C_DEVICE_SETUP_INFO; 3755 UCHAR ucSSChipSlaveAddr; //Slave Address to set up this SS chip
2954 3756 UCHAR ucNumOfI2CDataRecords; //number of data block
2955/* ========================================================================================== */ 3757 ATOM_I2C_DATA_RECORD asI2CData[1];
2956typedef struct _ATOM_ASIC_MVDD_INFO { 3758}ATOM_I2C_DEVICE_SETUP_INFO;
2957 ATOM_COMMON_TABLE_HEADER sHeader; 3759
2958 ATOM_I2C_DEVICE_SETUP_INFO asI2CSetup[1]; 3760//==========================================================================================
2959} ATOM_ASIC_MVDD_INFO; 3761typedef struct _ATOM_ASIC_MVDD_INFO
2960 3762{
2961/* ========================================================================================== */ 3763 ATOM_COMMON_TABLE_HEADER sHeader;
3764 ATOM_I2C_DEVICE_SETUP_INFO asI2CSetup[1];
3765}ATOM_ASIC_MVDD_INFO;
3766
3767//==========================================================================================
2962#define ATOM_MCLK_SS_INFO ATOM_ASIC_MVDD_INFO 3768#define ATOM_MCLK_SS_INFO ATOM_ASIC_MVDD_INFO
2963 3769
2964/* ========================================================================================== */ 3770//==========================================================================================
2965/**************************************************************************/ 3771/**************************************************************************/
2966 3772
2967typedef struct _ATOM_ASIC_SS_ASSIGNMENT { 3773typedef struct _ATOM_ASIC_SS_ASSIGNMENT
2968 ULONG ulTargetClockRange; /* Clock Out frequence (VCO ), in unit of 10Khz */ 3774{
2969 USHORT usSpreadSpectrumPercentage; /* in unit of 0.01% */ 3775 ULONG ulTargetClockRange; //Clock Out frequence (VCO ), in unit of 10Khz
2970 USHORT usSpreadRateInKhz; /* in unit of kHz, modulation freq */ 3776 USHORT usSpreadSpectrumPercentage; //in unit of 0.01%
2971 UCHAR ucClockIndication; /* Indicate which clock source needs SS */ 3777 USHORT usSpreadRateInKhz; //in unit of kHz, modulation freq
2972 UCHAR ucSpreadSpectrumMode; /* Bit1=0 Down Spread,=1 Center Spread. */ 3778 UCHAR ucClockIndication; //Indicate which clock source needs SS
2973 UCHAR ucReserved[2]; 3779 UCHAR ucSpreadSpectrumMode; //Bit1=0 Down Spread,=1 Center Spread.
2974} ATOM_ASIC_SS_ASSIGNMENT; 3780 UCHAR ucReserved[2];
2975 3781}ATOM_ASIC_SS_ASSIGNMENT;
2976/* Define ucSpreadSpectrumType */ 3782
3783//Define ucClockIndication, SW uses the IDs below to search if the SS is requried/enabled on a clock branch/signal type.
3784//SS is not required or enabled if a match is not found.
2977#define ASIC_INTERNAL_MEMORY_SS 1 3785#define ASIC_INTERNAL_MEMORY_SS 1
2978#define ASIC_INTERNAL_ENGINE_SS 2 3786#define ASIC_INTERNAL_ENGINE_SS 2
2979#define ASIC_INTERNAL_UVD_SS 3 3787#define ASIC_INTERNAL_UVD_SS 3
3788#define ASIC_INTERNAL_SS_ON_TMDS 4
3789#define ASIC_INTERNAL_SS_ON_HDMI 5
3790#define ASIC_INTERNAL_SS_ON_LVDS 6
3791#define ASIC_INTERNAL_SS_ON_DP 7
3792#define ASIC_INTERNAL_SS_ON_DCPLL 8
3793
3794typedef struct _ATOM_ASIC_SS_ASSIGNMENT_V2
3795{
3796 ULONG ulTargetClockRange; //For mem/engine/uvd, Clock Out frequence (VCO ), in unit of 10Khz
3797 //For TMDS/HDMI/LVDS, it is pixel clock , for DP, it is link clock ( 27000 or 16200 )
3798 USHORT usSpreadSpectrumPercentage; //in unit of 0.01%
3799 USHORT usSpreadRateIn10Hz; //in unit of 10Hz, modulation freq
3800 UCHAR ucClockIndication; //Indicate which clock source needs SS
3801 UCHAR ucSpreadSpectrumMode; //Bit0=0 Down Spread,=1 Center Spread, bit1=0: internal SS bit1=1: external SS
3802 UCHAR ucReserved[2];
3803}ATOM_ASIC_SS_ASSIGNMENT_V2;
3804
3805//ucSpreadSpectrumMode
3806//#define ATOM_SS_DOWN_SPREAD_MODE_MASK 0x00000000
3807//#define ATOM_SS_DOWN_SPREAD_MODE 0x00000000
3808//#define ATOM_SS_CENTRE_SPREAD_MODE_MASK 0x00000001
3809//#define ATOM_SS_CENTRE_SPREAD_MODE 0x00000001
3810//#define ATOM_INTERNAL_SS_MASK 0x00000000
3811//#define ATOM_EXTERNAL_SS_MASK 0x00000002
3812
3813typedef struct _ATOM_ASIC_INTERNAL_SS_INFO
3814{
3815 ATOM_COMMON_TABLE_HEADER sHeader;
3816 ATOM_ASIC_SS_ASSIGNMENT asSpreadSpectrum[4];
3817}ATOM_ASIC_INTERNAL_SS_INFO;
2980 3818
2981typedef struct _ATOM_ASIC_INTERNAL_SS_INFO { 3819typedef struct _ATOM_ASIC_INTERNAL_SS_INFO_V2
2982 ATOM_COMMON_TABLE_HEADER sHeader; 3820{
2983 ATOM_ASIC_SS_ASSIGNMENT asSpreadSpectrum[4]; 3821 ATOM_COMMON_TABLE_HEADER sHeader;
2984} ATOM_ASIC_INTERNAL_SS_INFO; 3822 ATOM_ASIC_SS_ASSIGNMENT_V2 asSpreadSpectrum[1]; //this is point only.
3823}ATOM_ASIC_INTERNAL_SS_INFO_V2;
2985 3824
2986/* ==============================Scratch Pad Definition Portion=============================== */ 3825typedef struct _ATOM_ASIC_SS_ASSIGNMENT_V3
3826{
3827 ULONG ulTargetClockRange; //For mem/engine/uvd, Clock Out frequence (VCO ), in unit of 10Khz
3828 //For TMDS/HDMI/LVDS, it is pixel clock , for DP, it is link clock ( 27000 or 16200 )
3829 USHORT usSpreadSpectrumPercentage; //in unit of 0.01%
3830 USHORT usSpreadRateIn10Hz; //in unit of 10Hz, modulation freq
3831 UCHAR ucClockIndication; //Indicate which clock source needs SS
3832 UCHAR ucSpreadSpectrumMode; //Bit0=0 Down Spread,=1 Center Spread, bit1=0: internal SS bit1=1: external SS
3833 UCHAR ucReserved[2];
3834}ATOM_ASIC_SS_ASSIGNMENT_V3;
3835
3836typedef struct _ATOM_ASIC_INTERNAL_SS_INFO_V3
3837{
3838 ATOM_COMMON_TABLE_HEADER sHeader;
3839 ATOM_ASIC_SS_ASSIGNMENT_V3 asSpreadSpectrum[1]; //this is pointer only.
3840}ATOM_ASIC_INTERNAL_SS_INFO_V3;
3841
3842
3843//==============================Scratch Pad Definition Portion===============================
2987#define ATOM_DEVICE_CONNECT_INFO_DEF 0 3844#define ATOM_DEVICE_CONNECT_INFO_DEF 0
2988#define ATOM_ROM_LOCATION_DEF 1 3845#define ATOM_ROM_LOCATION_DEF 1
2989#define ATOM_TV_STANDARD_DEF 2 3846#define ATOM_TV_STANDARD_DEF 2
@@ -2995,7 +3852,8 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
2995#define ATOM_I2C_CHANNEL_STATUS_DEF 8 3852#define ATOM_I2C_CHANNEL_STATUS_DEF 8
2996#define ATOM_I2C_CHANNEL_STATUS1_DEF 9 3853#define ATOM_I2C_CHANNEL_STATUS1_DEF 9
2997 3854
2998/* BIOS_0_SCRATCH Definition */ 3855
3856// BIOS_0_SCRATCH Definition
2999#define ATOM_S0_CRT1_MONO 0x00000001L 3857#define ATOM_S0_CRT1_MONO 0x00000001L
3000#define ATOM_S0_CRT1_COLOR 0x00000002L 3858#define ATOM_S0_CRT1_COLOR 0x00000002L
3001#define ATOM_S0_CRT1_MASK (ATOM_S0_CRT1_MONO+ATOM_S0_CRT1_COLOR) 3859#define ATOM_S0_CRT1_MASK (ATOM_S0_CRT1_MONO+ATOM_S0_CRT1_COLOR)
@@ -3008,6 +3866,7 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3008#define ATOM_S0_CV_DIN_A 0x00000020L 3866#define ATOM_S0_CV_DIN_A 0x00000020L
3009#define ATOM_S0_CV_MASK_A (ATOM_S0_CV_A+ATOM_S0_CV_DIN_A) 3867#define ATOM_S0_CV_MASK_A (ATOM_S0_CV_A+ATOM_S0_CV_DIN_A)
3010 3868
3869
3011#define ATOM_S0_CRT2_MONO 0x00000100L 3870#define ATOM_S0_CRT2_MONO 0x00000100L
3012#define ATOM_S0_CRT2_COLOR 0x00000200L 3871#define ATOM_S0_CRT2_COLOR 0x00000200L
3013#define ATOM_S0_CRT2_MASK (ATOM_S0_CRT2_MONO+ATOM_S0_CRT2_COLOR) 3872#define ATOM_S0_CRT2_MASK (ATOM_S0_CRT2_MONO+ATOM_S0_CRT2_COLOR)
@@ -3025,28 +3884,27 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3025#define ATOM_S0_DFP2 0x00020000L 3884#define ATOM_S0_DFP2 0x00020000L
3026#define ATOM_S0_LCD1 0x00040000L 3885#define ATOM_S0_LCD1 0x00040000L
3027#define ATOM_S0_LCD2 0x00080000L 3886#define ATOM_S0_LCD2 0x00080000L
3028#define ATOM_S0_TV2 0x00100000L 3887#define ATOM_S0_DFP6 0x00100000L
3029#define ATOM_S0_DFP3 0x00200000L 3888#define ATOM_S0_DFP3 0x00200000L
3030#define ATOM_S0_DFP4 0x00400000L 3889#define ATOM_S0_DFP4 0x00400000L
3031#define ATOM_S0_DFP5 0x00800000L 3890#define ATOM_S0_DFP5 0x00800000L
3032 3891
3033#define ATOM_S0_DFP_MASK \ 3892#define ATOM_S0_DFP_MASK ATOM_S0_DFP1 | ATOM_S0_DFP2 | ATOM_S0_DFP3 | ATOM_S0_DFP4 | ATOM_S0_DFP5 | ATOM_S0_DFP6
3034 (ATOM_S0_DFP1 | ATOM_S0_DFP2 | ATOM_S0_DFP3 | ATOM_S0_DFP4 | ATOM_S0_DFP5)
3035 3893
3036#define ATOM_S0_FAD_REGISTER_BUG 0x02000000L /* If set, indicates we are running a PCIE asic with */ 3894#define ATOM_S0_FAD_REGISTER_BUG 0x02000000L // If set, indicates we are running a PCIE asic with
3037 /* the FAD/HDP reg access bug. Bit is read by DAL */ 3895 // the FAD/HDP reg access bug. Bit is read by DAL, this is obsolete from RV5xx
3038 3896
3039#define ATOM_S0_THERMAL_STATE_MASK 0x1C000000L 3897#define ATOM_S0_THERMAL_STATE_MASK 0x1C000000L
3040#define ATOM_S0_THERMAL_STATE_SHIFT 26 3898#define ATOM_S0_THERMAL_STATE_SHIFT 26
3041 3899
3042#define ATOM_S0_SYSTEM_POWER_STATE_MASK 0xE0000000L 3900#define ATOM_S0_SYSTEM_POWER_STATE_MASK 0xE0000000L
3043#define ATOM_S0_SYSTEM_POWER_STATE_SHIFT 29 3901#define ATOM_S0_SYSTEM_POWER_STATE_SHIFT 29
3044 3902
3045#define ATOM_S0_SYSTEM_POWER_STATE_VALUE_AC 1 3903#define ATOM_S0_SYSTEM_POWER_STATE_VALUE_AC 1
3046#define ATOM_S0_SYSTEM_POWER_STATE_VALUE_DC 2 3904#define ATOM_S0_SYSTEM_POWER_STATE_VALUE_DC 2
3047#define ATOM_S0_SYSTEM_POWER_STATE_VALUE_LITEAC 3 3905#define ATOM_S0_SYSTEM_POWER_STATE_VALUE_LITEAC 3
3048 3906
3049/* Byte aligned defintion for BIOS usage */ 3907//Byte aligned defintion for BIOS usage
3050#define ATOM_S0_CRT1_MONOb0 0x01 3908#define ATOM_S0_CRT1_MONOb0 0x01
3051#define ATOM_S0_CRT1_COLORb0 0x02 3909#define ATOM_S0_CRT1_COLORb0 0x02
3052#define ATOM_S0_CRT1_MASKb0 (ATOM_S0_CRT1_MONOb0+ATOM_S0_CRT1_COLORb0) 3910#define ATOM_S0_CRT1_MASKb0 (ATOM_S0_CRT1_MONOb0+ATOM_S0_CRT1_COLORb0)
@@ -3076,8 +3934,11 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3076#define ATOM_S0_DFP2b2 0x02 3934#define ATOM_S0_DFP2b2 0x02
3077#define ATOM_S0_LCD1b2 0x04 3935#define ATOM_S0_LCD1b2 0x04
3078#define ATOM_S0_LCD2b2 0x08 3936#define ATOM_S0_LCD2b2 0x08
3079#define ATOM_S0_TV2b2 0x10 3937#define ATOM_S0_DFP6b2 0x10
3080#define ATOM_S0_DFP3b2 0x20 3938#define ATOM_S0_DFP3b2 0x20
3939#define ATOM_S0_DFP4b2 0x40
3940#define ATOM_S0_DFP5b2 0x80
3941
3081 3942
3082#define ATOM_S0_THERMAL_STATE_MASKb3 0x1C 3943#define ATOM_S0_THERMAL_STATE_MASKb3 0x1C
3083#define ATOM_S0_THERMAL_STATE_SHIFTb3 2 3944#define ATOM_S0_THERMAL_STATE_SHIFTb3 2
@@ -3085,43 +3946,20 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3085#define ATOM_S0_SYSTEM_POWER_STATE_MASKb3 0xE0 3946#define ATOM_S0_SYSTEM_POWER_STATE_MASKb3 0xE0
3086#define ATOM_S0_LCD1_SHIFT 18 3947#define ATOM_S0_LCD1_SHIFT 18
3087 3948
3088/* BIOS_1_SCRATCH Definition */ 3949// BIOS_1_SCRATCH Definition
3089#define ATOM_S1_ROM_LOCATION_MASK 0x0000FFFFL 3950#define ATOM_S1_ROM_LOCATION_MASK 0x0000FFFFL
3090#define ATOM_S1_PCI_BUS_DEV_MASK 0xFFFF0000L 3951#define ATOM_S1_PCI_BUS_DEV_MASK 0xFFFF0000L
3091 3952
3092/* BIOS_2_SCRATCH Definition */ 3953// BIOS_2_SCRATCH Definition
3093#define ATOM_S2_TV1_STANDARD_MASK 0x0000000FL 3954#define ATOM_S2_TV1_STANDARD_MASK 0x0000000FL
3094#define ATOM_S2_CURRENT_BL_LEVEL_MASK 0x0000FF00L 3955#define ATOM_S2_CURRENT_BL_LEVEL_MASK 0x0000FF00L
3095#define ATOM_S2_CURRENT_BL_LEVEL_SHIFT 8 3956#define ATOM_S2_CURRENT_BL_LEVEL_SHIFT 8
3096 3957
3097#define ATOM_S2_CRT1_DPMS_STATE 0x00010000L
3098#define ATOM_S2_LCD1_DPMS_STATE 0x00020000L
3099#define ATOM_S2_TV1_DPMS_STATE 0x00040000L
3100#define ATOM_S2_DFP1_DPMS_STATE 0x00080000L
3101#define ATOM_S2_CRT2_DPMS_STATE 0x00100000L
3102#define ATOM_S2_LCD2_DPMS_STATE 0x00200000L
3103#define ATOM_S2_TV2_DPMS_STATE 0x00400000L
3104#define ATOM_S2_DFP2_DPMS_STATE 0x00800000L
3105#define ATOM_S2_CV_DPMS_STATE 0x01000000L
3106#define ATOM_S2_DFP3_DPMS_STATE 0x02000000L
3107#define ATOM_S2_DFP4_DPMS_STATE 0x04000000L
3108#define ATOM_S2_DFP5_DPMS_STATE 0x08000000L
3109
3110#define ATOM_S2_DFP_DPM_STATE \
3111 (ATOM_S2_DFP1_DPMS_STATE | ATOM_S2_DFP2_DPMS_STATE | \
3112 ATOM_S2_DFP3_DPMS_STATE | ATOM_S2_DFP4_DPMS_STATE | \
3113 ATOM_S2_DFP5_DPMS_STATE)
3114
3115#define ATOM_S2_DEVICE_DPMS_STATE \
3116 (ATOM_S2_CRT1_DPMS_STATE + ATOM_S2_LCD1_DPMS_STATE + \
3117 ATOM_S2_TV1_DPMS_STATE + ATOM_S2_DFP_DPMS_STATE + \
3118 ATOM_S2_CRT2_DPMS_STATE + ATOM_S2_LCD2_DPMS_STATE + \
3119 ATOM_S2_TV2_DPMS_STATE + ATOM_S2_CV_DPMS_STATE)
3120
3121#define ATOM_S2_FORCEDLOWPWRMODE_STATE_MASK 0x0C000000L 3958#define ATOM_S2_FORCEDLOWPWRMODE_STATE_MASK 0x0C000000L
3122#define ATOM_S2_FORCEDLOWPWRMODE_STATE_MASK_SHIFT 26 3959#define ATOM_S2_FORCEDLOWPWRMODE_STATE_MASK_SHIFT 26
3123#define ATOM_S2_FORCEDLOWPWRMODE_STATE_CHANGE 0x10000000L 3960#define ATOM_S2_FORCEDLOWPWRMODE_STATE_CHANGE 0x10000000L
3124 3961
3962#define ATOM_S2_DEVICE_DPMS_STATE 0x00010000L
3125#define ATOM_S2_VRI_BRIGHT_ENABLE 0x20000000L 3963#define ATOM_S2_VRI_BRIGHT_ENABLE 0x20000000L
3126 3964
3127#define ATOM_S2_DISPLAY_ROTATION_0_DEGREE 0x0 3965#define ATOM_S2_DISPLAY_ROTATION_0_DEGREE 0x0
@@ -3131,21 +3969,11 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3131#define ATOM_S2_DISPLAY_ROTATION_DEGREE_SHIFT 30 3969#define ATOM_S2_DISPLAY_ROTATION_DEGREE_SHIFT 30
3132#define ATOM_S2_DISPLAY_ROTATION_ANGLE_MASK 0xC0000000L 3970#define ATOM_S2_DISPLAY_ROTATION_ANGLE_MASK 0xC0000000L
3133 3971
3134/* Byte aligned defintion for BIOS usage */ 3972
3973//Byte aligned defintion for BIOS usage
3135#define ATOM_S2_TV1_STANDARD_MASKb0 0x0F 3974#define ATOM_S2_TV1_STANDARD_MASKb0 0x0F
3136#define ATOM_S2_CURRENT_BL_LEVEL_MASKb1 0xFF 3975#define ATOM_S2_CURRENT_BL_LEVEL_MASKb1 0xFF
3137#define ATOM_S2_CRT1_DPMS_STATEb2 0x01 3976#define ATOM_S2_DEVICE_DPMS_STATEb2 0x01
3138#define ATOM_S2_LCD1_DPMS_STATEb2 0x02
3139#define ATOM_S2_TV1_DPMS_STATEb2 0x04
3140#define ATOM_S2_DFP1_DPMS_STATEb2 0x08
3141#define ATOM_S2_CRT2_DPMS_STATEb2 0x10
3142#define ATOM_S2_LCD2_DPMS_STATEb2 0x20
3143#define ATOM_S2_TV2_DPMS_STATEb2 0x40
3144#define ATOM_S2_DFP2_DPMS_STATEb2 0x80
3145#define ATOM_S2_CV_DPMS_STATEb3 0x01
3146#define ATOM_S2_DFP3_DPMS_STATEb3 0x02
3147#define ATOM_S2_DFP4_DPMS_STATEb3 0x04
3148#define ATOM_S2_DFP5_DPMS_STATEb3 0x08
3149 3977
3150#define ATOM_S2_DEVICE_DPMS_MASKw1 0x3FF 3978#define ATOM_S2_DEVICE_DPMS_MASKw1 0x3FF
3151#define ATOM_S2_FORCEDLOWPWRMODE_STATE_MASKb3 0x0C 3979#define ATOM_S2_FORCEDLOWPWRMODE_STATE_MASKb3 0x0C
@@ -3153,21 +3981,22 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3153#define ATOM_S2_VRI_BRIGHT_ENABLEb3 0x20 3981#define ATOM_S2_VRI_BRIGHT_ENABLEb3 0x20
3154#define ATOM_S2_ROTATION_STATE_MASKb3 0xC0 3982#define ATOM_S2_ROTATION_STATE_MASKb3 0xC0
3155 3983
3156/* BIOS_3_SCRATCH Definition */ 3984
3985// BIOS_3_SCRATCH Definition
3157#define ATOM_S3_CRT1_ACTIVE 0x00000001L 3986#define ATOM_S3_CRT1_ACTIVE 0x00000001L
3158#define ATOM_S3_LCD1_ACTIVE 0x00000002L 3987#define ATOM_S3_LCD1_ACTIVE 0x00000002L
3159#define ATOM_S3_TV1_ACTIVE 0x00000004L 3988#define ATOM_S3_TV1_ACTIVE 0x00000004L
3160#define ATOM_S3_DFP1_ACTIVE 0x00000008L 3989#define ATOM_S3_DFP1_ACTIVE 0x00000008L
3161#define ATOM_S3_CRT2_ACTIVE 0x00000010L 3990#define ATOM_S3_CRT2_ACTIVE 0x00000010L
3162#define ATOM_S3_LCD2_ACTIVE 0x00000020L 3991#define ATOM_S3_LCD2_ACTIVE 0x00000020L
3163#define ATOM_S3_TV2_ACTIVE 0x00000040L 3992#define ATOM_S3_DFP6_ACTIVE 0x00000040L
3164#define ATOM_S3_DFP2_ACTIVE 0x00000080L 3993#define ATOM_S3_DFP2_ACTIVE 0x00000080L
3165#define ATOM_S3_CV_ACTIVE 0x00000100L 3994#define ATOM_S3_CV_ACTIVE 0x00000100L
3166#define ATOM_S3_DFP3_ACTIVE 0x00000200L 3995#define ATOM_S3_DFP3_ACTIVE 0x00000200L
3167#define ATOM_S3_DFP4_ACTIVE 0x00000400L 3996#define ATOM_S3_DFP4_ACTIVE 0x00000400L
3168#define ATOM_S3_DFP5_ACTIVE 0x00000800L 3997#define ATOM_S3_DFP5_ACTIVE 0x00000800L
3169 3998
3170#define ATOM_S3_DEVICE_ACTIVE_MASK 0x000003FFL 3999#define ATOM_S3_DEVICE_ACTIVE_MASK 0x00000FFFL
3171 4000
3172#define ATOM_S3_LCD_FULLEXPANSION_ACTIVE 0x00001000L 4001#define ATOM_S3_LCD_FULLEXPANSION_ACTIVE 0x00001000L
3173#define ATOM_S3_LCD_EXPANSION_ASPEC_RATIO_ACTIVE 0x00002000L 4002#define ATOM_S3_LCD_EXPANSION_ASPEC_RATIO_ACTIVE 0x00002000L
@@ -3178,7 +4007,7 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3178#define ATOM_S3_DFP1_CRTC_ACTIVE 0x00080000L 4007#define ATOM_S3_DFP1_CRTC_ACTIVE 0x00080000L
3179#define ATOM_S3_CRT2_CRTC_ACTIVE 0x00100000L 4008#define ATOM_S3_CRT2_CRTC_ACTIVE 0x00100000L
3180#define ATOM_S3_LCD2_CRTC_ACTIVE 0x00200000L 4009#define ATOM_S3_LCD2_CRTC_ACTIVE 0x00200000L
3181#define ATOM_S3_TV2_CRTC_ACTIVE 0x00400000L 4010#define ATOM_S3_DFP6_CRTC_ACTIVE 0x00400000L
3182#define ATOM_S3_DFP2_CRTC_ACTIVE 0x00800000L 4011#define ATOM_S3_DFP2_CRTC_ACTIVE 0x00800000L
3183#define ATOM_S3_CV_CRTC_ACTIVE 0x01000000L 4012#define ATOM_S3_CV_CRTC_ACTIVE 0x01000000L
3184#define ATOM_S3_DFP3_CRTC_ACTIVE 0x02000000L 4013#define ATOM_S3_DFP3_CRTC_ACTIVE 0x02000000L
@@ -3187,17 +4016,18 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3187 4016
3188#define ATOM_S3_DEVICE_CRTC_ACTIVE_MASK 0x0FFF0000L 4017#define ATOM_S3_DEVICE_CRTC_ACTIVE_MASK 0x0FFF0000L
3189#define ATOM_S3_ASIC_GUI_ENGINE_HUNG 0x20000000L 4018#define ATOM_S3_ASIC_GUI_ENGINE_HUNG 0x20000000L
4019//Below two definitions are not supported in pplib, but in the old powerplay in DAL
3190#define ATOM_S3_ALLOW_FAST_PWR_SWITCH 0x40000000L 4020#define ATOM_S3_ALLOW_FAST_PWR_SWITCH 0x40000000L
3191#define ATOM_S3_RQST_GPU_USE_MIN_PWR 0x80000000L 4021#define ATOM_S3_RQST_GPU_USE_MIN_PWR 0x80000000L
3192 4022
3193/* Byte aligned defintion for BIOS usage */ 4023//Byte aligned defintion for BIOS usage
3194#define ATOM_S3_CRT1_ACTIVEb0 0x01 4024#define ATOM_S3_CRT1_ACTIVEb0 0x01
3195#define ATOM_S3_LCD1_ACTIVEb0 0x02 4025#define ATOM_S3_LCD1_ACTIVEb0 0x02
3196#define ATOM_S3_TV1_ACTIVEb0 0x04 4026#define ATOM_S3_TV1_ACTIVEb0 0x04
3197#define ATOM_S3_DFP1_ACTIVEb0 0x08 4027#define ATOM_S3_DFP1_ACTIVEb0 0x08
3198#define ATOM_S3_CRT2_ACTIVEb0 0x10 4028#define ATOM_S3_CRT2_ACTIVEb0 0x10
3199#define ATOM_S3_LCD2_ACTIVEb0 0x20 4029#define ATOM_S3_LCD2_ACTIVEb0 0x20
3200#define ATOM_S3_TV2_ACTIVEb0 0x40 4030#define ATOM_S3_DFP6_ACTIVEb0 0x40
3201#define ATOM_S3_DFP2_ACTIVEb0 0x80 4031#define ATOM_S3_DFP2_ACTIVEb0 0x80
3202#define ATOM_S3_CV_ACTIVEb1 0x01 4032#define ATOM_S3_CV_ACTIVEb1 0x01
3203#define ATOM_S3_DFP3_ACTIVEb1 0x02 4033#define ATOM_S3_DFP3_ACTIVEb1 0x02
@@ -3212,7 +4042,7 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3212#define ATOM_S3_DFP1_CRTC_ACTIVEb2 0x08 4042#define ATOM_S3_DFP1_CRTC_ACTIVEb2 0x08
3213#define ATOM_S3_CRT2_CRTC_ACTIVEb2 0x10 4043#define ATOM_S3_CRT2_CRTC_ACTIVEb2 0x10
3214#define ATOM_S3_LCD2_CRTC_ACTIVEb2 0x20 4044#define ATOM_S3_LCD2_CRTC_ACTIVEb2 0x20
3215#define ATOM_S3_TV2_CRTC_ACTIVEb2 0x40 4045#define ATOM_S3_DFP6_CRTC_ACTIVEb2 0x40
3216#define ATOM_S3_DFP2_CRTC_ACTIVEb2 0x80 4046#define ATOM_S3_DFP2_CRTC_ACTIVEb2 0x80
3217#define ATOM_S3_CV_CRTC_ACTIVEb3 0x01 4047#define ATOM_S3_CV_CRTC_ACTIVEb3 0x01
3218#define ATOM_S3_DFP3_CRTC_ACTIVEb3 0x02 4048#define ATOM_S3_DFP3_CRTC_ACTIVEb3 0x02
@@ -3221,35 +4051,31 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3221 4051
3222#define ATOM_S3_ACTIVE_CRTC2w1 0xFFF 4052#define ATOM_S3_ACTIVE_CRTC2w1 0xFFF
3223 4053
3224#define ATOM_S3_ASIC_GUI_ENGINE_HUNGb3 0x20 4054// BIOS_4_SCRATCH Definition
3225#define ATOM_S3_ALLOW_FAST_PWR_SWITCHb3 0x40
3226#define ATOM_S3_RQST_GPU_USE_MIN_PWRb3 0x80
3227
3228/* BIOS_4_SCRATCH Definition */
3229#define ATOM_S4_LCD1_PANEL_ID_MASK 0x000000FFL 4055#define ATOM_S4_LCD1_PANEL_ID_MASK 0x000000FFL
3230#define ATOM_S4_LCD1_REFRESH_MASK 0x0000FF00L 4056#define ATOM_S4_LCD1_REFRESH_MASK 0x0000FF00L
3231#define ATOM_S4_LCD1_REFRESH_SHIFT 8 4057#define ATOM_S4_LCD1_REFRESH_SHIFT 8
3232 4058
3233/* Byte aligned defintion for BIOS usage */ 4059//Byte aligned defintion for BIOS usage
3234#define ATOM_S4_LCD1_PANEL_ID_MASKb0 0x0FF 4060#define ATOM_S4_LCD1_PANEL_ID_MASKb0 0x0FF
3235#define ATOM_S4_LCD1_REFRESH_MASKb1 ATOM_S4_LCD1_PANEL_ID_MASKb0 4061#define ATOM_S4_LCD1_REFRESH_MASKb1 ATOM_S4_LCD1_PANEL_ID_MASKb0
3236#define ATOM_S4_VRAM_INFO_MASKb2 ATOM_S4_LCD1_PANEL_ID_MASKb0 4062#define ATOM_S4_VRAM_INFO_MASKb2 ATOM_S4_LCD1_PANEL_ID_MASKb0
3237 4063
3238/* BIOS_5_SCRATCH Definition, BIOS_5_SCRATCH is used by Firmware only !!!! */ 4064// BIOS_5_SCRATCH Definition, BIOS_5_SCRATCH is used by Firmware only !!!!
3239#define ATOM_S5_DOS_REQ_CRT1b0 0x01 4065#define ATOM_S5_DOS_REQ_CRT1b0 0x01
3240#define ATOM_S5_DOS_REQ_LCD1b0 0x02 4066#define ATOM_S5_DOS_REQ_LCD1b0 0x02
3241#define ATOM_S5_DOS_REQ_TV1b0 0x04 4067#define ATOM_S5_DOS_REQ_TV1b0 0x04
3242#define ATOM_S5_DOS_REQ_DFP1b0 0x08 4068#define ATOM_S5_DOS_REQ_DFP1b0 0x08
3243#define ATOM_S5_DOS_REQ_CRT2b0 0x10 4069#define ATOM_S5_DOS_REQ_CRT2b0 0x10
3244#define ATOM_S5_DOS_REQ_LCD2b0 0x20 4070#define ATOM_S5_DOS_REQ_LCD2b0 0x20
3245#define ATOM_S5_DOS_REQ_TV2b0 0x40 4071#define ATOM_S5_DOS_REQ_DFP6b0 0x40
3246#define ATOM_S5_DOS_REQ_DFP2b0 0x80 4072#define ATOM_S5_DOS_REQ_DFP2b0 0x80
3247#define ATOM_S5_DOS_REQ_CVb1 0x01 4073#define ATOM_S5_DOS_REQ_CVb1 0x01
3248#define ATOM_S5_DOS_REQ_DFP3b1 0x02 4074#define ATOM_S5_DOS_REQ_DFP3b1 0x02
3249#define ATOM_S5_DOS_REQ_DFP4b1 0x04 4075#define ATOM_S5_DOS_REQ_DFP4b1 0x04
3250#define ATOM_S5_DOS_REQ_DFP5b1 0x08 4076#define ATOM_S5_DOS_REQ_DFP5b1 0x08
3251 4077
3252#define ATOM_S5_DOS_REQ_DEVICEw0 0x03FF 4078#define ATOM_S5_DOS_REQ_DEVICEw0 0x0FFF
3253 4079
3254#define ATOM_S5_DOS_REQ_CRT1 0x0001 4080#define ATOM_S5_DOS_REQ_CRT1 0x0001
3255#define ATOM_S5_DOS_REQ_LCD1 0x0002 4081#define ATOM_S5_DOS_REQ_LCD1 0x0002
@@ -3257,22 +4083,21 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3257#define ATOM_S5_DOS_REQ_DFP1 0x0008 4083#define ATOM_S5_DOS_REQ_DFP1 0x0008
3258#define ATOM_S5_DOS_REQ_CRT2 0x0010 4084#define ATOM_S5_DOS_REQ_CRT2 0x0010
3259#define ATOM_S5_DOS_REQ_LCD2 0x0020 4085#define ATOM_S5_DOS_REQ_LCD2 0x0020
3260#define ATOM_S5_DOS_REQ_TV2 0x0040 4086#define ATOM_S5_DOS_REQ_DFP6 0x0040
3261#define ATOM_S5_DOS_REQ_DFP2 0x0080 4087#define ATOM_S5_DOS_REQ_DFP2 0x0080
3262#define ATOM_S5_DOS_REQ_CV 0x0100 4088#define ATOM_S5_DOS_REQ_CV 0x0100
3263#define ATOM_S5_DOS_REQ_DFP3 0x0200 4089#define ATOM_S5_DOS_REQ_DFP3 0x0200
3264#define ATOM_S5_DOS_REQ_DFP4 0x0400 4090#define ATOM_S5_DOS_REQ_DFP4 0x0400
3265#define ATOM_S5_DOS_REQ_DFP5 0x0800 4091#define ATOM_S5_DOS_REQ_DFP5 0x0800
3266 4092
3267#define ATOM_S5_DOS_FORCE_CRT1b2 ATOM_S5_DOS_REQ_CRT1b0 4093#define ATOM_S5_DOS_FORCE_CRT1b2 ATOM_S5_DOS_REQ_CRT1b0
3268#define ATOM_S5_DOS_FORCE_TV1b2 ATOM_S5_DOS_REQ_TV1b0 4094#define ATOM_S5_DOS_FORCE_TV1b2 ATOM_S5_DOS_REQ_TV1b0
3269#define ATOM_S5_DOS_FORCE_CRT2b2 ATOM_S5_DOS_REQ_CRT2b0 4095#define ATOM_S5_DOS_FORCE_CRT2b2 ATOM_S5_DOS_REQ_CRT2b0
3270#define ATOM_S5_DOS_FORCE_CVb3 ATOM_S5_DOS_REQ_CVb1 4096#define ATOM_S5_DOS_FORCE_CVb3 ATOM_S5_DOS_REQ_CVb1
3271#define ATOM_S5_DOS_FORCE_DEVICEw1 \ 4097#define ATOM_S5_DOS_FORCE_DEVICEw1 (ATOM_S5_DOS_FORCE_CRT1b2+ATOM_S5_DOS_FORCE_TV1b2+ATOM_S5_DOS_FORCE_CRT2b2+\
3272 (ATOM_S5_DOS_FORCE_CRT1b2 + ATOM_S5_DOS_FORCE_TV1b2 + \ 4098 (ATOM_S5_DOS_FORCE_CVb3<<8))
3273 ATOM_S5_DOS_FORCE_CRT2b2 + (ATOM_S5_DOS_FORCE_CVb3 << 8))
3274 4099
3275/* BIOS_6_SCRATCH Definition */ 4100// BIOS_6_SCRATCH Definition
3276#define ATOM_S6_DEVICE_CHANGE 0x00000001L 4101#define ATOM_S6_DEVICE_CHANGE 0x00000001L
3277#define ATOM_S6_SCALER_CHANGE 0x00000002L 4102#define ATOM_S6_SCALER_CHANGE 0x00000002L
3278#define ATOM_S6_LID_CHANGE 0x00000004L 4103#define ATOM_S6_LID_CHANGE 0x00000004L
@@ -3285,11 +4110,11 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3285#define ATOM_S6_HW_I2C_BUSY_STATE 0x00000200L 4110#define ATOM_S6_HW_I2C_BUSY_STATE 0x00000200L
3286#define ATOM_S6_THERMAL_STATE_CHANGE 0x00000400L 4111#define ATOM_S6_THERMAL_STATE_CHANGE 0x00000400L
3287#define ATOM_S6_INTERRUPT_SET_BY_BIOS 0x00000800L 4112#define ATOM_S6_INTERRUPT_SET_BY_BIOS 0x00000800L
3288#define ATOM_S6_REQ_LCD_EXPANSION_FULL 0x00001000L /* Normal expansion Request bit for LCD */ 4113#define ATOM_S6_REQ_LCD_EXPANSION_FULL 0x00001000L //Normal expansion Request bit for LCD
3289#define ATOM_S6_REQ_LCD_EXPANSION_ASPEC_RATIO 0x00002000L /* Aspect ratio expansion Request bit for LCD */ 4114#define ATOM_S6_REQ_LCD_EXPANSION_ASPEC_RATIO 0x00002000L //Aspect ratio expansion Request bit for LCD
3290 4115
3291#define ATOM_S6_DISPLAY_STATE_CHANGE 0x00004000L /* This bit is recycled when ATOM_BIOS_INFO_BIOS_SCRATCH6_SCL2_REDEFINE is set,previously it's SCL2_H_expansion */ 4116#define ATOM_S6_DISPLAY_STATE_CHANGE 0x00004000L //This bit is recycled when ATOM_BIOS_INFO_BIOS_SCRATCH6_SCL2_REDEFINE is set,previously it's SCL2_H_expansion
3292#define ATOM_S6_I2C_STATE_CHANGE 0x00008000L /* This bit is recycled,when ATOM_BIOS_INFO_BIOS_SCRATCH6_SCL2_REDEFINE is set,previously it's SCL2_V_expansion */ 4117#define ATOM_S6_I2C_STATE_CHANGE 0x00008000L //This bit is recycled,when ATOM_BIOS_INFO_BIOS_SCRATCH6_SCL2_REDEFINE is set,previously it's SCL2_V_expansion
3293 4118
3294#define ATOM_S6_ACC_REQ_CRT1 0x00010000L 4119#define ATOM_S6_ACC_REQ_CRT1 0x00010000L
3295#define ATOM_S6_ACC_REQ_LCD1 0x00020000L 4120#define ATOM_S6_ACC_REQ_LCD1 0x00020000L
@@ -3297,7 +4122,7 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3297#define ATOM_S6_ACC_REQ_DFP1 0x00080000L 4122#define ATOM_S6_ACC_REQ_DFP1 0x00080000L
3298#define ATOM_S6_ACC_REQ_CRT2 0x00100000L 4123#define ATOM_S6_ACC_REQ_CRT2 0x00100000L
3299#define ATOM_S6_ACC_REQ_LCD2 0x00200000L 4124#define ATOM_S6_ACC_REQ_LCD2 0x00200000L
3300#define ATOM_S6_ACC_REQ_TV2 0x00400000L 4125#define ATOM_S6_ACC_REQ_DFP6 0x00400000L
3301#define ATOM_S6_ACC_REQ_DFP2 0x00800000L 4126#define ATOM_S6_ACC_REQ_DFP2 0x00800000L
3302#define ATOM_S6_ACC_REQ_CV 0x01000000L 4127#define ATOM_S6_ACC_REQ_CV 0x01000000L
3303#define ATOM_S6_ACC_REQ_DFP3 0x02000000L 4128#define ATOM_S6_ACC_REQ_DFP3 0x02000000L
@@ -3310,7 +4135,7 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3310#define ATOM_S6_VRI_BRIGHTNESS_CHANGE 0x40000000L 4135#define ATOM_S6_VRI_BRIGHTNESS_CHANGE 0x40000000L
3311#define ATOM_S6_CONFIG_DISPLAY_CHANGE_MASK 0x80000000L 4136#define ATOM_S6_CONFIG_DISPLAY_CHANGE_MASK 0x80000000L
3312 4137
3313/* Byte aligned defintion for BIOS usage */ 4138//Byte aligned defintion for BIOS usage
3314#define ATOM_S6_DEVICE_CHANGEb0 0x01 4139#define ATOM_S6_DEVICE_CHANGEb0 0x01
3315#define ATOM_S6_SCALER_CHANGEb0 0x02 4140#define ATOM_S6_SCALER_CHANGEb0 0x02
3316#define ATOM_S6_LID_CHANGEb0 0x04 4141#define ATOM_S6_LID_CHANGEb0 0x04
@@ -3320,11 +4145,11 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3320#define ATOM_S6_LID_STATEb0 0x40 4145#define ATOM_S6_LID_STATEb0 0x40
3321#define ATOM_S6_DOCK_STATEb0 0x80 4146#define ATOM_S6_DOCK_STATEb0 0x80
3322#define ATOM_S6_CRITICAL_STATEb1 0x01 4147#define ATOM_S6_CRITICAL_STATEb1 0x01
3323#define ATOM_S6_HW_I2C_BUSY_STATEb1 0x02 4148#define ATOM_S6_HW_I2C_BUSY_STATEb1 0x02
3324#define ATOM_S6_THERMAL_STATE_CHANGEb1 0x04 4149#define ATOM_S6_THERMAL_STATE_CHANGEb1 0x04
3325#define ATOM_S6_INTERRUPT_SET_BY_BIOSb1 0x08 4150#define ATOM_S6_INTERRUPT_SET_BY_BIOSb1 0x08
3326#define ATOM_S6_REQ_LCD_EXPANSION_FULLb1 0x10 4151#define ATOM_S6_REQ_LCD_EXPANSION_FULLb1 0x10
3327#define ATOM_S6_REQ_LCD_EXPANSION_ASPEC_RATIOb1 0x20 4152#define ATOM_S6_REQ_LCD_EXPANSION_ASPEC_RATIOb1 0x20
3328 4153
3329#define ATOM_S6_ACC_REQ_CRT1b2 0x01 4154#define ATOM_S6_ACC_REQ_CRT1b2 0x01
3330#define ATOM_S6_ACC_REQ_LCD1b2 0x02 4155#define ATOM_S6_ACC_REQ_LCD1b2 0x02
@@ -3332,12 +4157,12 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3332#define ATOM_S6_ACC_REQ_DFP1b2 0x08 4157#define ATOM_S6_ACC_REQ_DFP1b2 0x08
3333#define ATOM_S6_ACC_REQ_CRT2b2 0x10 4158#define ATOM_S6_ACC_REQ_CRT2b2 0x10
3334#define ATOM_S6_ACC_REQ_LCD2b2 0x20 4159#define ATOM_S6_ACC_REQ_LCD2b2 0x20
3335#define ATOM_S6_ACC_REQ_TV2b2 0x40 4160#define ATOM_S6_ACC_REQ_DFP6b2 0x40
3336#define ATOM_S6_ACC_REQ_DFP2b2 0x80 4161#define ATOM_S6_ACC_REQ_DFP2b2 0x80
3337#define ATOM_S6_ACC_REQ_CVb3 0x01 4162#define ATOM_S6_ACC_REQ_CVb3 0x01
3338#define ATOM_S6_ACC_REQ_DFP3b3 0x02 4163#define ATOM_S6_ACC_REQ_DFP3b3 0x02
3339#define ATOM_S6_ACC_REQ_DFP4b3 0x04 4164#define ATOM_S6_ACC_REQ_DFP4b3 0x04
3340#define ATOM_S6_ACC_REQ_DFP5b3 0x08 4165#define ATOM_S6_ACC_REQ_DFP5b3 0x08
3341 4166
3342#define ATOM_S6_ACC_REQ_DEVICEw1 ATOM_S5_DOS_REQ_DEVICEw0 4167#define ATOM_S6_ACC_REQ_DEVICEw1 ATOM_S5_DOS_REQ_DEVICEw0
3343#define ATOM_S6_SYSTEM_POWER_MODE_CHANGEb3 0x10 4168#define ATOM_S6_SYSTEM_POWER_MODE_CHANGEb3 0x10
@@ -3366,7 +4191,7 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3366#define ATOM_S6_VRI_BRIGHTNESS_CHANGE_SHIFT 30 4191#define ATOM_S6_VRI_BRIGHTNESS_CHANGE_SHIFT 30
3367#define ATOM_S6_CONFIG_DISPLAY_CHANGE_SHIFT 31 4192#define ATOM_S6_CONFIG_DISPLAY_CHANGE_SHIFT 31
3368 4193
3369/* BIOS_7_SCRATCH Definition, BIOS_7_SCRATCH is used by Firmware only !!!! */ 4194// BIOS_7_SCRATCH Definition, BIOS_7_SCRATCH is used by Firmware only !!!!
3370#define ATOM_S7_DOS_MODE_TYPEb0 0x03 4195#define ATOM_S7_DOS_MODE_TYPEb0 0x03
3371#define ATOM_S7_DOS_MODE_VGAb0 0x00 4196#define ATOM_S7_DOS_MODE_VGAb0 0x00
3372#define ATOM_S7_DOS_MODE_VESAb0 0x01 4197#define ATOM_S7_DOS_MODE_VESAb0 0x01
@@ -3378,220 +4203,194 @@ typedef struct _ATOM_ASIC_INTERNAL_SS_INFO {
3378 4203
3379#define ATOM_S7_DOS_8BIT_DAC_EN_SHIFT 8 4204#define ATOM_S7_DOS_8BIT_DAC_EN_SHIFT 8
3380 4205
3381/* BIOS_8_SCRATCH Definition */ 4206// BIOS_8_SCRATCH Definition
3382#define ATOM_S8_I2C_CHANNEL_BUSY_MASK 0x00000FFFF 4207#define ATOM_S8_I2C_CHANNEL_BUSY_MASK 0x00000FFFF
3383#define ATOM_S8_I2C_HW_ENGINE_BUSY_MASK 0x0FFFF0000 4208#define ATOM_S8_I2C_HW_ENGINE_BUSY_MASK 0x0FFFF0000
3384 4209
3385#define ATOM_S8_I2C_CHANNEL_BUSY_SHIFT 0 4210#define ATOM_S8_I2C_CHANNEL_BUSY_SHIFT 0
3386#define ATOM_S8_I2C_ENGINE_BUSY_SHIFT 16 4211#define ATOM_S8_I2C_ENGINE_BUSY_SHIFT 16
3387 4212
3388/* BIOS_9_SCRATCH Definition */ 4213// BIOS_9_SCRATCH Definition
3389#ifndef ATOM_S9_I2C_CHANNEL_COMPLETED_MASK 4214#ifndef ATOM_S9_I2C_CHANNEL_COMPLETED_MASK
3390#define ATOM_S9_I2C_CHANNEL_COMPLETED_MASK 0x0000FFFF 4215#define ATOM_S9_I2C_CHANNEL_COMPLETED_MASK 0x0000FFFF
3391#endif 4216#endif
3392#ifndef ATOM_S9_I2C_CHANNEL_ABORTED_MASK 4217#ifndef ATOM_S9_I2C_CHANNEL_ABORTED_MASK
3393#define ATOM_S9_I2C_CHANNEL_ABORTED_MASK 0xFFFF0000 4218#define ATOM_S9_I2C_CHANNEL_ABORTED_MASK 0xFFFF0000
3394#endif 4219#endif
3395#ifndef ATOM_S9_I2C_CHANNEL_COMPLETED_SHIFT 4220#ifndef ATOM_S9_I2C_CHANNEL_COMPLETED_SHIFT
3396#define ATOM_S9_I2C_CHANNEL_COMPLETED_SHIFT 0 4221#define ATOM_S9_I2C_CHANNEL_COMPLETED_SHIFT 0
3397#endif 4222#endif
3398#ifndef ATOM_S9_I2C_CHANNEL_ABORTED_SHIFT 4223#ifndef ATOM_S9_I2C_CHANNEL_ABORTED_SHIFT
3399#define ATOM_S9_I2C_CHANNEL_ABORTED_SHIFT 16 4224#define ATOM_S9_I2C_CHANNEL_ABORTED_SHIFT 16
3400#endif 4225#endif
3401 4226
4227
3402#define ATOM_FLAG_SET 0x20 4228#define ATOM_FLAG_SET 0x20
3403#define ATOM_FLAG_CLEAR 0 4229#define ATOM_FLAG_CLEAR 0
3404#define CLEAR_ATOM_S6_ACC_MODE \ 4230#define CLEAR_ATOM_S6_ACC_MODE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_ACC_MODE_SHIFT | ATOM_FLAG_CLEAR)
3405 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \ 4231#define SET_ATOM_S6_DEVICE_CHANGE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_DEVICE_CHANGE_SHIFT | ATOM_FLAG_SET)
3406 ATOM_S6_ACC_MODE_SHIFT | ATOM_FLAG_CLEAR) 4232#define SET_ATOM_S6_VRI_BRIGHTNESS_CHANGE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_VRI_BRIGHTNESS_CHANGE_SHIFT | ATOM_FLAG_SET)
3407#define SET_ATOM_S6_DEVICE_CHANGE \ 4233#define SET_ATOM_S6_SCALER_CHANGE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_SCALER_CHANGE_SHIFT | ATOM_FLAG_SET)
3408 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \ 4234#define SET_ATOM_S6_LID_CHANGE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_LID_CHANGE_SHIFT | ATOM_FLAG_SET)
3409 ATOM_S6_DEVICE_CHANGE_SHIFT | ATOM_FLAG_SET)
3410#define SET_ATOM_S6_VRI_BRIGHTNESS_CHANGE \
3411 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3412 ATOM_S6_VRI_BRIGHTNESS_CHANGE_SHIFT | ATOM_FLAG_SET)
3413#define SET_ATOM_S6_SCALER_CHANGE \
3414 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3415 ATOM_S6_SCALER_CHANGE_SHIFT | ATOM_FLAG_SET)
3416#define SET_ATOM_S6_LID_CHANGE \
3417 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3418 ATOM_S6_LID_CHANGE_SHIFT | ATOM_FLAG_SET)
3419
3420#define SET_ATOM_S6_LID_STATE \
3421 ((ATOM_ACC_CHANGE_INFO_DEF << 8) |\
3422 ATOM_S6_LID_STATE_SHIFT | ATOM_FLAG_SET)
3423#define CLEAR_ATOM_S6_LID_STATE \
3424 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3425 ATOM_S6_LID_STATE_SHIFT | ATOM_FLAG_CLEAR)
3426
3427#define SET_ATOM_S6_DOCK_CHANGE \
3428 ((ATOM_ACC_CHANGE_INFO_DEF << 8)| \
3429 ATOM_S6_DOCKING_CHANGE_SHIFT | ATOM_FLAG_SET)
3430#define SET_ATOM_S6_DOCK_STATE \
3431 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3432 ATOM_S6_DOCK_STATE_SHIFT | ATOM_FLAG_SET)
3433#define CLEAR_ATOM_S6_DOCK_STATE \
3434 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3435 ATOM_S6_DOCK_STATE_SHIFT | ATOM_FLAG_CLEAR)
3436
3437#define SET_ATOM_S6_THERMAL_STATE_CHANGE \
3438 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3439 ATOM_S6_THERMAL_STATE_CHANGE_SHIFT | ATOM_FLAG_SET)
3440#define SET_ATOM_S6_SYSTEM_POWER_MODE_CHANGE \
3441 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3442 ATOM_S6_SYSTEM_POWER_MODE_CHANGE_SHIFT | ATOM_FLAG_SET)
3443#define SET_ATOM_S6_INTERRUPT_SET_BY_BIOS \
3444 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3445 ATOM_S6_INTERRUPT_SET_BY_BIOS_SHIFT | ATOM_FLAG_SET)
3446
3447#define SET_ATOM_S6_CRITICAL_STATE \
3448 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3449 ATOM_S6_CRITICAL_STATE_SHIFT | ATOM_FLAG_SET)
3450#define CLEAR_ATOM_S6_CRITICAL_STATE \
3451 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3452 ATOM_S6_CRITICAL_STATE_SHIFT | ATOM_FLAG_CLEAR)
3453
3454#define SET_ATOM_S6_REQ_SCALER \
3455 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3456 ATOM_S6_REQ_SCALER_SHIFT | ATOM_FLAG_SET)
3457#define CLEAR_ATOM_S6_REQ_SCALER \
3458 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3459 ATOM_S6_REQ_SCALER_SHIFT | ATOM_FLAG_CLEAR )
3460
3461#define SET_ATOM_S6_REQ_SCALER_ARATIO \
3462 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3463 ATOM_S6_REQ_SCALER_ARATIO_SHIFT | ATOM_FLAG_SET )
3464#define CLEAR_ATOM_S6_REQ_SCALER_ARATIO \
3465 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3466 ATOM_S6_REQ_SCALER_ARATIO_SHIFT | ATOM_FLAG_CLEAR )
3467
3468#define SET_ATOM_S6_I2C_STATE_CHANGE \
3469 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3470 ATOM_S6_I2C_STATE_CHANGE_SHIFT | ATOM_FLAG_SET )
3471
3472#define SET_ATOM_S6_DISPLAY_STATE_CHANGE \
3473 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3474 ATOM_S6_DISPLAY_STATE_CHANGE_SHIFT | ATOM_FLAG_SET )
3475
3476#define SET_ATOM_S6_DEVICE_RECONFIG \
3477 ((ATOM_ACC_CHANGE_INFO_DEF << 8) | \
3478 ATOM_S6_CONFIG_DISPLAY_CHANGE_SHIFT | ATOM_FLAG_SET)
3479#define CLEAR_ATOM_S0_LCD1 \
3480 ((ATOM_DEVICE_CONNECT_INFO_DEF << 8 ) | \
3481 ATOM_S0_LCD1_SHIFT | ATOM_FLAG_CLEAR )
3482#define SET_ATOM_S7_DOS_8BIT_DAC_EN \
3483 ((ATOM_DOS_MODE_INFO_DEF << 8) | \
3484 ATOM_S7_DOS_8BIT_DAC_EN_SHIFT | ATOM_FLAG_SET )
3485#define CLEAR_ATOM_S7_DOS_8BIT_DAC_EN \
3486 ((ATOM_DOS_MODE_INFO_DEF << 8) | \
3487 ATOM_S7_DOS_8BIT_DAC_EN_SHIFT | ATOM_FLAG_CLEAR )
3488 4235
3489/****************************************************************************/ 4236#define SET_ATOM_S6_LID_STATE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_LID_STATE_SHIFT | ATOM_FLAG_SET)
3490/* Portion II: Definitinos only used in Driver */ 4237#define CLEAR_ATOM_S6_LID_STATE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_LID_STATE_SHIFT | ATOM_FLAG_CLEAR)
4238
4239#define SET_ATOM_S6_DOCK_CHANGE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_DOCKING_CHANGE_SHIFT | ATOM_FLAG_SET)
4240#define SET_ATOM_S6_DOCK_STATE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_DOCK_STATE_SHIFT | ATOM_FLAG_SET)
4241#define CLEAR_ATOM_S6_DOCK_STATE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_DOCK_STATE_SHIFT | ATOM_FLAG_CLEAR)
4242
4243#define SET_ATOM_S6_THERMAL_STATE_CHANGE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_THERMAL_STATE_CHANGE_SHIFT | ATOM_FLAG_SET)
4244#define SET_ATOM_S6_SYSTEM_POWER_MODE_CHANGE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_SYSTEM_POWER_MODE_CHANGE_SHIFT | ATOM_FLAG_SET)
4245#define SET_ATOM_S6_INTERRUPT_SET_BY_BIOS ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_INTERRUPT_SET_BY_BIOS_SHIFT | ATOM_FLAG_SET)
4246
4247#define SET_ATOM_S6_CRITICAL_STATE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_CRITICAL_STATE_SHIFT | ATOM_FLAG_SET)
4248#define CLEAR_ATOM_S6_CRITICAL_STATE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_CRITICAL_STATE_SHIFT | ATOM_FLAG_CLEAR)
4249
4250#define SET_ATOM_S6_REQ_SCALER ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_REQ_SCALER_SHIFT | ATOM_FLAG_SET)
4251#define CLEAR_ATOM_S6_REQ_SCALER ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_REQ_SCALER_SHIFT | ATOM_FLAG_CLEAR )
4252
4253#define SET_ATOM_S6_REQ_SCALER_ARATIO ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_REQ_SCALER_ARATIO_SHIFT | ATOM_FLAG_SET )
4254#define CLEAR_ATOM_S6_REQ_SCALER_ARATIO ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_REQ_SCALER_ARATIO_SHIFT | ATOM_FLAG_CLEAR )
4255
4256#define SET_ATOM_S6_I2C_STATE_CHANGE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_I2C_STATE_CHANGE_SHIFT | ATOM_FLAG_SET )
4257
4258#define SET_ATOM_S6_DISPLAY_STATE_CHANGE ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_DISPLAY_STATE_CHANGE_SHIFT | ATOM_FLAG_SET )
4259
4260#define SET_ATOM_S6_DEVICE_RECONFIG ((ATOM_ACC_CHANGE_INFO_DEF << 8 )|ATOM_S6_CONFIG_DISPLAY_CHANGE_SHIFT | ATOM_FLAG_SET)
4261#define CLEAR_ATOM_S0_LCD1 ((ATOM_DEVICE_CONNECT_INFO_DEF << 8 )| ATOM_S0_LCD1_SHIFT | ATOM_FLAG_CLEAR )
4262#define SET_ATOM_S7_DOS_8BIT_DAC_EN ((ATOM_DOS_MODE_INFO_DEF << 8 )|ATOM_S7_DOS_8BIT_DAC_EN_SHIFT | ATOM_FLAG_SET )
4263#define CLEAR_ATOM_S7_DOS_8BIT_DAC_EN ((ATOM_DOS_MODE_INFO_DEF << 8 )|ATOM_S7_DOS_8BIT_DAC_EN_SHIFT | ATOM_FLAG_CLEAR )
4264
4265/****************************************************************************/
4266//Portion II: Definitinos only used in Driver
3491/****************************************************************************/ 4267/****************************************************************************/
3492 4268
3493/* Macros used by driver */ 4269// Macros used by driver
4270#ifdef __cplusplus
4271#define GetIndexIntoMasterTable(MasterOrData, FieldName) ((reinterpret_cast<char*>(&(static_cast<ATOM_MASTER_LIST_OF_##MasterOrData##_TABLES*>(0))->FieldName)-static_cast<char*>(0))/sizeof(USHORT))
3494 4272
3495#define GetIndexIntoMasterTable(MasterOrData, FieldName) (((char *)(&((ATOM_MASTER_LIST_OF_##MasterOrData##_TABLES *)0)->FieldName)-(char *)0)/sizeof(USHORT)) 4273#define GET_COMMAND_TABLE_COMMANDSET_REVISION(TABLE_HEADER_OFFSET) (((static_cast<ATOM_COMMON_TABLE_HEADER*>(TABLE_HEADER_OFFSET))->ucTableFormatRevision )&0x3F)
4274#define GET_COMMAND_TABLE_PARAMETER_REVISION(TABLE_HEADER_OFFSET) (((static_cast<ATOM_COMMON_TABLE_HEADER*>(TABLE_HEADER_OFFSET))->ucTableContentRevision)&0x3F)
4275#else // not __cplusplus
4276#define GetIndexIntoMasterTable(MasterOrData, FieldName) (((char*)(&((ATOM_MASTER_LIST_OF_##MasterOrData##_TABLES*)0)->FieldName)-(char*)0)/sizeof(USHORT))
3496 4277
3497#define GET_COMMAND_TABLE_COMMANDSET_REVISION(TABLE_HEADER_OFFSET) ((((ATOM_COMMON_TABLE_HEADER*)TABLE_HEADER_OFFSET)->ucTableFormatRevision)&0x3F) 4278#define GET_COMMAND_TABLE_COMMANDSET_REVISION(TABLE_HEADER_OFFSET) ((((ATOM_COMMON_TABLE_HEADER*)TABLE_HEADER_OFFSET)->ucTableFormatRevision)&0x3F)
3498#define GET_COMMAND_TABLE_PARAMETER_REVISION(TABLE_HEADER_OFFSET) ((((ATOM_COMMON_TABLE_HEADER*)TABLE_HEADER_OFFSET)->ucTableContentRevision)&0x3F) 4279#define GET_COMMAND_TABLE_PARAMETER_REVISION(TABLE_HEADER_OFFSET) ((((ATOM_COMMON_TABLE_HEADER*)TABLE_HEADER_OFFSET)->ucTableContentRevision)&0x3F)
4280#endif // __cplusplus
3499 4281
3500#define GET_DATA_TABLE_MAJOR_REVISION GET_COMMAND_TABLE_COMMANDSET_REVISION 4282#define GET_DATA_TABLE_MAJOR_REVISION GET_COMMAND_TABLE_COMMANDSET_REVISION
3501#define GET_DATA_TABLE_MINOR_REVISION GET_COMMAND_TABLE_PARAMETER_REVISION 4283#define GET_DATA_TABLE_MINOR_REVISION GET_COMMAND_TABLE_PARAMETER_REVISION
3502 4284
3503/****************************************************************************/ 4285/****************************************************************************/
3504/* Portion III: Definitinos only used in VBIOS */ 4286//Portion III: Definitinos only used in VBIOS
3505/****************************************************************************/ 4287/****************************************************************************/
3506#define ATOM_DAC_SRC 0x80 4288#define ATOM_DAC_SRC 0x80
3507#define ATOM_SRC_DAC1 0 4289#define ATOM_SRC_DAC1 0
3508#define ATOM_SRC_DAC2 0x80 4290#define ATOM_SRC_DAC2 0x80
3509 4291
3510#ifdef UEFI_BUILD 4292typedef struct _MEMORY_PLLINIT_PARAMETERS
3511#define USHORT UTEMP 4293{
3512#endif 4294 ULONG ulTargetMemoryClock; //In 10Khz unit
3513 4295 UCHAR ucAction; //not define yet
3514typedef struct _MEMORY_PLLINIT_PARAMETERS { 4296 UCHAR ucFbDiv_Hi; //Fbdiv Hi byte
3515 ULONG ulTargetMemoryClock; /* In 10Khz unit */ 4297 UCHAR ucFbDiv; //FB value
3516 UCHAR ucAction; /* not define yet */ 4298 UCHAR ucPostDiv; //Post div
3517 UCHAR ucFbDiv_Hi; /* Fbdiv Hi byte */ 4299}MEMORY_PLLINIT_PARAMETERS;
3518 UCHAR ucFbDiv; /* FB value */
3519 UCHAR ucPostDiv; /* Post div */
3520} MEMORY_PLLINIT_PARAMETERS;
3521 4300
3522#define MEMORY_PLLINIT_PS_ALLOCATION MEMORY_PLLINIT_PARAMETERS 4301#define MEMORY_PLLINIT_PS_ALLOCATION MEMORY_PLLINIT_PARAMETERS
3523 4302
3524#define GPIO_PIN_WRITE 0x01 4303
4304#define GPIO_PIN_WRITE 0x01
3525#define GPIO_PIN_READ 0x00 4305#define GPIO_PIN_READ 0x00
3526 4306
3527typedef struct _GPIO_PIN_CONTROL_PARAMETERS { 4307typedef struct _GPIO_PIN_CONTROL_PARAMETERS
3528 UCHAR ucGPIO_ID; /* return value, read from GPIO pins */ 4308{
3529 UCHAR ucGPIOBitShift; /* define which bit in uGPIOBitVal need to be update */ 4309 UCHAR ucGPIO_ID; //return value, read from GPIO pins
3530 UCHAR ucGPIOBitVal; /* Set/Reset corresponding bit defined in ucGPIOBitMask */ 4310 UCHAR ucGPIOBitShift; //define which bit in uGPIOBitVal need to be update
3531 UCHAR ucAction; /* =GPIO_PIN_WRITE: Read; =GPIO_PIN_READ: Write */ 4311 UCHAR ucGPIOBitVal; //Set/Reset corresponding bit defined in ucGPIOBitMask
3532} GPIO_PIN_CONTROL_PARAMETERS; 4312 UCHAR ucAction; //=GPIO_PIN_WRITE: Read; =GPIO_PIN_READ: Write
3533 4313}GPIO_PIN_CONTROL_PARAMETERS;
3534typedef struct _ENABLE_SCALER_PARAMETERS { 4314
3535 UCHAR ucScaler; /* ATOM_SCALER1, ATOM_SCALER2 */ 4315typedef struct _ENABLE_SCALER_PARAMETERS
3536 UCHAR ucEnable; /* ATOM_SCALER_DISABLE or ATOM_SCALER_CENTER or ATOM_SCALER_EXPANSION */ 4316{
3537 UCHAR ucTVStandard; /* */ 4317 UCHAR ucScaler; // ATOM_SCALER1, ATOM_SCALER2
3538 UCHAR ucPadding[1]; 4318 UCHAR ucEnable; // ATOM_SCALER_DISABLE or ATOM_SCALER_CENTER or ATOM_SCALER_EXPANSION
3539} ENABLE_SCALER_PARAMETERS; 4319 UCHAR ucTVStandard; //
3540#define ENABLE_SCALER_PS_ALLOCATION ENABLE_SCALER_PARAMETERS 4320 UCHAR ucPadding[1];
3541 4321}ENABLE_SCALER_PARAMETERS;
3542/* ucEnable: */ 4322#define ENABLE_SCALER_PS_ALLOCATION ENABLE_SCALER_PARAMETERS
4323
4324//ucEnable:
3543#define SCALER_BYPASS_AUTO_CENTER_NO_REPLICATION 0 4325#define SCALER_BYPASS_AUTO_CENTER_NO_REPLICATION 0
3544#define SCALER_BYPASS_AUTO_CENTER_AUTO_REPLICATION 1 4326#define SCALER_BYPASS_AUTO_CENTER_AUTO_REPLICATION 1
3545#define SCALER_ENABLE_2TAP_ALPHA_MODE 2 4327#define SCALER_ENABLE_2TAP_ALPHA_MODE 2
3546#define SCALER_ENABLE_MULTITAP_MODE 3 4328#define SCALER_ENABLE_MULTITAP_MODE 3
3547 4329
3548typedef struct _ENABLE_HARDWARE_ICON_CURSOR_PARAMETERS { 4330typedef struct _ENABLE_HARDWARE_ICON_CURSOR_PARAMETERS
3549 ULONG usHWIconHorzVertPosn; /* Hardware Icon Vertical position */ 4331{
3550 UCHAR ucHWIconVertOffset; /* Hardware Icon Vertical offset */ 4332 ULONG usHWIconHorzVertPosn; // Hardware Icon Vertical position
3551 UCHAR ucHWIconHorzOffset; /* Hardware Icon Horizontal offset */ 4333 UCHAR ucHWIconVertOffset; // Hardware Icon Vertical offset
3552 UCHAR ucSelection; /* ATOM_CURSOR1 or ATOM_ICON1 or ATOM_CURSOR2 or ATOM_ICON2 */ 4334 UCHAR ucHWIconHorzOffset; // Hardware Icon Horizontal offset
3553 UCHAR ucEnable; /* ATOM_ENABLE or ATOM_DISABLE */ 4335 UCHAR ucSelection; // ATOM_CURSOR1 or ATOM_ICON1 or ATOM_CURSOR2 or ATOM_ICON2
3554} ENABLE_HARDWARE_ICON_CURSOR_PARAMETERS; 4336 UCHAR ucEnable; // ATOM_ENABLE or ATOM_DISABLE
3555 4337}ENABLE_HARDWARE_ICON_CURSOR_PARAMETERS;
3556typedef struct _ENABLE_HARDWARE_ICON_CURSOR_PS_ALLOCATION { 4338
3557 ENABLE_HARDWARE_ICON_CURSOR_PARAMETERS sEnableIcon; 4339typedef struct _ENABLE_HARDWARE_ICON_CURSOR_PS_ALLOCATION
3558 ENABLE_CRTC_PARAMETERS sReserved; 4340{
3559} ENABLE_HARDWARE_ICON_CURSOR_PS_ALLOCATION; 4341 ENABLE_HARDWARE_ICON_CURSOR_PARAMETERS sEnableIcon;
3560 4342 ENABLE_CRTC_PARAMETERS sReserved;
3561typedef struct _ENABLE_GRAPH_SURFACE_PARAMETERS { 4343}ENABLE_HARDWARE_ICON_CURSOR_PS_ALLOCATION;
3562 USHORT usHight; /* Image Hight */ 4344
3563 USHORT usWidth; /* Image Width */ 4345typedef struct _ENABLE_GRAPH_SURFACE_PARAMETERS
3564 UCHAR ucSurface; /* Surface 1 or 2 */ 4346{
3565 UCHAR ucPadding[3]; 4347 USHORT usHight; // Image Hight
3566} ENABLE_GRAPH_SURFACE_PARAMETERS; 4348 USHORT usWidth; // Image Width
3567 4349 UCHAR ucSurface; // Surface 1 or 2
3568typedef struct _ENABLE_GRAPH_SURFACE_PARAMETERS_V1_2 { 4350 UCHAR ucPadding[3];
3569 USHORT usHight; /* Image Hight */ 4351}ENABLE_GRAPH_SURFACE_PARAMETERS;
3570 USHORT usWidth; /* Image Width */ 4352
3571 UCHAR ucSurface; /* Surface 1 or 2 */ 4353typedef struct _ENABLE_GRAPH_SURFACE_PARAMETERS_V1_2
3572 UCHAR ucEnable; /* ATOM_ENABLE or ATOM_DISABLE */ 4354{
3573 UCHAR ucPadding[2]; 4355 USHORT usHight; // Image Hight
3574} ENABLE_GRAPH_SURFACE_PARAMETERS_V1_2; 4356 USHORT usWidth; // Image Width
3575 4357 UCHAR ucSurface; // Surface 1 or 2
3576typedef struct _ENABLE_GRAPH_SURFACE_PS_ALLOCATION { 4358 UCHAR ucEnable; // ATOM_ENABLE or ATOM_DISABLE
3577 ENABLE_GRAPH_SURFACE_PARAMETERS sSetSurface; 4359 UCHAR ucPadding[2];
3578 ENABLE_YUV_PS_ALLOCATION sReserved; /* Don't set this one */ 4360}ENABLE_GRAPH_SURFACE_PARAMETERS_V1_2;
3579} ENABLE_GRAPH_SURFACE_PS_ALLOCATION; 4361
3580 4362typedef struct _ENABLE_GRAPH_SURFACE_PARAMETERS_V1_3
3581typedef struct _MEMORY_CLEAN_UP_PARAMETERS { 4363{
3582 USHORT usMemoryStart; /* in 8Kb boundry, offset from memory base address */ 4364 USHORT usHight; // Image Hight
3583 USHORT usMemorySize; /* 8Kb blocks aligned */ 4365 USHORT usWidth; // Image Width
3584} MEMORY_CLEAN_UP_PARAMETERS; 4366 UCHAR ucSurface; // Surface 1 or 2
4367 UCHAR ucEnable; // ATOM_ENABLE or ATOM_DISABLE
4368 USHORT usDeviceId; // Active Device Id for this surface. If no device, set to 0.
4369}ENABLE_GRAPH_SURFACE_PARAMETERS_V1_3;
4370
4371typedef struct _ENABLE_GRAPH_SURFACE_PS_ALLOCATION
4372{
4373 ENABLE_GRAPH_SURFACE_PARAMETERS sSetSurface;
4374 ENABLE_YUV_PS_ALLOCATION sReserved; // Don't set this one
4375}ENABLE_GRAPH_SURFACE_PS_ALLOCATION;
4376
4377typedef struct _MEMORY_CLEAN_UP_PARAMETERS
4378{
4379 USHORT usMemoryStart; //in 8Kb boundry, offset from memory base address
4380 USHORT usMemorySize; //8Kb blocks aligned
4381}MEMORY_CLEAN_UP_PARAMETERS;
3585#define MEMORY_CLEAN_UP_PS_ALLOCATION MEMORY_CLEAN_UP_PARAMETERS 4382#define MEMORY_CLEAN_UP_PS_ALLOCATION MEMORY_CLEAN_UP_PARAMETERS
3586 4383
3587typedef struct _GET_DISPLAY_SURFACE_SIZE_PARAMETERS { 4384typedef struct _GET_DISPLAY_SURFACE_SIZE_PARAMETERS
3588 USHORT usX_Size; /* When use as input parameter, usX_Size indicates which CRTC */ 4385{
3589 USHORT usY_Size; 4386 USHORT usX_Size; //When use as input parameter, usX_Size indicates which CRTC
3590} GET_DISPLAY_SURFACE_SIZE_PARAMETERS; 4387 USHORT usY_Size;
4388}GET_DISPLAY_SURFACE_SIZE_PARAMETERS;
3591 4389
3592typedef struct _INDIRECT_IO_ACCESS { 4390typedef struct _INDIRECT_IO_ACCESS
3593 ATOM_COMMON_TABLE_HEADER sHeader; 4391{
3594 UCHAR IOAccessSequence[256]; 4392 ATOM_COMMON_TABLE_HEADER sHeader;
4393 UCHAR IOAccessSequence[256];
3595} INDIRECT_IO_ACCESS; 4394} INDIRECT_IO_ACCESS;
3596 4395
3597#define INDIRECT_READ 0x00 4396#define INDIRECT_READ 0x00
@@ -3615,93 +4414,108 @@ typedef struct _INDIRECT_IO_ACCESS {
3615#define INDIRECT_IO_NBMISC_READ INDIRECT_IO_NBMISC | INDIRECT_READ 4414#define INDIRECT_IO_NBMISC_READ INDIRECT_IO_NBMISC | INDIRECT_READ
3616#define INDIRECT_IO_NBMISC_WRITE INDIRECT_IO_NBMISC | INDIRECT_WRITE 4415#define INDIRECT_IO_NBMISC_WRITE INDIRECT_IO_NBMISC | INDIRECT_WRITE
3617 4416
3618typedef struct _ATOM_OEM_INFO { 4417typedef struct _ATOM_OEM_INFO
3619 ATOM_COMMON_TABLE_HEADER sHeader; 4418{
3620 ATOM_I2C_ID_CONFIG_ACCESS sucI2cId; 4419 ATOM_COMMON_TABLE_HEADER sHeader;
3621} ATOM_OEM_INFO; 4420 ATOM_I2C_ID_CONFIG_ACCESS sucI2cId;
3622 4421}ATOM_OEM_INFO;
3623typedef struct _ATOM_TV_MODE { 4422
3624 UCHAR ucVMode_Num; /* Video mode number */ 4423typedef struct _ATOM_TV_MODE
3625 UCHAR ucTV_Mode_Num; /* Internal TV mode number */ 4424{
3626} ATOM_TV_MODE; 4425 UCHAR ucVMode_Num; //Video mode number
3627 4426 UCHAR ucTV_Mode_Num; //Internal TV mode number
3628typedef struct _ATOM_BIOS_INT_TVSTD_MODE { 4427}ATOM_TV_MODE;
3629 ATOM_COMMON_TABLE_HEADER sHeader; 4428
3630 USHORT usTV_Mode_LUT_Offset; /* Pointer to standard to internal number conversion table */ 4429typedef struct _ATOM_BIOS_INT_TVSTD_MODE
3631 USHORT usTV_FIFO_Offset; /* Pointer to FIFO entry table */ 4430{
3632 USHORT usNTSC_Tbl_Offset; /* Pointer to SDTV_Mode_NTSC table */ 4431 ATOM_COMMON_TABLE_HEADER sHeader;
3633 USHORT usPAL_Tbl_Offset; /* Pointer to SDTV_Mode_PAL table */ 4432 USHORT usTV_Mode_LUT_Offset; // Pointer to standard to internal number conversion table
3634 USHORT usCV_Tbl_Offset; /* Pointer to SDTV_Mode_PAL table */ 4433 USHORT usTV_FIFO_Offset; // Pointer to FIFO entry table
3635} ATOM_BIOS_INT_TVSTD_MODE; 4434 USHORT usNTSC_Tbl_Offset; // Pointer to SDTV_Mode_NTSC table
3636 4435 USHORT usPAL_Tbl_Offset; // Pointer to SDTV_Mode_PAL table
3637typedef struct _ATOM_TV_MODE_SCALER_PTR { 4436 USHORT usCV_Tbl_Offset; // Pointer to SDTV_Mode_PAL table
3638 USHORT ucFilter0_Offset; /* Pointer to filter format 0 coefficients */ 4437}ATOM_BIOS_INT_TVSTD_MODE;
3639 USHORT usFilter1_Offset; /* Pointer to filter format 0 coefficients */ 4438
3640 UCHAR ucTV_Mode_Num; 4439
3641} ATOM_TV_MODE_SCALER_PTR; 4440typedef struct _ATOM_TV_MODE_SCALER_PTR
3642 4441{
3643typedef struct _ATOM_STANDARD_VESA_TIMING { 4442 USHORT ucFilter0_Offset; //Pointer to filter format 0 coefficients
3644 ATOM_COMMON_TABLE_HEADER sHeader; 4443 USHORT usFilter1_Offset; //Pointer to filter format 0 coefficients
3645 ATOM_DTD_FORMAT aModeTimings[16]; /* 16 is not the real array number, just for initial allocation */ 4444 UCHAR ucTV_Mode_Num;
3646} ATOM_STANDARD_VESA_TIMING; 4445}ATOM_TV_MODE_SCALER_PTR;
3647 4446
3648typedef struct _ATOM_STD_FORMAT { 4447typedef struct _ATOM_STANDARD_VESA_TIMING
3649 USHORT usSTD_HDisp; 4448{
3650 USHORT usSTD_VDisp; 4449 ATOM_COMMON_TABLE_HEADER sHeader;
3651 USHORT usSTD_RefreshRate; 4450 ATOM_DTD_FORMAT aModeTimings[16]; // 16 is not the real array number, just for initial allocation
3652 USHORT usReserved; 4451}ATOM_STANDARD_VESA_TIMING;
3653} ATOM_STD_FORMAT; 4452
3654 4453
3655typedef struct _ATOM_VESA_TO_EXTENDED_MODE { 4454typedef struct _ATOM_STD_FORMAT
3656 USHORT usVESA_ModeNumber; 4455{
3657 USHORT usExtendedModeNumber; 4456 USHORT usSTD_HDisp;
3658} ATOM_VESA_TO_EXTENDED_MODE; 4457 USHORT usSTD_VDisp;
3659 4458 USHORT usSTD_RefreshRate;
3660typedef struct _ATOM_VESA_TO_INTENAL_MODE_LUT { 4459 USHORT usReserved;
3661 ATOM_COMMON_TABLE_HEADER sHeader; 4460}ATOM_STD_FORMAT;
3662 ATOM_VESA_TO_EXTENDED_MODE asVESA_ToExtendedModeInfo[76]; 4461
3663} ATOM_VESA_TO_INTENAL_MODE_LUT; 4462typedef struct _ATOM_VESA_TO_EXTENDED_MODE
4463{
4464 USHORT usVESA_ModeNumber;
4465 USHORT usExtendedModeNumber;
4466}ATOM_VESA_TO_EXTENDED_MODE;
4467
4468typedef struct _ATOM_VESA_TO_INTENAL_MODE_LUT
4469{
4470 ATOM_COMMON_TABLE_HEADER sHeader;
4471 ATOM_VESA_TO_EXTENDED_MODE asVESA_ToExtendedModeInfo[76];
4472}ATOM_VESA_TO_INTENAL_MODE_LUT;
3664 4473
3665/*************** ATOM Memory Related Data Structure ***********************/ 4474/*************** ATOM Memory Related Data Structure ***********************/
3666typedef struct _ATOM_MEMORY_VENDOR_BLOCK { 4475typedef struct _ATOM_MEMORY_VENDOR_BLOCK{
3667 UCHAR ucMemoryType; 4476 UCHAR ucMemoryType;
3668 UCHAR ucMemoryVendor; 4477 UCHAR ucMemoryVendor;
3669 UCHAR ucAdjMCId; 4478 UCHAR ucAdjMCId;
3670 UCHAR ucDynClkId; 4479 UCHAR ucDynClkId;
3671 ULONG ulDllResetClkRange; 4480 ULONG ulDllResetClkRange;
3672} ATOM_MEMORY_VENDOR_BLOCK; 4481}ATOM_MEMORY_VENDOR_BLOCK;
3673 4482
3674typedef struct _ATOM_MEMORY_SETTING_ID_CONFIG { 4483
4484typedef struct _ATOM_MEMORY_SETTING_ID_CONFIG{
3675#if ATOM_BIG_ENDIAN 4485#if ATOM_BIG_ENDIAN
3676 ULONG ucMemBlkId:8; 4486 ULONG ucMemBlkId:8;
3677 ULONG ulMemClockRange:24; 4487 ULONG ulMemClockRange:24;
3678#else 4488#else
3679 ULONG ulMemClockRange:24; 4489 ULONG ulMemClockRange:24;
3680 ULONG ucMemBlkId:8; 4490 ULONG ucMemBlkId:8;
3681#endif 4491#endif
3682} ATOM_MEMORY_SETTING_ID_CONFIG; 4492}ATOM_MEMORY_SETTING_ID_CONFIG;
3683 4493
3684typedef union _ATOM_MEMORY_SETTING_ID_CONFIG_ACCESS { 4494typedef union _ATOM_MEMORY_SETTING_ID_CONFIG_ACCESS
3685 ATOM_MEMORY_SETTING_ID_CONFIG slAccess; 4495{
3686 ULONG ulAccess; 4496 ATOM_MEMORY_SETTING_ID_CONFIG slAccess;
3687} ATOM_MEMORY_SETTING_ID_CONFIG_ACCESS; 4497 ULONG ulAccess;
3688 4498}ATOM_MEMORY_SETTING_ID_CONFIG_ACCESS;
3689typedef struct _ATOM_MEMORY_SETTING_DATA_BLOCK { 4499
3690 ATOM_MEMORY_SETTING_ID_CONFIG_ACCESS ulMemoryID; 4500
3691 ULONG aulMemData[1]; 4501typedef struct _ATOM_MEMORY_SETTING_DATA_BLOCK{
3692} ATOM_MEMORY_SETTING_DATA_BLOCK; 4502 ATOM_MEMORY_SETTING_ID_CONFIG_ACCESS ulMemoryID;
3693 4503 ULONG aulMemData[1];
3694typedef struct _ATOM_INIT_REG_INDEX_FORMAT { 4504}ATOM_MEMORY_SETTING_DATA_BLOCK;
3695 USHORT usRegIndex; /* MC register index */ 4505
3696 UCHAR ucPreRegDataLength; /* offset in ATOM_INIT_REG_DATA_BLOCK.saRegDataBuf */ 4506
3697} ATOM_INIT_REG_INDEX_FORMAT; 4507typedef struct _ATOM_INIT_REG_INDEX_FORMAT{
3698 4508 USHORT usRegIndex; // MC register index
3699typedef struct _ATOM_INIT_REG_BLOCK { 4509 UCHAR ucPreRegDataLength; // offset in ATOM_INIT_REG_DATA_BLOCK.saRegDataBuf
3700 USHORT usRegIndexTblSize; /* size of asRegIndexBuf */ 4510}ATOM_INIT_REG_INDEX_FORMAT;
3701 USHORT usRegDataBlkSize; /* size of ATOM_MEMORY_SETTING_DATA_BLOCK */ 4511
3702 ATOM_INIT_REG_INDEX_FORMAT asRegIndexBuf[1]; 4512
3703 ATOM_MEMORY_SETTING_DATA_BLOCK asRegDataBuf[1]; 4513typedef struct _ATOM_INIT_REG_BLOCK{
3704} ATOM_INIT_REG_BLOCK; 4514 USHORT usRegIndexTblSize; //size of asRegIndexBuf
4515 USHORT usRegDataBlkSize; //size of ATOM_MEMORY_SETTING_DATA_BLOCK
4516 ATOM_INIT_REG_INDEX_FORMAT asRegIndexBuf[1];
4517 ATOM_MEMORY_SETTING_DATA_BLOCK asRegDataBuf[1];
4518}ATOM_INIT_REG_BLOCK;
3705 4519
3706#define END_OF_REG_INDEX_BLOCK 0x0ffff 4520#define END_OF_REG_INDEX_BLOCK 0x0ffff
3707#define END_OF_REG_DATA_BLOCK 0x00000000 4521#define END_OF_REG_DATA_BLOCK 0x00000000
@@ -3716,16 +4530,19 @@ typedef struct _ATOM_INIT_REG_BLOCK {
3716#define INDEX_ACCESS_RANGE_END (INDEX_ACCESS_RANGE_BEGIN + 1) 4530#define INDEX_ACCESS_RANGE_END (INDEX_ACCESS_RANGE_BEGIN + 1)
3717#define VALUE_INDEX_ACCESS_SINGLE (INDEX_ACCESS_RANGE_END + 1) 4531#define VALUE_INDEX_ACCESS_SINGLE (INDEX_ACCESS_RANGE_END + 1)
3718 4532
3719typedef struct _ATOM_MC_INIT_PARAM_TABLE { 4533
3720 ATOM_COMMON_TABLE_HEADER sHeader; 4534typedef struct _ATOM_MC_INIT_PARAM_TABLE
3721 USHORT usAdjustARB_SEQDataOffset; 4535{
3722 USHORT usMCInitMemTypeTblOffset; 4536 ATOM_COMMON_TABLE_HEADER sHeader;
3723 USHORT usMCInitCommonTblOffset; 4537 USHORT usAdjustARB_SEQDataOffset;
3724 USHORT usMCInitPowerDownTblOffset; 4538 USHORT usMCInitMemTypeTblOffset;
3725 ULONG ulARB_SEQDataBuf[32]; 4539 USHORT usMCInitCommonTblOffset;
3726 ATOM_INIT_REG_BLOCK asMCInitMemType; 4540 USHORT usMCInitPowerDownTblOffset;
3727 ATOM_INIT_REG_BLOCK asMCInitCommon; 4541 ULONG ulARB_SEQDataBuf[32];
3728} ATOM_MC_INIT_PARAM_TABLE; 4542 ATOM_INIT_REG_BLOCK asMCInitMemType;
4543 ATOM_INIT_REG_BLOCK asMCInitCommon;
4544}ATOM_MC_INIT_PARAM_TABLE;
4545
3729 4546
3730#define _4Mx16 0x2 4547#define _4Mx16 0x2
3731#define _4Mx32 0x3 4548#define _4Mx32 0x3
@@ -3751,221 +4568,272 @@ typedef struct _ATOM_MC_INIT_PARAM_TABLE {
3751 4568
3752#define QIMONDA INFINEON 4569#define QIMONDA INFINEON
3753#define PROMOS MOSEL 4570#define PROMOS MOSEL
4571#define KRETON INFINEON
3754 4572
3755/* ///////////Support for GDDR5 MC uCode to reside in upper 64K of ROM///////////// */ 4573/////////////Support for GDDR5 MC uCode to reside in upper 64K of ROM/////////////
3756 4574
3757#define UCODE_ROM_START_ADDRESS 0x1c000 4575#define UCODE_ROM_START_ADDRESS 0x1c000
3758#define UCODE_SIGNATURE 0x4375434d /* 'MCuC' - MC uCode */ 4576#define UCODE_SIGNATURE 0x4375434d // 'MCuC' - MC uCode
3759 4577
3760/* uCode block header for reference */ 4578//uCode block header for reference
3761 4579
3762typedef struct _MCuCodeHeader { 4580typedef struct _MCuCodeHeader
3763 ULONG ulSignature; 4581{
3764 UCHAR ucRevision; 4582 ULONG ulSignature;
3765 UCHAR ucChecksum; 4583 UCHAR ucRevision;
3766 UCHAR ucReserved1; 4584 UCHAR ucChecksum;
3767 UCHAR ucReserved2; 4585 UCHAR ucReserved1;
3768 USHORT usParametersLength; 4586 UCHAR ucReserved2;
3769 USHORT usUCodeLength; 4587 USHORT usParametersLength;
3770 USHORT usReserved1; 4588 USHORT usUCodeLength;
3771 USHORT usReserved2; 4589 USHORT usReserved1;
4590 USHORT usReserved2;
3772} MCuCodeHeader; 4591} MCuCodeHeader;
3773 4592
3774/* //////////////////////////////////////////////////////////////////////////////// */ 4593//////////////////////////////////////////////////////////////////////////////////
3775 4594
3776#define ATOM_MAX_NUMBER_OF_VRAM_MODULE 16 4595#define ATOM_MAX_NUMBER_OF_VRAM_MODULE 16
3777 4596
3778#define ATOM_VRAM_MODULE_MEMORY_VENDOR_ID_MASK 0xF 4597#define ATOM_VRAM_MODULE_MEMORY_VENDOR_ID_MASK 0xF
3779typedef struct _ATOM_VRAM_MODULE_V1 { 4598typedef struct _ATOM_VRAM_MODULE_V1
3780 ULONG ulReserved; 4599{
3781 USHORT usEMRSValue; 4600 ULONG ulReserved;
3782 USHORT usMRSValue; 4601 USHORT usEMRSValue;
3783 USHORT usReserved; 4602 USHORT usMRSValue;
3784 UCHAR ucExtMemoryID; /* An external indicator (by hardcode, callback or pin) to tell what is the current memory module */ 4603 USHORT usReserved;
3785 UCHAR ucMemoryType; /* [7:4]=0x1:DDR1;=0x2:DDR2;=0x3:DDR3;=0x4:DDR4;[3:0] reserved; */ 4604 UCHAR ucExtMemoryID; // An external indicator (by hardcode, callback or pin) to tell what is the current memory module
3786 UCHAR ucMemoryVenderID; /* Predefined,never change across designs or memory type/vender */ 4605 UCHAR ucMemoryType; // [7:4]=0x1:DDR1;=0x2:DDR2;=0x3:DDR3;=0x4:DDR4;[3:0] reserved;
3787 UCHAR ucMemoryDeviceCfg; /* [7:4]=0x0:4M;=0x1:8M;=0x2:16M;0x3:32M....[3:0]=0x0:x4;=0x1:x8;=0x2:x16;=0x3:x32... */ 4606 UCHAR ucMemoryVenderID; // Predefined,never change across designs or memory type/vender
3788 UCHAR ucRow; /* Number of Row,in power of 2; */ 4607 UCHAR ucMemoryDeviceCfg; // [7:4]=0x0:4M;=0x1:8M;=0x2:16M;0x3:32M....[3:0]=0x0:x4;=0x1:x8;=0x2:x16;=0x3:x32...
3789 UCHAR ucColumn; /* Number of Column,in power of 2; */ 4608 UCHAR ucRow; // Number of Row,in power of 2;
3790 UCHAR ucBank; /* Nunber of Bank; */ 4609 UCHAR ucColumn; // Number of Column,in power of 2;
3791 UCHAR ucRank; /* Number of Rank, in power of 2 */ 4610 UCHAR ucBank; // Nunber of Bank;
3792 UCHAR ucChannelNum; /* Number of channel; */ 4611 UCHAR ucRank; // Number of Rank, in power of 2
3793 UCHAR ucChannelConfig; /* [3:0]=Indication of what channel combination;[4:7]=Channel bit width, in number of 2 */ 4612 UCHAR ucChannelNum; // Number of channel;
3794 UCHAR ucDefaultMVDDQ_ID; /* Default MVDDQ setting for this memory block, ID linking to MVDDQ info table to find real set-up data; */ 4613 UCHAR ucChannelConfig; // [3:0]=Indication of what channel combination;[4:7]=Channel bit width, in number of 2
3795 UCHAR ucDefaultMVDDC_ID; /* Default MVDDC setting for this memory block, ID linking to MVDDC info table to find real set-up data; */ 4614 UCHAR ucDefaultMVDDQ_ID; // Default MVDDQ setting for this memory block, ID linking to MVDDQ info table to find real set-up data;
3796 UCHAR ucReserved[2]; 4615 UCHAR ucDefaultMVDDC_ID; // Default MVDDC setting for this memory block, ID linking to MVDDC info table to find real set-up data;
3797} ATOM_VRAM_MODULE_V1; 4616 UCHAR ucReserved[2];
3798 4617}ATOM_VRAM_MODULE_V1;
3799typedef struct _ATOM_VRAM_MODULE_V2 { 4618
3800 ULONG ulReserved; 4619
3801 ULONG ulFlags; /* To enable/disable functionalities based on memory type */ 4620typedef struct _ATOM_VRAM_MODULE_V2
3802 ULONG ulEngineClock; /* Override of default engine clock for particular memory type */ 4621{
3803 ULONG ulMemoryClock; /* Override of default memory clock for particular memory type */ 4622 ULONG ulReserved;
3804 USHORT usEMRS2Value; /* EMRS2 Value is used for GDDR2 and GDDR4 memory type */ 4623 ULONG ulFlags; // To enable/disable functionalities based on memory type
3805 USHORT usEMRS3Value; /* EMRS3 Value is used for GDDR2 and GDDR4 memory type */ 4624 ULONG ulEngineClock; // Override of default engine clock for particular memory type
3806 USHORT usEMRSValue; 4625 ULONG ulMemoryClock; // Override of default memory clock for particular memory type
3807 USHORT usMRSValue; 4626 USHORT usEMRS2Value; // EMRS2 Value is used for GDDR2 and GDDR4 memory type
3808 USHORT usReserved; 4627 USHORT usEMRS3Value; // EMRS3 Value is used for GDDR2 and GDDR4 memory type
3809 UCHAR ucExtMemoryID; /* An external indicator (by hardcode, callback or pin) to tell what is the current memory module */ 4628 USHORT usEMRSValue;
3810 UCHAR ucMemoryType; /* [7:4]=0x1:DDR1;=0x2:DDR2;=0x3:DDR3;=0x4:DDR4;[3:0] - must not be used for now; */ 4629 USHORT usMRSValue;
3811 UCHAR ucMemoryVenderID; /* Predefined,never change across designs or memory type/vender. If not predefined, vendor detection table gets executed */ 4630 USHORT usReserved;
3812 UCHAR ucMemoryDeviceCfg; /* [7:4]=0x0:4M;=0x1:8M;=0x2:16M;0x3:32M....[3:0]=0x0:x4;=0x1:x8;=0x2:x16;=0x3:x32... */ 4631 UCHAR ucExtMemoryID; // An external indicator (by hardcode, callback or pin) to tell what is the current memory module
3813 UCHAR ucRow; /* Number of Row,in power of 2; */ 4632 UCHAR ucMemoryType; // [7:4]=0x1:DDR1;=0x2:DDR2;=0x3:DDR3;=0x4:DDR4;[3:0] - must not be used for now;
3814 UCHAR ucColumn; /* Number of Column,in power of 2; */ 4633 UCHAR ucMemoryVenderID; // Predefined,never change across designs or memory type/vender. If not predefined, vendor detection table gets executed
3815 UCHAR ucBank; /* Nunber of Bank; */ 4634 UCHAR ucMemoryDeviceCfg; // [7:4]=0x0:4M;=0x1:8M;=0x2:16M;0x3:32M....[3:0]=0x0:x4;=0x1:x8;=0x2:x16;=0x3:x32...
3816 UCHAR ucRank; /* Number of Rank, in power of 2 */ 4635 UCHAR ucRow; // Number of Row,in power of 2;
3817 UCHAR ucChannelNum; /* Number of channel; */ 4636 UCHAR ucColumn; // Number of Column,in power of 2;
3818 UCHAR ucChannelConfig; /* [3:0]=Indication of what channel combination;[4:7]=Channel bit width, in number of 2 */ 4637 UCHAR ucBank; // Nunber of Bank;
3819 UCHAR ucDefaultMVDDQ_ID; /* Default MVDDQ setting for this memory block, ID linking to MVDDQ info table to find real set-up data; */ 4638 UCHAR ucRank; // Number of Rank, in power of 2
3820 UCHAR ucDefaultMVDDC_ID; /* Default MVDDC setting for this memory block, ID linking to MVDDC info table to find real set-up data; */ 4639 UCHAR ucChannelNum; // Number of channel;
3821 UCHAR ucRefreshRateFactor; 4640 UCHAR ucChannelConfig; // [3:0]=Indication of what channel combination;[4:7]=Channel bit width, in number of 2
3822 UCHAR ucReserved[3]; 4641 UCHAR ucDefaultMVDDQ_ID; // Default MVDDQ setting for this memory block, ID linking to MVDDQ info table to find real set-up data;
3823} ATOM_VRAM_MODULE_V2; 4642 UCHAR ucDefaultMVDDC_ID; // Default MVDDC setting for this memory block, ID linking to MVDDC info table to find real set-up data;
3824 4643 UCHAR ucRefreshRateFactor;
3825typedef struct _ATOM_MEMORY_TIMING_FORMAT { 4644 UCHAR ucReserved[3];
3826 ULONG ulClkRange; /* memory clock in 10kHz unit, when target memory clock is below this clock, use this memory timing */ 4645}ATOM_VRAM_MODULE_V2;
3827 union { 4646
3828 USHORT usMRS; /* mode register */ 4647
3829 USHORT usDDR3_MR0; 4648typedef struct _ATOM_MEMORY_TIMING_FORMAT
3830 }; 4649{
3831 union { 4650 ULONG ulClkRange; // memory clock in 10kHz unit, when target memory clock is below this clock, use this memory timing
3832 USHORT usEMRS; /* extended mode register */ 4651 union{
3833 USHORT usDDR3_MR1; 4652 USHORT usMRS; // mode register
3834 }; 4653 USHORT usDDR3_MR0;
3835 UCHAR ucCL; /* CAS latency */ 4654 };
3836 UCHAR ucWL; /* WRITE Latency */ 4655 union{
3837 UCHAR uctRAS; /* tRAS */ 4656 USHORT usEMRS; // extended mode register
3838 UCHAR uctRC; /* tRC */ 4657 USHORT usDDR3_MR1;
3839 UCHAR uctRFC; /* tRFC */ 4658 };
3840 UCHAR uctRCDR; /* tRCDR */ 4659 UCHAR ucCL; // CAS latency
3841 UCHAR uctRCDW; /* tRCDW */ 4660 UCHAR ucWL; // WRITE Latency
3842 UCHAR uctRP; /* tRP */ 4661 UCHAR uctRAS; // tRAS
3843 UCHAR uctRRD; /* tRRD */ 4662 UCHAR uctRC; // tRC
3844 UCHAR uctWR; /* tWR */ 4663 UCHAR uctRFC; // tRFC
3845 UCHAR uctWTR; /* tWTR */ 4664 UCHAR uctRCDR; // tRCDR
3846 UCHAR uctPDIX; /* tPDIX */ 4665 UCHAR uctRCDW; // tRCDW
3847 UCHAR uctFAW; /* tFAW */ 4666 UCHAR uctRP; // tRP
3848 UCHAR uctAOND; /* tAOND */ 4667 UCHAR uctRRD; // tRRD
3849 union { 4668 UCHAR uctWR; // tWR
3850 struct { 4669 UCHAR uctWTR; // tWTR
3851 UCHAR ucflag; /* flag to control memory timing calculation. bit0= control EMRS2 Infineon */ 4670 UCHAR uctPDIX; // tPDIX
3852 UCHAR ucReserved; 4671 UCHAR uctFAW; // tFAW
3853 }; 4672 UCHAR uctAOND; // tAOND
3854 USHORT usDDR3_MR2; 4673 union
3855 }; 4674 {
3856} ATOM_MEMORY_TIMING_FORMAT; 4675 struct {
3857 4676 UCHAR ucflag; // flag to control memory timing calculation. bit0= control EMRS2 Infineon
3858typedef struct _ATOM_MEMORY_TIMING_FORMAT_V1 { 4677 UCHAR ucReserved;
3859 ULONG ulClkRange; /* memory clock in 10kHz unit, when target memory clock is below this clock, use this memory timing */ 4678 };
3860 USHORT usMRS; /* mode register */ 4679 USHORT usDDR3_MR2;
3861 USHORT usEMRS; /* extended mode register */ 4680 };
3862 UCHAR ucCL; /* CAS latency */ 4681}ATOM_MEMORY_TIMING_FORMAT;
3863 UCHAR ucWL; /* WRITE Latency */ 4682
3864 UCHAR uctRAS; /* tRAS */ 4683
3865 UCHAR uctRC; /* tRC */ 4684typedef struct _ATOM_MEMORY_TIMING_FORMAT_V1
3866 UCHAR uctRFC; /* tRFC */ 4685{
3867 UCHAR uctRCDR; /* tRCDR */ 4686 ULONG ulClkRange; // memory clock in 10kHz unit, when target memory clock is below this clock, use this memory timing
3868 UCHAR uctRCDW; /* tRCDW */ 4687 USHORT usMRS; // mode register
3869 UCHAR uctRP; /* tRP */ 4688 USHORT usEMRS; // extended mode register
3870 UCHAR uctRRD; /* tRRD */ 4689 UCHAR ucCL; // CAS latency
3871 UCHAR uctWR; /* tWR */ 4690 UCHAR ucWL; // WRITE Latency
3872 UCHAR uctWTR; /* tWTR */ 4691 UCHAR uctRAS; // tRAS
3873 UCHAR uctPDIX; /* tPDIX */ 4692 UCHAR uctRC; // tRC
3874 UCHAR uctFAW; /* tFAW */ 4693 UCHAR uctRFC; // tRFC
3875 UCHAR uctAOND; /* tAOND */ 4694 UCHAR uctRCDR; // tRCDR
3876 UCHAR ucflag; /* flag to control memory timing calculation. bit0= control EMRS2 Infineon */ 4695 UCHAR uctRCDW; // tRCDW
3877/* ///////////////////////GDDR parameters/////////////////////////////////// */ 4696 UCHAR uctRP; // tRP
3878 UCHAR uctCCDL; /* */ 4697 UCHAR uctRRD; // tRRD
3879 UCHAR uctCRCRL; /* */ 4698 UCHAR uctWR; // tWR
3880 UCHAR uctCRCWL; /* */ 4699 UCHAR uctWTR; // tWTR
3881 UCHAR uctCKE; /* */ 4700 UCHAR uctPDIX; // tPDIX
3882 UCHAR uctCKRSE; /* */ 4701 UCHAR uctFAW; // tFAW
3883 UCHAR uctCKRSX; /* */ 4702 UCHAR uctAOND; // tAOND
3884 UCHAR uctFAW32; /* */ 4703 UCHAR ucflag; // flag to control memory timing calculation. bit0= control EMRS2 Infineon
3885 UCHAR ucReserved1; /* */ 4704////////////////////////////////////GDDR parameters///////////////////////////////////
3886 UCHAR ucReserved2; /* */ 4705 UCHAR uctCCDL; //
3887 UCHAR ucTerminator; 4706 UCHAR uctCRCRL; //
3888} ATOM_MEMORY_TIMING_FORMAT_V1; 4707 UCHAR uctCRCWL; //
3889 4708 UCHAR uctCKE; //
3890typedef struct _ATOM_MEMORY_FORMAT { 4709 UCHAR uctCKRSE; //
3891 ULONG ulDllDisClock; /* memory DLL will be disable when target memory clock is below this clock */ 4710 UCHAR uctCKRSX; //
3892 union { 4711 UCHAR uctFAW32; //
3893 USHORT usEMRS2Value; /* EMRS2 Value is used for GDDR2 and GDDR4 memory type */ 4712 UCHAR ucMR5lo; //
3894 USHORT usDDR3_Reserved; /* Not used for DDR3 memory */ 4713 UCHAR ucMR5hi; //
3895 }; 4714 UCHAR ucTerminator;
3896 union { 4715}ATOM_MEMORY_TIMING_FORMAT_V1;
3897 USHORT usEMRS3Value; /* EMRS3 Value is used for GDDR2 and GDDR4 memory type */ 4716
3898 USHORT usDDR3_MR3; /* Used for DDR3 memory */ 4717typedef struct _ATOM_MEMORY_TIMING_FORMAT_V2
3899 }; 4718{
3900 UCHAR ucMemoryType; /* [7:4]=0x1:DDR1;=0x2:DDR2;=0x3:DDR3;=0x4:DDR4;[3:0] - must not be used for now; */ 4719 ULONG ulClkRange; // memory clock in 10kHz unit, when target memory clock is below this clock, use this memory timing
3901 UCHAR ucMemoryVenderID; /* Predefined,never change across designs or memory type/vender. If not predefined, vendor detection table gets executed */ 4720 USHORT usMRS; // mode register
3902 UCHAR ucRow; /* Number of Row,in power of 2; */ 4721 USHORT usEMRS; // extended mode register
3903 UCHAR ucColumn; /* Number of Column,in power of 2; */ 4722 UCHAR ucCL; // CAS latency
3904 UCHAR ucBank; /* Nunber of Bank; */ 4723 UCHAR ucWL; // WRITE Latency
3905 UCHAR ucRank; /* Number of Rank, in power of 2 */ 4724 UCHAR uctRAS; // tRAS
3906 UCHAR ucBurstSize; /* burst size, 0= burst size=4 1= burst size=8 */ 4725 UCHAR uctRC; // tRC
3907 UCHAR ucDllDisBit; /* position of DLL Enable/Disable bit in EMRS ( Extended Mode Register ) */ 4726 UCHAR uctRFC; // tRFC
3908 UCHAR ucRefreshRateFactor; /* memory refresh rate in unit of ms */ 4727 UCHAR uctRCDR; // tRCDR
3909 UCHAR ucDensity; /* _8Mx32, _16Mx32, _16Mx16, _32Mx16 */ 4728 UCHAR uctRCDW; // tRCDW
3910 UCHAR ucPreamble; /* [7:4] Write Preamble, [3:0] Read Preamble */ 4729 UCHAR uctRP; // tRP
3911 UCHAR ucMemAttrib; /* Memory Device Addribute, like RDBI/WDBI etc */ 4730 UCHAR uctRRD; // tRRD
3912 ATOM_MEMORY_TIMING_FORMAT asMemTiming[5]; /* Memory Timing block sort from lower clock to higher clock */ 4731 UCHAR uctWR; // tWR
3913} ATOM_MEMORY_FORMAT; 4732 UCHAR uctWTR; // tWTR
3914 4733 UCHAR uctPDIX; // tPDIX
3915typedef struct _ATOM_VRAM_MODULE_V3 { 4734 UCHAR uctFAW; // tFAW
3916 ULONG ulChannelMapCfg; /* board dependent paramenter:Channel combination */ 4735 UCHAR uctAOND; // tAOND
3917 USHORT usSize; /* size of ATOM_VRAM_MODULE_V3 */ 4736 UCHAR ucflag; // flag to control memory timing calculation. bit0= control EMRS2 Infineon
3918 USHORT usDefaultMVDDQ; /* board dependent parameter:Default Memory Core Voltage */ 4737////////////////////////////////////GDDR parameters///////////////////////////////////
3919 USHORT usDefaultMVDDC; /* board dependent parameter:Default Memory IO Voltage */ 4738 UCHAR uctCCDL; //
3920 UCHAR ucExtMemoryID; /* An external indicator (by hardcode, callback or pin) to tell what is the current memory module */ 4739 UCHAR uctCRCRL; //
3921 UCHAR ucChannelNum; /* board dependent parameter:Number of channel; */ 4740 UCHAR uctCRCWL; //
3922 UCHAR ucChannelSize; /* board dependent parameter:32bit or 64bit */ 4741 UCHAR uctCKE; //
3923 UCHAR ucVREFI; /* board dependnt parameter: EXT or INT +160mv to -140mv */ 4742 UCHAR uctCKRSE; //
3924 UCHAR ucNPL_RT; /* board dependent parameter:NPL round trip delay, used for calculate memory timing parameters */ 4743 UCHAR uctCKRSX; //
3925 UCHAR ucFlag; /* To enable/disable functionalities based on memory type */ 4744 UCHAR uctFAW32; //
3926 ATOM_MEMORY_FORMAT asMemory; /* describ all of video memory parameters from memory spec */ 4745 UCHAR ucMR4lo; //
3927} ATOM_VRAM_MODULE_V3; 4746 UCHAR ucMR4hi; //
3928 4747 UCHAR ucMR5lo; //
3929/* ATOM_VRAM_MODULE_V3.ucNPL_RT */ 4748 UCHAR ucMR5hi; //
4749 UCHAR ucTerminator;
4750 UCHAR ucReserved;
4751}ATOM_MEMORY_TIMING_FORMAT_V2;
4752
4753typedef struct _ATOM_MEMORY_FORMAT
4754{
4755 ULONG ulDllDisClock; // memory DLL will be disable when target memory clock is below this clock
4756 union{
4757 USHORT usEMRS2Value; // EMRS2 Value is used for GDDR2 and GDDR4 memory type
4758 USHORT usDDR3_Reserved; // Not used for DDR3 memory
4759 };
4760 union{
4761 USHORT usEMRS3Value; // EMRS3 Value is used for GDDR2 and GDDR4 memory type
4762 USHORT usDDR3_MR3; // Used for DDR3 memory
4763 };
4764 UCHAR ucMemoryType; // [7:4]=0x1:DDR1;=0x2:DDR2;=0x3:DDR3;=0x4:DDR4;[3:0] - must not be used for now;
4765 UCHAR ucMemoryVenderID; // Predefined,never change across designs or memory type/vender. If not predefined, vendor detection table gets executed
4766 UCHAR ucRow; // Number of Row,in power of 2;
4767 UCHAR ucColumn; // Number of Column,in power of 2;
4768 UCHAR ucBank; // Nunber of Bank;
4769 UCHAR ucRank; // Number of Rank, in power of 2
4770 UCHAR ucBurstSize; // burst size, 0= burst size=4 1= burst size=8
4771 UCHAR ucDllDisBit; // position of DLL Enable/Disable bit in EMRS ( Extended Mode Register )
4772 UCHAR ucRefreshRateFactor; // memory refresh rate in unit of ms
4773 UCHAR ucDensity; // _8Mx32, _16Mx32, _16Mx16, _32Mx16
4774 UCHAR ucPreamble; //[7:4] Write Preamble, [3:0] Read Preamble
4775 UCHAR ucMemAttrib; // Memory Device Addribute, like RDBI/WDBI etc
4776 ATOM_MEMORY_TIMING_FORMAT asMemTiming[5]; //Memory Timing block sort from lower clock to higher clock
4777}ATOM_MEMORY_FORMAT;
4778
4779
4780typedef struct _ATOM_VRAM_MODULE_V3
4781{
4782 ULONG ulChannelMapCfg; // board dependent paramenter:Channel combination
4783 USHORT usSize; // size of ATOM_VRAM_MODULE_V3
4784 USHORT usDefaultMVDDQ; // board dependent parameter:Default Memory Core Voltage
4785 USHORT usDefaultMVDDC; // board dependent parameter:Default Memory IO Voltage
4786 UCHAR ucExtMemoryID; // An external indicator (by hardcode, callback or pin) to tell what is the current memory module
4787 UCHAR ucChannelNum; // board dependent parameter:Number of channel;
4788 UCHAR ucChannelSize; // board dependent parameter:32bit or 64bit
4789 UCHAR ucVREFI; // board dependnt parameter: EXT or INT +160mv to -140mv
4790 UCHAR ucNPL_RT; // board dependent parameter:NPL round trip delay, used for calculate memory timing parameters
4791 UCHAR ucFlag; // To enable/disable functionalities based on memory type
4792 ATOM_MEMORY_FORMAT asMemory; // describ all of video memory parameters from memory spec
4793}ATOM_VRAM_MODULE_V3;
4794
4795
4796//ATOM_VRAM_MODULE_V3.ucNPL_RT
3930#define NPL_RT_MASK 0x0f 4797#define NPL_RT_MASK 0x0f
3931#define BATTERY_ODT_MASK 0xc0 4798#define BATTERY_ODT_MASK 0xc0
3932 4799
3933#define ATOM_VRAM_MODULE ATOM_VRAM_MODULE_V3 4800#define ATOM_VRAM_MODULE ATOM_VRAM_MODULE_V3
3934 4801
3935typedef struct _ATOM_VRAM_MODULE_V4 { 4802typedef struct _ATOM_VRAM_MODULE_V4
3936 ULONG ulChannelMapCfg; /* board dependent parameter: Channel combination */ 4803{
3937 USHORT usModuleSize; /* size of ATOM_VRAM_MODULE_V4, make it easy for VBIOS to look for next entry of VRAM_MODULE */ 4804 ULONG ulChannelMapCfg; // board dependent parameter: Channel combination
3938 USHORT usPrivateReserved; /* BIOS internal reserved space to optimize code size, updated by the compiler, shouldn't be modified manually!! */ 4805 USHORT usModuleSize; // size of ATOM_VRAM_MODULE_V4, make it easy for VBIOS to look for next entry of VRAM_MODULE
3939 /* MC_ARB_RAMCFG (includes NOOFBANK,NOOFRANKS,NOOFROWS,NOOFCOLS) */ 4806 USHORT usPrivateReserved; // BIOS internal reserved space to optimize code size, updated by the compiler, shouldn't be modified manually!!
3940 USHORT usReserved; 4807 // MC_ARB_RAMCFG (includes NOOFBANK,NOOFRANKS,NOOFROWS,NOOFCOLS)
3941 UCHAR ucExtMemoryID; /* An external indicator (by hardcode, callback or pin) to tell what is the current memory module */ 4808 USHORT usReserved;
3942 UCHAR ucMemoryType; /* [7:4]=0x1:DDR1;=0x2:DDR2;=0x3:DDR3;=0x4:DDR4; 0x5:DDR5 [3:0] - Must be 0x0 for now; */ 4809 UCHAR ucExtMemoryID; // An external indicator (by hardcode, callback or pin) to tell what is the current memory module
3943 UCHAR ucChannelNum; /* Number of channels present in this module config */ 4810 UCHAR ucMemoryType; // [7:4]=0x1:DDR1;=0x2:DDR2;=0x3:DDR3;=0x4:DDR4; 0x5:DDR5 [3:0] - Must be 0x0 for now;
3944 UCHAR ucChannelWidth; /* 0 - 32 bits; 1 - 64 bits */ 4811 UCHAR ucChannelNum; // Number of channels present in this module config
3945 UCHAR ucDensity; /* _8Mx32, _16Mx32, _16Mx16, _32Mx16 */ 4812 UCHAR ucChannelWidth; // 0 - 32 bits; 1 - 64 bits
3946 UCHAR ucFlag; /* To enable/disable functionalities based on memory type */ 4813 UCHAR ucDensity; // _8Mx32, _16Mx32, _16Mx16, _32Mx16
3947 UCHAR ucMisc; /* bit0: 0 - single rank; 1 - dual rank; bit2: 0 - burstlength 4, 1 - burstlength 8 */ 4814 UCHAR ucFlag; // To enable/disable functionalities based on memory type
3948 UCHAR ucVREFI; /* board dependent parameter */ 4815 UCHAR ucMisc; // bit0: 0 - single rank; 1 - dual rank; bit2: 0 - burstlength 4, 1 - burstlength 8
3949 UCHAR ucNPL_RT; /* board dependent parameter:NPL round trip delay, used for calculate memory timing parameters */ 4816 UCHAR ucVREFI; // board dependent parameter
3950 UCHAR ucPreamble; /* [7:4] Write Preamble, [3:0] Read Preamble */ 4817 UCHAR ucNPL_RT; // board dependent parameter:NPL round trip delay, used for calculate memory timing parameters
3951 UCHAR ucMemorySize; /* BIOS internal reserved space to optimize code size, updated by the compiler, shouldn't be modified manually!! */ 4818 UCHAR ucPreamble; // [7:4] Write Preamble, [3:0] Read Preamble
3952 /* Total memory size in unit of 16MB for CONFIG_MEMSIZE - bit[23:0] zeros */ 4819 UCHAR ucMemorySize; // BIOS internal reserved space to optimize code size, updated by the compiler, shouldn't be modified manually!!
3953 UCHAR ucReserved[3]; 4820 // Total memory size in unit of 16MB for CONFIG_MEMSIZE - bit[23:0] zeros
3954 4821 UCHAR ucReserved[3];
3955/* compare with V3, we flat the struct by merging ATOM_MEMORY_FORMAT (as is) into V4 as the same level */ 4822
3956 union { 4823//compare with V3, we flat the struct by merging ATOM_MEMORY_FORMAT (as is) into V4 as the same level
3957 USHORT usEMRS2Value; /* EMRS2 Value is used for GDDR2 and GDDR4 memory type */ 4824 union{
3958 USHORT usDDR3_Reserved; 4825 USHORT usEMRS2Value; // EMRS2 Value is used for GDDR2 and GDDR4 memory type
3959 }; 4826 USHORT usDDR3_Reserved;
3960 union { 4827 };
3961 USHORT usEMRS3Value; /* EMRS3 Value is used for GDDR2 and GDDR4 memory type */ 4828 union{
3962 USHORT usDDR3_MR3; /* Used for DDR3 memory */ 4829 USHORT usEMRS3Value; // EMRS3 Value is used for GDDR2 and GDDR4 memory type
3963 }; 4830 USHORT usDDR3_MR3; // Used for DDR3 memory
3964 UCHAR ucMemoryVenderID; /* Predefined, If not predefined, vendor detection table gets executed */ 4831 };
3965 UCHAR ucRefreshRateFactor; /* [1:0]=RefreshFactor (00=8ms, 01=16ms, 10=32ms,11=64ms) */ 4832 UCHAR ucMemoryVenderID; // Predefined, If not predefined, vendor detection table gets executed
3966 UCHAR ucReserved2[2]; 4833 UCHAR ucRefreshRateFactor; // [1:0]=RefreshFactor (00=8ms, 01=16ms, 10=32ms,11=64ms)
3967 ATOM_MEMORY_TIMING_FORMAT asMemTiming[5]; /* Memory Timing block sort from lower clock to higher clock */ 4834 UCHAR ucReserved2[2];
3968} ATOM_VRAM_MODULE_V4; 4835 ATOM_MEMORY_TIMING_FORMAT asMemTiming[5];//Memory Timing block sort from lower clock to higher clock
4836}ATOM_VRAM_MODULE_V4;
3969 4837
3970#define VRAM_MODULE_V4_MISC_RANK_MASK 0x3 4838#define VRAM_MODULE_V4_MISC_RANK_MASK 0x3
3971#define VRAM_MODULE_V4_MISC_DUAL_RANK 0x1 4839#define VRAM_MODULE_V4_MISC_DUAL_RANK 0x1
@@ -3973,96 +4841,139 @@ typedef struct _ATOM_VRAM_MODULE_V4 {
3973#define VRAM_MODULE_V4_MISC_BL8 0x4 4841#define VRAM_MODULE_V4_MISC_BL8 0x4
3974#define VRAM_MODULE_V4_MISC_DUAL_CS 0x10 4842#define VRAM_MODULE_V4_MISC_DUAL_CS 0x10
3975 4843
3976typedef struct _ATOM_VRAM_MODULE_V5 { 4844typedef struct _ATOM_VRAM_MODULE_V5
3977 ULONG ulChannelMapCfg; /* board dependent parameter: Channel combination */ 4845{
3978 USHORT usModuleSize; /* size of ATOM_VRAM_MODULE_V4, make it easy for VBIOS to look for next entry of VRAM_MODULE */ 4846 ULONG ulChannelMapCfg; // board dependent parameter: Channel combination
3979 USHORT usPrivateReserved; /* BIOS internal reserved space to optimize code size, updated by the compiler, shouldn't be modified manually!! */ 4847 USHORT usModuleSize; // size of ATOM_VRAM_MODULE_V4, make it easy for VBIOS to look for next entry of VRAM_MODULE
3980 /* MC_ARB_RAMCFG (includes NOOFBANK,NOOFRANKS,NOOFROWS,NOOFCOLS) */ 4848 USHORT usPrivateReserved; // BIOS internal reserved space to optimize code size, updated by the compiler, shouldn't be modified manually!!
3981 USHORT usReserved; 4849 // MC_ARB_RAMCFG (includes NOOFBANK,NOOFRANKS,NOOFROWS,NOOFCOLS)
3982 UCHAR ucExtMemoryID; /* An external indicator (by hardcode, callback or pin) to tell what is the current memory module */ 4850 USHORT usReserved;
3983 UCHAR ucMemoryType; /* [7:4]=0x1:DDR1;=0x2:DDR2;=0x3:DDR3;=0x4:DDR4; 0x5:DDR5 [3:0] - Must be 0x0 for now; */ 4851 UCHAR ucExtMemoryID; // An external indicator (by hardcode, callback or pin) to tell what is the current memory module
3984 UCHAR ucChannelNum; /* Number of channels present in this module config */ 4852 UCHAR ucMemoryType; // [7:4]=0x1:DDR1;=0x2:DDR2;=0x3:DDR3;=0x4:DDR4; 0x5:DDR5 [3:0] - Must be 0x0 for now;
3985 UCHAR ucChannelWidth; /* 0 - 32 bits; 1 - 64 bits */ 4853 UCHAR ucChannelNum; // Number of channels present in this module config
3986 UCHAR ucDensity; /* _8Mx32, _16Mx32, _16Mx16, _32Mx16 */ 4854 UCHAR ucChannelWidth; // 0 - 32 bits; 1 - 64 bits
3987 UCHAR ucFlag; /* To enable/disable functionalities based on memory type */ 4855 UCHAR ucDensity; // _8Mx32, _16Mx32, _16Mx16, _32Mx16
3988 UCHAR ucMisc; /* bit0: 0 - single rank; 1 - dual rank; bit2: 0 - burstlength 4, 1 - burstlength 8 */ 4856 UCHAR ucFlag; // To enable/disable functionalities based on memory type
3989 UCHAR ucVREFI; /* board dependent parameter */ 4857 UCHAR ucMisc; // bit0: 0 - single rank; 1 - dual rank; bit2: 0 - burstlength 4, 1 - burstlength 8
3990 UCHAR ucNPL_RT; /* board dependent parameter:NPL round trip delay, used for calculate memory timing parameters */ 4858 UCHAR ucVREFI; // board dependent parameter
3991 UCHAR ucPreamble; /* [7:4] Write Preamble, [3:0] Read Preamble */ 4859 UCHAR ucNPL_RT; // board dependent parameter:NPL round trip delay, used for calculate memory timing parameters
3992 UCHAR ucMemorySize; /* BIOS internal reserved space to optimize code size, updated by the compiler, shouldn't be modified manually!! */ 4860 UCHAR ucPreamble; // [7:4] Write Preamble, [3:0] Read Preamble
3993 /* Total memory size in unit of 16MB for CONFIG_MEMSIZE - bit[23:0] zeros */ 4861 UCHAR ucMemorySize; // BIOS internal reserved space to optimize code size, updated by the compiler, shouldn't be modified manually!!
3994 UCHAR ucReserved[3]; 4862 // Total memory size in unit of 16MB for CONFIG_MEMSIZE - bit[23:0] zeros
4863 UCHAR ucReserved[3];
4864
4865//compare with V3, we flat the struct by merging ATOM_MEMORY_FORMAT (as is) into V4 as the same level
4866 USHORT usEMRS2Value; // EMRS2 Value is used for GDDR2 and GDDR4 memory type
4867 USHORT usEMRS3Value; // EMRS3 Value is used for GDDR2 and GDDR4 memory type
4868 UCHAR ucMemoryVenderID; // Predefined, If not predefined, vendor detection table gets executed
4869 UCHAR ucRefreshRateFactor; // [1:0]=RefreshFactor (00=8ms, 01=16ms, 10=32ms,11=64ms)
4870 UCHAR ucFIFODepth; // FIFO depth supposes to be detected during vendor detection, but if we dont do vendor detection we have to hardcode FIFO Depth
4871 UCHAR ucCDR_Bandwidth; // [0:3]=Read CDR bandwidth, [4:7] - Write CDR Bandwidth
4872 ATOM_MEMORY_TIMING_FORMAT_V1 asMemTiming[5];//Memory Timing block sort from lower clock to higher clock
4873}ATOM_VRAM_MODULE_V5;
4874
4875typedef struct _ATOM_VRAM_MODULE_V6
4876{
4877 ULONG ulChannelMapCfg; // board dependent parameter: Channel combination
4878 USHORT usModuleSize; // size of ATOM_VRAM_MODULE_V4, make it easy for VBIOS to look for next entry of VRAM_MODULE
4879 USHORT usPrivateReserved; // BIOS internal reserved space to optimize code size, updated by the compiler, shouldn't be modified manually!!
4880 // MC_ARB_RAMCFG (includes NOOFBANK,NOOFRANKS,NOOFROWS,NOOFCOLS)
4881 USHORT usReserved;
4882 UCHAR ucExtMemoryID; // An external indicator (by hardcode, callback or pin) to tell what is the current memory module
4883 UCHAR ucMemoryType; // [7:4]=0x1:DDR1;=0x2:DDR2;=0x3:DDR3;=0x4:DDR4; 0x5:DDR5 [3:0] - Must be 0x0 for now;
4884 UCHAR ucChannelNum; // Number of channels present in this module config
4885 UCHAR ucChannelWidth; // 0 - 32 bits; 1 - 64 bits
4886 UCHAR ucDensity; // _8Mx32, _16Mx32, _16Mx16, _32Mx16
4887 UCHAR ucFlag; // To enable/disable functionalities based on memory type
4888 UCHAR ucMisc; // bit0: 0 - single rank; 1 - dual rank; bit2: 0 - burstlength 4, 1 - burstlength 8
4889 UCHAR ucVREFI; // board dependent parameter
4890 UCHAR ucNPL_RT; // board dependent parameter:NPL round trip delay, used for calculate memory timing parameters
4891 UCHAR ucPreamble; // [7:4] Write Preamble, [3:0] Read Preamble
4892 UCHAR ucMemorySize; // BIOS internal reserved space to optimize code size, updated by the compiler, shouldn't be modified manually!!
4893 // Total memory size in unit of 16MB for CONFIG_MEMSIZE - bit[23:0] zeros
4894 UCHAR ucReserved[3];
4895
4896//compare with V3, we flat the struct by merging ATOM_MEMORY_FORMAT (as is) into V4 as the same level
4897 USHORT usEMRS2Value; // EMRS2 Value is used for GDDR2 and GDDR4 memory type
4898 USHORT usEMRS3Value; // EMRS3 Value is used for GDDR2 and GDDR4 memory type
4899 UCHAR ucMemoryVenderID; // Predefined, If not predefined, vendor detection table gets executed
4900 UCHAR ucRefreshRateFactor; // [1:0]=RefreshFactor (00=8ms, 01=16ms, 10=32ms,11=64ms)
4901 UCHAR ucFIFODepth; // FIFO depth supposes to be detected during vendor detection, but if we dont do vendor detection we have to hardcode FIFO Depth
4902 UCHAR ucCDR_Bandwidth; // [0:3]=Read CDR bandwidth, [4:7] - Write CDR Bandwidth
4903 ATOM_MEMORY_TIMING_FORMAT_V2 asMemTiming[5];//Memory Timing block sort from lower clock to higher clock
4904}ATOM_VRAM_MODULE_V6;
4905
4906
4907
4908typedef struct _ATOM_VRAM_INFO_V2
4909{
4910 ATOM_COMMON_TABLE_HEADER sHeader;
4911 UCHAR ucNumOfVRAMModule;
4912 ATOM_VRAM_MODULE aVramInfo[ATOM_MAX_NUMBER_OF_VRAM_MODULE]; // just for allocation, real number of blocks is in ucNumOfVRAMModule;
4913}ATOM_VRAM_INFO_V2;
3995 4914
3996/* compare with V3, we flat the struct by merging ATOM_MEMORY_FORMAT (as is) into V4 as the same level */ 4915typedef struct _ATOM_VRAM_INFO_V3
3997 USHORT usEMRS2Value; /* EMRS2 Value is used for GDDR2 and GDDR4 memory type */ 4916{
3998 USHORT usEMRS3Value; /* EMRS3 Value is used for GDDR2 and GDDR4 memory type */ 4917 ATOM_COMMON_TABLE_HEADER sHeader;
3999 UCHAR ucMemoryVenderID; /* Predefined, If not predefined, vendor detection table gets executed */ 4918 USHORT usMemAdjustTblOffset; // offset of ATOM_INIT_REG_BLOCK structure for memory vendor specific MC adjust setting
4000 UCHAR ucRefreshRateFactor; /* [1:0]=RefreshFactor (00=8ms, 01=16ms, 10=32ms,11=64ms) */ 4919 USHORT usMemClkPatchTblOffset; // offset of ATOM_INIT_REG_BLOCK structure for memory clock specific MC setting
4001 UCHAR ucFIFODepth; /* FIFO depth supposes to be detected during vendor detection, but if we dont do vendor detection we have to hardcode FIFO Depth */ 4920 USHORT usRerseved;
4002 UCHAR ucCDR_Bandwidth; /* [0:3]=Read CDR bandwidth, [4:7] - Write CDR Bandwidth */ 4921 UCHAR aVID_PinsShift[9]; // 8 bit strap maximum+terminator
4003 ATOM_MEMORY_TIMING_FORMAT_V1 asMemTiming[5]; /* Memory Timing block sort from lower clock to higher clock */ 4922 UCHAR ucNumOfVRAMModule;
4004} ATOM_VRAM_MODULE_V5; 4923 ATOM_VRAM_MODULE aVramInfo[ATOM_MAX_NUMBER_OF_VRAM_MODULE]; // just for allocation, real number of blocks is in ucNumOfVRAMModule;
4005 4924 ATOM_INIT_REG_BLOCK asMemPatch; // for allocation
4006typedef struct _ATOM_VRAM_INFO_V2 { 4925 // ATOM_INIT_REG_BLOCK aMemAdjust;
4007 ATOM_COMMON_TABLE_HEADER sHeader; 4926}ATOM_VRAM_INFO_V3;
4008 UCHAR ucNumOfVRAMModule;
4009 ATOM_VRAM_MODULE aVramInfo[ATOM_MAX_NUMBER_OF_VRAM_MODULE]; /* just for allocation, real number of blocks is in ucNumOfVRAMModule; */
4010} ATOM_VRAM_INFO_V2;
4011
4012typedef struct _ATOM_VRAM_INFO_V3 {
4013 ATOM_COMMON_TABLE_HEADER sHeader;
4014 USHORT usMemAdjustTblOffset; /* offset of ATOM_INIT_REG_BLOCK structure for memory vendor specific MC adjust setting */
4015 USHORT usMemClkPatchTblOffset; /* offset of ATOM_INIT_REG_BLOCK structure for memory clock specific MC setting */
4016 USHORT usRerseved;
4017 UCHAR aVID_PinsShift[9]; /* 8 bit strap maximum+terminator */
4018 UCHAR ucNumOfVRAMModule;
4019 ATOM_VRAM_MODULE aVramInfo[ATOM_MAX_NUMBER_OF_VRAM_MODULE]; /* just for allocation, real number of blocks is in ucNumOfVRAMModule; */
4020 ATOM_INIT_REG_BLOCK asMemPatch; /* for allocation */
4021 /* ATOM_INIT_REG_BLOCK aMemAdjust; */
4022} ATOM_VRAM_INFO_V3;
4023 4927
4024#define ATOM_VRAM_INFO_LAST ATOM_VRAM_INFO_V3 4928#define ATOM_VRAM_INFO_LAST ATOM_VRAM_INFO_V3
4025 4929
4026typedef struct _ATOM_VRAM_INFO_V4 { 4930typedef struct _ATOM_VRAM_INFO_V4
4027 ATOM_COMMON_TABLE_HEADER sHeader; 4931{
4028 USHORT usMemAdjustTblOffset; /* offset of ATOM_INIT_REG_BLOCK structure for memory vendor specific MC adjust setting */ 4932 ATOM_COMMON_TABLE_HEADER sHeader;
4029 USHORT usMemClkPatchTblOffset; /* offset of ATOM_INIT_REG_BLOCK structure for memory clock specific MC setting */ 4933 USHORT usMemAdjustTblOffset; // offset of ATOM_INIT_REG_BLOCK structure for memory vendor specific MC adjust setting
4030 USHORT usRerseved; 4934 USHORT usMemClkPatchTblOffset; // offset of ATOM_INIT_REG_BLOCK structure for memory clock specific MC setting
4031 UCHAR ucMemDQ7_0ByteRemap; /* DQ line byte remap, =0: Memory Data line BYTE0, =1: BYTE1, =2: BYTE2, =3: BYTE3 */ 4935 USHORT usRerseved;
4032 ULONG ulMemDQ7_0BitRemap; /* each DQ line ( 7~0) use 3bits, like: DQ0=Bit[2:0], DQ1:[5:3], ... DQ7:[23:21] */ 4936 UCHAR ucMemDQ7_0ByteRemap; // DQ line byte remap, =0: Memory Data line BYTE0, =1: BYTE1, =2: BYTE2, =3: BYTE3
4033 UCHAR ucReservde[4]; 4937 ULONG ulMemDQ7_0BitRemap; // each DQ line ( 7~0) use 3bits, like: DQ0=Bit[2:0], DQ1:[5:3], ... DQ7:[23:21]
4034 UCHAR ucNumOfVRAMModule; 4938 UCHAR ucReservde[4];
4035 ATOM_VRAM_MODULE_V4 aVramInfo[ATOM_MAX_NUMBER_OF_VRAM_MODULE]; /* just for allocation, real number of blocks is in ucNumOfVRAMModule; */ 4939 UCHAR ucNumOfVRAMModule;
4036 ATOM_INIT_REG_BLOCK asMemPatch; /* for allocation */ 4940 ATOM_VRAM_MODULE_V4 aVramInfo[ATOM_MAX_NUMBER_OF_VRAM_MODULE]; // just for allocation, real number of blocks is in ucNumOfVRAMModule;
4037 /* ATOM_INIT_REG_BLOCK aMemAdjust; */ 4941 ATOM_INIT_REG_BLOCK asMemPatch; // for allocation
4038} ATOM_VRAM_INFO_V4; 4942 // ATOM_INIT_REG_BLOCK aMemAdjust;
4039 4943}ATOM_VRAM_INFO_V4;
4040typedef struct _ATOM_VRAM_GPIO_DETECTION_INFO { 4944
4041 ATOM_COMMON_TABLE_HEADER sHeader; 4945typedef struct _ATOM_VRAM_GPIO_DETECTION_INFO
4042 UCHAR aVID_PinsShift[9]; /* 8 bit strap maximum+terminator */ 4946{
4043} ATOM_VRAM_GPIO_DETECTION_INFO; 4947 ATOM_COMMON_TABLE_HEADER sHeader;
4044 4948 UCHAR aVID_PinsShift[9]; //8 bit strap maximum+terminator
4045typedef struct _ATOM_MEMORY_TRAINING_INFO { 4949}ATOM_VRAM_GPIO_DETECTION_INFO;
4046 ATOM_COMMON_TABLE_HEADER sHeader; 4950
4047 UCHAR ucTrainingLoop; 4951
4048 UCHAR ucReserved[3]; 4952typedef struct _ATOM_MEMORY_TRAINING_INFO
4049 ATOM_INIT_REG_BLOCK asMemTrainingSetting; 4953{
4050} ATOM_MEMORY_TRAINING_INFO; 4954 ATOM_COMMON_TABLE_HEADER sHeader;
4051 4955 UCHAR ucTrainingLoop;
4052typedef struct SW_I2C_CNTL_DATA_PARAMETERS { 4956 UCHAR ucReserved[3];
4053 UCHAR ucControl; 4957 ATOM_INIT_REG_BLOCK asMemTrainingSetting;
4054 UCHAR ucData; 4958}ATOM_MEMORY_TRAINING_INFO;
4055 UCHAR ucSatus; 4959
4056 UCHAR ucTemp; 4960
4961typedef struct SW_I2C_CNTL_DATA_PARAMETERS
4962{
4963 UCHAR ucControl;
4964 UCHAR ucData;
4965 UCHAR ucSatus;
4966 UCHAR ucTemp;
4057} SW_I2C_CNTL_DATA_PARAMETERS; 4967} SW_I2C_CNTL_DATA_PARAMETERS;
4058 4968
4059#define SW_I2C_CNTL_DATA_PS_ALLOCATION SW_I2C_CNTL_DATA_PARAMETERS 4969#define SW_I2C_CNTL_DATA_PS_ALLOCATION SW_I2C_CNTL_DATA_PARAMETERS
4060 4970
4061typedef struct _SW_I2C_IO_DATA_PARAMETERS { 4971typedef struct _SW_I2C_IO_DATA_PARAMETERS
4062 USHORT GPIO_Info; 4972{
4063 UCHAR ucAct; 4973 USHORT GPIO_Info;
4064 UCHAR ucData; 4974 UCHAR ucAct;
4065} SW_I2C_IO_DATA_PARAMETERS; 4975 UCHAR ucData;
4976 } SW_I2C_IO_DATA_PARAMETERS;
4066 4977
4067#define SW_I2C_IO_DATA_PS_ALLOCATION SW_I2C_IO_DATA_PARAMETERS 4978#define SW_I2C_IO_DATA_PS_ALLOCATION SW_I2C_IO_DATA_PARAMETERS
4068 4979
@@ -4087,127 +4998,136 @@ typedef struct _SW_I2C_IO_DATA_PARAMETERS {
4087#define SW_I2C_CNTL_CLOSE 5 4998#define SW_I2C_CNTL_CLOSE 5
4088#define SW_I2C_CNTL_WRITE1BIT 6 4999#define SW_I2C_CNTL_WRITE1BIT 6
4089 5000
4090/* ==============================VESA definition Portion=============================== */ 5001//==============================VESA definition Portion===============================
4091#define VESA_OEM_PRODUCT_REV '01.00' 5002#define VESA_OEM_PRODUCT_REV '01.00'
4092#define VESA_MODE_ATTRIBUTE_MODE_SUPPORT 0xBB /* refer to VBE spec p.32, no TTY support */ 5003#define VESA_MODE_ATTRIBUTE_MODE_SUPPORT 0xBB //refer to VBE spec p.32, no TTY support
4093#define VESA_MODE_WIN_ATTRIBUTE 7 5004#define VESA_MODE_WIN_ATTRIBUTE 7
4094#define VESA_WIN_SIZE 64 5005#define VESA_WIN_SIZE 64
4095 5006
4096typedef struct _PTR_32_BIT_STRUCTURE { 5007typedef struct _PTR_32_BIT_STRUCTURE
4097 USHORT Offset16; 5008{
4098 USHORT Segment16; 5009 USHORT Offset16;
5010 USHORT Segment16;
4099} PTR_32_BIT_STRUCTURE; 5011} PTR_32_BIT_STRUCTURE;
4100 5012
4101typedef union _PTR_32_BIT_UNION { 5013typedef union _PTR_32_BIT_UNION
4102 PTR_32_BIT_STRUCTURE SegmentOffset; 5014{
4103 ULONG Ptr32_Bit; 5015 PTR_32_BIT_STRUCTURE SegmentOffset;
5016 ULONG Ptr32_Bit;
4104} PTR_32_BIT_UNION; 5017} PTR_32_BIT_UNION;
4105 5018
4106typedef struct _VBE_1_2_INFO_BLOCK_UPDATABLE { 5019typedef struct _VBE_1_2_INFO_BLOCK_UPDATABLE
4107 UCHAR VbeSignature[4]; 5020{
4108 USHORT VbeVersion; 5021 UCHAR VbeSignature[4];
4109 PTR_32_BIT_UNION OemStringPtr; 5022 USHORT VbeVersion;
4110 UCHAR Capabilities[4]; 5023 PTR_32_BIT_UNION OemStringPtr;
4111 PTR_32_BIT_UNION VideoModePtr; 5024 UCHAR Capabilities[4];
4112 USHORT TotalMemory; 5025 PTR_32_BIT_UNION VideoModePtr;
5026 USHORT TotalMemory;
4113} VBE_1_2_INFO_BLOCK_UPDATABLE; 5027} VBE_1_2_INFO_BLOCK_UPDATABLE;
4114 5028
4115typedef struct _VBE_2_0_INFO_BLOCK_UPDATABLE { 5029
4116 VBE_1_2_INFO_BLOCK_UPDATABLE CommonBlock; 5030typedef struct _VBE_2_0_INFO_BLOCK_UPDATABLE
4117 USHORT OemSoftRev; 5031{
4118 PTR_32_BIT_UNION OemVendorNamePtr; 5032 VBE_1_2_INFO_BLOCK_UPDATABLE CommonBlock;
4119 PTR_32_BIT_UNION OemProductNamePtr; 5033 USHORT OemSoftRev;
4120 PTR_32_BIT_UNION OemProductRevPtr; 5034 PTR_32_BIT_UNION OemVendorNamePtr;
5035 PTR_32_BIT_UNION OemProductNamePtr;
5036 PTR_32_BIT_UNION OemProductRevPtr;
4121} VBE_2_0_INFO_BLOCK_UPDATABLE; 5037} VBE_2_0_INFO_BLOCK_UPDATABLE;
4122 5038
4123typedef union _VBE_VERSION_UNION { 5039typedef union _VBE_VERSION_UNION
4124 VBE_2_0_INFO_BLOCK_UPDATABLE VBE_2_0_InfoBlock; 5040{
4125 VBE_1_2_INFO_BLOCK_UPDATABLE VBE_1_2_InfoBlock; 5041 VBE_2_0_INFO_BLOCK_UPDATABLE VBE_2_0_InfoBlock;
5042 VBE_1_2_INFO_BLOCK_UPDATABLE VBE_1_2_InfoBlock;
4126} VBE_VERSION_UNION; 5043} VBE_VERSION_UNION;
4127 5044
4128typedef struct _VBE_INFO_BLOCK { 5045typedef struct _VBE_INFO_BLOCK
4129 VBE_VERSION_UNION UpdatableVBE_Info; 5046{
4130 UCHAR Reserved[222]; 5047 VBE_VERSION_UNION UpdatableVBE_Info;
4131 UCHAR OemData[256]; 5048 UCHAR Reserved[222];
5049 UCHAR OemData[256];
4132} VBE_INFO_BLOCK; 5050} VBE_INFO_BLOCK;
4133 5051
4134typedef struct _VBE_FP_INFO { 5052typedef struct _VBE_FP_INFO
4135 USHORT HSize; 5053{
4136 USHORT VSize; 5054 USHORT HSize;
4137 USHORT FPType; 5055 USHORT VSize;
4138 UCHAR RedBPP; 5056 USHORT FPType;
4139 UCHAR GreenBPP; 5057 UCHAR RedBPP;
4140 UCHAR BlueBPP; 5058 UCHAR GreenBPP;
4141 UCHAR ReservedBPP; 5059 UCHAR BlueBPP;
4142 ULONG RsvdOffScrnMemSize; 5060 UCHAR ReservedBPP;
4143 ULONG RsvdOffScrnMEmPtr; 5061 ULONG RsvdOffScrnMemSize;
4144 UCHAR Reserved[14]; 5062 ULONG RsvdOffScrnMEmPtr;
5063 UCHAR Reserved[14];
4145} VBE_FP_INFO; 5064} VBE_FP_INFO;
4146 5065
4147typedef struct _VESA_MODE_INFO_BLOCK { 5066typedef struct _VESA_MODE_INFO_BLOCK
4148/* Mandatory information for all VBE revisions */ 5067{
4149 USHORT ModeAttributes; /* dw ? ; mode attributes */ 5068// Mandatory information for all VBE revisions
4150 UCHAR WinAAttributes; /* db ? ; window A attributes */ 5069 USHORT ModeAttributes; // dw ? ; mode attributes
4151 UCHAR WinBAttributes; /* db ? ; window B attributes */ 5070 UCHAR WinAAttributes; // db ? ; window A attributes
4152 USHORT WinGranularity; /* dw ? ; window granularity */ 5071 UCHAR WinBAttributes; // db ? ; window B attributes
4153 USHORT WinSize; /* dw ? ; window size */ 5072 USHORT WinGranularity; // dw ? ; window granularity
4154 USHORT WinASegment; /* dw ? ; window A start segment */ 5073 USHORT WinSize; // dw ? ; window size
4155 USHORT WinBSegment; /* dw ? ; window B start segment */ 5074 USHORT WinASegment; // dw ? ; window A start segment
4156 ULONG WinFuncPtr; /* dd ? ; real mode pointer to window function */ 5075 USHORT WinBSegment; // dw ? ; window B start segment
4157 USHORT BytesPerScanLine; /* dw ? ; bytes per scan line */ 5076 ULONG WinFuncPtr; // dd ? ; real mode pointer to window function
4158 5077 USHORT BytesPerScanLine;// dw ? ; bytes per scan line
4159/* ; Mandatory information for VBE 1.2 and above */ 5078
4160 USHORT XResolution; /* dw ? ; horizontal resolution in pixels or characters */ 5079//; Mandatory information for VBE 1.2 and above
4161 USHORT YResolution; /* dw ? ; vertical resolution in pixels or characters */ 5080 USHORT XResolution; // dw ? ; horizontal resolution in pixels or characters
4162 UCHAR XCharSize; /* db ? ; character cell width in pixels */ 5081 USHORT YResolution; // dw ? ; vertical resolution in pixels or characters
4163 UCHAR YCharSize; /* db ? ; character cell height in pixels */ 5082 UCHAR XCharSize; // db ? ; character cell width in pixels
4164 UCHAR NumberOfPlanes; /* db ? ; number of memory planes */ 5083 UCHAR YCharSize; // db ? ; character cell height in pixels
4165 UCHAR BitsPerPixel; /* db ? ; bits per pixel */ 5084 UCHAR NumberOfPlanes; // db ? ; number of memory planes
4166 UCHAR NumberOfBanks; /* db ? ; number of banks */ 5085 UCHAR BitsPerPixel; // db ? ; bits per pixel
4167 UCHAR MemoryModel; /* db ? ; memory model type */ 5086 UCHAR NumberOfBanks; // db ? ; number of banks
4168 UCHAR BankSize; /* db ? ; bank size in KB */ 5087 UCHAR MemoryModel; // db ? ; memory model type
4169 UCHAR NumberOfImagePages; /* db ? ; number of images */ 5088 UCHAR BankSize; // db ? ; bank size in KB
4170 UCHAR ReservedForPageFunction; /* db 1 ; reserved for page function */ 5089 UCHAR NumberOfImagePages;// db ? ; number of images
4171 5090 UCHAR ReservedForPageFunction;//db 1 ; reserved for page function
4172/* ; Direct Color fields(required for direct/6 and YUV/7 memory models) */ 5091
4173 UCHAR RedMaskSize; /* db ? ; size of direct color red mask in bits */ 5092//; Direct Color fields(required for direct/6 and YUV/7 memory models)
4174 UCHAR RedFieldPosition; /* db ? ; bit position of lsb of red mask */ 5093 UCHAR RedMaskSize; // db ? ; size of direct color red mask in bits
4175 UCHAR GreenMaskSize; /* db ? ; size of direct color green mask in bits */ 5094 UCHAR RedFieldPosition; // db ? ; bit position of lsb of red mask
4176 UCHAR GreenFieldPosition; /* db ? ; bit position of lsb of green mask */ 5095 UCHAR GreenMaskSize; // db ? ; size of direct color green mask in bits
4177 UCHAR BlueMaskSize; /* db ? ; size of direct color blue mask in bits */ 5096 UCHAR GreenFieldPosition; // db ? ; bit position of lsb of green mask
4178 UCHAR BlueFieldPosition; /* db ? ; bit position of lsb of blue mask */ 5097 UCHAR BlueMaskSize; // db ? ; size of direct color blue mask in bits
4179 UCHAR RsvdMaskSize; /* db ? ; size of direct color reserved mask in bits */ 5098 UCHAR BlueFieldPosition; // db ? ; bit position of lsb of blue mask
4180 UCHAR RsvdFieldPosition; /* db ? ; bit position of lsb of reserved mask */ 5099 UCHAR RsvdMaskSize; // db ? ; size of direct color reserved mask in bits
4181 UCHAR DirectColorModeInfo; /* db ? ; direct color mode attributes */ 5100 UCHAR RsvdFieldPosition; // db ? ; bit position of lsb of reserved mask
4182 5101 UCHAR DirectColorModeInfo;// db ? ; direct color mode attributes
4183/* ; Mandatory information for VBE 2.0 and above */ 5102
4184 ULONG PhysBasePtr; /* dd ? ; physical address for flat memory frame buffer */ 5103//; Mandatory information for VBE 2.0 and above
4185 ULONG Reserved_1; /* dd 0 ; reserved - always set to 0 */ 5104 ULONG PhysBasePtr; // dd ? ; physical address for flat memory frame buffer
4186 USHORT Reserved_2; /* dw 0 ; reserved - always set to 0 */ 5105 ULONG Reserved_1; // dd 0 ; reserved - always set to 0
4187 5106 USHORT Reserved_2; // dw 0 ; reserved - always set to 0
4188/* ; Mandatory information for VBE 3.0 and above */ 5107
4189 USHORT LinBytesPerScanLine; /* dw ? ; bytes per scan line for linear modes */ 5108//; Mandatory information for VBE 3.0 and above
4190 UCHAR BnkNumberOfImagePages; /* db ? ; number of images for banked modes */ 5109 USHORT LinBytesPerScanLine; // dw ? ; bytes per scan line for linear modes
4191 UCHAR LinNumberOfImagPages; /* db ? ; number of images for linear modes */ 5110 UCHAR BnkNumberOfImagePages;// db ? ; number of images for banked modes
4192 UCHAR LinRedMaskSize; /* db ? ; size of direct color red mask(linear modes) */ 5111 UCHAR LinNumberOfImagPages; // db ? ; number of images for linear modes
4193 UCHAR LinRedFieldPosition; /* db ? ; bit position of lsb of red mask(linear modes) */ 5112 UCHAR LinRedMaskSize; // db ? ; size of direct color red mask(linear modes)
4194 UCHAR LinGreenMaskSize; /* db ? ; size of direct color green mask(linear modes) */ 5113 UCHAR LinRedFieldPosition; // db ? ; bit position of lsb of red mask(linear modes)
4195 UCHAR LinGreenFieldPosition; /* db ? ; bit position of lsb of green mask(linear modes) */ 5114 UCHAR LinGreenMaskSize; // db ? ; size of direct color green mask(linear modes)
4196 UCHAR LinBlueMaskSize; /* db ? ; size of direct color blue mask(linear modes) */ 5115 UCHAR LinGreenFieldPosition;// db ? ; bit position of lsb of green mask(linear modes)
4197 UCHAR LinBlueFieldPosition; /* db ? ; bit position of lsb of blue mask(linear modes) */ 5116 UCHAR LinBlueMaskSize; // db ? ; size of direct color blue mask(linear modes)
4198 UCHAR LinRsvdMaskSize; /* db ? ; size of direct color reserved mask(linear modes) */ 5117 UCHAR LinBlueFieldPosition; // db ? ; bit position of lsb of blue mask(linear modes)
4199 UCHAR LinRsvdFieldPosition; /* db ? ; bit position of lsb of reserved mask(linear modes) */ 5118 UCHAR LinRsvdMaskSize; // db ? ; size of direct color reserved mask(linear modes)
4200 ULONG MaxPixelClock; /* dd ? ; maximum pixel clock(in Hz) for graphics mode */ 5119 UCHAR LinRsvdFieldPosition; // db ? ; bit position of lsb of reserved mask(linear modes)
4201 UCHAR Reserved; /* db 190 dup (0) */ 5120 ULONG MaxPixelClock; // dd ? ; maximum pixel clock(in Hz) for graphics mode
5121 UCHAR Reserved; // db 190 dup (0)
4202} VESA_MODE_INFO_BLOCK; 5122} VESA_MODE_INFO_BLOCK;
4203 5123
4204/* BIOS function CALLS */ 5124// BIOS function CALLS
4205#define ATOM_BIOS_EXTENDED_FUNCTION_CODE 0xA0 /* ATI Extended Function code */ 5125#define ATOM_BIOS_EXTENDED_FUNCTION_CODE 0xA0 // ATI Extended Function code
4206#define ATOM_BIOS_FUNCTION_COP_MODE 0x00 5126#define ATOM_BIOS_FUNCTION_COP_MODE 0x00
4207#define ATOM_BIOS_FUNCTION_SHORT_QUERY1 0x04 5127#define ATOM_BIOS_FUNCTION_SHORT_QUERY1 0x04
4208#define ATOM_BIOS_FUNCTION_SHORT_QUERY2 0x05 5128#define ATOM_BIOS_FUNCTION_SHORT_QUERY2 0x05
4209#define ATOM_BIOS_FUNCTION_SHORT_QUERY3 0x06 5129#define ATOM_BIOS_FUNCTION_SHORT_QUERY3 0x06
4210#define ATOM_BIOS_FUNCTION_GET_DDC 0x0B 5130#define ATOM_BIOS_FUNCTION_GET_DDC 0x0B
4211#define ATOM_BIOS_FUNCTION_ASIC_DSTATE 0x0E 5131#define ATOM_BIOS_FUNCTION_ASIC_DSTATE 0x0E
4212#define ATOM_BIOS_FUNCTION_DEBUG_PLAY 0x0F 5132#define ATOM_BIOS_FUNCTION_DEBUG_PLAY 0x0F
4213#define ATOM_BIOS_FUNCTION_STV_STD 0x16 5133#define ATOM_BIOS_FUNCTION_STV_STD 0x16
@@ -4217,100 +5137,135 @@ typedef struct _VESA_MODE_INFO_BLOCK {
4217#define ATOM_BIOS_FUNCTION_PANEL_CONTROL 0x82 5137#define ATOM_BIOS_FUNCTION_PANEL_CONTROL 0x82
4218#define ATOM_BIOS_FUNCTION_OLD_DEVICE_DET 0x83 5138#define ATOM_BIOS_FUNCTION_OLD_DEVICE_DET 0x83
4219#define ATOM_BIOS_FUNCTION_OLD_DEVICE_SWITCH 0x84 5139#define ATOM_BIOS_FUNCTION_OLD_DEVICE_SWITCH 0x84
4220#define ATOM_BIOS_FUNCTION_HW_ICON 0x8A 5140#define ATOM_BIOS_FUNCTION_HW_ICON 0x8A
4221#define ATOM_BIOS_FUNCTION_SET_CMOS 0x8B 5141#define ATOM_BIOS_FUNCTION_SET_CMOS 0x8B
4222#define SUB_FUNCTION_UPDATE_DISPLAY_INFO 0x8000 /* Sub function 80 */ 5142#define SUB_FUNCTION_UPDATE_DISPLAY_INFO 0x8000 // Sub function 80
4223#define SUB_FUNCTION_UPDATE_EXPANSION_INFO 0x8100 /* Sub function 80 */ 5143#define SUB_FUNCTION_UPDATE_EXPANSION_INFO 0x8100 // Sub function 80
4224 5144
4225#define ATOM_BIOS_FUNCTION_DISPLAY_INFO 0x8D 5145#define ATOM_BIOS_FUNCTION_DISPLAY_INFO 0x8D
4226#define ATOM_BIOS_FUNCTION_DEVICE_ON_OFF 0x8E 5146#define ATOM_BIOS_FUNCTION_DEVICE_ON_OFF 0x8E
4227#define ATOM_BIOS_FUNCTION_VIDEO_STATE 0x8F 5147#define ATOM_BIOS_FUNCTION_VIDEO_STATE 0x8F
4228#define ATOM_SUB_FUNCTION_GET_CRITICAL_STATE 0x0300 /* Sub function 03 */ 5148#define ATOM_SUB_FUNCTION_GET_CRITICAL_STATE 0x0300 // Sub function 03
4229#define ATOM_SUB_FUNCTION_GET_LIDSTATE 0x0700 /* Sub function 7 */ 5149#define ATOM_SUB_FUNCTION_GET_LIDSTATE 0x0700 // Sub function 7
4230#define ATOM_SUB_FUNCTION_THERMAL_STATE_NOTICE 0x1400 /* Notify caller the current thermal state */ 5150#define ATOM_SUB_FUNCTION_THERMAL_STATE_NOTICE 0x1400 // Notify caller the current thermal state
4231#define ATOM_SUB_FUNCTION_CRITICAL_STATE_NOTICE 0x8300 /* Notify caller the current critical state */ 5151#define ATOM_SUB_FUNCTION_CRITICAL_STATE_NOTICE 0x8300 // Notify caller the current critical state
4232#define ATOM_SUB_FUNCTION_SET_LIDSTATE 0x8500 /* Sub function 85 */ 5152#define ATOM_SUB_FUNCTION_SET_LIDSTATE 0x8500 // Sub function 85
4233#define ATOM_SUB_FUNCTION_GET_REQ_DISPLAY_FROM_SBIOS_MODE 0x8900 /* Sub function 89 */ 5153#define ATOM_SUB_FUNCTION_GET_REQ_DISPLAY_FROM_SBIOS_MODE 0x8900// Sub function 89
4234#define ATOM_SUB_FUNCTION_INFORM_ADC_SUPPORT 0x9400 /* Notify caller that ADC is supported */ 5154#define ATOM_SUB_FUNCTION_INFORM_ADC_SUPPORT 0x9400 // Notify caller that ADC is supported
4235 5155
4236#define ATOM_BIOS_FUNCTION_VESA_DPMS 0x4F10 /* Set DPMS */ 5156
4237#define ATOM_SUB_FUNCTION_SET_DPMS 0x0001 /* BL: Sub function 01 */ 5157#define ATOM_BIOS_FUNCTION_VESA_DPMS 0x4F10 // Set DPMS
4238#define ATOM_SUB_FUNCTION_GET_DPMS 0x0002 /* BL: Sub function 02 */ 5158#define ATOM_SUB_FUNCTION_SET_DPMS 0x0001 // BL: Sub function 01
4239#define ATOM_PARAMETER_VESA_DPMS_ON 0x0000 /* BH Parameter for DPMS ON. */ 5159#define ATOM_SUB_FUNCTION_GET_DPMS 0x0002 // BL: Sub function 02
4240#define ATOM_PARAMETER_VESA_DPMS_STANDBY 0x0100 /* BH Parameter for DPMS STANDBY */ 5160#define ATOM_PARAMETER_VESA_DPMS_ON 0x0000 // BH Parameter for DPMS ON.
4241#define ATOM_PARAMETER_VESA_DPMS_SUSPEND 0x0200 /* BH Parameter for DPMS SUSPEND */ 5161#define ATOM_PARAMETER_VESA_DPMS_STANDBY 0x0100 // BH Parameter for DPMS STANDBY
4242#define ATOM_PARAMETER_VESA_DPMS_OFF 0x0400 /* BH Parameter for DPMS OFF */ 5162#define ATOM_PARAMETER_VESA_DPMS_SUSPEND 0x0200 // BH Parameter for DPMS SUSPEND
4243#define ATOM_PARAMETER_VESA_DPMS_REDUCE_ON 0x0800 /* BH Parameter for DPMS REDUCE ON (NOT SUPPORTED) */ 5163#define ATOM_PARAMETER_VESA_DPMS_OFF 0x0400 // BH Parameter for DPMS OFF
5164#define ATOM_PARAMETER_VESA_DPMS_REDUCE_ON 0x0800 // BH Parameter for DPMS REDUCE ON (NOT SUPPORTED)
4244 5165
4245#define ATOM_BIOS_RETURN_CODE_MASK 0x0000FF00L 5166#define ATOM_BIOS_RETURN_CODE_MASK 0x0000FF00L
4246#define ATOM_BIOS_REG_HIGH_MASK 0x0000FF00L 5167#define ATOM_BIOS_REG_HIGH_MASK 0x0000FF00L
4247#define ATOM_BIOS_REG_LOW_MASK 0x000000FFL 5168#define ATOM_BIOS_REG_LOW_MASK 0x000000FFL
4248 5169
4249/* structure used for VBIOS only */ 5170// structure used for VBIOS only
4250 5171
4251/* DispOutInfoTable */ 5172//DispOutInfoTable
4252typedef struct _ASIC_TRANSMITTER_INFO { 5173typedef struct _ASIC_TRANSMITTER_INFO
5174{
4253 USHORT usTransmitterObjId; 5175 USHORT usTransmitterObjId;
4254 USHORT usSupportDevice; 5176 USHORT usSupportDevice;
4255 UCHAR ucTransmitterCmdTblId; 5177 UCHAR ucTransmitterCmdTblId;
4256 UCHAR ucConfig; 5178 UCHAR ucConfig;
4257 UCHAR ucEncoderID; /* available 1st encoder ( default ) */ 5179 UCHAR ucEncoderID; //available 1st encoder ( default )
4258 UCHAR ucOptionEncoderID; /* available 2nd encoder ( optional ) */ 5180 UCHAR ucOptionEncoderID; //available 2nd encoder ( optional )
4259 UCHAR uc2ndEncoderID; 5181 UCHAR uc2ndEncoderID;
4260 UCHAR ucReserved; 5182 UCHAR ucReserved;
4261} ASIC_TRANSMITTER_INFO; 5183}ASIC_TRANSMITTER_INFO;
4262 5184
4263typedef struct _ASIC_ENCODER_INFO { 5185typedef struct _ASIC_ENCODER_INFO
5186{
4264 UCHAR ucEncoderID; 5187 UCHAR ucEncoderID;
4265 UCHAR ucEncoderConfig; 5188 UCHAR ucEncoderConfig;
4266 USHORT usEncoderCmdTblId; 5189 USHORT usEncoderCmdTblId;
4267} ASIC_ENCODER_INFO; 5190}ASIC_ENCODER_INFO;
4268 5191
4269typedef struct _ATOM_DISP_OUT_INFO { 5192typedef struct _ATOM_DISP_OUT_INFO
4270 ATOM_COMMON_TABLE_HEADER sHeader; 5193{
5194 ATOM_COMMON_TABLE_HEADER sHeader;
5195 USHORT ptrTransmitterInfo;
5196 USHORT ptrEncoderInfo;
5197 ASIC_TRANSMITTER_INFO asTransmitterInfo[1];
5198 ASIC_ENCODER_INFO asEncoderInfo[1];
5199}ATOM_DISP_OUT_INFO;
5200
5201typedef struct _ATOM_DISP_OUT_INFO_V2
5202{
5203 ATOM_COMMON_TABLE_HEADER sHeader;
4271 USHORT ptrTransmitterInfo; 5204 USHORT ptrTransmitterInfo;
4272 USHORT ptrEncoderInfo; 5205 USHORT ptrEncoderInfo;
4273 ASIC_TRANSMITTER_INFO asTransmitterInfo[1]; 5206 USHORT ptrMainCallParserFar; // direct address of main parser call in VBIOS binary.
4274 ASIC_ENCODER_INFO asEncoderInfo[1]; 5207 ASIC_TRANSMITTER_INFO asTransmitterInfo[1];
4275} ATOM_DISP_OUT_INFO; 5208 ASIC_ENCODER_INFO asEncoderInfo[1];
5209}ATOM_DISP_OUT_INFO_V2;
4276 5210
4277/* DispDevicePriorityInfo */ 5211// DispDevicePriorityInfo
4278typedef struct _ATOM_DISPLAY_DEVICE_PRIORITY_INFO { 5212typedef struct _ATOM_DISPLAY_DEVICE_PRIORITY_INFO
4279 ATOM_COMMON_TABLE_HEADER sHeader; 5213{
5214 ATOM_COMMON_TABLE_HEADER sHeader;
4280 USHORT asDevicePriority[16]; 5215 USHORT asDevicePriority[16];
4281} ATOM_DISPLAY_DEVICE_PRIORITY_INFO; 5216}ATOM_DISPLAY_DEVICE_PRIORITY_INFO;
4282 5217
4283/* ProcessAuxChannelTransactionTable */ 5218//ProcessAuxChannelTransactionTable
4284typedef struct _PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS { 5219typedef struct _PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS
4285 USHORT lpAuxRequest; 5220{
4286 USHORT lpDataOut; 5221 USHORT lpAuxRequest;
4287 UCHAR ucChannelID; 5222 USHORT lpDataOut;
4288 union { 5223 UCHAR ucChannelID;
4289 UCHAR ucReplyStatus; 5224 union
4290 UCHAR ucDelay; 5225 {
5226 UCHAR ucReplyStatus;
5227 UCHAR ucDelay;
5228 };
5229 UCHAR ucDataOutLen;
5230 UCHAR ucReserved;
5231}PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS;
5232
5233//ProcessAuxChannelTransactionTable
5234typedef struct _PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2
5235{
5236 USHORT lpAuxRequest;
5237 USHORT lpDataOut;
5238 UCHAR ucChannelID;
5239 union
5240 {
5241 UCHAR ucReplyStatus;
5242 UCHAR ucDelay;
4291 }; 5243 };
4292 UCHAR ucDataOutLen; 5244 UCHAR ucDataOutLen;
4293 UCHAR ucReserved; 5245 UCHAR ucHPD_ID; //=0: HPD1, =1: HPD2, =2: HPD3, =3: HPD4, =4: HPD5, =5: HPD6
4294} PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS; 5246}PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2;
4295 5247
4296#define PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS 5248#define PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS
4297 5249
4298/* GetSinkType */ 5250//GetSinkType
4299 5251
4300typedef struct _DP_ENCODER_SERVICE_PARAMETERS { 5252typedef struct _DP_ENCODER_SERVICE_PARAMETERS
5253{
4301 USHORT ucLinkClock; 5254 USHORT ucLinkClock;
4302 union { 5255 union
4303 UCHAR ucConfig; /* for DP training command */ 5256 {
4304 UCHAR ucI2cId; /* use for GET_SINK_TYPE command */ 5257 UCHAR ucConfig; // for DP training command
5258 UCHAR ucI2cId; // use for GET_SINK_TYPE command
4305 }; 5259 };
4306 UCHAR ucAction; 5260 UCHAR ucAction;
4307 UCHAR ucStatus; 5261 UCHAR ucStatus;
4308 UCHAR ucLaneNum; 5262 UCHAR ucLaneNum;
4309 UCHAR ucReserved[2]; 5263 UCHAR ucReserved[2];
4310} DP_ENCODER_SERVICE_PARAMETERS; 5264}DP_ENCODER_SERVICE_PARAMETERS;
4311 5265
4312/* ucAction */ 5266// ucAction
4313#define ATOM_DP_ACTION_GET_SINK_TYPE 0x01 5267#define ATOM_DP_ACTION_GET_SINK_TYPE 0x01
5268/* obselete */
4314#define ATOM_DP_ACTION_TRAINING_START 0x02 5269#define ATOM_DP_ACTION_TRAINING_START 0x02
4315#define ATOM_DP_ACTION_TRAINING_COMPLETE 0x03 5270#define ATOM_DP_ACTION_TRAINING_COMPLETE 0x03
4316#define ATOM_DP_ACTION_TRAINING_PATTERN_SEL 0x04 5271#define ATOM_DP_ACTION_TRAINING_PATTERN_SEL 0x04
@@ -4318,7 +5273,7 @@ typedef struct _DP_ENCODER_SERVICE_PARAMETERS {
4318#define ATOM_DP_ACTION_GET_VSWING_PREEMP 0x06 5273#define ATOM_DP_ACTION_GET_VSWING_PREEMP 0x06
4319#define ATOM_DP_ACTION_BLANKING 0x07 5274#define ATOM_DP_ACTION_BLANKING 0x07
4320 5275
4321/* ucConfig */ 5276// ucConfig
4322#define ATOM_DP_CONFIG_ENCODER_SEL_MASK 0x03 5277#define ATOM_DP_CONFIG_ENCODER_SEL_MASK 0x03
4323#define ATOM_DP_CONFIG_DIG1_ENCODER 0x00 5278#define ATOM_DP_CONFIG_DIG1_ENCODER 0x00
4324#define ATOM_DP_CONFIG_DIG2_ENCODER 0x01 5279#define ATOM_DP_CONFIG_DIG2_ENCODER 0x01
@@ -4326,14 +5281,14 @@ typedef struct _DP_ENCODER_SERVICE_PARAMETERS {
4326#define ATOM_DP_CONFIG_LINK_SEL_MASK 0x04 5281#define ATOM_DP_CONFIG_LINK_SEL_MASK 0x04
4327#define ATOM_DP_CONFIG_LINK_A 0x00 5282#define ATOM_DP_CONFIG_LINK_A 0x00
4328#define ATOM_DP_CONFIG_LINK_B 0x04 5283#define ATOM_DP_CONFIG_LINK_B 0x04
4329 5284/* /obselete */
4330#define DP_ENCODER_SERVICE_PS_ALLOCATION WRITE_ONE_BYTE_HW_I2C_DATA_PARAMETERS 5285#define DP_ENCODER_SERVICE_PS_ALLOCATION WRITE_ONE_BYTE_HW_I2C_DATA_PARAMETERS
4331 5286
4332/* DP_TRAINING_TABLE */ 5287// DP_TRAINING_TABLE
4333#define DPCD_SET_LINKRATE_LANENUM_PATTERN1_TBL_ADDR ATOM_DP_TRAINING_TBL_ADDR 5288#define DPCD_SET_LINKRATE_LANENUM_PATTERN1_TBL_ADDR ATOM_DP_TRAINING_TBL_ADDR
4334#define DPCD_SET_SS_CNTL_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 8 ) 5289#define DPCD_SET_SS_CNTL_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 8 )
4335#define DPCD_SET_LANE_VSWING_PREEMP_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 16) 5290#define DPCD_SET_LANE_VSWING_PREEMP_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 16 )
4336#define DPCD_SET_TRAINING_PATTERN0_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 24) 5291#define DPCD_SET_TRAINING_PATTERN0_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 24 )
4337#define DPCD_SET_TRAINING_PATTERN2_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 32) 5292#define DPCD_SET_TRAINING_PATTERN2_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 32)
4338#define DPCD_GET_LINKRATE_LANENUM_SS_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 40) 5293#define DPCD_GET_LINKRATE_LANENUM_SS_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 40)
4339#define DPCD_GET_LANE_STATUS_ADJUST_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 48) 5294#define DPCD_GET_LANE_STATUS_ADJUST_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 48)
@@ -4341,183 +5296,241 @@ typedef struct _DP_ENCODER_SERVICE_PARAMETERS {
4341#define DP_I2C_AUX_DDC_WRITE_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 64) 5296#define DP_I2C_AUX_DDC_WRITE_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 64)
4342#define DP_I2C_AUX_DDC_READ_START_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 72) 5297#define DP_I2C_AUX_DDC_READ_START_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 72)
4343#define DP_I2C_AUX_DDC_READ_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 76) 5298#define DP_I2C_AUX_DDC_READ_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 76)
4344#define DP_I2C_AUX_DDC_READ_END_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 80) 5299#define DP_I2C_AUX_DDC_WRITE_END_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 80)
5300#define DP_I2C_AUX_DDC_READ_END_TBL_ADDR (ATOM_DP_TRAINING_TBL_ADDR + 84)
4345 5301
4346typedef struct _PROCESS_I2C_CHANNEL_TRANSACTION_PARAMETERS { 5302typedef struct _PROCESS_I2C_CHANNEL_TRANSACTION_PARAMETERS
4347 UCHAR ucI2CSpeed; 5303{
4348 union { 5304 UCHAR ucI2CSpeed;
4349 UCHAR ucRegIndex; 5305 union
4350 UCHAR ucStatus; 5306 {
5307 UCHAR ucRegIndex;
5308 UCHAR ucStatus;
4351 }; 5309 };
4352 USHORT lpI2CDataOut; 5310 USHORT lpI2CDataOut;
4353 UCHAR ucFlag; 5311 UCHAR ucFlag;
4354 UCHAR ucTransBytes; 5312 UCHAR ucTransBytes;
4355 UCHAR ucSlaveAddr; 5313 UCHAR ucSlaveAddr;
4356 UCHAR ucLineNumber; 5314 UCHAR ucLineNumber;
4357} PROCESS_I2C_CHANNEL_TRANSACTION_PARAMETERS; 5315}PROCESS_I2C_CHANNEL_TRANSACTION_PARAMETERS;
4358 5316
4359#define PROCESS_I2C_CHANNEL_TRANSACTION_PS_ALLOCATION PROCESS_I2C_CHANNEL_TRANSACTION_PARAMETERS 5317#define PROCESS_I2C_CHANNEL_TRANSACTION_PS_ALLOCATION PROCESS_I2C_CHANNEL_TRANSACTION_PARAMETERS
4360 5318
4361/* ucFlag */ 5319//ucFlag
4362#define HW_I2C_WRITE 1 5320#define HW_I2C_WRITE 1
4363#define HW_I2C_READ 0 5321#define HW_I2C_READ 0
5322#define I2C_2BYTE_ADDR 0x02
4364 5323
5324typedef struct _SET_HWBLOCK_INSTANCE_PARAMETER_V2
5325{
5326 UCHAR ucHWBlkInst; // HW block instance, 0, 1, 2, ...
5327 UCHAR ucReserved[3];
5328}SET_HWBLOCK_INSTANCE_PARAMETER_V2;
5329
5330#define HWBLKINST_INSTANCE_MASK 0x07
5331#define HWBLKINST_HWBLK_MASK 0xF0
5332#define HWBLKINST_HWBLK_SHIFT 0x04
5333
5334//ucHWBlock
5335#define SELECT_DISP_ENGINE 0
5336#define SELECT_DISP_PLL 1
5337#define SELECT_DCIO_UNIPHY_LINK0 2
5338#define SELECT_DCIO_UNIPHY_LINK1 3
5339#define SELECT_DCIO_IMPCAL 4
5340#define SELECT_DCIO_DIG 6
5341#define SELECT_CRTC_PIXEL_RATE 7
5342
5343/****************************************************************************/
5344//Portion VI: Definitinos for vbios MC scratch registers that driver used
4365/****************************************************************************/ 5345/****************************************************************************/
4366/* Portion VI: Definitinos being oboselete */ 5346
5347#define MC_MISC0__MEMORY_TYPE_MASK 0xF0000000
5348#define MC_MISC0__MEMORY_TYPE__GDDR1 0x10000000
5349#define MC_MISC0__MEMORY_TYPE__DDR2 0x20000000
5350#define MC_MISC0__MEMORY_TYPE__GDDR3 0x30000000
5351#define MC_MISC0__MEMORY_TYPE__GDDR4 0x40000000
5352#define MC_MISC0__MEMORY_TYPE__GDDR5 0x50000000
5353#define MC_MISC0__MEMORY_TYPE__DDR3 0xB0000000
5354
5355/****************************************************************************/
5356//Portion VI: Definitinos being oboselete
4367/****************************************************************************/ 5357/****************************************************************************/
4368 5358
4369/* ========================================================================================== */ 5359//==========================================================================================
4370/* Remove the definitions below when driver is ready! */ 5360//Remove the definitions below when driver is ready!
4371typedef struct _ATOM_DAC_INFO { 5361typedef struct _ATOM_DAC_INFO
4372 ATOM_COMMON_TABLE_HEADER sHeader; 5362{
4373 USHORT usMaxFrequency; /* in 10kHz unit */ 5363 ATOM_COMMON_TABLE_HEADER sHeader;
4374 USHORT usReserved; 5364 USHORT usMaxFrequency; // in 10kHz unit
4375} ATOM_DAC_INFO; 5365 USHORT usReserved;
4376 5366}ATOM_DAC_INFO;
4377typedef struct _COMPASSIONATE_DATA { 5367
4378 ATOM_COMMON_TABLE_HEADER sHeader; 5368
4379 5369typedef struct _COMPASSIONATE_DATA
4380 /* ============================== DAC1 portion */ 5370{
4381 UCHAR ucDAC1_BG_Adjustment; 5371 ATOM_COMMON_TABLE_HEADER sHeader;
4382 UCHAR ucDAC1_DAC_Adjustment; 5372
4383 USHORT usDAC1_FORCE_Data; 5373 //============================== DAC1 portion
4384 /* ============================== DAC2 portion */ 5374 UCHAR ucDAC1_BG_Adjustment;
4385 UCHAR ucDAC2_CRT2_BG_Adjustment; 5375 UCHAR ucDAC1_DAC_Adjustment;
4386 UCHAR ucDAC2_CRT2_DAC_Adjustment; 5376 USHORT usDAC1_FORCE_Data;
4387 USHORT usDAC2_CRT2_FORCE_Data; 5377 //============================== DAC2 portion
4388 USHORT usDAC2_CRT2_MUX_RegisterIndex; 5378 UCHAR ucDAC2_CRT2_BG_Adjustment;
4389 UCHAR ucDAC2_CRT2_MUX_RegisterInfo; /* Bit[4:0]=Bit position,Bit[7]=1:Active High;=0 Active Low */ 5379 UCHAR ucDAC2_CRT2_DAC_Adjustment;
4390 UCHAR ucDAC2_NTSC_BG_Adjustment; 5380 USHORT usDAC2_CRT2_FORCE_Data;
4391 UCHAR ucDAC2_NTSC_DAC_Adjustment; 5381 USHORT usDAC2_CRT2_MUX_RegisterIndex;
4392 USHORT usDAC2_TV1_FORCE_Data; 5382 UCHAR ucDAC2_CRT2_MUX_RegisterInfo; //Bit[4:0]=Bit position,Bit[7]=1:Active High;=0 Active Low
4393 USHORT usDAC2_TV1_MUX_RegisterIndex; 5383 UCHAR ucDAC2_NTSC_BG_Adjustment;
4394 UCHAR ucDAC2_TV1_MUX_RegisterInfo; /* Bit[4:0]=Bit position,Bit[7]=1:Active High;=0 Active Low */ 5384 UCHAR ucDAC2_NTSC_DAC_Adjustment;
4395 UCHAR ucDAC2_CV_BG_Adjustment; 5385 USHORT usDAC2_TV1_FORCE_Data;
4396 UCHAR ucDAC2_CV_DAC_Adjustment; 5386 USHORT usDAC2_TV1_MUX_RegisterIndex;
4397 USHORT usDAC2_CV_FORCE_Data; 5387 UCHAR ucDAC2_TV1_MUX_RegisterInfo; //Bit[4:0]=Bit position,Bit[7]=1:Active High;=0 Active Low
4398 USHORT usDAC2_CV_MUX_RegisterIndex; 5388 UCHAR ucDAC2_CV_BG_Adjustment;
4399 UCHAR ucDAC2_CV_MUX_RegisterInfo; /* Bit[4:0]=Bit position,Bit[7]=1:Active High;=0 Active Low */ 5389 UCHAR ucDAC2_CV_DAC_Adjustment;
4400 UCHAR ucDAC2_PAL_BG_Adjustment; 5390 USHORT usDAC2_CV_FORCE_Data;
4401 UCHAR ucDAC2_PAL_DAC_Adjustment; 5391 USHORT usDAC2_CV_MUX_RegisterIndex;
4402 USHORT usDAC2_TV2_FORCE_Data; 5392 UCHAR ucDAC2_CV_MUX_RegisterInfo; //Bit[4:0]=Bit position,Bit[7]=1:Active High;=0 Active Low
4403} COMPASSIONATE_DATA; 5393 UCHAR ucDAC2_PAL_BG_Adjustment;
5394 UCHAR ucDAC2_PAL_DAC_Adjustment;
5395 USHORT usDAC2_TV2_FORCE_Data;
5396}COMPASSIONATE_DATA;
4404 5397
4405/****************************Supported Device Info Table Definitions**********************/ 5398/****************************Supported Device Info Table Definitions**********************/
4406/* ucConnectInfo: */ 5399// ucConnectInfo:
4407/* [7:4] - connector type */ 5400// [7:4] - connector type
4408/* = 1 - VGA connector */ 5401// = 1 - VGA connector
4409/* = 2 - DVI-I */ 5402// = 2 - DVI-I
4410/* = 3 - DVI-D */ 5403// = 3 - DVI-D
4411/* = 4 - DVI-A */ 5404// = 4 - DVI-A
4412/* = 5 - SVIDEO */ 5405// = 5 - SVIDEO
4413/* = 6 - COMPOSITE */ 5406// = 6 - COMPOSITE
4414/* = 7 - LVDS */ 5407// = 7 - LVDS
4415/* = 8 - DIGITAL LINK */ 5408// = 8 - DIGITAL LINK
4416/* = 9 - SCART */ 5409// = 9 - SCART
4417/* = 0xA - HDMI_type A */ 5410// = 0xA - HDMI_type A
4418/* = 0xB - HDMI_type B */ 5411// = 0xB - HDMI_type B
4419/* = 0xE - Special case1 (DVI+DIN) */ 5412// = 0xE - Special case1 (DVI+DIN)
4420/* Others=TBD */ 5413// Others=TBD
4421/* [3:0] - DAC Associated */ 5414// [3:0] - DAC Associated
4422/* = 0 - no DAC */ 5415// = 0 - no DAC
4423/* = 1 - DACA */ 5416// = 1 - DACA
4424/* = 2 - DACB */ 5417// = 2 - DACB
4425/* = 3 - External DAC */ 5418// = 3 - External DAC
4426/* Others=TBD */ 5419// Others=TBD
4427/* */ 5420//
4428 5421
4429typedef struct _ATOM_CONNECTOR_INFO { 5422typedef struct _ATOM_CONNECTOR_INFO
5423{
4430#if ATOM_BIG_ENDIAN 5424#if ATOM_BIG_ENDIAN
4431 UCHAR bfConnectorType:4; 5425 UCHAR bfConnectorType:4;
4432 UCHAR bfAssociatedDAC:4; 5426 UCHAR bfAssociatedDAC:4;
4433#else 5427#else
4434 UCHAR bfAssociatedDAC:4; 5428 UCHAR bfAssociatedDAC:4;
4435 UCHAR bfConnectorType:4; 5429 UCHAR bfConnectorType:4;
4436#endif 5430#endif
4437} ATOM_CONNECTOR_INFO; 5431}ATOM_CONNECTOR_INFO;
4438 5432
4439typedef union _ATOM_CONNECTOR_INFO_ACCESS { 5433typedef union _ATOM_CONNECTOR_INFO_ACCESS
4440 ATOM_CONNECTOR_INFO sbfAccess; 5434{
4441 UCHAR ucAccess; 5435 ATOM_CONNECTOR_INFO sbfAccess;
4442} ATOM_CONNECTOR_INFO_ACCESS; 5436 UCHAR ucAccess;
5437}ATOM_CONNECTOR_INFO_ACCESS;
5438
5439typedef struct _ATOM_CONNECTOR_INFO_I2C
5440{
5441 ATOM_CONNECTOR_INFO_ACCESS sucConnectorInfo;
5442 ATOM_I2C_ID_CONFIG_ACCESS sucI2cId;
5443}ATOM_CONNECTOR_INFO_I2C;
4443 5444
4444typedef struct _ATOM_CONNECTOR_INFO_I2C {
4445 ATOM_CONNECTOR_INFO_ACCESS sucConnectorInfo;
4446 ATOM_I2C_ID_CONFIG_ACCESS sucI2cId;
4447} ATOM_CONNECTOR_INFO_I2C;
4448 5445
4449typedef struct _ATOM_SUPPORTED_DEVICES_INFO { 5446typedef struct _ATOM_SUPPORTED_DEVICES_INFO
4450 ATOM_COMMON_TABLE_HEADER sHeader; 5447{
4451 USHORT usDeviceSupport; 5448 ATOM_COMMON_TABLE_HEADER sHeader;
4452 ATOM_CONNECTOR_INFO_I2C asConnInfo[ATOM_MAX_SUPPORTED_DEVICE_INFO]; 5449 USHORT usDeviceSupport;
4453} ATOM_SUPPORTED_DEVICES_INFO; 5450 ATOM_CONNECTOR_INFO_I2C asConnInfo[ATOM_MAX_SUPPORTED_DEVICE_INFO];
5451}ATOM_SUPPORTED_DEVICES_INFO;
4454 5452
4455#define NO_INT_SRC_MAPPED 0xFF 5453#define NO_INT_SRC_MAPPED 0xFF
4456 5454
4457typedef struct _ATOM_CONNECTOR_INC_SRC_BITMAP { 5455typedef struct _ATOM_CONNECTOR_INC_SRC_BITMAP
4458 UCHAR ucIntSrcBitmap; 5456{
4459} ATOM_CONNECTOR_INC_SRC_BITMAP; 5457 UCHAR ucIntSrcBitmap;
4460 5458}ATOM_CONNECTOR_INC_SRC_BITMAP;
4461typedef struct _ATOM_SUPPORTED_DEVICES_INFO_2 { 5459
4462 ATOM_COMMON_TABLE_HEADER sHeader; 5460typedef struct _ATOM_SUPPORTED_DEVICES_INFO_2
4463 USHORT usDeviceSupport; 5461{
4464 ATOM_CONNECTOR_INFO_I2C asConnInfo[ATOM_MAX_SUPPORTED_DEVICE_INFO_2]; 5462 ATOM_COMMON_TABLE_HEADER sHeader;
4465 ATOM_CONNECTOR_INC_SRC_BITMAP 5463 USHORT usDeviceSupport;
4466 asIntSrcInfo[ATOM_MAX_SUPPORTED_DEVICE_INFO_2]; 5464 ATOM_CONNECTOR_INFO_I2C asConnInfo[ATOM_MAX_SUPPORTED_DEVICE_INFO_2];
4467} ATOM_SUPPORTED_DEVICES_INFO_2; 5465 ATOM_CONNECTOR_INC_SRC_BITMAP asIntSrcInfo[ATOM_MAX_SUPPORTED_DEVICE_INFO_2];
4468 5466}ATOM_SUPPORTED_DEVICES_INFO_2;
4469typedef struct _ATOM_SUPPORTED_DEVICES_INFO_2d1 { 5467
4470 ATOM_COMMON_TABLE_HEADER sHeader; 5468typedef struct _ATOM_SUPPORTED_DEVICES_INFO_2d1
4471 USHORT usDeviceSupport; 5469{
4472 ATOM_CONNECTOR_INFO_I2C asConnInfo[ATOM_MAX_SUPPORTED_DEVICE]; 5470 ATOM_COMMON_TABLE_HEADER sHeader;
4473 ATOM_CONNECTOR_INC_SRC_BITMAP asIntSrcInfo[ATOM_MAX_SUPPORTED_DEVICE]; 5471 USHORT usDeviceSupport;
4474} ATOM_SUPPORTED_DEVICES_INFO_2d1; 5472 ATOM_CONNECTOR_INFO_I2C asConnInfo[ATOM_MAX_SUPPORTED_DEVICE];
5473 ATOM_CONNECTOR_INC_SRC_BITMAP asIntSrcInfo[ATOM_MAX_SUPPORTED_DEVICE];
5474}ATOM_SUPPORTED_DEVICES_INFO_2d1;
4475 5475
4476#define ATOM_SUPPORTED_DEVICES_INFO_LAST ATOM_SUPPORTED_DEVICES_INFO_2d1 5476#define ATOM_SUPPORTED_DEVICES_INFO_LAST ATOM_SUPPORTED_DEVICES_INFO_2d1
4477 5477
4478typedef struct _ATOM_MISC_CONTROL_INFO { 5478
4479 USHORT usFrequency; 5479
4480 UCHAR ucPLL_ChargePump; /* PLL charge-pump gain control */ 5480typedef struct _ATOM_MISC_CONTROL_INFO
4481 UCHAR ucPLL_DutyCycle; /* PLL duty cycle control */ 5481{
4482 UCHAR ucPLL_VCO_Gain; /* PLL VCO gain control */ 5482 USHORT usFrequency;
4483 UCHAR ucPLL_VoltageSwing; /* PLL driver voltage swing control */ 5483 UCHAR ucPLL_ChargePump; // PLL charge-pump gain control
4484} ATOM_MISC_CONTROL_INFO; 5484 UCHAR ucPLL_DutyCycle; // PLL duty cycle control
5485 UCHAR ucPLL_VCO_Gain; // PLL VCO gain control
5486 UCHAR ucPLL_VoltageSwing; // PLL driver voltage swing control
5487}ATOM_MISC_CONTROL_INFO;
5488
4485 5489
4486#define ATOM_MAX_MISC_INFO 4 5490#define ATOM_MAX_MISC_INFO 4
4487 5491
4488typedef struct _ATOM_TMDS_INFO { 5492typedef struct _ATOM_TMDS_INFO
4489 ATOM_COMMON_TABLE_HEADER sHeader; 5493{
4490 USHORT usMaxFrequency; /* in 10Khz */ 5494 ATOM_COMMON_TABLE_HEADER sHeader;
4491 ATOM_MISC_CONTROL_INFO asMiscInfo[ATOM_MAX_MISC_INFO]; 5495 USHORT usMaxFrequency; // in 10Khz
4492} ATOM_TMDS_INFO; 5496 ATOM_MISC_CONTROL_INFO asMiscInfo[ATOM_MAX_MISC_INFO];
5497}ATOM_TMDS_INFO;
5498
4493 5499
4494typedef struct _ATOM_ENCODER_ANALOG_ATTRIBUTE { 5500typedef struct _ATOM_ENCODER_ANALOG_ATTRIBUTE
4495 UCHAR ucTVStandard; /* Same as TV standards defined above, */ 5501{
4496 UCHAR ucPadding[1]; 5502 UCHAR ucTVStandard; //Same as TV standards defined above,
4497} ATOM_ENCODER_ANALOG_ATTRIBUTE; 5503 UCHAR ucPadding[1];
5504}ATOM_ENCODER_ANALOG_ATTRIBUTE;
4498 5505
4499typedef struct _ATOM_ENCODER_DIGITAL_ATTRIBUTE { 5506typedef struct _ATOM_ENCODER_DIGITAL_ATTRIBUTE
4500 UCHAR ucAttribute; /* Same as other digital encoder attributes defined above */ 5507{
4501 UCHAR ucPadding[1]; 5508 UCHAR ucAttribute; //Same as other digital encoder attributes defined above
4502} ATOM_ENCODER_DIGITAL_ATTRIBUTE; 5509 UCHAR ucPadding[1];
5510}ATOM_ENCODER_DIGITAL_ATTRIBUTE;
4503 5511
4504typedef union _ATOM_ENCODER_ATTRIBUTE { 5512typedef union _ATOM_ENCODER_ATTRIBUTE
4505 ATOM_ENCODER_ANALOG_ATTRIBUTE sAlgAttrib; 5513{
4506 ATOM_ENCODER_DIGITAL_ATTRIBUTE sDigAttrib; 5514 ATOM_ENCODER_ANALOG_ATTRIBUTE sAlgAttrib;
4507} ATOM_ENCODER_ATTRIBUTE; 5515 ATOM_ENCODER_DIGITAL_ATTRIBUTE sDigAttrib;
5516}ATOM_ENCODER_ATTRIBUTE;
4508 5517
4509typedef struct _DVO_ENCODER_CONTROL_PARAMETERS {
4510 USHORT usPixelClock;
4511 USHORT usEncoderID;
4512 UCHAR ucDeviceType; /* Use ATOM_DEVICE_xxx1_Index to indicate device type only. */
4513 UCHAR ucAction; /* ATOM_ENABLE/ATOM_DISABLE/ATOM_HPD_INIT */
4514 ATOM_ENCODER_ATTRIBUTE usDevAttr;
4515} DVO_ENCODER_CONTROL_PARAMETERS;
4516 5518
4517typedef struct _DVO_ENCODER_CONTROL_PS_ALLOCATION { 5519typedef struct _DVO_ENCODER_CONTROL_PARAMETERS
4518 DVO_ENCODER_CONTROL_PARAMETERS sDVOEncoder; 5520{
4519 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved; /* Caller doesn't need to init this portion */ 5521 USHORT usPixelClock;
4520} DVO_ENCODER_CONTROL_PS_ALLOCATION; 5522 USHORT usEncoderID;
5523 UCHAR ucDeviceType; //Use ATOM_DEVICE_xxx1_Index to indicate device type only.
5524 UCHAR ucAction; //ATOM_ENABLE/ATOM_DISABLE/ATOM_HPD_INIT
5525 ATOM_ENCODER_ATTRIBUTE usDevAttr;
5526}DVO_ENCODER_CONTROL_PARAMETERS;
5527
5528typedef struct _DVO_ENCODER_CONTROL_PS_ALLOCATION
5529{
5530 DVO_ENCODER_CONTROL_PARAMETERS sDVOEncoder;
5531 WRITE_ONE_BYTE_HW_I2C_DATA_PS_ALLOCATION sReserved; //Caller doesn't need to init this portion
5532}DVO_ENCODER_CONTROL_PS_ALLOCATION;
5533
4521 5534
4522#define ATOM_XTMDS_ASIC_SI164_ID 1 5535#define ATOM_XTMDS_ASIC_SI164_ID 1
4523#define ATOM_XTMDS_ASIC_SI178_ID 2 5536#define ATOM_XTMDS_ASIC_SI178_ID 2
@@ -4526,27 +5539,30 @@ typedef struct _DVO_ENCODER_CONTROL_PS_ALLOCATION {
4526#define ATOM_XTMDS_SUPPORTED_DUALLINK 0x00000002 5539#define ATOM_XTMDS_SUPPORTED_DUALLINK 0x00000002
4527#define ATOM_XTMDS_MVPU_FPGA 0x00000004 5540#define ATOM_XTMDS_MVPU_FPGA 0x00000004
4528 5541
4529typedef struct _ATOM_XTMDS_INFO { 5542
4530 ATOM_COMMON_TABLE_HEADER sHeader; 5543typedef struct _ATOM_XTMDS_INFO
4531 USHORT usSingleLinkMaxFrequency; 5544{
4532 ATOM_I2C_ID_CONFIG_ACCESS sucI2cId; /* Point the ID on which I2C is used to control external chip */ 5545 ATOM_COMMON_TABLE_HEADER sHeader;
4533 UCHAR ucXtransimitterID; 5546 USHORT usSingleLinkMaxFrequency;
4534 UCHAR ucSupportedLink; /* Bit field, bit0=1, single link supported;bit1=1,dual link supported */ 5547 ATOM_I2C_ID_CONFIG_ACCESS sucI2cId; //Point the ID on which I2C is used to control external chip
4535 UCHAR ucSequnceAlterID; /* Even with the same external TMDS asic, it's possible that the program seqence alters */ 5548 UCHAR ucXtransimitterID;
4536 /* due to design. This ID is used to alert driver that the sequence is not "standard"! */ 5549 UCHAR ucSupportedLink; // Bit field, bit0=1, single link supported;bit1=1,dual link supported
4537 UCHAR ucMasterAddress; /* Address to control Master xTMDS Chip */ 5550 UCHAR ucSequnceAlterID; // Even with the same external TMDS asic, it's possible that the program seqence alters
4538 UCHAR ucSlaveAddress; /* Address to control Slave xTMDS Chip */ 5551 // due to design. This ID is used to alert driver that the sequence is not "standard"!
4539} ATOM_XTMDS_INFO; 5552 UCHAR ucMasterAddress; // Address to control Master xTMDS Chip
4540 5553 UCHAR ucSlaveAddress; // Address to control Slave xTMDS Chip
4541typedef struct _DFP_DPMS_STATUS_CHANGE_PARAMETERS { 5554}ATOM_XTMDS_INFO;
4542 UCHAR ucEnable; /* ATOM_ENABLE=On or ATOM_DISABLE=Off */ 5555
4543 UCHAR ucDevice; /* ATOM_DEVICE_DFP1_INDEX.... */ 5556typedef struct _DFP_DPMS_STATUS_CHANGE_PARAMETERS
4544 UCHAR ucPadding[2]; 5557{
4545} DFP_DPMS_STATUS_CHANGE_PARAMETERS; 5558 UCHAR ucEnable; // ATOM_ENABLE=On or ATOM_DISABLE=Off
5559 UCHAR ucDevice; // ATOM_DEVICE_DFP1_INDEX....
5560 UCHAR ucPadding[2];
5561}DFP_DPMS_STATUS_CHANGE_PARAMETERS;
4546 5562
4547/****************************Legacy Power Play Table Definitions **********************/ 5563/****************************Legacy Power Play Table Definitions **********************/
4548 5564
4549/* Definitions for ulPowerPlayMiscInfo */ 5565//Definitions for ulPowerPlayMiscInfo
4550#define ATOM_PM_MISCINFO_SPLIT_CLOCK 0x00000000L 5566#define ATOM_PM_MISCINFO_SPLIT_CLOCK 0x00000000L
4551#define ATOM_PM_MISCINFO_USING_MCLK_SRC 0x00000001L 5567#define ATOM_PM_MISCINFO_USING_MCLK_SRC 0x00000001L
4552#define ATOM_PM_MISCINFO_USING_SCLK_SRC 0x00000002L 5568#define ATOM_PM_MISCINFO_USING_SCLK_SRC 0x00000002L
@@ -4558,8 +5574,8 @@ typedef struct _DFP_DPMS_STATUS_CHANGE_PARAMETERS {
4558 5574
4559#define ATOM_PM_MISCINFO_ENGINE_CLOCK_CONTRL_EN 0x00000020L 5575#define ATOM_PM_MISCINFO_ENGINE_CLOCK_CONTRL_EN 0x00000020L
4560#define ATOM_PM_MISCINFO_MEMORY_CLOCK_CONTRL_EN 0x00000040L 5576#define ATOM_PM_MISCINFO_MEMORY_CLOCK_CONTRL_EN 0x00000040L
4561#define ATOM_PM_MISCINFO_PROGRAM_VOLTAGE 0x00000080L /* When this bit set, ucVoltageDropIndex is not an index for GPIO pin, but a voltage ID that SW needs program */ 5577#define ATOM_PM_MISCINFO_PROGRAM_VOLTAGE 0x00000080L //When this bit set, ucVoltageDropIndex is not an index for GPIO pin, but a voltage ID that SW needs program
4562 5578
4563#define ATOM_PM_MISCINFO_ASIC_REDUCED_SPEED_SCLK_EN 0x00000100L 5579#define ATOM_PM_MISCINFO_ASIC_REDUCED_SPEED_SCLK_EN 0x00000100L
4564#define ATOM_PM_MISCINFO_ASIC_DYNAMIC_VOLTAGE_EN 0x00000200L 5580#define ATOM_PM_MISCINFO_ASIC_DYNAMIC_VOLTAGE_EN 0x00000200L
4565#define ATOM_PM_MISCINFO_ASIC_SLEEP_MODE_EN 0x00000400L 5581#define ATOM_PM_MISCINFO_ASIC_SLEEP_MODE_EN 0x00000400L
@@ -4569,22 +5585,22 @@ typedef struct _DFP_DPMS_STATUS_CHANGE_PARAMETERS {
4569#define ATOM_PM_MISCINFO_LOW_LCD_REFRESH_RATE 0x00004000L 5585#define ATOM_PM_MISCINFO_LOW_LCD_REFRESH_RATE 0x00004000L
4570 5586
4571#define ATOM_PM_MISCINFO_DRIVER_DEFAULT_MODE 0x00008000L 5587#define ATOM_PM_MISCINFO_DRIVER_DEFAULT_MODE 0x00008000L
4572#define ATOM_PM_MISCINFO_OVER_CLOCK_MODE 0x00010000L 5588#define ATOM_PM_MISCINFO_OVER_CLOCK_MODE 0x00010000L
4573#define ATOM_PM_MISCINFO_OVER_DRIVE_MODE 0x00020000L 5589#define ATOM_PM_MISCINFO_OVER_DRIVE_MODE 0x00020000L
4574#define ATOM_PM_MISCINFO_POWER_SAVING_MODE 0x00040000L 5590#define ATOM_PM_MISCINFO_POWER_SAVING_MODE 0x00040000L
4575#define ATOM_PM_MISCINFO_THERMAL_DIODE_MODE 0x00080000L 5591#define ATOM_PM_MISCINFO_THERMAL_DIODE_MODE 0x00080000L
4576 5592
4577#define ATOM_PM_MISCINFO_FRAME_MODULATION_MASK 0x00300000L /* 0-FM Disable, 1-2 level FM, 2-4 level FM, 3-Reserved */ 5593#define ATOM_PM_MISCINFO_FRAME_MODULATION_MASK 0x00300000L //0-FM Disable, 1-2 level FM, 2-4 level FM, 3-Reserved
4578#define ATOM_PM_MISCINFO_FRAME_MODULATION_SHIFT 20 5594#define ATOM_PM_MISCINFO_FRAME_MODULATION_SHIFT 20
4579 5595
4580#define ATOM_PM_MISCINFO_DYN_CLK_3D_IDLE 0x00400000L 5596#define ATOM_PM_MISCINFO_DYN_CLK_3D_IDLE 0x00400000L
4581#define ATOM_PM_MISCINFO_DYNAMIC_CLOCK_DIVIDER_BY_2 0x00800000L 5597#define ATOM_PM_MISCINFO_DYNAMIC_CLOCK_DIVIDER_BY_2 0x00800000L
4582#define ATOM_PM_MISCINFO_DYNAMIC_CLOCK_DIVIDER_BY_4 0x01000000L 5598#define ATOM_PM_MISCINFO_DYNAMIC_CLOCK_DIVIDER_BY_4 0x01000000L
4583#define ATOM_PM_MISCINFO_DYNAMIC_HDP_BLOCK_EN 0x02000000L /* When set, Dynamic */ 5599#define ATOM_PM_MISCINFO_DYNAMIC_HDP_BLOCK_EN 0x02000000L //When set, Dynamic
4584#define ATOM_PM_MISCINFO_DYNAMIC_MC_HOST_BLOCK_EN 0x04000000L /* When set, Dynamic */ 5600#define ATOM_PM_MISCINFO_DYNAMIC_MC_HOST_BLOCK_EN 0x04000000L //When set, Dynamic
4585#define ATOM_PM_MISCINFO_3D_ACCELERATION_EN 0x08000000L /* When set, This mode is for acceleated 3D mode */ 5601#define ATOM_PM_MISCINFO_3D_ACCELERATION_EN 0x08000000L //When set, This mode is for acceleated 3D mode
4586 5602
4587#define ATOM_PM_MISCINFO_POWERPLAY_SETTINGS_GROUP_MASK 0x70000000L /* 1-Optimal Battery Life Group, 2-High Battery, 3-Balanced, 4-High Performance, 5- Optimal Performance (Default state with Default clocks) */ 5603#define ATOM_PM_MISCINFO_POWERPLAY_SETTINGS_GROUP_MASK 0x70000000L //1-Optimal Battery Life Group, 2-High Battery, 3-Balanced, 4-High Performance, 5- Optimal Performance (Default state with Default clocks)
4588#define ATOM_PM_MISCINFO_POWERPLAY_SETTINGS_GROUP_SHIFT 28 5604#define ATOM_PM_MISCINFO_POWERPLAY_SETTINGS_GROUP_SHIFT 28
4589#define ATOM_PM_MISCINFO_ENABLE_BACK_BIAS 0x80000000L 5605#define ATOM_PM_MISCINFO_ENABLE_BACK_BIAS 0x80000000L
4590 5606
@@ -4594,55 +5610,59 @@ typedef struct _DFP_DPMS_STATUS_CHANGE_PARAMETERS {
4594#define ATOM_PM_MISCINFO2_FS3D_OVERDRIVE_INFO 0x00000008L 5610#define ATOM_PM_MISCINFO2_FS3D_OVERDRIVE_INFO 0x00000008L
4595#define ATOM_PM_MISCINFO2_FORCEDLOWPWR_MODE 0x00000010L 5611#define ATOM_PM_MISCINFO2_FORCEDLOWPWR_MODE 0x00000010L
4596#define ATOM_PM_MISCINFO2_VDDCI_DYNAMIC_VOLTAGE_EN 0x00000020L 5612#define ATOM_PM_MISCINFO2_VDDCI_DYNAMIC_VOLTAGE_EN 0x00000020L
4597#define ATOM_PM_MISCINFO2_VIDEO_PLAYBACK_CAPABLE 0x00000040L /* If this bit is set in multi-pp mode, then driver will pack up one with the minior power consumption. */ 5613#define ATOM_PM_MISCINFO2_VIDEO_PLAYBACK_CAPABLE 0x00000040L //If this bit is set in multi-pp mode, then driver will pack up one with the minior power consumption.
4598 /* If it's not set in any pp mode, driver will use its default logic to pick a pp mode in video playback */ 5614 //If it's not set in any pp mode, driver will use its default logic to pick a pp mode in video playback
4599#define ATOM_PM_MISCINFO2_NOT_VALID_ON_DC 0x00000080L 5615#define ATOM_PM_MISCINFO2_NOT_VALID_ON_DC 0x00000080L
4600#define ATOM_PM_MISCINFO2_STUTTER_MODE_EN 0x00000100L 5616#define ATOM_PM_MISCINFO2_STUTTER_MODE_EN 0x00000100L
4601#define ATOM_PM_MISCINFO2_UVD_SUPPORT_MODE 0x00000200L 5617#define ATOM_PM_MISCINFO2_UVD_SUPPORT_MODE 0x00000200L
4602 5618
4603/* ucTableFormatRevision=1 */ 5619//ucTableFormatRevision=1
4604/* ucTableContentRevision=1 */ 5620//ucTableContentRevision=1
4605typedef struct _ATOM_POWERMODE_INFO { 5621typedef struct _ATOM_POWERMODE_INFO
4606 ULONG ulMiscInfo; /* The power level should be arranged in ascending order */ 5622{
4607 ULONG ulReserved1; /* must set to 0 */ 5623 ULONG ulMiscInfo; //The power level should be arranged in ascending order
4608 ULONG ulReserved2; /* must set to 0 */ 5624 ULONG ulReserved1; // must set to 0
4609 USHORT usEngineClock; 5625 ULONG ulReserved2; // must set to 0
4610 USHORT usMemoryClock; 5626 USHORT usEngineClock;
4611 UCHAR ucVoltageDropIndex; /* index to GPIO table */ 5627 USHORT usMemoryClock;
4612 UCHAR ucSelectedPanel_RefreshRate; /* panel refresh rate */ 5628 UCHAR ucVoltageDropIndex; // index to GPIO table
4613 UCHAR ucMinTemperature; 5629 UCHAR ucSelectedPanel_RefreshRate;// panel refresh rate
4614 UCHAR ucMaxTemperature; 5630 UCHAR ucMinTemperature;
4615 UCHAR ucNumPciELanes; /* number of PCIE lanes */ 5631 UCHAR ucMaxTemperature;
4616} ATOM_POWERMODE_INFO; 5632 UCHAR ucNumPciELanes; // number of PCIE lanes
4617 5633}ATOM_POWERMODE_INFO;
4618/* ucTableFormatRevision=2 */ 5634
4619/* ucTableContentRevision=1 */ 5635//ucTableFormatRevision=2
4620typedef struct _ATOM_POWERMODE_INFO_V2 { 5636//ucTableContentRevision=1
4621 ULONG ulMiscInfo; /* The power level should be arranged in ascending order */ 5637typedef struct _ATOM_POWERMODE_INFO_V2
4622 ULONG ulMiscInfo2; 5638{
4623 ULONG ulEngineClock; 5639 ULONG ulMiscInfo; //The power level should be arranged in ascending order
4624 ULONG ulMemoryClock; 5640 ULONG ulMiscInfo2;
4625 UCHAR ucVoltageDropIndex; /* index to GPIO table */ 5641 ULONG ulEngineClock;
4626 UCHAR ucSelectedPanel_RefreshRate; /* panel refresh rate */ 5642 ULONG ulMemoryClock;
4627 UCHAR ucMinTemperature; 5643 UCHAR ucVoltageDropIndex; // index to GPIO table
4628 UCHAR ucMaxTemperature; 5644 UCHAR ucSelectedPanel_RefreshRate;// panel refresh rate
4629 UCHAR ucNumPciELanes; /* number of PCIE lanes */ 5645 UCHAR ucMinTemperature;
4630} ATOM_POWERMODE_INFO_V2; 5646 UCHAR ucMaxTemperature;
4631 5647 UCHAR ucNumPciELanes; // number of PCIE lanes
4632/* ucTableFormatRevision=2 */ 5648}ATOM_POWERMODE_INFO_V2;
4633/* ucTableContentRevision=2 */ 5649
4634typedef struct _ATOM_POWERMODE_INFO_V3 { 5650//ucTableFormatRevision=2
4635 ULONG ulMiscInfo; /* The power level should be arranged in ascending order */ 5651//ucTableContentRevision=2
4636 ULONG ulMiscInfo2; 5652typedef struct _ATOM_POWERMODE_INFO_V3
4637 ULONG ulEngineClock; 5653{
4638 ULONG ulMemoryClock; 5654 ULONG ulMiscInfo; //The power level should be arranged in ascending order
4639 UCHAR ucVoltageDropIndex; /* index to Core (VDDC) votage table */ 5655 ULONG ulMiscInfo2;
4640 UCHAR ucSelectedPanel_RefreshRate; /* panel refresh rate */ 5656 ULONG ulEngineClock;
4641 UCHAR ucMinTemperature; 5657 ULONG ulMemoryClock;
4642 UCHAR ucMaxTemperature; 5658 UCHAR ucVoltageDropIndex; // index to Core (VDDC) votage table
4643 UCHAR ucNumPciELanes; /* number of PCIE lanes */ 5659 UCHAR ucSelectedPanel_RefreshRate;// panel refresh rate
4644 UCHAR ucVDDCI_VoltageDropIndex; /* index to VDDCI votage table */ 5660 UCHAR ucMinTemperature;
4645} ATOM_POWERMODE_INFO_V3; 5661 UCHAR ucMaxTemperature;
5662 UCHAR ucNumPciELanes; // number of PCIE lanes
5663 UCHAR ucVDDCI_VoltageDropIndex; // index to VDDCI votage table
5664}ATOM_POWERMODE_INFO_V3;
5665
4646 5666
4647#define ATOM_MAX_NUMBEROF_POWER_BLOCK 8 5667#define ATOM_MAX_NUMBEROF_POWER_BLOCK 8
4648 5668
@@ -4655,59 +5675,264 @@ typedef struct _ATOM_POWERMODE_INFO_V3 {
4655#define ATOM_PP_OVERDRIVE_THERMALCONTROLLER_MUA6649 0x04 5675#define ATOM_PP_OVERDRIVE_THERMALCONTROLLER_MUA6649 0x04
4656#define ATOM_PP_OVERDRIVE_THERMALCONTROLLER_LM64 0x05 5676#define ATOM_PP_OVERDRIVE_THERMALCONTROLLER_LM64 0x05
4657#define ATOM_PP_OVERDRIVE_THERMALCONTROLLER_F75375 0x06 5677#define ATOM_PP_OVERDRIVE_THERMALCONTROLLER_F75375 0x06
4658#define ATOM_PP_OVERDRIVE_THERMALCONTROLLER_ASC7512 0x07 /* Andigilog */ 5678#define ATOM_PP_OVERDRIVE_THERMALCONTROLLER_ASC7512 0x07 // Andigilog
4659 5679
4660typedef struct _ATOM_POWERPLAY_INFO { 5680
4661 ATOM_COMMON_TABLE_HEADER sHeader; 5681typedef struct _ATOM_POWERPLAY_INFO
4662 UCHAR ucOverdriveThermalController; 5682{
4663 UCHAR ucOverdriveI2cLine; 5683 ATOM_COMMON_TABLE_HEADER sHeader;
4664 UCHAR ucOverdriveIntBitmap; 5684 UCHAR ucOverdriveThermalController;
4665 UCHAR ucOverdriveControllerAddress; 5685 UCHAR ucOverdriveI2cLine;
4666 UCHAR ucSizeOfPowerModeEntry; 5686 UCHAR ucOverdriveIntBitmap;
4667 UCHAR ucNumOfPowerModeEntries; 5687 UCHAR ucOverdriveControllerAddress;
4668 ATOM_POWERMODE_INFO asPowerPlayInfo[ATOM_MAX_NUMBEROF_POWER_BLOCK]; 5688 UCHAR ucSizeOfPowerModeEntry;
4669} ATOM_POWERPLAY_INFO; 5689 UCHAR ucNumOfPowerModeEntries;
4670 5690 ATOM_POWERMODE_INFO asPowerPlayInfo[ATOM_MAX_NUMBEROF_POWER_BLOCK];
4671typedef struct _ATOM_POWERPLAY_INFO_V2 { 5691}ATOM_POWERPLAY_INFO;
4672 ATOM_COMMON_TABLE_HEADER sHeader; 5692
4673 UCHAR ucOverdriveThermalController; 5693typedef struct _ATOM_POWERPLAY_INFO_V2
4674 UCHAR ucOverdriveI2cLine; 5694{
4675 UCHAR ucOverdriveIntBitmap; 5695 ATOM_COMMON_TABLE_HEADER sHeader;
4676 UCHAR ucOverdriveControllerAddress; 5696 UCHAR ucOverdriveThermalController;
4677 UCHAR ucSizeOfPowerModeEntry; 5697 UCHAR ucOverdriveI2cLine;
4678 UCHAR ucNumOfPowerModeEntries; 5698 UCHAR ucOverdriveIntBitmap;
4679 ATOM_POWERMODE_INFO_V2 asPowerPlayInfo[ATOM_MAX_NUMBEROF_POWER_BLOCK]; 5699 UCHAR ucOverdriveControllerAddress;
4680} ATOM_POWERPLAY_INFO_V2; 5700 UCHAR ucSizeOfPowerModeEntry;
4681 5701 UCHAR ucNumOfPowerModeEntries;
4682typedef struct _ATOM_POWERPLAY_INFO_V3 { 5702 ATOM_POWERMODE_INFO_V2 asPowerPlayInfo[ATOM_MAX_NUMBEROF_POWER_BLOCK];
4683 ATOM_COMMON_TABLE_HEADER sHeader; 5703}ATOM_POWERPLAY_INFO_V2;
4684 UCHAR ucOverdriveThermalController; 5704
4685 UCHAR ucOverdriveI2cLine; 5705typedef struct _ATOM_POWERPLAY_INFO_V3
4686 UCHAR ucOverdriveIntBitmap; 5706{
4687 UCHAR ucOverdriveControllerAddress; 5707 ATOM_COMMON_TABLE_HEADER sHeader;
4688 UCHAR ucSizeOfPowerModeEntry; 5708 UCHAR ucOverdriveThermalController;
4689 UCHAR ucNumOfPowerModeEntries; 5709 UCHAR ucOverdriveI2cLine;
4690 ATOM_POWERMODE_INFO_V3 asPowerPlayInfo[ATOM_MAX_NUMBEROF_POWER_BLOCK]; 5710 UCHAR ucOverdriveIntBitmap;
4691} ATOM_POWERPLAY_INFO_V3; 5711 UCHAR ucOverdriveControllerAddress;
5712 UCHAR ucSizeOfPowerModeEntry;
5713 UCHAR ucNumOfPowerModeEntries;
5714 ATOM_POWERMODE_INFO_V3 asPowerPlayInfo[ATOM_MAX_NUMBEROF_POWER_BLOCK];
5715}ATOM_POWERPLAY_INFO_V3;
5716
5717/* New PPlib */
5718/**************************************************************************/
5719typedef struct _ATOM_PPLIB_THERMALCONTROLLER
5720
5721{
5722 UCHAR ucType; // one of ATOM_PP_THERMALCONTROLLER_*
5723 UCHAR ucI2cLine; // as interpreted by DAL I2C
5724 UCHAR ucI2cAddress;
5725 UCHAR ucFanParameters; // Fan Control Parameters.
5726 UCHAR ucFanMinRPM; // Fan Minimum RPM (hundreds) -- for display purposes only.
5727 UCHAR ucFanMaxRPM; // Fan Maximum RPM (hundreds) -- for display purposes only.
5728 UCHAR ucReserved; // ----
5729 UCHAR ucFlags; // to be defined
5730} ATOM_PPLIB_THERMALCONTROLLER;
5731
5732#define ATOM_PP_FANPARAMETERS_TACHOMETER_PULSES_PER_REVOLUTION_MASK 0x0f
5733#define ATOM_PP_FANPARAMETERS_NOFAN 0x80 // No fan is connected to this controller.
5734
5735#define ATOM_PP_THERMALCONTROLLER_NONE 0
5736#define ATOM_PP_THERMALCONTROLLER_LM63 1 // Not used by PPLib
5737#define ATOM_PP_THERMALCONTROLLER_ADM1032 2 // Not used by PPLib
5738#define ATOM_PP_THERMALCONTROLLER_ADM1030 3 // Not used by PPLib
5739#define ATOM_PP_THERMALCONTROLLER_MUA6649 4 // Not used by PPLib
5740#define ATOM_PP_THERMALCONTROLLER_LM64 5
5741#define ATOM_PP_THERMALCONTROLLER_F75375 6 // Not used by PPLib
5742#define ATOM_PP_THERMALCONTROLLER_RV6xx 7
5743#define ATOM_PP_THERMALCONTROLLER_RV770 8
5744#define ATOM_PP_THERMALCONTROLLER_ADT7473 9
5745
5746typedef struct _ATOM_PPLIB_STATE
5747{
5748 UCHAR ucNonClockStateIndex;
5749 UCHAR ucClockStateIndices[1]; // variable-sized
5750} ATOM_PPLIB_STATE;
5751
5752//// ATOM_PPLIB_POWERPLAYTABLE::ulPlatformCaps
5753#define ATOM_PP_PLATFORM_CAP_BACKBIAS 1
5754#define ATOM_PP_PLATFORM_CAP_POWERPLAY 2
5755#define ATOM_PP_PLATFORM_CAP_SBIOSPOWERSOURCE 4
5756#define ATOM_PP_PLATFORM_CAP_ASPM_L0s 8
5757#define ATOM_PP_PLATFORM_CAP_ASPM_L1 16
5758#define ATOM_PP_PLATFORM_CAP_HARDWAREDC 32
5759#define ATOM_PP_PLATFORM_CAP_GEMINIPRIMARY 64
5760#define ATOM_PP_PLATFORM_CAP_STEPVDDC 128
5761#define ATOM_PP_PLATFORM_CAP_VOLTAGECONTROL 256
5762#define ATOM_PP_PLATFORM_CAP_SIDEPORTCONTROL 512
5763#define ATOM_PP_PLATFORM_CAP_TURNOFFPLL_ASPML1 1024
5764#define ATOM_PP_PLATFORM_CAP_HTLINKCONTROL 2048
5765
5766typedef struct _ATOM_PPLIB_POWERPLAYTABLE
5767{
5768 ATOM_COMMON_TABLE_HEADER sHeader;
5769
5770 UCHAR ucDataRevision;
5771
5772 UCHAR ucNumStates;
5773 UCHAR ucStateEntrySize;
5774 UCHAR ucClockInfoSize;
5775 UCHAR ucNonClockSize;
5776
5777 // offset from start of this table to array of ucNumStates ATOM_PPLIB_STATE structures
5778 USHORT usStateArrayOffset;
5779
5780 // offset from start of this table to array of ASIC-specific structures,
5781 // currently ATOM_PPLIB_CLOCK_INFO.
5782 USHORT usClockInfoArrayOffset;
5783
5784 // offset from start of this table to array of ATOM_PPLIB_NONCLOCK_INFO
5785 USHORT usNonClockInfoArrayOffset;
5786
5787 USHORT usBackbiasTime; // in microseconds
5788 USHORT usVoltageTime; // in microseconds
5789 USHORT usTableSize; //the size of this structure, or the extended structure
5790
5791 ULONG ulPlatformCaps; // See ATOM_PPLIB_CAPS_*
5792
5793 ATOM_PPLIB_THERMALCONTROLLER sThermalController;
5794
5795 USHORT usBootClockInfoOffset;
5796 USHORT usBootNonClockInfoOffset;
5797
5798} ATOM_PPLIB_POWERPLAYTABLE;
5799
5800//// ATOM_PPLIB_NONCLOCK_INFO::usClassification
5801#define ATOM_PPLIB_CLASSIFICATION_UI_MASK 0x0007
5802#define ATOM_PPLIB_CLASSIFICATION_UI_SHIFT 0
5803#define ATOM_PPLIB_CLASSIFICATION_UI_NONE 0
5804#define ATOM_PPLIB_CLASSIFICATION_UI_BATTERY 1
5805#define ATOM_PPLIB_CLASSIFICATION_UI_BALANCED 3
5806#define ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE 5
5807// 2, 4, 6, 7 are reserved
5808
5809#define ATOM_PPLIB_CLASSIFICATION_BOOT 0x0008
5810#define ATOM_PPLIB_CLASSIFICATION_THERMAL 0x0010
5811#define ATOM_PPLIB_CLASSIFICATION_LIMITEDPOWERSOURCE 0x0020
5812#define ATOM_PPLIB_CLASSIFICATION_REST 0x0040
5813#define ATOM_PPLIB_CLASSIFICATION_FORCED 0x0080
5814#define ATOM_PPLIB_CLASSIFICATION_3DPERFORMANCE 0x0100
5815#define ATOM_PPLIB_CLASSIFICATION_OVERDRIVETEMPLATE 0x0200
5816#define ATOM_PPLIB_CLASSIFICATION_UVDSTATE 0x0400
5817#define ATOM_PPLIB_CLASSIFICATION_3DLOW 0x0800
5818#define ATOM_PPLIB_CLASSIFICATION_ACPI 0x1000
5819// remaining 3 bits are reserved
5820
5821//// ATOM_PPLIB_NONCLOCK_INFO::ulCapsAndSettings
5822#define ATOM_PPLIB_SINGLE_DISPLAY_ONLY 0x00000001
5823#define ATOM_PPLIB_SUPPORTS_VIDEO_PLAYBACK 0x00000002
5824
5825// 0 is 2.5Gb/s, 1 is 5Gb/s
5826#define ATOM_PPLIB_PCIE_LINK_SPEED_MASK 0x00000004
5827#define ATOM_PPLIB_PCIE_LINK_SPEED_SHIFT 2
5828
5829// lanes - 1: 1, 2, 4, 8, 12, 16 permitted by PCIE spec
5830#define ATOM_PPLIB_PCIE_LINK_WIDTH_MASK 0x000000F8
5831#define ATOM_PPLIB_PCIE_LINK_WIDTH_SHIFT 3
5832
5833// lookup into reduced refresh-rate table
5834#define ATOM_PPLIB_LIMITED_REFRESHRATE_VALUE_MASK 0x00000F00
5835#define ATOM_PPLIB_LIMITED_REFRESHRATE_VALUE_SHIFT 8
5836
5837#define ATOM_PPLIB_LIMITED_REFRESHRATE_UNLIMITED 0
5838#define ATOM_PPLIB_LIMITED_REFRESHRATE_50HZ 1
5839// 2-15 TBD as needed.
5840
5841#define ATOM_PPLIB_SOFTWARE_DISABLE_LOADBALANCING 0x00001000
5842#define ATOM_PPLIB_SOFTWARE_ENABLE_SLEEP_FOR_TIMESTAMPS 0x00002000
5843#define ATOM_PPLIB_ENABLE_VARIBRIGHT 0x00008000
5844
5845#define ATOM_PPLIB_DISALLOW_ON_DC 0x00004000
5846
5847// Contained in an array starting at the offset
5848// in ATOM_PPLIB_POWERPLAYTABLE::usNonClockInfoArrayOffset.
5849// referenced from ATOM_PPLIB_STATE_INFO::ucNonClockStateIndex
5850typedef struct _ATOM_PPLIB_NONCLOCK_INFO
5851{
5852 USHORT usClassification;
5853 UCHAR ucMinTemperature;
5854 UCHAR ucMaxTemperature;
5855 ULONG ulCapsAndSettings;
5856 UCHAR ucRequiredPower;
5857 UCHAR ucUnused1[3];
5858} ATOM_PPLIB_NONCLOCK_INFO;
5859
5860// Contained in an array starting at the offset
5861// in ATOM_PPLIB_POWERPLAYTABLE::usClockInfoArrayOffset.
5862// referenced from ATOM_PPLIB_STATE::ucClockStateIndices
5863typedef struct _ATOM_PPLIB_R600_CLOCK_INFO
5864{
5865 USHORT usEngineClockLow;
5866 UCHAR ucEngineClockHigh;
5867
5868 USHORT usMemoryClockLow;
5869 UCHAR ucMemoryClockHigh;
5870
5871 USHORT usVDDC;
5872 USHORT usUnused1;
5873 USHORT usUnused2;
5874
5875 ULONG ulFlags; // ATOM_PPLIB_R600_FLAGS_*
5876
5877} ATOM_PPLIB_R600_CLOCK_INFO;
5878
5879// ulFlags in ATOM_PPLIB_R600_CLOCK_INFO
5880#define ATOM_PPLIB_R600_FLAGS_PCIEGEN2 1
5881#define ATOM_PPLIB_R600_FLAGS_UVDSAFE 2
5882#define ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE 4
5883#define ATOM_PPLIB_R600_FLAGS_MEMORY_ODT_OFF 8
5884#define ATOM_PPLIB_R600_FLAGS_MEMORY_DLL_OFF 16
5885
5886typedef struct _ATOM_PPLIB_RS780_CLOCK_INFO
5887
5888{
5889 USHORT usLowEngineClockLow; // Low Engine clock in MHz (the same way as on the R600).
5890 UCHAR ucLowEngineClockHigh;
5891 USHORT usHighEngineClockLow; // High Engine clock in MHz.
5892 UCHAR ucHighEngineClockHigh;
5893 USHORT usMemoryClockLow; // For now one of the ATOM_PPLIB_RS780_SPMCLK_XXXX constants.
5894 UCHAR ucMemoryClockHigh; // Currentyl unused.
5895 UCHAR ucPadding; // For proper alignment and size.
5896 USHORT usVDDC; // For the 780, use: None, Low, High, Variable
5897 UCHAR ucMaxHTLinkWidth; // From SBIOS - {2, 4, 8, 16}
5898 UCHAR ucMinHTLinkWidth; // From SBIOS - {2, 4, 8, 16}. Effective only if CDLW enabled. Minimum down stream width could be bigger as display BW requriement.
5899 USHORT usHTLinkFreq; // See definition ATOM_PPLIB_RS780_HTLINKFREQ_xxx or in MHz(>=200).
5900 ULONG ulFlags;
5901} ATOM_PPLIB_RS780_CLOCK_INFO;
5902
5903#define ATOM_PPLIB_RS780_VOLTAGE_NONE 0
5904#define ATOM_PPLIB_RS780_VOLTAGE_LOW 1
5905#define ATOM_PPLIB_RS780_VOLTAGE_HIGH 2
5906#define ATOM_PPLIB_RS780_VOLTAGE_VARIABLE 3
5907
5908#define ATOM_PPLIB_RS780_SPMCLK_NONE 0 // We cannot change the side port memory clock, leave it as it is.
5909#define ATOM_PPLIB_RS780_SPMCLK_LOW 1
5910#define ATOM_PPLIB_RS780_SPMCLK_HIGH 2
5911
5912#define ATOM_PPLIB_RS780_HTLINKFREQ_NONE 0
5913#define ATOM_PPLIB_RS780_HTLINKFREQ_LOW 1
5914#define ATOM_PPLIB_RS780_HTLINKFREQ_HIGH 2
4692 5915
4693/**************************************************************************/ 5916/**************************************************************************/
4694 5917
4695/* Following definitions are for compatiblity issue in different SW components. */ 5918
5919// Following definitions are for compatiblity issue in different SW components.
4696#define ATOM_MASTER_DATA_TABLE_REVISION 0x01 5920#define ATOM_MASTER_DATA_TABLE_REVISION 0x01
4697#define Object_Info Object_Header 5921#define Object_Info Object_Header
4698#define AdjustARB_SEQ MC_InitParameter 5922#define AdjustARB_SEQ MC_InitParameter
4699#define VRAM_GPIO_DetectionInfo VoltageObjectInfo 5923#define VRAM_GPIO_DetectionInfo VoltageObjectInfo
4700#define ASIC_VDDCI_Info ASIC_ProfilingInfo 5924#define ASIC_VDDCI_Info ASIC_ProfilingInfo
4701#define ASIC_MVDDQ_Info MemoryTrainingInfo 5925#define ASIC_MVDDQ_Info MemoryTrainingInfo
4702#define SS_Info PPLL_SS_Info 5926#define SS_Info PPLL_SS_Info
4703#define ASIC_MVDDC_Info ASIC_InternalSS_Info 5927#define ASIC_MVDDC_Info ASIC_InternalSS_Info
4704#define DispDevicePriorityInfo SaveRestoreInfo 5928#define DispDevicePriorityInfo SaveRestoreInfo
4705#define DispOutInfo TV_VideoMode 5929#define DispOutInfo TV_VideoMode
4706 5930
5931
4707#define ATOM_ENCODER_OBJECT_TABLE ATOM_OBJECT_TABLE 5932#define ATOM_ENCODER_OBJECT_TABLE ATOM_OBJECT_TABLE
4708#define ATOM_CONNECTOR_OBJECT_TABLE ATOM_OBJECT_TABLE 5933#define ATOM_CONNECTOR_OBJECT_TABLE ATOM_OBJECT_TABLE
4709 5934
4710/* New device naming, remove them when both DAL/VBIOS is ready */ 5935//New device naming, remove them when both DAL/VBIOS is ready
4711#define DFP2I_OUTPUT_CONTROL_PARAMETERS CRT1_OUTPUT_CONTROL_PARAMETERS 5936#define DFP2I_OUTPUT_CONTROL_PARAMETERS CRT1_OUTPUT_CONTROL_PARAMETERS
4712#define DFP2I_OUTPUT_CONTROL_PS_ALLOCATION DFP2I_OUTPUT_CONTROL_PARAMETERS 5937#define DFP2I_OUTPUT_CONTROL_PS_ALLOCATION DFP2I_OUTPUT_CONTROL_PARAMETERS
4713 5938
@@ -4722,7 +5947,7 @@ typedef struct _ATOM_POWERPLAY_INFO_V3 {
4722 5947
4723#define ATOM_DEVICE_DFP1I_INDEX ATOM_DEVICE_DFP1_INDEX 5948#define ATOM_DEVICE_DFP1I_INDEX ATOM_DEVICE_DFP1_INDEX
4724#define ATOM_DEVICE_DFP1X_INDEX ATOM_DEVICE_DFP2_INDEX 5949#define ATOM_DEVICE_DFP1X_INDEX ATOM_DEVICE_DFP2_INDEX
4725 5950
4726#define ATOM_DEVICE_DFP2I_INDEX 0x00000009 5951#define ATOM_DEVICE_DFP2I_INDEX 0x00000009
4727#define ATOM_DEVICE_DFP2I_SUPPORT (0x1L << ATOM_DEVICE_DFP2I_INDEX) 5952#define ATOM_DEVICE_DFP2I_SUPPORT (0x1L << ATOM_DEVICE_DFP2I_INDEX)
4728 5953
@@ -4740,7 +5965,7 @@ typedef struct _ATOM_POWERPLAY_INFO_V3 {
4740 5965
4741#define ATOM_S3_DFP2I_ACTIVEb1 0x02 5966#define ATOM_S3_DFP2I_ACTIVEb1 0x02
4742 5967
4743#define ATOM_S3_DFP1I_ACTIVE ATOM_S3_DFP1_ACTIVE 5968#define ATOM_S3_DFP1I_ACTIVE ATOM_S3_DFP1_ACTIVE
4744#define ATOM_S3_DFP1X_ACTIVE ATOM_S3_DFP2_ACTIVE 5969#define ATOM_S3_DFP1X_ACTIVE ATOM_S3_DFP2_ACTIVE
4745 5970
4746#define ATOM_S3_DFP2I_ACTIVE 0x00000200L 5971#define ATOM_S3_DFP2I_ACTIVE 0x00000200L
@@ -4759,14 +5984,14 @@ typedef struct _ATOM_POWERPLAY_INFO_V3 {
4759#define ATOM_S6_ACC_REQ_DFP2Ib3 0x02 5984#define ATOM_S6_ACC_REQ_DFP2Ib3 0x02
4760#define ATOM_S6_ACC_REQ_DFP2I 0x02000000L 5985#define ATOM_S6_ACC_REQ_DFP2I 0x02000000L
4761 5986
4762#define TMDS1XEncoderControl DVOEncoderControl 5987#define TMDS1XEncoderControl DVOEncoderControl
4763#define DFP1XOutputControl DVOOutputControl 5988#define DFP1XOutputControl DVOOutputControl
4764 5989
4765#define ExternalDFPOutputControl DFP1XOutputControl 5990#define ExternalDFPOutputControl DFP1XOutputControl
4766#define EnableExternalTMDS_Encoder TMDS1XEncoderControl 5991#define EnableExternalTMDS_Encoder TMDS1XEncoderControl
4767 5992
4768#define DFP1IOutputControl TMDSAOutputControl 5993#define DFP1IOutputControl TMDSAOutputControl
4769#define DFP2IOutputControl LVTMAOutputControl 5994#define DFP2IOutputControl LVTMAOutputControl
4770 5995
4771#define DAC1_ENCODER_CONTROL_PARAMETERS DAC_ENCODER_CONTROL_PARAMETERS 5996#define DAC1_ENCODER_CONTROL_PARAMETERS DAC_ENCODER_CONTROL_PARAMETERS
4772#define DAC1_ENCODER_CONTROL_PS_ALLOCATION DAC_ENCODER_CONTROL_PS_ALLOCATION 5997#define DAC1_ENCODER_CONTROL_PS_ALLOCATION DAC_ENCODER_CONTROL_PS_ALLOCATION
@@ -4775,7 +6000,7 @@ typedef struct _ATOM_POWERPLAY_INFO_V3 {
4775#define DAC2_ENCODER_CONTROL_PS_ALLOCATION DAC_ENCODER_CONTROL_PS_ALLOCATION 6000#define DAC2_ENCODER_CONTROL_PS_ALLOCATION DAC_ENCODER_CONTROL_PS_ALLOCATION
4776 6001
4777#define ucDac1Standard ucDacStandard 6002#define ucDac1Standard ucDacStandard
4778#define ucDac2Standard ucDacStandard 6003#define ucDac2Standard ucDacStandard
4779 6004
4780#define TMDS1EncoderControl TMDSAEncoderControl 6005#define TMDS1EncoderControl TMDSAEncoderControl
4781#define TMDS2EncoderControl LVTMAEncoderControl 6006#define TMDS2EncoderControl LVTMAEncoderControl
@@ -4785,12 +6010,56 @@ typedef struct _ATOM_POWERPLAY_INFO_V3 {
4785#define CRT1OutputControl DAC1OutputControl 6010#define CRT1OutputControl DAC1OutputControl
4786#define CRT2OutputControl DAC2OutputControl 6011#define CRT2OutputControl DAC2OutputControl
4787 6012
4788/* These two lines will be removed for sure in a few days, will follow up with Michael V. */ 6013//These two lines will be removed for sure in a few days, will follow up with Michael V.
4789#define EnableLVDS_SS EnableSpreadSpectrumOnPPLL 6014#define EnableLVDS_SS EnableSpreadSpectrumOnPPLL
4790#define ENABLE_LVDS_SS_PARAMETERS_V3 ENABLE_SPREAD_SPECTRUM_ON_PPLL 6015#define ENABLE_LVDS_SS_PARAMETERS_V3 ENABLE_SPREAD_SPECTRUM_ON_PPLL
6016
6017//#define ATOM_S2_CRT1_DPMS_STATE 0x00010000L
6018//#define ATOM_S2_LCD1_DPMS_STATE ATOM_S2_CRT1_DPMS_STATE
6019//#define ATOM_S2_TV1_DPMS_STATE ATOM_S2_CRT1_DPMS_STATE
6020//#define ATOM_S2_DFP1_DPMS_STATE ATOM_S2_CRT1_DPMS_STATE
6021//#define ATOM_S2_CRT2_DPMS_STATE ATOM_S2_CRT1_DPMS_STATE
6022
6023#define ATOM_S6_ACC_REQ_TV2 0x00400000L
6024#define ATOM_DEVICE_TV2_INDEX 0x00000006
6025#define ATOM_DEVICE_TV2_SUPPORT (0x1L << ATOM_DEVICE_TV2_INDEX)
6026#define ATOM_S0_TV2 0x00100000L
6027#define ATOM_S3_TV2_ACTIVE ATOM_S3_DFP6_ACTIVE
6028#define ATOM_S3_TV2_CRTC_ACTIVE ATOM_S3_DFP6_CRTC_ACTIVE
6029
6030//
6031#define ATOM_S2_CRT1_DPMS_STATE 0x00010000L
6032#define ATOM_S2_LCD1_DPMS_STATE 0x00020000L
6033#define ATOM_S2_TV1_DPMS_STATE 0x00040000L
6034#define ATOM_S2_DFP1_DPMS_STATE 0x00080000L
6035#define ATOM_S2_CRT2_DPMS_STATE 0x00100000L
6036#define ATOM_S2_LCD2_DPMS_STATE 0x00200000L
6037#define ATOM_S2_TV2_DPMS_STATE 0x00400000L
6038#define ATOM_S2_DFP2_DPMS_STATE 0x00800000L
6039#define ATOM_S2_CV_DPMS_STATE 0x01000000L
6040#define ATOM_S2_DFP3_DPMS_STATE 0x02000000L
6041#define ATOM_S2_DFP4_DPMS_STATE 0x04000000L
6042#define ATOM_S2_DFP5_DPMS_STATE 0x08000000L
6043
6044#define ATOM_S2_CRT1_DPMS_STATEb2 0x01
6045#define ATOM_S2_LCD1_DPMS_STATEb2 0x02
6046#define ATOM_S2_TV1_DPMS_STATEb2 0x04
6047#define ATOM_S2_DFP1_DPMS_STATEb2 0x08
6048#define ATOM_S2_CRT2_DPMS_STATEb2 0x10
6049#define ATOM_S2_LCD2_DPMS_STATEb2 0x20
6050#define ATOM_S2_TV2_DPMS_STATEb2 0x40
6051#define ATOM_S2_DFP2_DPMS_STATEb2 0x80
6052#define ATOM_S2_CV_DPMS_STATEb3 0x01
6053#define ATOM_S2_DFP3_DPMS_STATEb3 0x02
6054#define ATOM_S2_DFP4_DPMS_STATEb3 0x04
6055#define ATOM_S2_DFP5_DPMS_STATEb3 0x08
6056
6057#define ATOM_S3_ASIC_GUI_ENGINE_HUNGb3 0x20
6058#define ATOM_S3_ALLOW_FAST_PWR_SWITCHb3 0x40
6059#define ATOM_S3_RQST_GPU_USE_MIN_PWRb3 0x80
4791 6060
4792/*********************************************************************************/ 6061/*********************************************************************************/
4793 6062
4794#pragma pack() /* BIOS data must use byte aligment */ 6063#pragma pack() // BIOS data must use byte aligment
4795 6064
4796#endif /* _ATOMBIOS_H */ 6065#endif /* _ATOMBIOS_H */
diff --git a/drivers/gpu/drm/radeon/atombios_crtc.c b/drivers/gpu/drm/radeon/atombios_crtc.c
index c15287a590ff..a87990b3ae84 100644
--- a/drivers/gpu/drm/radeon/atombios_crtc.c
+++ b/drivers/gpu/drm/radeon/atombios_crtc.c
@@ -241,27 +241,31 @@ void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
241{ 241{
242 struct drm_device *dev = crtc->dev; 242 struct drm_device *dev = crtc->dev;
243 struct radeon_device *rdev = dev->dev_private; 243 struct radeon_device *rdev = dev->dev_private;
244 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
244 245
245 switch (mode) { 246 switch (mode) {
246 case DRM_MODE_DPMS_ON: 247 case DRM_MODE_DPMS_ON:
247 atombios_enable_crtc(crtc, 1); 248 atombios_enable_crtc(crtc, ATOM_ENABLE);
248 if (ASIC_IS_DCE3(rdev)) 249 if (ASIC_IS_DCE3(rdev))
249 atombios_enable_crtc_memreq(crtc, 1); 250 atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
250 atombios_blank_crtc(crtc, 0); 251 atombios_blank_crtc(crtc, ATOM_DISABLE);
252 /* XXX re-enable when interrupt support is added */
253 if (!ASIC_IS_DCE4(rdev))
254 drm_vblank_post_modeset(dev, radeon_crtc->crtc_id);
255 radeon_crtc_load_lut(crtc);
251 break; 256 break;
252 case DRM_MODE_DPMS_STANDBY: 257 case DRM_MODE_DPMS_STANDBY:
253 case DRM_MODE_DPMS_SUSPEND: 258 case DRM_MODE_DPMS_SUSPEND:
254 case DRM_MODE_DPMS_OFF: 259 case DRM_MODE_DPMS_OFF:
255 atombios_blank_crtc(crtc, 1); 260 /* XXX re-enable when interrupt support is added */
261 if (!ASIC_IS_DCE4(rdev))
262 drm_vblank_pre_modeset(dev, radeon_crtc->crtc_id);
263 atombios_blank_crtc(crtc, ATOM_ENABLE);
256 if (ASIC_IS_DCE3(rdev)) 264 if (ASIC_IS_DCE3(rdev))
257 atombios_enable_crtc_memreq(crtc, 0); 265 atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
258 atombios_enable_crtc(crtc, 0); 266 atombios_enable_crtc(crtc, ATOM_DISABLE);
259 break; 267 break;
260 } 268 }
261
262 if (mode != DRM_MODE_DPMS_OFF) {
263 radeon_crtc_load_lut(crtc);
264 }
265} 269}
266 270
267static void 271static void
@@ -307,7 +311,6 @@ atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
307 args.susModeMiscInfo.usAccess = cpu_to_le16(misc); 311 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
308 args.ucCRTC = radeon_crtc->crtc_id; 312 args.ucCRTC = radeon_crtc->crtc_id;
309 313
310 printk("executing set crtc dtd timing\n");
311 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 314 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
312} 315}
313 316
@@ -347,11 +350,58 @@ static void atombios_crtc_set_timing(struct drm_crtc *crtc,
347 args.susModeMiscInfo.usAccess = cpu_to_le16(misc); 350 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
348 args.ucCRTC = radeon_crtc->crtc_id; 351 args.ucCRTC = radeon_crtc->crtc_id;
349 352
350 printk("executing set crtc timing\n");
351 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 353 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
352} 354}
353 355
354static void atombios_set_ss(struct drm_crtc *crtc, int enable) 356static void atombios_disable_ss(struct drm_crtc *crtc)
357{
358 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
359 struct drm_device *dev = crtc->dev;
360 struct radeon_device *rdev = dev->dev_private;
361 u32 ss_cntl;
362
363 if (ASIC_IS_DCE4(rdev)) {
364 switch (radeon_crtc->pll_id) {
365 case ATOM_PPLL1:
366 ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
367 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
368 WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
369 break;
370 case ATOM_PPLL2:
371 ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
372 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
373 WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
374 break;
375 case ATOM_DCPLL:
376 case ATOM_PPLL_INVALID:
377 return;
378 }
379 } else if (ASIC_IS_AVIVO(rdev)) {
380 switch (radeon_crtc->pll_id) {
381 case ATOM_PPLL1:
382 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
383 ss_cntl &= ~1;
384 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
385 break;
386 case ATOM_PPLL2:
387 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
388 ss_cntl &= ~1;
389 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
390 break;
391 case ATOM_DCPLL:
392 case ATOM_PPLL_INVALID:
393 return;
394 }
395 }
396}
397
398
399union atom_enable_ss {
400 ENABLE_LVDS_SS_PARAMETERS legacy;
401 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
402};
403
404static void atombios_enable_ss(struct drm_crtc *crtc)
355{ 405{
356 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 406 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
357 struct drm_device *dev = crtc->dev; 407 struct drm_device *dev = crtc->dev;
@@ -360,11 +410,14 @@ static void atombios_set_ss(struct drm_crtc *crtc, int enable)
360 struct radeon_encoder *radeon_encoder = NULL; 410 struct radeon_encoder *radeon_encoder = NULL;
361 struct radeon_encoder_atom_dig *dig = NULL; 411 struct radeon_encoder_atom_dig *dig = NULL;
362 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL); 412 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
363 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION args; 413 union atom_enable_ss args;
364 ENABLE_LVDS_SS_PARAMETERS legacy_args;
365 uint16_t percentage = 0; 414 uint16_t percentage = 0;
366 uint8_t type = 0, step = 0, delay = 0, range = 0; 415 uint8_t type = 0, step = 0, delay = 0, range = 0;
367 416
417 /* XXX add ss support for DCE4 */
418 if (ASIC_IS_DCE4(rdev))
419 return;
420
368 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 421 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
369 if (encoder->crtc == crtc) { 422 if (encoder->crtc == crtc) {
370 radeon_encoder = to_radeon_encoder(encoder); 423 radeon_encoder = to_radeon_encoder(encoder);
@@ -377,9 +430,9 @@ static void atombios_set_ss(struct drm_crtc *crtc, int enable)
377 step = dig->ss->step; 430 step = dig->ss->step;
378 delay = dig->ss->delay; 431 delay = dig->ss->delay;
379 range = dig->ss->range; 432 range = dig->ss->range;
380 } else if (enable) 433 } else
381 return; 434 return;
382 } else if (enable) 435 } else
383 return; 436 return;
384 break; 437 break;
385 } 438 }
@@ -388,81 +441,96 @@ static void atombios_set_ss(struct drm_crtc *crtc, int enable)
388 if (!radeon_encoder) 441 if (!radeon_encoder)
389 return; 442 return;
390 443
444 memset(&args, 0, sizeof(args));
391 if (ASIC_IS_AVIVO(rdev)) { 445 if (ASIC_IS_AVIVO(rdev)) {
392 memset(&args, 0, sizeof(args)); 446 args.v1.usSpreadSpectrumPercentage = cpu_to_le16(percentage);
393 args.usSpreadSpectrumPercentage = cpu_to_le16(percentage); 447 args.v1.ucSpreadSpectrumType = type;
394 args.ucSpreadSpectrumType = type; 448 args.v1.ucSpreadSpectrumStep = step;
395 args.ucSpreadSpectrumStep = step; 449 args.v1.ucSpreadSpectrumDelay = delay;
396 args.ucSpreadSpectrumDelay = delay; 450 args.v1.ucSpreadSpectrumRange = range;
397 args.ucSpreadSpectrumRange = range; 451 args.v1.ucPpll = radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
398 args.ucPpll = radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1; 452 args.v1.ucEnable = ATOM_ENABLE;
399 args.ucEnable = enable;
400 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
401 } else { 453 } else {
402 memset(&legacy_args, 0, sizeof(legacy_args)); 454 args.legacy.usSpreadSpectrumPercentage = cpu_to_le16(percentage);
403 legacy_args.usSpreadSpectrumPercentage = cpu_to_le16(percentage); 455 args.legacy.ucSpreadSpectrumType = type;
404 legacy_args.ucSpreadSpectrumType = type; 456 args.legacy.ucSpreadSpectrumStepSize_Delay = (step & 3) << 2;
405 legacy_args.ucSpreadSpectrumStepSize_Delay = (step & 3) << 2; 457 args.legacy.ucSpreadSpectrumStepSize_Delay |= (delay & 7) << 4;
406 legacy_args.ucSpreadSpectrumStepSize_Delay |= (delay & 7) << 4; 458 args.legacy.ucEnable = ATOM_ENABLE;
407 legacy_args.ucEnable = enable;
408 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&legacy_args);
409 } 459 }
460 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
410} 461}
411 462
412void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode) 463union adjust_pixel_clock {
464 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
465 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
466};
467
468static u32 atombios_adjust_pll(struct drm_crtc *crtc,
469 struct drm_display_mode *mode,
470 struct radeon_pll *pll)
413{ 471{
414 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
415 struct drm_device *dev = crtc->dev; 472 struct drm_device *dev = crtc->dev;
416 struct radeon_device *rdev = dev->dev_private; 473 struct radeon_device *rdev = dev->dev_private;
417 struct drm_encoder *encoder = NULL; 474 struct drm_encoder *encoder = NULL;
418 struct radeon_encoder *radeon_encoder = NULL; 475 struct radeon_encoder *radeon_encoder = NULL;
419 uint8_t frev, crev; 476 u32 adjusted_clock = mode->clock;
420 int index; 477 int encoder_mode = 0;
421 SET_PIXEL_CLOCK_PS_ALLOCATION args;
422 PIXEL_CLOCK_PARAMETERS *spc1_ptr;
423 PIXEL_CLOCK_PARAMETERS_V2 *spc2_ptr;
424 PIXEL_CLOCK_PARAMETERS_V3 *spc3_ptr;
425 uint32_t pll_clock = mode->clock;
426 uint32_t adjusted_clock;
427 uint32_t ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
428 struct radeon_pll *pll;
429 int pll_flags = 0;
430 478
431 memset(&args, 0, sizeof(args)); 479 /* reset the pll flags */
480 pll->flags = 0;
481
482 /* select the PLL algo */
483 if (ASIC_IS_AVIVO(rdev)) {
484 if (radeon_new_pll == 0)
485 pll->algo = PLL_ALGO_LEGACY;
486 else
487 pll->algo = PLL_ALGO_NEW;
488 } else {
489 if (radeon_new_pll == 1)
490 pll->algo = PLL_ALGO_NEW;
491 else
492 pll->algo = PLL_ALGO_LEGACY;
493 }
432 494
433 if (ASIC_IS_AVIVO(rdev)) { 495 if (ASIC_IS_AVIVO(rdev)) {
434 if ((rdev->family == CHIP_RS600) || 496 if ((rdev->family == CHIP_RS600) ||
435 (rdev->family == CHIP_RS690) || 497 (rdev->family == CHIP_RS690) ||
436 (rdev->family == CHIP_RS740)) 498 (rdev->family == CHIP_RS740))
437 pll_flags |= (RADEON_PLL_USE_FRAC_FB_DIV | 499 pll->flags |= (RADEON_PLL_USE_FRAC_FB_DIV |
438 RADEON_PLL_PREFER_CLOSEST_LOWER); 500 RADEON_PLL_PREFER_CLOSEST_LOWER);
439 501
440 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000) /* range limits??? */ 502 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000) /* range limits??? */
441 pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV; 503 pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
442 else 504 else
443 pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV; 505 pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
444 } else { 506 } else {
445 pll_flags |= RADEON_PLL_LEGACY; 507 pll->flags |= RADEON_PLL_LEGACY;
446 508
447 if (mode->clock > 200000) /* range limits??? */ 509 if (mode->clock > 200000) /* range limits??? */
448 pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV; 510 pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
449 else 511 else
450 pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV; 512 pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
451 513
452 } 514 }
453 515
454 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 516 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
455 if (encoder->crtc == crtc) { 517 if (encoder->crtc == crtc) {
456 if (!ASIC_IS_AVIVO(rdev)) {
457 if (encoder->encoder_type !=
458 DRM_MODE_ENCODER_DAC)
459 pll_flags |= RADEON_PLL_NO_ODD_POST_DIV;
460 if (!ASIC_IS_AVIVO(rdev)
461 && (encoder->encoder_type ==
462 DRM_MODE_ENCODER_LVDS))
463 pll_flags |= RADEON_PLL_USE_REF_DIV;
464 }
465 radeon_encoder = to_radeon_encoder(encoder); 518 radeon_encoder = to_radeon_encoder(encoder);
519 encoder_mode = atombios_get_encoder_mode(encoder);
520 if (ASIC_IS_AVIVO(rdev)) {
521 /* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */
522 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
523 adjusted_clock = mode->clock * 2;
524 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) {
525 pll->algo = PLL_ALGO_LEGACY;
526 pll->flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
527 }
528 } else {
529 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
530 pll->flags |= RADEON_PLL_NO_ODD_POST_DIV;
531 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
532 pll->flags |= RADEON_PLL_USE_REF_DIV;
533 }
466 break; 534 break;
467 } 535 }
468 } 536 }
@@ -472,85 +540,231 @@ void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
472 * special hw requirements. 540 * special hw requirements.
473 */ 541 */
474 if (ASIC_IS_DCE3(rdev)) { 542 if (ASIC_IS_DCE3(rdev)) {
475 ADJUST_DISPLAY_PLL_PS_ALLOCATION adjust_pll_args; 543 union adjust_pixel_clock args;
544 u8 frev, crev;
545 int index;
546
547 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
548 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
549 &crev))
550 return adjusted_clock;
551
552 memset(&args, 0, sizeof(args));
553
554 switch (frev) {
555 case 1:
556 switch (crev) {
557 case 1:
558 case 2:
559 args.v1.usPixelClock = cpu_to_le16(mode->clock / 10);
560 args.v1.ucTransmitterID = radeon_encoder->encoder_id;
561 args.v1.ucEncodeMode = encoder_mode;
562
563 atom_execute_table(rdev->mode_info.atom_context,
564 index, (uint32_t *)&args);
565 adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
566 break;
567 case 3:
568 args.v3.sInput.usPixelClock = cpu_to_le16(mode->clock / 10);
569 args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
570 args.v3.sInput.ucEncodeMode = encoder_mode;
571 args.v3.sInput.ucDispPllConfig = 0;
572 if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
573 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
574
575 if (encoder_mode == ATOM_ENCODER_MODE_DP)
576 args.v3.sInput.ucDispPllConfig |=
577 DISPPLL_CONFIG_COHERENT_MODE;
578 else {
579 if (dig->coherent_mode)
580 args.v3.sInput.ucDispPllConfig |=
581 DISPPLL_CONFIG_COHERENT_MODE;
582 if (mode->clock > 165000)
583 args.v3.sInput.ucDispPllConfig |=
584 DISPPLL_CONFIG_DUAL_LINK;
585 }
586 } else if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
587 /* may want to enable SS on DP/eDP eventually */
588 /*args.v3.sInput.ucDispPllConfig |=
589 DISPPLL_CONFIG_SS_ENABLE;*/
590 if (encoder_mode == ATOM_ENCODER_MODE_DP)
591 args.v3.sInput.ucDispPllConfig |=
592 DISPPLL_CONFIG_COHERENT_MODE;
593 else {
594 if (mode->clock > 165000)
595 args.v3.sInput.ucDispPllConfig |=
596 DISPPLL_CONFIG_DUAL_LINK;
597 }
598 }
599 atom_execute_table(rdev->mode_info.atom_context,
600 index, (uint32_t *)&args);
601 adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
602 if (args.v3.sOutput.ucRefDiv) {
603 pll->flags |= RADEON_PLL_USE_REF_DIV;
604 pll->reference_div = args.v3.sOutput.ucRefDiv;
605 }
606 if (args.v3.sOutput.ucPostDiv) {
607 pll->flags |= RADEON_PLL_USE_POST_DIV;
608 pll->post_div = args.v3.sOutput.ucPostDiv;
609 }
610 break;
611 default:
612 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
613 return adjusted_clock;
614 }
615 break;
616 default:
617 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
618 return adjusted_clock;
619 }
620 }
621 return adjusted_clock;
622}
623
624union set_pixel_clock {
625 SET_PIXEL_CLOCK_PS_ALLOCATION base;
626 PIXEL_CLOCK_PARAMETERS v1;
627 PIXEL_CLOCK_PARAMETERS_V2 v2;
628 PIXEL_CLOCK_PARAMETERS_V3 v3;
629 PIXEL_CLOCK_PARAMETERS_V5 v5;
630};
631
632static void atombios_crtc_set_dcpll(struct drm_crtc *crtc)
633{
634 struct drm_device *dev = crtc->dev;
635 struct radeon_device *rdev = dev->dev_private;
636 u8 frev, crev;
637 int index;
638 union set_pixel_clock args;
476 639
477 if (!encoder) 640 memset(&args, 0, sizeof(args));
641
642 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
643 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
644 &crev))
645 return;
646
647 switch (frev) {
648 case 1:
649 switch (crev) {
650 case 5:
651 /* if the default dcpll clock is specified,
652 * SetPixelClock provides the dividers
653 */
654 args.v5.ucCRTC = ATOM_CRTC_INVALID;
655 args.v5.usPixelClock = rdev->clock.default_dispclk;
656 args.v5.ucPpll = ATOM_DCPLL;
657 break;
658 default:
659 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
478 return; 660 return;
661 }
662 break;
663 default:
664 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
665 return;
666 }
667 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
668}
479 669
480 memset(&adjust_pll_args, 0, sizeof(adjust_pll_args)); 670static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
481 adjust_pll_args.usPixelClock = cpu_to_le16(mode->clock / 10); 671{
482 adjust_pll_args.ucTransmitterID = radeon_encoder->encoder_id; 672 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
483 adjust_pll_args.ucEncodeMode = atombios_get_encoder_mode(encoder); 673 struct drm_device *dev = crtc->dev;
674 struct radeon_device *rdev = dev->dev_private;
675 struct drm_encoder *encoder = NULL;
676 struct radeon_encoder *radeon_encoder = NULL;
677 u8 frev, crev;
678 int index;
679 union set_pixel_clock args;
680 u32 pll_clock = mode->clock;
681 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
682 struct radeon_pll *pll;
683 u32 adjusted_clock;
684 int encoder_mode = 0;
484 685
485 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll); 686 memset(&args, 0, sizeof(args));
486 atom_execute_table(rdev->mode_info.atom_context, 687
487 index, (uint32_t *)&adjust_pll_args); 688 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
488 adjusted_clock = le16_to_cpu(adjust_pll_args.usPixelClock) * 10; 689 if (encoder->crtc == crtc) {
489 } else { 690 radeon_encoder = to_radeon_encoder(encoder);
490 /* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */ 691 encoder_mode = atombios_get_encoder_mode(encoder);
491 if (ASIC_IS_AVIVO(rdev) && 692 break;
492 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)) 693 }
493 adjusted_clock = mode->clock * 2;
494 else
495 adjusted_clock = mode->clock;
496 } 694 }
497 695
498 if (radeon_crtc->crtc_id == 0) 696 if (!radeon_encoder)
697 return;
698
699 switch (radeon_crtc->pll_id) {
700 case ATOM_PPLL1:
499 pll = &rdev->clock.p1pll; 701 pll = &rdev->clock.p1pll;
500 else 702 break;
703 case ATOM_PPLL2:
501 pll = &rdev->clock.p2pll; 704 pll = &rdev->clock.p2pll;
705 break;
706 case ATOM_DCPLL:
707 case ATOM_PPLL_INVALID:
708 pll = &rdev->clock.dcpll;
709 break;
710 }
711
712 /* adjust pixel clock as needed */
713 adjusted_clock = atombios_adjust_pll(crtc, mode, pll);
502 714
503 radeon_compute_pll(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div, 715 radeon_compute_pll(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
504 &ref_div, &post_div, pll_flags); 716 &ref_div, &post_div);
505 717
506 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock); 718 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
507 atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, 719 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
508 &crev); 720 &crev))
721 return;
509 722
510 switch (frev) { 723 switch (frev) {
511 case 1: 724 case 1:
512 switch (crev) { 725 switch (crev) {
513 case 1: 726 case 1:
514 spc1_ptr = (PIXEL_CLOCK_PARAMETERS *) & args.sPCLKInput; 727 args.v1.usPixelClock = cpu_to_le16(mode->clock / 10);
515 spc1_ptr->usPixelClock = cpu_to_le16(mode->clock / 10); 728 args.v1.usRefDiv = cpu_to_le16(ref_div);
516 spc1_ptr->usRefDiv = cpu_to_le16(ref_div); 729 args.v1.usFbDiv = cpu_to_le16(fb_div);
517 spc1_ptr->usFbDiv = cpu_to_le16(fb_div); 730 args.v1.ucFracFbDiv = frac_fb_div;
518 spc1_ptr->ucFracFbDiv = frac_fb_div; 731 args.v1.ucPostDiv = post_div;
519 spc1_ptr->ucPostDiv = post_div; 732 args.v1.ucPpll = radeon_crtc->pll_id;
520 spc1_ptr->ucPpll = 733 args.v1.ucCRTC = radeon_crtc->crtc_id;
521 radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1; 734 args.v1.ucRefDivSrc = 1;
522 spc1_ptr->ucCRTC = radeon_crtc->crtc_id;
523 spc1_ptr->ucRefDivSrc = 1;
524 break; 735 break;
525 case 2: 736 case 2:
526 spc2_ptr = 737 args.v2.usPixelClock = cpu_to_le16(mode->clock / 10);
527 (PIXEL_CLOCK_PARAMETERS_V2 *) & args.sPCLKInput; 738 args.v2.usRefDiv = cpu_to_le16(ref_div);
528 spc2_ptr->usPixelClock = cpu_to_le16(mode->clock / 10); 739 args.v2.usFbDiv = cpu_to_le16(fb_div);
529 spc2_ptr->usRefDiv = cpu_to_le16(ref_div); 740 args.v2.ucFracFbDiv = frac_fb_div;
530 spc2_ptr->usFbDiv = cpu_to_le16(fb_div); 741 args.v2.ucPostDiv = post_div;
531 spc2_ptr->ucFracFbDiv = frac_fb_div; 742 args.v2.ucPpll = radeon_crtc->pll_id;
532 spc2_ptr->ucPostDiv = post_div; 743 args.v2.ucCRTC = radeon_crtc->crtc_id;
533 spc2_ptr->ucPpll = 744 args.v2.ucRefDivSrc = 1;
534 radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
535 spc2_ptr->ucCRTC = radeon_crtc->crtc_id;
536 spc2_ptr->ucRefDivSrc = 1;
537 break; 745 break;
538 case 3: 746 case 3:
539 if (!encoder) 747 args.v3.usPixelClock = cpu_to_le16(mode->clock / 10);
540 return; 748 args.v3.usRefDiv = cpu_to_le16(ref_div);
541 spc3_ptr = 749 args.v3.usFbDiv = cpu_to_le16(fb_div);
542 (PIXEL_CLOCK_PARAMETERS_V3 *) & args.sPCLKInput; 750 args.v3.ucFracFbDiv = frac_fb_div;
543 spc3_ptr->usPixelClock = cpu_to_le16(mode->clock / 10); 751 args.v3.ucPostDiv = post_div;
544 spc3_ptr->usRefDiv = cpu_to_le16(ref_div); 752 args.v3.ucPpll = radeon_crtc->pll_id;
545 spc3_ptr->usFbDiv = cpu_to_le16(fb_div); 753 args.v3.ucMiscInfo = (radeon_crtc->pll_id << 2);
546 spc3_ptr->ucFracFbDiv = frac_fb_div; 754 args.v3.ucTransmitterId = radeon_encoder->encoder_id;
547 spc3_ptr->ucPostDiv = post_div; 755 args.v3.ucEncoderMode = encoder_mode;
548 spc3_ptr->ucPpll = 756 break;
549 radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1; 757 case 5:
550 spc3_ptr->ucMiscInfo = (radeon_crtc->crtc_id << 2); 758 args.v5.ucCRTC = radeon_crtc->crtc_id;
551 spc3_ptr->ucTransmitterId = radeon_encoder->encoder_id; 759 args.v5.usPixelClock = cpu_to_le16(mode->clock / 10);
552 spc3_ptr->ucEncoderMode = 760 args.v5.ucRefDiv = ref_div;
553 atombios_get_encoder_mode(encoder); 761 args.v5.usFbDiv = cpu_to_le16(fb_div);
762 args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
763 args.v5.ucPostDiv = post_div;
764 args.v5.ucMiscInfo = 0; /* HDMI depth, etc. */
765 args.v5.ucTransmitterID = radeon_encoder->encoder_id;
766 args.v5.ucEncoderMode = encoder_mode;
767 args.v5.ucPpll = radeon_crtc->pll_id;
554 break; 768 break;
555 default: 769 default:
556 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 770 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
@@ -562,33 +776,177 @@ void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
562 return; 776 return;
563 } 777 }
564 778
565 printk("executing set pll\n");
566 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 779 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
567} 780}
568 781
569int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y, 782static int evergreen_crtc_set_base(struct drm_crtc *crtc, int x, int y,
570 struct drm_framebuffer *old_fb) 783 struct drm_framebuffer *old_fb)
571{ 784{
572 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 785 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
573 struct drm_device *dev = crtc->dev; 786 struct drm_device *dev = crtc->dev;
574 struct radeon_device *rdev = dev->dev_private; 787 struct radeon_device *rdev = dev->dev_private;
575 struct radeon_framebuffer *radeon_fb; 788 struct radeon_framebuffer *radeon_fb;
576 struct drm_gem_object *obj; 789 struct drm_gem_object *obj;
577 struct drm_radeon_gem_object *obj_priv; 790 struct radeon_bo *rbo;
578 uint64_t fb_location; 791 uint64_t fb_location;
579 uint32_t fb_format, fb_pitch_pixels, tiling_flags; 792 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
793 int r;
580 794
581 if (!crtc->fb) 795 /* no fb bound */
582 return -EINVAL; 796 if (!crtc->fb) {
797 DRM_DEBUG("No FB bound\n");
798 return 0;
799 }
583 800
584 radeon_fb = to_radeon_framebuffer(crtc->fb); 801 radeon_fb = to_radeon_framebuffer(crtc->fb);
585 802
803 /* Pin framebuffer & get tilling informations */
586 obj = radeon_fb->obj; 804 obj = radeon_fb->obj;
587 obj_priv = obj->driver_private; 805 rbo = obj->driver_private;
806 r = radeon_bo_reserve(rbo, false);
807 if (unlikely(r != 0))
808 return r;
809 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
810 if (unlikely(r != 0)) {
811 radeon_bo_unreserve(rbo);
812 return -EINVAL;
813 }
814 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
815 radeon_bo_unreserve(rbo);
816
817 switch (crtc->fb->bits_per_pixel) {
818 case 8:
819 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
820 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
821 break;
822 case 15:
823 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
824 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
825 break;
826 case 16:
827 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
828 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
829 break;
830 case 24:
831 case 32:
832 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
833 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
834 break;
835 default:
836 DRM_ERROR("Unsupported screen depth %d\n",
837 crtc->fb->bits_per_pixel);
838 return -EINVAL;
839 }
840
841 switch (radeon_crtc->crtc_id) {
842 case 0:
843 WREG32(AVIVO_D1VGA_CONTROL, 0);
844 break;
845 case 1:
846 WREG32(AVIVO_D2VGA_CONTROL, 0);
847 break;
848 case 2:
849 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
850 break;
851 case 3:
852 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
853 break;
854 case 4:
855 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
856 break;
857 case 5:
858 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
859 break;
860 default:
861 break;
862 }
863
864 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
865 upper_32_bits(fb_location));
866 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
867 upper_32_bits(fb_location));
868 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
869 (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
870 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
871 (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
872 WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
873
874 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
875 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
876 WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
877 WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
878 WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, crtc->fb->width);
879 WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, crtc->fb->height);
880
881 fb_pitch_pixels = crtc->fb->pitch / (crtc->fb->bits_per_pixel / 8);
882 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
883 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
884
885 WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
886 crtc->mode.vdisplay);
887 x &= ~3;
888 y &= ~1;
889 WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
890 (x << 16) | y);
891 WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
892 (crtc->mode.hdisplay << 16) | crtc->mode.vdisplay);
893
894 if (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE)
895 WREG32(EVERGREEN_DATA_FORMAT + radeon_crtc->crtc_offset,
896 EVERGREEN_INTERLEAVE_EN);
897 else
898 WREG32(EVERGREEN_DATA_FORMAT + radeon_crtc->crtc_offset, 0);
899
900 if (old_fb && old_fb != crtc->fb) {
901 radeon_fb = to_radeon_framebuffer(old_fb);
902 rbo = radeon_fb->obj->driver_private;
903 r = radeon_bo_reserve(rbo, false);
904 if (unlikely(r != 0))
905 return r;
906 radeon_bo_unpin(rbo);
907 radeon_bo_unreserve(rbo);
908 }
909
910 /* Bytes per pixel may have changed */
911 radeon_bandwidth_update(rdev);
912
913 return 0;
914}
915
916static int avivo_crtc_set_base(struct drm_crtc *crtc, int x, int y,
917 struct drm_framebuffer *old_fb)
918{
919 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
920 struct drm_device *dev = crtc->dev;
921 struct radeon_device *rdev = dev->dev_private;
922 struct radeon_framebuffer *radeon_fb;
923 struct drm_gem_object *obj;
924 struct radeon_bo *rbo;
925 uint64_t fb_location;
926 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
927 int r;
928
929 /* no fb bound */
930 if (!crtc->fb) {
931 DRM_DEBUG("No FB bound\n");
932 return 0;
933 }
588 934
589 if (radeon_gem_object_pin(obj, RADEON_GEM_DOMAIN_VRAM, &fb_location)) { 935 radeon_fb = to_radeon_framebuffer(crtc->fb);
936
937 /* Pin framebuffer & get tilling informations */
938 obj = radeon_fb->obj;
939 rbo = obj->driver_private;
940 r = radeon_bo_reserve(rbo, false);
941 if (unlikely(r != 0))
942 return r;
943 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
944 if (unlikely(r != 0)) {
945 radeon_bo_unreserve(rbo);
590 return -EINVAL; 946 return -EINVAL;
591 } 947 }
948 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
949 radeon_bo_unreserve(rbo);
592 950
593 switch (crtc->fb->bits_per_pixel) { 951 switch (crtc->fb->bits_per_pixel) {
594 case 8: 952 case 8:
@@ -618,8 +976,6 @@ int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
618 return -EINVAL; 976 return -EINVAL;
619 } 977 }
620 978
621 radeon_object_get_tiling_flags(obj->driver_private,
622 &tiling_flags, NULL);
623 if (tiling_flags & RADEON_TILING_MACRO) 979 if (tiling_flags & RADEON_TILING_MACRO)
624 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE; 980 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;
625 981
@@ -674,7 +1030,12 @@ int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
674 1030
675 if (old_fb && old_fb != crtc->fb) { 1031 if (old_fb && old_fb != crtc->fb) {
676 radeon_fb = to_radeon_framebuffer(old_fb); 1032 radeon_fb = to_radeon_framebuffer(old_fb);
677 radeon_gem_object_unpin(radeon_fb->obj); 1033 rbo = radeon_fb->obj->driver_private;
1034 r = radeon_bo_reserve(rbo, false);
1035 if (unlikely(r != 0))
1036 return r;
1037 radeon_bo_unpin(rbo);
1038 radeon_bo_unreserve(rbo);
678 } 1039 }
679 1040
680 /* Bytes per pixel may have changed */ 1041 /* Bytes per pixel may have changed */
@@ -683,6 +1044,84 @@ int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
683 return 0; 1044 return 0;
684} 1045}
685 1046
1047int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
1048 struct drm_framebuffer *old_fb)
1049{
1050 struct drm_device *dev = crtc->dev;
1051 struct radeon_device *rdev = dev->dev_private;
1052
1053 if (ASIC_IS_DCE4(rdev))
1054 return evergreen_crtc_set_base(crtc, x, y, old_fb);
1055 else if (ASIC_IS_AVIVO(rdev))
1056 return avivo_crtc_set_base(crtc, x, y, old_fb);
1057 else
1058 return radeon_crtc_set_base(crtc, x, y, old_fb);
1059}
1060
1061/* properly set additional regs when using atombios */
1062static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
1063{
1064 struct drm_device *dev = crtc->dev;
1065 struct radeon_device *rdev = dev->dev_private;
1066 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1067 u32 disp_merge_cntl;
1068
1069 switch (radeon_crtc->crtc_id) {
1070 case 0:
1071 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
1072 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
1073 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
1074 break;
1075 case 1:
1076 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
1077 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
1078 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
1079 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
1080 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
1081 break;
1082 }
1083}
1084
1085static int radeon_atom_pick_pll(struct drm_crtc *crtc)
1086{
1087 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1088 struct drm_device *dev = crtc->dev;
1089 struct radeon_device *rdev = dev->dev_private;
1090 struct drm_encoder *test_encoder;
1091 struct drm_crtc *test_crtc;
1092 uint32_t pll_in_use = 0;
1093
1094 if (ASIC_IS_DCE4(rdev)) {
1095 /* if crtc is driving DP and we have an ext clock, use that */
1096 list_for_each_entry(test_encoder, &dev->mode_config.encoder_list, head) {
1097 if (test_encoder->crtc && (test_encoder->crtc == crtc)) {
1098 if (atombios_get_encoder_mode(test_encoder) == ATOM_ENCODER_MODE_DP) {
1099 if (rdev->clock.dp_extclk)
1100 return ATOM_PPLL_INVALID;
1101 }
1102 }
1103 }
1104
1105 /* otherwise, pick one of the plls */
1106 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1107 struct radeon_crtc *radeon_test_crtc;
1108
1109 if (crtc == test_crtc)
1110 continue;
1111
1112 radeon_test_crtc = to_radeon_crtc(test_crtc);
1113 if ((radeon_test_crtc->pll_id >= ATOM_PPLL1) &&
1114 (radeon_test_crtc->pll_id <= ATOM_PPLL2))
1115 pll_in_use |= (1 << radeon_test_crtc->pll_id);
1116 }
1117 if (!(pll_in_use & 1))
1118 return ATOM_PPLL1;
1119 return ATOM_PPLL2;
1120 } else
1121 return radeon_crtc->crtc_id;
1122
1123}
1124
686int atombios_crtc_mode_set(struct drm_crtc *crtc, 1125int atombios_crtc_mode_set(struct drm_crtc *crtc,
687 struct drm_display_mode *mode, 1126 struct drm_display_mode *mode,
688 struct drm_display_mode *adjusted_mode, 1127 struct drm_display_mode *adjusted_mode,
@@ -694,19 +1133,24 @@ int atombios_crtc_mode_set(struct drm_crtc *crtc,
694 1133
695 /* TODO color tiling */ 1134 /* TODO color tiling */
696 1135
697 atombios_set_ss(crtc, 0); 1136 atombios_disable_ss(crtc);
1137 /* always set DCPLL */
1138 if (ASIC_IS_DCE4(rdev))
1139 atombios_crtc_set_dcpll(crtc);
698 atombios_crtc_set_pll(crtc, adjusted_mode); 1140 atombios_crtc_set_pll(crtc, adjusted_mode);
699 atombios_set_ss(crtc, 1); 1141 atombios_enable_ss(crtc);
700 atombios_crtc_set_timing(crtc, adjusted_mode);
701 1142
702 if (ASIC_IS_AVIVO(rdev)) 1143 if (ASIC_IS_DCE4(rdev))
703 atombios_crtc_set_base(crtc, x, y, old_fb); 1144 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1145 else if (ASIC_IS_AVIVO(rdev))
1146 atombios_crtc_set_timing(crtc, adjusted_mode);
704 else { 1147 else {
1148 atombios_crtc_set_timing(crtc, adjusted_mode);
705 if (radeon_crtc->crtc_id == 0) 1149 if (radeon_crtc->crtc_id == 0)
706 atombios_set_crtc_dtd_timing(crtc, adjusted_mode); 1150 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
707 radeon_crtc_set_base(crtc, x, y, old_fb); 1151 radeon_legacy_atom_fixup(crtc);
708 radeon_legacy_atom_set_surface(crtc);
709 } 1152 }
1153 atombios_crtc_set_base(crtc, x, y, old_fb);
710 atombios_overscan_setup(crtc, mode, adjusted_mode); 1154 atombios_overscan_setup(crtc, mode, adjusted_mode);
711 atombios_scaler_setup(crtc); 1155 atombios_scaler_setup(crtc);
712 return 0; 1156 return 0;
@@ -723,14 +1167,19 @@ static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
723 1167
724static void atombios_crtc_prepare(struct drm_crtc *crtc) 1168static void atombios_crtc_prepare(struct drm_crtc *crtc)
725{ 1169{
1170 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1171
1172 /* pick pll */
1173 radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);
1174
1175 atombios_lock_crtc(crtc, ATOM_ENABLE);
726 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF); 1176 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
727 atombios_lock_crtc(crtc, 1);
728} 1177}
729 1178
730static void atombios_crtc_commit(struct drm_crtc *crtc) 1179static void atombios_crtc_commit(struct drm_crtc *crtc)
731{ 1180{
732 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON); 1181 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
733 atombios_lock_crtc(crtc, 0); 1182 atombios_lock_crtc(crtc, ATOM_DISABLE);
734} 1183}
735 1184
736static const struct drm_crtc_helper_funcs atombios_helper_funcs = { 1185static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
@@ -746,8 +1195,37 @@ static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
746void radeon_atombios_init_crtc(struct drm_device *dev, 1195void radeon_atombios_init_crtc(struct drm_device *dev,
747 struct radeon_crtc *radeon_crtc) 1196 struct radeon_crtc *radeon_crtc)
748{ 1197{
749 if (radeon_crtc->crtc_id == 1) 1198 struct radeon_device *rdev = dev->dev_private;
750 radeon_crtc->crtc_offset = 1199
751 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL; 1200 if (ASIC_IS_DCE4(rdev)) {
1201 switch (radeon_crtc->crtc_id) {
1202 case 0:
1203 default:
1204 radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
1205 break;
1206 case 1:
1207 radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
1208 break;
1209 case 2:
1210 radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
1211 break;
1212 case 3:
1213 radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
1214 break;
1215 case 4:
1216 radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
1217 break;
1218 case 5:
1219 radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
1220 break;
1221 }
1222 } else {
1223 if (radeon_crtc->crtc_id == 1)
1224 radeon_crtc->crtc_offset =
1225 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
1226 else
1227 radeon_crtc->crtc_offset = 0;
1228 }
1229 radeon_crtc->pll_id = -1;
752 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs); 1230 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
753} 1231}
diff --git a/drivers/gpu/drm/radeon/atombios_dp.c b/drivers/gpu/drm/radeon/atombios_dp.c
new file mode 100644
index 000000000000..28b31c64f48d
--- /dev/null
+++ b/drivers/gpu/drm/radeon/atombios_dp.c
@@ -0,0 +1,809 @@
1/*
2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 */
26#include "drmP.h"
27#include "radeon_drm.h"
28#include "radeon.h"
29
30#include "atom.h"
31#include "atom-bits.h"
32#include "drm_dp_helper.h"
33
34/* move these to drm_dp_helper.c/h */
35#define DP_LINK_CONFIGURATION_SIZE 9
36#define DP_LINK_STATUS_SIZE 6
37#define DP_DPCD_SIZE 8
38
39static char *voltage_names[] = {
40 "0.4V", "0.6V", "0.8V", "1.2V"
41};
42static char *pre_emph_names[] = {
43 "0dB", "3.5dB", "6dB", "9.5dB"
44};
45
46static const int dp_clocks[] = {
47 54000, /* 1 lane, 1.62 Ghz */
48 90000, /* 1 lane, 2.70 Ghz */
49 108000, /* 2 lane, 1.62 Ghz */
50 180000, /* 2 lane, 2.70 Ghz */
51 216000, /* 4 lane, 1.62 Ghz */
52 360000, /* 4 lane, 2.70 Ghz */
53};
54
55static const int num_dp_clocks = sizeof(dp_clocks) / sizeof(int);
56
57/* common helper functions */
58static int dp_lanes_for_mode_clock(u8 dpcd[DP_DPCD_SIZE], int mode_clock)
59{
60 int i;
61 u8 max_link_bw;
62 u8 max_lane_count;
63
64 if (!dpcd)
65 return 0;
66
67 max_link_bw = dpcd[DP_MAX_LINK_RATE];
68 max_lane_count = dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
69
70 switch (max_link_bw) {
71 case DP_LINK_BW_1_62:
72 default:
73 for (i = 0; i < num_dp_clocks; i++) {
74 if (i % 2)
75 continue;
76 switch (max_lane_count) {
77 case 1:
78 if (i > 1)
79 return 0;
80 break;
81 case 2:
82 if (i > 3)
83 return 0;
84 break;
85 case 4:
86 default:
87 break;
88 }
89 if (dp_clocks[i] > mode_clock) {
90 if (i < 2)
91 return 1;
92 else if (i < 4)
93 return 2;
94 else
95 return 4;
96 }
97 }
98 break;
99 case DP_LINK_BW_2_7:
100 for (i = 0; i < num_dp_clocks; i++) {
101 switch (max_lane_count) {
102 case 1:
103 if (i > 1)
104 return 0;
105 break;
106 case 2:
107 if (i > 3)
108 return 0;
109 break;
110 case 4:
111 default:
112 break;
113 }
114 if (dp_clocks[i] > mode_clock) {
115 if (i < 2)
116 return 1;
117 else if (i < 4)
118 return 2;
119 else
120 return 4;
121 }
122 }
123 break;
124 }
125
126 return 0;
127}
128
129static int dp_link_clock_for_mode_clock(u8 dpcd[DP_DPCD_SIZE], int mode_clock)
130{
131 int i;
132 u8 max_link_bw;
133 u8 max_lane_count;
134
135 if (!dpcd)
136 return 0;
137
138 max_link_bw = dpcd[DP_MAX_LINK_RATE];
139 max_lane_count = dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
140
141 switch (max_link_bw) {
142 case DP_LINK_BW_1_62:
143 default:
144 for (i = 0; i < num_dp_clocks; i++) {
145 if (i % 2)
146 continue;
147 switch (max_lane_count) {
148 case 1:
149 if (i > 1)
150 return 0;
151 break;
152 case 2:
153 if (i > 3)
154 return 0;
155 break;
156 case 4:
157 default:
158 break;
159 }
160 if (dp_clocks[i] > mode_clock)
161 return 162000;
162 }
163 break;
164 case DP_LINK_BW_2_7:
165 for (i = 0; i < num_dp_clocks; i++) {
166 switch (max_lane_count) {
167 case 1:
168 if (i > 1)
169 return 0;
170 break;
171 case 2:
172 if (i > 3)
173 return 0;
174 break;
175 case 4:
176 default:
177 break;
178 }
179 if (dp_clocks[i] > mode_clock)
180 return (i % 2) ? 270000 : 162000;
181 }
182 }
183
184 return 0;
185}
186
187int dp_mode_valid(u8 dpcd[DP_DPCD_SIZE], int mode_clock)
188{
189 int lanes = dp_lanes_for_mode_clock(dpcd, mode_clock);
190 int bw = dp_lanes_for_mode_clock(dpcd, mode_clock);
191
192 if ((lanes == 0) || (bw == 0))
193 return MODE_CLOCK_HIGH;
194
195 return MODE_OK;
196}
197
198static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r)
199{
200 return link_status[r - DP_LANE0_1_STATUS];
201}
202
203static u8 dp_get_lane_status(u8 link_status[DP_LINK_STATUS_SIZE],
204 int lane)
205{
206 int i = DP_LANE0_1_STATUS + (lane >> 1);
207 int s = (lane & 1) * 4;
208 u8 l = dp_link_status(link_status, i);
209 return (l >> s) & 0xf;
210}
211
212static bool dp_clock_recovery_ok(u8 link_status[DP_LINK_STATUS_SIZE],
213 int lane_count)
214{
215 int lane;
216 u8 lane_status;
217
218 for (lane = 0; lane < lane_count; lane++) {
219 lane_status = dp_get_lane_status(link_status, lane);
220 if ((lane_status & DP_LANE_CR_DONE) == 0)
221 return false;
222 }
223 return true;
224}
225
226static bool dp_channel_eq_ok(u8 link_status[DP_LINK_STATUS_SIZE],
227 int lane_count)
228{
229 u8 lane_align;
230 u8 lane_status;
231 int lane;
232
233 lane_align = dp_link_status(link_status,
234 DP_LANE_ALIGN_STATUS_UPDATED);
235 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
236 return false;
237 for (lane = 0; lane < lane_count; lane++) {
238 lane_status = dp_get_lane_status(link_status, lane);
239 if ((lane_status & DP_CHANNEL_EQ_BITS) != DP_CHANNEL_EQ_BITS)
240 return false;
241 }
242 return true;
243}
244
245static u8 dp_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
246 int lane)
247
248{
249 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
250 int s = ((lane & 1) ?
251 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
252 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
253 u8 l = dp_link_status(link_status, i);
254
255 return ((l >> s) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
256}
257
258static u8 dp_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
259 int lane)
260{
261 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
262 int s = ((lane & 1) ?
263 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
264 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
265 u8 l = dp_link_status(link_status, i);
266
267 return ((l >> s) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
268}
269
270/* XXX fix me -- chip specific */
271#define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200
272static u8 dp_pre_emphasis_max(u8 voltage_swing)
273{
274 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
275 case DP_TRAIN_VOLTAGE_SWING_400:
276 return DP_TRAIN_PRE_EMPHASIS_6;
277 case DP_TRAIN_VOLTAGE_SWING_600:
278 return DP_TRAIN_PRE_EMPHASIS_6;
279 case DP_TRAIN_VOLTAGE_SWING_800:
280 return DP_TRAIN_PRE_EMPHASIS_3_5;
281 case DP_TRAIN_VOLTAGE_SWING_1200:
282 default:
283 return DP_TRAIN_PRE_EMPHASIS_0;
284 }
285}
286
287static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
288 int lane_count,
289 u8 train_set[4])
290{
291 u8 v = 0;
292 u8 p = 0;
293 int lane;
294
295 for (lane = 0; lane < lane_count; lane++) {
296 u8 this_v = dp_get_adjust_request_voltage(link_status, lane);
297 u8 this_p = dp_get_adjust_request_pre_emphasis(link_status, lane);
298
299 DRM_DEBUG("requested signal parameters: lane %d voltage %s pre_emph %s\n",
300 lane,
301 voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
302 pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
303
304 if (this_v > v)
305 v = this_v;
306 if (this_p > p)
307 p = this_p;
308 }
309
310 if (v >= DP_VOLTAGE_MAX)
311 v = DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
312
313 if (p >= dp_pre_emphasis_max(v))
314 p = dp_pre_emphasis_max(v) | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
315
316 DRM_DEBUG("using signal parameters: voltage %s pre_emph %s\n",
317 voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
318 pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
319
320 for (lane = 0; lane < 4; lane++)
321 train_set[lane] = v | p;
322}
323
324union aux_channel_transaction {
325 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
326 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
327};
328
329/* radeon aux chan functions */
330bool radeon_process_aux_ch(struct radeon_i2c_chan *chan, u8 *req_bytes,
331 int num_bytes, u8 *read_byte,
332 u8 read_buf_len, u8 delay)
333{
334 struct drm_device *dev = chan->dev;
335 struct radeon_device *rdev = dev->dev_private;
336 union aux_channel_transaction args;
337 int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
338 unsigned char *base;
339 int retry_count = 0;
340
341 memset(&args, 0, sizeof(args));
342
343 base = (unsigned char *)rdev->mode_info.atom_context->scratch;
344
345retry:
346 memcpy(base, req_bytes, num_bytes);
347
348 args.v1.lpAuxRequest = 0;
349 args.v1.lpDataOut = 16;
350 args.v1.ucDataOutLen = 0;
351 args.v1.ucChannelID = chan->rec.i2c_id;
352 args.v1.ucDelay = delay / 10;
353 if (ASIC_IS_DCE4(rdev))
354 args.v2.ucHPD_ID = chan->rec.hpd_id;
355
356 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
357
358 if (args.v1.ucReplyStatus && !args.v1.ucDataOutLen) {
359 if (args.v1.ucReplyStatus == 0x20 && retry_count++ < 10)
360 goto retry;
361 DRM_DEBUG("failed to get auxch %02x%02x %02x %02x 0x%02x %02x after %d retries\n",
362 req_bytes[1], req_bytes[0], req_bytes[2], req_bytes[3],
363 chan->rec.i2c_id, args.v1.ucReplyStatus, retry_count);
364 return false;
365 }
366
367 if (args.v1.ucDataOutLen && read_byte && read_buf_len) {
368 if (read_buf_len < args.v1.ucDataOutLen) {
369 DRM_ERROR("Buffer to small for return answer %d %d\n",
370 read_buf_len, args.v1.ucDataOutLen);
371 return false;
372 }
373 {
374 int len = min(read_buf_len, args.v1.ucDataOutLen);
375 memcpy(read_byte, base + 16, len);
376 }
377 }
378 return true;
379}
380
381bool radeon_dp_aux_native_write(struct radeon_connector *radeon_connector, uint16_t address,
382 uint8_t send_bytes, uint8_t *send)
383{
384 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
385 u8 msg[20];
386 u8 msg_len, dp_msg_len;
387 bool ret;
388
389 dp_msg_len = 4;
390 msg[0] = address;
391 msg[1] = address >> 8;
392 msg[2] = AUX_NATIVE_WRITE << 4;
393 dp_msg_len += send_bytes;
394 msg[3] = (dp_msg_len << 4) | (send_bytes - 1);
395
396 if (send_bytes > 16)
397 return false;
398
399 memcpy(&msg[4], send, send_bytes);
400 msg_len = 4 + send_bytes;
401 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, msg, msg_len, NULL, 0, 0);
402 return ret;
403}
404
405bool radeon_dp_aux_native_read(struct radeon_connector *radeon_connector, uint16_t address,
406 uint8_t delay, uint8_t expected_bytes,
407 uint8_t *read_p)
408{
409 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
410 u8 msg[20];
411 u8 msg_len, dp_msg_len;
412 bool ret = false;
413 msg_len = 4;
414 dp_msg_len = 4;
415 msg[0] = address;
416 msg[1] = address >> 8;
417 msg[2] = AUX_NATIVE_READ << 4;
418 msg[3] = (dp_msg_len) << 4;
419 msg[3] |= expected_bytes - 1;
420
421 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, msg, msg_len, read_p, expected_bytes, delay);
422 return ret;
423}
424
425/* radeon dp functions */
426static u8 radeon_dp_encoder_service(struct radeon_device *rdev, int action, int dp_clock,
427 uint8_t ucconfig, uint8_t lane_num)
428{
429 DP_ENCODER_SERVICE_PARAMETERS args;
430 int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
431
432 memset(&args, 0, sizeof(args));
433 args.ucLinkClock = dp_clock / 10;
434 args.ucConfig = ucconfig;
435 args.ucAction = action;
436 args.ucLaneNum = lane_num;
437 args.ucStatus = 0;
438
439 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
440 return args.ucStatus;
441}
442
443u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
444{
445 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
446 struct drm_device *dev = radeon_connector->base.dev;
447 struct radeon_device *rdev = dev->dev_private;
448
449 return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
450 dig_connector->dp_i2c_bus->rec.i2c_id, 0);
451}
452
453bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
454{
455 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
456 u8 msg[25];
457 int ret;
458
459 ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, 0, 8, msg);
460 if (ret) {
461 memcpy(dig_connector->dpcd, msg, 8);
462 {
463 int i;
464 DRM_DEBUG("DPCD: ");
465 for (i = 0; i < 8; i++)
466 DRM_DEBUG("%02x ", msg[i]);
467 DRM_DEBUG("\n");
468 }
469 return true;
470 }
471 dig_connector->dpcd[0] = 0;
472 return false;
473}
474
475void radeon_dp_set_link_config(struct drm_connector *connector,
476 struct drm_display_mode *mode)
477{
478 struct radeon_connector *radeon_connector;
479 struct radeon_connector_atom_dig *dig_connector;
480
481 if ((connector->connector_type != DRM_MODE_CONNECTOR_DisplayPort) &&
482 (connector->connector_type != DRM_MODE_CONNECTOR_eDP))
483 return;
484
485 radeon_connector = to_radeon_connector(connector);
486 if (!radeon_connector->con_priv)
487 return;
488 dig_connector = radeon_connector->con_priv;
489
490 dig_connector->dp_clock =
491 dp_link_clock_for_mode_clock(dig_connector->dpcd, mode->clock);
492 dig_connector->dp_lane_count =
493 dp_lanes_for_mode_clock(dig_connector->dpcd, mode->clock);
494}
495
496int radeon_dp_mode_valid_helper(struct radeon_connector *radeon_connector,
497 struct drm_display_mode *mode)
498{
499 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
500
501 return dp_mode_valid(dig_connector->dpcd, mode->clock);
502}
503
504static bool atom_dp_get_link_status(struct radeon_connector *radeon_connector,
505 u8 link_status[DP_LINK_STATUS_SIZE])
506{
507 int ret;
508 ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS, 100,
509 DP_LINK_STATUS_SIZE, link_status);
510 if (!ret) {
511 DRM_ERROR("displayport link status failed\n");
512 return false;
513 }
514
515 DRM_DEBUG("link status %02x %02x %02x %02x %02x %02x\n",
516 link_status[0], link_status[1], link_status[2],
517 link_status[3], link_status[4], link_status[5]);
518 return true;
519}
520
521bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
522{
523 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
524 u8 link_status[DP_LINK_STATUS_SIZE];
525
526 if (!atom_dp_get_link_status(radeon_connector, link_status))
527 return false;
528 if (dp_channel_eq_ok(link_status, dig_connector->dp_lane_count))
529 return false;
530 return true;
531}
532
533static void dp_set_power(struct radeon_connector *radeon_connector, u8 power_state)
534{
535 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
536
537 if (dig_connector->dpcd[0] >= 0x11) {
538 radeon_dp_aux_native_write(radeon_connector, DP_SET_POWER, 1,
539 &power_state);
540 }
541}
542
543static void dp_set_downspread(struct radeon_connector *radeon_connector, u8 downspread)
544{
545 radeon_dp_aux_native_write(radeon_connector, DP_DOWNSPREAD_CTRL, 1,
546 &downspread);
547}
548
549static void dp_set_link_bw_lanes(struct radeon_connector *radeon_connector,
550 u8 link_configuration[DP_LINK_CONFIGURATION_SIZE])
551{
552 radeon_dp_aux_native_write(radeon_connector, DP_LINK_BW_SET, 2,
553 link_configuration);
554}
555
556static void dp_update_dpvs_emph(struct radeon_connector *radeon_connector,
557 struct drm_encoder *encoder,
558 u8 train_set[4])
559{
560 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
561 int i;
562
563 for (i = 0; i < dig_connector->dp_lane_count; i++)
564 atombios_dig_transmitter_setup(encoder,
565 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
566 i, train_set[i]);
567
568 radeon_dp_aux_native_write(radeon_connector, DP_TRAINING_LANE0_SET,
569 dig_connector->dp_lane_count, train_set);
570}
571
572static void dp_set_training(struct radeon_connector *radeon_connector,
573 u8 training)
574{
575 radeon_dp_aux_native_write(radeon_connector, DP_TRAINING_PATTERN_SET,
576 1, &training);
577}
578
579void dp_link_train(struct drm_encoder *encoder,
580 struct drm_connector *connector)
581{
582 struct drm_device *dev = encoder->dev;
583 struct radeon_device *rdev = dev->dev_private;
584 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
585 struct radeon_encoder_atom_dig *dig;
586 struct radeon_connector *radeon_connector;
587 struct radeon_connector_atom_dig *dig_connector;
588 int enc_id = 0;
589 bool clock_recovery, channel_eq;
590 u8 link_status[DP_LINK_STATUS_SIZE];
591 u8 link_configuration[DP_LINK_CONFIGURATION_SIZE];
592 u8 tries, voltage;
593 u8 train_set[4];
594 int i;
595
596 if ((connector->connector_type != DRM_MODE_CONNECTOR_DisplayPort) &&
597 (connector->connector_type != DRM_MODE_CONNECTOR_eDP))
598 return;
599
600 if (!radeon_encoder->enc_priv)
601 return;
602 dig = radeon_encoder->enc_priv;
603
604 radeon_connector = to_radeon_connector(connector);
605 if (!radeon_connector->con_priv)
606 return;
607 dig_connector = radeon_connector->con_priv;
608
609 if (dig->dig_encoder)
610 enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
611 else
612 enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
613 if (dig_connector->linkb)
614 enc_id |= ATOM_DP_CONFIG_LINK_B;
615 else
616 enc_id |= ATOM_DP_CONFIG_LINK_A;
617
618 memset(link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
619 if (dig_connector->dp_clock == 270000)
620 link_configuration[0] = DP_LINK_BW_2_7;
621 else
622 link_configuration[0] = DP_LINK_BW_1_62;
623 link_configuration[1] = dig_connector->dp_lane_count;
624 if (dig_connector->dpcd[0] >= 0x11)
625 link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
626
627 /* power up the sink */
628 dp_set_power(radeon_connector, DP_SET_POWER_D0);
629 /* disable the training pattern on the sink */
630 dp_set_training(radeon_connector, DP_TRAINING_PATTERN_DISABLE);
631 /* set link bw and lanes on the sink */
632 dp_set_link_bw_lanes(radeon_connector, link_configuration);
633 /* disable downspread on the sink */
634 dp_set_downspread(radeon_connector, 0);
635 if (ASIC_IS_DCE4(rdev)) {
636 /* start training on the source */
637 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_START);
638 /* set training pattern 1 on the source */
639 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1);
640 } else {
641 /* start training on the source */
642 radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_START,
643 dig_connector->dp_clock, enc_id, 0);
644 /* set training pattern 1 on the source */
645 radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
646 dig_connector->dp_clock, enc_id, 0);
647 }
648
649 /* set initial vs/emph */
650 memset(train_set, 0, 4);
651 udelay(400);
652 /* set training pattern 1 on the sink */
653 dp_set_training(radeon_connector, DP_TRAINING_PATTERN_1);
654
655 dp_update_dpvs_emph(radeon_connector, encoder, train_set);
656
657 /* clock recovery loop */
658 clock_recovery = false;
659 tries = 0;
660 voltage = 0xff;
661 for (;;) {
662 udelay(100);
663 if (!atom_dp_get_link_status(radeon_connector, link_status))
664 break;
665
666 if (dp_clock_recovery_ok(link_status, dig_connector->dp_lane_count)) {
667 clock_recovery = true;
668 break;
669 }
670
671 for (i = 0; i < dig_connector->dp_lane_count; i++) {
672 if ((train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
673 break;
674 }
675 if (i == dig_connector->dp_lane_count) {
676 DRM_ERROR("clock recovery reached max voltage\n");
677 break;
678 }
679
680 if ((train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
681 ++tries;
682 if (tries == 5) {
683 DRM_ERROR("clock recovery tried 5 times\n");
684 break;
685 }
686 } else
687 tries = 0;
688
689 voltage = train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
690
691 /* Compute new train_set as requested by sink */
692 dp_get_adjust_train(link_status, dig_connector->dp_lane_count, train_set);
693 dp_update_dpvs_emph(radeon_connector, encoder, train_set);
694 }
695 if (!clock_recovery)
696 DRM_ERROR("clock recovery failed\n");
697 else
698 DRM_DEBUG("clock recovery at voltage %d pre-emphasis %d\n",
699 train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
700 (train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
701 DP_TRAIN_PRE_EMPHASIS_SHIFT);
702
703
704 /* set training pattern 2 on the sink */
705 dp_set_training(radeon_connector, DP_TRAINING_PATTERN_2);
706 /* set training pattern 2 on the source */
707 if (ASIC_IS_DCE4(rdev))
708 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2);
709 else
710 radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
711 dig_connector->dp_clock, enc_id, 1);
712
713 /* channel equalization loop */
714 tries = 0;
715 channel_eq = false;
716 for (;;) {
717 udelay(400);
718 if (!atom_dp_get_link_status(radeon_connector, link_status))
719 break;
720
721 if (dp_channel_eq_ok(link_status, dig_connector->dp_lane_count)) {
722 channel_eq = true;
723 break;
724 }
725
726 /* Try 5 times */
727 if (tries > 5) {
728 DRM_ERROR("channel eq failed: 5 tries\n");
729 break;
730 }
731
732 /* Compute new train_set as requested by sink */
733 dp_get_adjust_train(link_status, dig_connector->dp_lane_count, train_set);
734 dp_update_dpvs_emph(radeon_connector, encoder, train_set);
735
736 tries++;
737 }
738
739 if (!channel_eq)
740 DRM_ERROR("channel eq failed\n");
741 else
742 DRM_DEBUG("channel eq at voltage %d pre-emphasis %d\n",
743 train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
744 (train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
745 >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
746
747 /* disable the training pattern on the sink */
748 dp_set_training(radeon_connector, DP_TRAINING_PATTERN_DISABLE);
749
750 /* disable the training pattern on the source */
751 if (ASIC_IS_DCE4(rdev))
752 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE);
753 else
754 radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
755 dig_connector->dp_clock, enc_id, 0);
756}
757
758int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
759 uint8_t write_byte, uint8_t *read_byte)
760{
761 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
762 struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter;
763 int ret = 0;
764 uint16_t address = algo_data->address;
765 uint8_t msg[5];
766 uint8_t reply[2];
767 int msg_len, dp_msg_len;
768 int reply_bytes;
769
770 /* Set up the command byte */
771 if (mode & MODE_I2C_READ)
772 msg[2] = AUX_I2C_READ << 4;
773 else
774 msg[2] = AUX_I2C_WRITE << 4;
775
776 if (!(mode & MODE_I2C_STOP))
777 msg[2] |= AUX_I2C_MOT << 4;
778
779 msg[0] = address;
780 msg[1] = address >> 8;
781
782 reply_bytes = 1;
783
784 msg_len = 4;
785 dp_msg_len = 3;
786 switch (mode) {
787 case MODE_I2C_WRITE:
788 msg[4] = write_byte;
789 msg_len++;
790 dp_msg_len += 2;
791 break;
792 case MODE_I2C_READ:
793 dp_msg_len += 1;
794 break;
795 default:
796 break;
797 }
798
799 msg[3] = (dp_msg_len) << 4;
800 ret = radeon_process_aux_ch(auxch, msg, msg_len, reply, reply_bytes, 0);
801
802 if (ret) {
803 if (read_byte)
804 *read_byte = reply[0];
805 return reply_bytes;
806 }
807 return -EREMOTEIO;
808}
809
diff --git a/drivers/gpu/drm/radeon/avivod.h b/drivers/gpu/drm/radeon/avivod.h
index d4e6e6e4a938..3c391e7e9fd4 100644
--- a/drivers/gpu/drm/radeon/avivod.h
+++ b/drivers/gpu/drm/radeon/avivod.h
@@ -30,11 +30,13 @@
30 30
31#define D1CRTC_CONTROL 0x6080 31#define D1CRTC_CONTROL 0x6080
32#define CRTC_EN (1 << 0) 32#define CRTC_EN (1 << 0)
33#define D1CRTC_STATUS 0x609c
33#define D1CRTC_UPDATE_LOCK 0x60E8 34#define D1CRTC_UPDATE_LOCK 0x60E8
34#define D1GRPH_PRIMARY_SURFACE_ADDRESS 0x6110 35#define D1GRPH_PRIMARY_SURFACE_ADDRESS 0x6110
35#define D1GRPH_SECONDARY_SURFACE_ADDRESS 0x6118 36#define D1GRPH_SECONDARY_SURFACE_ADDRESS 0x6118
36 37
37#define D2CRTC_CONTROL 0x6880 38#define D2CRTC_CONTROL 0x6880
39#define D2CRTC_STATUS 0x689c
38#define D2CRTC_UPDATE_LOCK 0x68E8 40#define D2CRTC_UPDATE_LOCK 0x68E8
39#define D2GRPH_PRIMARY_SURFACE_ADDRESS 0x6910 41#define D2GRPH_PRIMARY_SURFACE_ADDRESS 0x6910
40#define D2GRPH_SECONDARY_SURFACE_ADDRESS 0x6918 42#define D2GRPH_SECONDARY_SURFACE_ADDRESS 0x6918
diff --git a/drivers/gpu/drm/radeon/evergreen.c b/drivers/gpu/drm/radeon/evergreen.c
new file mode 100644
index 000000000000..e8f447e20507
--- /dev/null
+++ b/drivers/gpu/drm/radeon/evergreen.c
@@ -0,0 +1,765 @@
1/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
26#include <linux/slab.h>
27#include "drmP.h"
28#include "radeon.h"
29#include "radeon_asic.h"
30#include "radeon_drm.h"
31#include "rv770d.h"
32#include "atom.h"
33#include "avivod.h"
34#include "evergreen_reg.h"
35
36static void evergreen_gpu_init(struct radeon_device *rdev);
37void evergreen_fini(struct radeon_device *rdev);
38
39bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
40{
41 bool connected = false;
42 /* XXX */
43 return connected;
44}
45
46void evergreen_hpd_set_polarity(struct radeon_device *rdev,
47 enum radeon_hpd_id hpd)
48{
49 /* XXX */
50}
51
52void evergreen_hpd_init(struct radeon_device *rdev)
53{
54 /* XXX */
55}
56
57
58void evergreen_bandwidth_update(struct radeon_device *rdev)
59{
60 /* XXX */
61}
62
63void evergreen_hpd_fini(struct radeon_device *rdev)
64{
65 /* XXX */
66}
67
68static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
69{
70 unsigned i;
71 u32 tmp;
72
73 for (i = 0; i < rdev->usec_timeout; i++) {
74 /* read MC_STATUS */
75 tmp = RREG32(SRBM_STATUS) & 0x1F00;
76 if (!tmp)
77 return 0;
78 udelay(1);
79 }
80 return -1;
81}
82
83/*
84 * GART
85 */
86int evergreen_pcie_gart_enable(struct radeon_device *rdev)
87{
88 u32 tmp;
89 int r, i;
90
91 if (rdev->gart.table.vram.robj == NULL) {
92 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
93 return -EINVAL;
94 }
95 r = radeon_gart_table_vram_pin(rdev);
96 if (r)
97 return r;
98 radeon_gart_restore(rdev);
99 /* Setup L2 cache */
100 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
101 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
102 EFFECTIVE_L2_QUEUE_SIZE(7));
103 WREG32(VM_L2_CNTL2, 0);
104 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
105 /* Setup TLB control */
106 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
107 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
108 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
109 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
110 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
111 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
112 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
113 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
114 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
115 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
116 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
117 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
118 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
119 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
120 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
121 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
122 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
123 (u32)(rdev->dummy_page.addr >> 12));
124 for (i = 1; i < 7; i++)
125 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
126
127 r600_pcie_gart_tlb_flush(rdev);
128 rdev->gart.ready = true;
129 return 0;
130}
131
132void evergreen_pcie_gart_disable(struct radeon_device *rdev)
133{
134 u32 tmp;
135 int i, r;
136
137 /* Disable all tables */
138 for (i = 0; i < 7; i++)
139 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
140
141 /* Setup L2 cache */
142 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
143 EFFECTIVE_L2_QUEUE_SIZE(7));
144 WREG32(VM_L2_CNTL2, 0);
145 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
146 /* Setup TLB control */
147 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
148 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
149 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
150 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
151 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
152 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
153 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
154 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
155 if (rdev->gart.table.vram.robj) {
156 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
157 if (likely(r == 0)) {
158 radeon_bo_kunmap(rdev->gart.table.vram.robj);
159 radeon_bo_unpin(rdev->gart.table.vram.robj);
160 radeon_bo_unreserve(rdev->gart.table.vram.robj);
161 }
162 }
163}
164
165void evergreen_pcie_gart_fini(struct radeon_device *rdev)
166{
167 evergreen_pcie_gart_disable(rdev);
168 radeon_gart_table_vram_free(rdev);
169 radeon_gart_fini(rdev);
170}
171
172
173void evergreen_agp_enable(struct radeon_device *rdev)
174{
175 u32 tmp;
176 int i;
177
178 /* Setup L2 cache */
179 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
180 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
181 EFFECTIVE_L2_QUEUE_SIZE(7));
182 WREG32(VM_L2_CNTL2, 0);
183 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
184 /* Setup TLB control */
185 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
186 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
187 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
188 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
189 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
190 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
191 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
192 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
193 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
194 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
195 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
196 for (i = 0; i < 7; i++)
197 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
198}
199
200static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
201{
202 save->vga_control[0] = RREG32(D1VGA_CONTROL);
203 save->vga_control[1] = RREG32(D2VGA_CONTROL);
204 save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
205 save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
206 save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
207 save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
208 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
209 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
210 save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
211 save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
212 save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
213 save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
214 save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
215 save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
216
217 /* Stop all video */
218 WREG32(VGA_RENDER_CONTROL, 0);
219 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
220 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
221 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
222 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
223 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
224 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
225 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
226 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
227 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
228 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
229 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
230 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
231 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
232 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
233 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
234 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
235 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
236 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
237
238 WREG32(D1VGA_CONTROL, 0);
239 WREG32(D2VGA_CONTROL, 0);
240 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
241 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
242 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
243 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
244}
245
246static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
247{
248 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
249 upper_32_bits(rdev->mc.vram_start));
250 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
251 upper_32_bits(rdev->mc.vram_start));
252 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
253 (u32)rdev->mc.vram_start);
254 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
255 (u32)rdev->mc.vram_start);
256
257 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
258 upper_32_bits(rdev->mc.vram_start));
259 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
260 upper_32_bits(rdev->mc.vram_start));
261 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
262 (u32)rdev->mc.vram_start);
263 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
264 (u32)rdev->mc.vram_start);
265
266 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
267 upper_32_bits(rdev->mc.vram_start));
268 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
269 upper_32_bits(rdev->mc.vram_start));
270 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
271 (u32)rdev->mc.vram_start);
272 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
273 (u32)rdev->mc.vram_start);
274
275 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
276 upper_32_bits(rdev->mc.vram_start));
277 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
278 upper_32_bits(rdev->mc.vram_start));
279 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
280 (u32)rdev->mc.vram_start);
281 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
282 (u32)rdev->mc.vram_start);
283
284 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
285 upper_32_bits(rdev->mc.vram_start));
286 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
287 upper_32_bits(rdev->mc.vram_start));
288 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
289 (u32)rdev->mc.vram_start);
290 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
291 (u32)rdev->mc.vram_start);
292
293 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
294 upper_32_bits(rdev->mc.vram_start));
295 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
296 upper_32_bits(rdev->mc.vram_start));
297 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
298 (u32)rdev->mc.vram_start);
299 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
300 (u32)rdev->mc.vram_start);
301
302 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
303 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
304 /* Unlock host access */
305 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
306 mdelay(1);
307 /* Restore video state */
308 WREG32(D1VGA_CONTROL, save->vga_control[0]);
309 WREG32(D2VGA_CONTROL, save->vga_control[1]);
310 WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
311 WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
312 WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
313 WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
314 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
315 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
316 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
317 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
318 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
319 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
320 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
321 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
322 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
323 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
324 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
325 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
326 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
327 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
328 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
329 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
330 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
331 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
332 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
333}
334
335static void evergreen_mc_program(struct radeon_device *rdev)
336{
337 struct evergreen_mc_save save;
338 u32 tmp;
339 int i, j;
340
341 /* Initialize HDP */
342 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
343 WREG32((0x2c14 + j), 0x00000000);
344 WREG32((0x2c18 + j), 0x00000000);
345 WREG32((0x2c1c + j), 0x00000000);
346 WREG32((0x2c20 + j), 0x00000000);
347 WREG32((0x2c24 + j), 0x00000000);
348 }
349 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
350
351 evergreen_mc_stop(rdev, &save);
352 if (evergreen_mc_wait_for_idle(rdev)) {
353 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
354 }
355 /* Lockout access through VGA aperture*/
356 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
357 /* Update configuration */
358 if (rdev->flags & RADEON_IS_AGP) {
359 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
360 /* VRAM before AGP */
361 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
362 rdev->mc.vram_start >> 12);
363 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
364 rdev->mc.gtt_end >> 12);
365 } else {
366 /* VRAM after AGP */
367 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
368 rdev->mc.gtt_start >> 12);
369 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
370 rdev->mc.vram_end >> 12);
371 }
372 } else {
373 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
374 rdev->mc.vram_start >> 12);
375 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
376 rdev->mc.vram_end >> 12);
377 }
378 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
379 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
380 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
381 WREG32(MC_VM_FB_LOCATION, tmp);
382 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
383 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
384 WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
385 if (rdev->flags & RADEON_IS_AGP) {
386 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
387 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
388 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
389 } else {
390 WREG32(MC_VM_AGP_BASE, 0);
391 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
392 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
393 }
394 if (evergreen_mc_wait_for_idle(rdev)) {
395 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
396 }
397 evergreen_mc_resume(rdev, &save);
398 /* we need to own VRAM, so turn off the VGA renderer here
399 * to stop it overwriting our objects */
400 rv515_vga_render_disable(rdev);
401}
402
403#if 0
404/*
405 * CP.
406 */
407static void evergreen_cp_stop(struct radeon_device *rdev)
408{
409 /* XXX */
410}
411
412
413static int evergreen_cp_load_microcode(struct radeon_device *rdev)
414{
415 /* XXX */
416
417 return 0;
418}
419
420
421/*
422 * Core functions
423 */
424static u32 evergreen_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
425 u32 num_backends,
426 u32 backend_disable_mask)
427{
428 u32 backend_map = 0;
429
430 return backend_map;
431}
432#endif
433
434static void evergreen_gpu_init(struct radeon_device *rdev)
435{
436 /* XXX */
437}
438
439int evergreen_mc_init(struct radeon_device *rdev)
440{
441 u32 tmp;
442 int chansize, numchan;
443
444 /* Get VRAM informations */
445 rdev->mc.vram_is_ddr = true;
446 tmp = RREG32(MC_ARB_RAMCFG);
447 if (tmp & CHANSIZE_OVERRIDE) {
448 chansize = 16;
449 } else if (tmp & CHANSIZE_MASK) {
450 chansize = 64;
451 } else {
452 chansize = 32;
453 }
454 tmp = RREG32(MC_SHARED_CHMAP);
455 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
456 case 0:
457 default:
458 numchan = 1;
459 break;
460 case 1:
461 numchan = 2;
462 break;
463 case 2:
464 numchan = 4;
465 break;
466 case 3:
467 numchan = 8;
468 break;
469 }
470 rdev->mc.vram_width = numchan * chansize;
471 /* Could aper size report 0 ? */
472 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
473 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
474 /* Setup GPU memory space */
475 /* size in MB on evergreen */
476 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
477 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
478 rdev->mc.visible_vram_size = rdev->mc.aper_size;
479 /* FIXME remove this once we support unmappable VRAM */
480 if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
481 rdev->mc.mc_vram_size = rdev->mc.aper_size;
482 rdev->mc.real_vram_size = rdev->mc.aper_size;
483 }
484 r600_vram_gtt_location(rdev, &rdev->mc);
485 radeon_update_bandwidth_info(rdev);
486
487 return 0;
488}
489
490int evergreen_gpu_reset(struct radeon_device *rdev)
491{
492 /* FIXME: implement for evergreen */
493 return 0;
494}
495
496static int evergreen_startup(struct radeon_device *rdev)
497{
498#if 0
499 int r;
500
501 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
502 r = r600_init_microcode(rdev);
503 if (r) {
504 DRM_ERROR("Failed to load firmware!\n");
505 return r;
506 }
507 }
508#endif
509 evergreen_mc_program(rdev);
510#if 0
511 if (rdev->flags & RADEON_IS_AGP) {
512 evergreem_agp_enable(rdev);
513 } else {
514 r = evergreen_pcie_gart_enable(rdev);
515 if (r)
516 return r;
517 }
518#endif
519 evergreen_gpu_init(rdev);
520#if 0
521 if (!rdev->r600_blit.shader_obj) {
522 r = r600_blit_init(rdev);
523 if (r) {
524 DRM_ERROR("radeon: failed blitter (%d).\n", r);
525 return r;
526 }
527 }
528
529 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
530 if (unlikely(r != 0))
531 return r;
532 r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
533 &rdev->r600_blit.shader_gpu_addr);
534 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
535 if (r) {
536 DRM_ERROR("failed to pin blit object %d\n", r);
537 return r;
538 }
539
540 /* Enable IRQ */
541 r = r600_irq_init(rdev);
542 if (r) {
543 DRM_ERROR("radeon: IH init failed (%d).\n", r);
544 radeon_irq_kms_fini(rdev);
545 return r;
546 }
547 r600_irq_set(rdev);
548
549 r = radeon_ring_init(rdev, rdev->cp.ring_size);
550 if (r)
551 return r;
552 r = evergreen_cp_load_microcode(rdev);
553 if (r)
554 return r;
555 r = r600_cp_resume(rdev);
556 if (r)
557 return r;
558 /* write back buffer are not vital so don't worry about failure */
559 r600_wb_enable(rdev);
560#endif
561 return 0;
562}
563
564int evergreen_resume(struct radeon_device *rdev)
565{
566 int r;
567
568 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
569 * posting will perform necessary task to bring back GPU into good
570 * shape.
571 */
572 /* post card */
573 atom_asic_init(rdev->mode_info.atom_context);
574 /* Initialize clocks */
575 r = radeon_clocks_init(rdev);
576 if (r) {
577 return r;
578 }
579
580 r = evergreen_startup(rdev);
581 if (r) {
582 DRM_ERROR("r600 startup failed on resume\n");
583 return r;
584 }
585#if 0
586 r = r600_ib_test(rdev);
587 if (r) {
588 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
589 return r;
590 }
591#endif
592 return r;
593
594}
595
596int evergreen_suspend(struct radeon_device *rdev)
597{
598#if 0
599 int r;
600
601 /* FIXME: we should wait for ring to be empty */
602 r700_cp_stop(rdev);
603 rdev->cp.ready = false;
604 r600_wb_disable(rdev);
605 evergreen_pcie_gart_disable(rdev);
606 /* unpin shaders bo */
607 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
608 if (likely(r == 0)) {
609 radeon_bo_unpin(rdev->r600_blit.shader_obj);
610 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
611 }
612#endif
613 return 0;
614}
615
616static bool evergreen_card_posted(struct radeon_device *rdev)
617{
618 u32 reg;
619
620 /* first check CRTCs */
621 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
622 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
623 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
624 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
625 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
626 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
627 if (reg & EVERGREEN_CRTC_MASTER_EN)
628 return true;
629
630 /* then check MEM_SIZE, in case the crtcs are off */
631 if (RREG32(CONFIG_MEMSIZE))
632 return true;
633
634 return false;
635}
636
637/* Plan is to move initialization in that function and use
638 * helper function so that radeon_device_init pretty much
639 * do nothing more than calling asic specific function. This
640 * should also allow to remove a bunch of callback function
641 * like vram_info.
642 */
643int evergreen_init(struct radeon_device *rdev)
644{
645 int r;
646
647 r = radeon_dummy_page_init(rdev);
648 if (r)
649 return r;
650 /* This don't do much */
651 r = radeon_gem_init(rdev);
652 if (r)
653 return r;
654 /* Read BIOS */
655 if (!radeon_get_bios(rdev)) {
656 if (ASIC_IS_AVIVO(rdev))
657 return -EINVAL;
658 }
659 /* Must be an ATOMBIOS */
660 if (!rdev->is_atom_bios) {
661 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
662 return -EINVAL;
663 }
664 r = radeon_atombios_init(rdev);
665 if (r)
666 return r;
667 /* Post card if necessary */
668 if (!evergreen_card_posted(rdev)) {
669 if (!rdev->bios) {
670 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
671 return -EINVAL;
672 }
673 DRM_INFO("GPU not posted. posting now...\n");
674 atom_asic_init(rdev->mode_info.atom_context);
675 }
676 /* Initialize scratch registers */
677 r600_scratch_init(rdev);
678 /* Initialize surface registers */
679 radeon_surface_init(rdev);
680 /* Initialize clocks */
681 radeon_get_clock_info(rdev->ddev);
682 r = radeon_clocks_init(rdev);
683 if (r)
684 return r;
685 /* Initialize power management */
686 radeon_pm_init(rdev);
687 /* Fence driver */
688 r = radeon_fence_driver_init(rdev);
689 if (r)
690 return r;
691 /* initialize AGP */
692 if (rdev->flags & RADEON_IS_AGP) {
693 r = radeon_agp_init(rdev);
694 if (r)
695 radeon_agp_disable(rdev);
696 }
697 /* initialize memory controller */
698 r = evergreen_mc_init(rdev);
699 if (r)
700 return r;
701 /* Memory manager */
702 r = radeon_bo_init(rdev);
703 if (r)
704 return r;
705#if 0
706 r = radeon_irq_kms_init(rdev);
707 if (r)
708 return r;
709
710 rdev->cp.ring_obj = NULL;
711 r600_ring_init(rdev, 1024 * 1024);
712
713 rdev->ih.ring_obj = NULL;
714 r600_ih_ring_init(rdev, 64 * 1024);
715
716 r = r600_pcie_gart_init(rdev);
717 if (r)
718 return r;
719#endif
720 rdev->accel_working = false;
721 r = evergreen_startup(rdev);
722 if (r) {
723 evergreen_suspend(rdev);
724 /*r600_wb_fini(rdev);*/
725 /*radeon_ring_fini(rdev);*/
726 /*evergreen_pcie_gart_fini(rdev);*/
727 rdev->accel_working = false;
728 }
729 if (rdev->accel_working) {
730 r = radeon_ib_pool_init(rdev);
731 if (r) {
732 DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
733 rdev->accel_working = false;
734 }
735 r = r600_ib_test(rdev);
736 if (r) {
737 DRM_ERROR("radeon: failed testing IB (%d).\n", r);
738 rdev->accel_working = false;
739 }
740 }
741 return 0;
742}
743
744void evergreen_fini(struct radeon_device *rdev)
745{
746 radeon_pm_fini(rdev);
747 evergreen_suspend(rdev);
748#if 0
749 r600_blit_fini(rdev);
750 r600_irq_fini(rdev);
751 radeon_irq_kms_fini(rdev);
752 radeon_ring_fini(rdev);
753 r600_wb_fini(rdev);
754 evergreen_pcie_gart_fini(rdev);
755#endif
756 radeon_gem_fini(rdev);
757 radeon_fence_driver_fini(rdev);
758 radeon_clocks_fini(rdev);
759 radeon_agp_fini(rdev);
760 radeon_bo_fini(rdev);
761 radeon_atombios_fini(rdev);
762 kfree(rdev->bios);
763 rdev->bios = NULL;
764 radeon_dummy_page_fini(rdev);
765}
diff --git a/drivers/gpu/drm/radeon/evergreen_reg.h b/drivers/gpu/drm/radeon/evergreen_reg.h
new file mode 100644
index 000000000000..f7c7c9643433
--- /dev/null
+++ b/drivers/gpu/drm/radeon/evergreen_reg.h
@@ -0,0 +1,176 @@
1/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#ifndef __EVERGREEN_REG_H__
25#define __EVERGREEN_REG_H__
26
27/* evergreen */
28#define EVERGREEN_VGA_MEMORY_BASE_ADDRESS 0x310
29#define EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH 0x324
30#define EVERGREEN_D3VGA_CONTROL 0x3e0
31#define EVERGREEN_D4VGA_CONTROL 0x3e4
32#define EVERGREEN_D5VGA_CONTROL 0x3e8
33#define EVERGREEN_D6VGA_CONTROL 0x3ec
34
35#define EVERGREEN_P1PLL_SS_CNTL 0x414
36#define EVERGREEN_P2PLL_SS_CNTL 0x454
37# define EVERGREEN_PxPLL_SS_EN (1 << 12)
38/* GRPH blocks at 0x6800, 0x7400, 0x10000, 0x10c00, 0x11800, 0x12400 */
39#define EVERGREEN_GRPH_ENABLE 0x6800
40#define EVERGREEN_GRPH_CONTROL 0x6804
41# define EVERGREEN_GRPH_DEPTH(x) (((x) & 0x3) << 0)
42# define EVERGREEN_GRPH_DEPTH_8BPP 0
43# define EVERGREEN_GRPH_DEPTH_16BPP 1
44# define EVERGREEN_GRPH_DEPTH_32BPP 2
45# define EVERGREEN_GRPH_FORMAT(x) (((x) & 0x7) << 8)
46/* 8 BPP */
47# define EVERGREEN_GRPH_FORMAT_INDEXED 0
48/* 16 BPP */
49# define EVERGREEN_GRPH_FORMAT_ARGB1555 0
50# define EVERGREEN_GRPH_FORMAT_ARGB565 1
51# define EVERGREEN_GRPH_FORMAT_ARGB4444 2
52# define EVERGREEN_GRPH_FORMAT_AI88 3
53# define EVERGREEN_GRPH_FORMAT_MONO16 4
54# define EVERGREEN_GRPH_FORMAT_BGRA5551 5
55/* 32 BPP */
56# define EVERGREEN_GRPH_FORMAT_ARGB8888 0
57# define EVERGREEN_GRPH_FORMAT_ARGB2101010 1
58# define EVERGREEN_GRPH_FORMAT_32BPP_DIG 2
59# define EVERGREEN_GRPH_FORMAT_8B_ARGB2101010 3
60# define EVERGREEN_GRPH_FORMAT_BGRA1010102 4
61# define EVERGREEN_GRPH_FORMAT_8B_BGRA1010102 5
62# define EVERGREEN_GRPH_FORMAT_RGB111110 6
63# define EVERGREEN_GRPH_FORMAT_BGR101111 7
64#define EVERGREEN_GRPH_SWAP_CONTROL 0x680c
65# define EVERGREEN_GRPH_ENDIAN_SWAP(x) (((x) & 0x3) << 0)
66# define EVERGREEN_GRPH_ENDIAN_NONE 0
67# define EVERGREEN_GRPH_ENDIAN_8IN16 1
68# define EVERGREEN_GRPH_ENDIAN_8IN32 2
69# define EVERGREEN_GRPH_ENDIAN_8IN64 3
70# define EVERGREEN_GRPH_RED_CROSSBAR(x) (((x) & 0x3) << 4)
71# define EVERGREEN_GRPH_RED_SEL_R 0
72# define EVERGREEN_GRPH_RED_SEL_G 1
73# define EVERGREEN_GRPH_RED_SEL_B 2
74# define EVERGREEN_GRPH_RED_SEL_A 3
75# define EVERGREEN_GRPH_GREEN_CROSSBAR(x) (((x) & 0x3) << 6)
76# define EVERGREEN_GRPH_GREEN_SEL_G 0
77# define EVERGREEN_GRPH_GREEN_SEL_B 1
78# define EVERGREEN_GRPH_GREEN_SEL_A 2
79# define EVERGREEN_GRPH_GREEN_SEL_R 3
80# define EVERGREEN_GRPH_BLUE_CROSSBAR(x) (((x) & 0x3) << 8)
81# define EVERGREEN_GRPH_BLUE_SEL_B 0
82# define EVERGREEN_GRPH_BLUE_SEL_A 1
83# define EVERGREEN_GRPH_BLUE_SEL_R 2
84# define EVERGREEN_GRPH_BLUE_SEL_G 3
85# define EVERGREEN_GRPH_ALPHA_CROSSBAR(x) (((x) & 0x3) << 10)
86# define EVERGREEN_GRPH_ALPHA_SEL_A 0
87# define EVERGREEN_GRPH_ALPHA_SEL_R 1
88# define EVERGREEN_GRPH_ALPHA_SEL_G 2
89# define EVERGREEN_GRPH_ALPHA_SEL_B 3
90#define EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS 0x6810
91#define EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS 0x6814
92# define EVERGREEN_GRPH_DFQ_ENABLE (1 << 0)
93# define EVERGREEN_GRPH_SURFACE_ADDRESS_MASK 0xffffff00
94#define EVERGREEN_GRPH_PITCH 0x6818
95#define EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH 0x681c
96#define EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH 0x6820
97#define EVERGREEN_GRPH_SURFACE_OFFSET_X 0x6824
98#define EVERGREEN_GRPH_SURFACE_OFFSET_Y 0x6828
99#define EVERGREEN_GRPH_X_START 0x682c
100#define EVERGREEN_GRPH_Y_START 0x6830
101#define EVERGREEN_GRPH_X_END 0x6834
102#define EVERGREEN_GRPH_Y_END 0x6838
103
104/* CUR blocks at 0x6998, 0x7598, 0x10198, 0x10d98, 0x11998, 0x12598 */
105#define EVERGREEN_CUR_CONTROL 0x6998
106# define EVERGREEN_CURSOR_EN (1 << 0)
107# define EVERGREEN_CURSOR_MODE(x) (((x) & 0x3) << 8)
108# define EVERGREEN_CURSOR_MONO 0
109# define EVERGREEN_CURSOR_24_1 1
110# define EVERGREEN_CURSOR_24_8_PRE_MULT 2
111# define EVERGREEN_CURSOR_24_8_UNPRE_MULT 3
112# define EVERGREEN_CURSOR_2X_MAGNIFY (1 << 16)
113# define EVERGREEN_CURSOR_FORCE_MC_ON (1 << 20)
114# define EVERGREEN_CURSOR_URGENT_CONTROL(x) (((x) & 0x7) << 24)
115# define EVERGREEN_CURSOR_URGENT_ALWAYS 0
116# define EVERGREEN_CURSOR_URGENT_1_8 1
117# define EVERGREEN_CURSOR_URGENT_1_4 2
118# define EVERGREEN_CURSOR_URGENT_3_8 3
119# define EVERGREEN_CURSOR_URGENT_1_2 4
120#define EVERGREEN_CUR_SURFACE_ADDRESS 0x699c
121# define EVERGREEN_CUR_SURFACE_ADDRESS_MASK 0xfffff000
122#define EVERGREEN_CUR_SIZE 0x69a0
123#define EVERGREEN_CUR_SURFACE_ADDRESS_HIGH 0x69a4
124#define EVERGREEN_CUR_POSITION 0x69a8
125#define EVERGREEN_CUR_HOT_SPOT 0x69ac
126#define EVERGREEN_CUR_COLOR1 0x69b0
127#define EVERGREEN_CUR_COLOR2 0x69b4
128#define EVERGREEN_CUR_UPDATE 0x69b8
129# define EVERGREEN_CURSOR_UPDATE_PENDING (1 << 0)
130# define EVERGREEN_CURSOR_UPDATE_TAKEN (1 << 1)
131# define EVERGREEN_CURSOR_UPDATE_LOCK (1 << 16)
132# define EVERGREEN_CURSOR_DISABLE_MULTIPLE_UPDATE (1 << 24)
133
134/* LUT blocks at 0x69e0, 0x75e0, 0x101e0, 0x10de0, 0x119e0, 0x125e0 */
135#define EVERGREEN_DC_LUT_RW_MODE 0x69e0
136#define EVERGREEN_DC_LUT_RW_INDEX 0x69e4
137#define EVERGREEN_DC_LUT_SEQ_COLOR 0x69e8
138#define EVERGREEN_DC_LUT_PWL_DATA 0x69ec
139#define EVERGREEN_DC_LUT_30_COLOR 0x69f0
140#define EVERGREEN_DC_LUT_VGA_ACCESS_ENABLE 0x69f4
141#define EVERGREEN_DC_LUT_WRITE_EN_MASK 0x69f8
142#define EVERGREEN_DC_LUT_AUTOFILL 0x69fc
143#define EVERGREEN_DC_LUT_CONTROL 0x6a00
144#define EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE 0x6a04
145#define EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN 0x6a08
146#define EVERGREEN_DC_LUT_BLACK_OFFSET_RED 0x6a0c
147#define EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE 0x6a10
148#define EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN 0x6a14
149#define EVERGREEN_DC_LUT_WHITE_OFFSET_RED 0x6a18
150
151#define EVERGREEN_DATA_FORMAT 0x6b00
152# define EVERGREEN_INTERLEAVE_EN (1 << 0)
153#define EVERGREEN_DESKTOP_HEIGHT 0x6b04
154
155#define EVERGREEN_VIEWPORT_START 0x6d70
156#define EVERGREEN_VIEWPORT_SIZE 0x6d74
157
158/* display controller offsets used for crtc/cur/lut/grph/viewport/etc. */
159#define EVERGREEN_CRTC0_REGISTER_OFFSET (0x6df0 - 0x6df0)
160#define EVERGREEN_CRTC1_REGISTER_OFFSET (0x79f0 - 0x6df0)
161#define EVERGREEN_CRTC2_REGISTER_OFFSET (0x105f0 - 0x6df0)
162#define EVERGREEN_CRTC3_REGISTER_OFFSET (0x111f0 - 0x6df0)
163#define EVERGREEN_CRTC4_REGISTER_OFFSET (0x11df0 - 0x6df0)
164#define EVERGREEN_CRTC5_REGISTER_OFFSET (0x129f0 - 0x6df0)
165
166/* CRTC blocks at 0x6df0, 0x79f0, 0x105f0, 0x111f0, 0x11df0, 0x129f0 */
167#define EVERGREEN_CRTC_CONTROL 0x6e70
168# define EVERGREEN_CRTC_MASTER_EN (1 << 0)
169#define EVERGREEN_CRTC_UPDATE_LOCK 0x6ed4
170
171#define EVERGREEN_DC_GPIO_HPD_MASK 0x64b0
172#define EVERGREEN_DC_GPIO_HPD_A 0x64b4
173#define EVERGREEN_DC_GPIO_HPD_EN 0x64b8
174#define EVERGREEN_DC_GPIO_HPD_Y 0x64bc
175
176#endif
diff --git a/drivers/gpu/drm/radeon/mkregtable.c b/drivers/gpu/drm/radeon/mkregtable.c
index 0d79577c1576..607241c6a8a9 100644
--- a/drivers/gpu/drm/radeon/mkregtable.c
+++ b/drivers/gpu/drm/radeon/mkregtable.c
@@ -661,8 +661,10 @@ static int parser_auth(struct table *t, const char *filename)
661 fseek(file, 0, SEEK_SET); 661 fseek(file, 0, SEEK_SET);
662 662
663 /* get header */ 663 /* get header */
664 if (fgets(buf, 1024, file) == NULL) 664 if (fgets(buf, 1024, file) == NULL) {
665 fclose(file);
665 return -1; 666 return -1;
667 }
666 668
667 /* first line will contain the last register 669 /* first line will contain the last register
668 * and gpu name */ 670 * and gpu name */
diff --git a/drivers/gpu/drm/radeon/r100.c b/drivers/gpu/drm/radeon/r100.c
index c9e93eabcf16..cf60c0b3ef15 100644
--- a/drivers/gpu/drm/radeon/r100.c
+++ b/drivers/gpu/drm/radeon/r100.c
@@ -26,11 +26,13 @@
26 * Jerome Glisse 26 * Jerome Glisse
27 */ 27 */
28#include <linux/seq_file.h> 28#include <linux/seq_file.h>
29#include <linux/slab.h>
29#include "drmP.h" 30#include "drmP.h"
30#include "drm.h" 31#include "drm.h"
31#include "radeon_drm.h" 32#include "radeon_drm.h"
32#include "radeon_reg.h" 33#include "radeon_reg.h"
33#include "radeon.h" 34#include "radeon.h"
35#include "radeon_asic.h"
34#include "r100d.h" 36#include "r100d.h"
35#include "rs100d.h" 37#include "rs100d.h"
36#include "rv200d.h" 38#include "rv200d.h"
@@ -65,6 +67,96 @@ MODULE_FIRMWARE(FIRMWARE_R520);
65 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 67 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
66 */ 68 */
67 69
70/* hpd for digital panel detect/disconnect */
71bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
72{
73 bool connected = false;
74
75 switch (hpd) {
76 case RADEON_HPD_1:
77 if (RREG32(RADEON_FP_GEN_CNTL) & RADEON_FP_DETECT_SENSE)
78 connected = true;
79 break;
80 case RADEON_HPD_2:
81 if (RREG32(RADEON_FP2_GEN_CNTL) & RADEON_FP2_DETECT_SENSE)
82 connected = true;
83 break;
84 default:
85 break;
86 }
87 return connected;
88}
89
90void r100_hpd_set_polarity(struct radeon_device *rdev,
91 enum radeon_hpd_id hpd)
92{
93 u32 tmp;
94 bool connected = r100_hpd_sense(rdev, hpd);
95
96 switch (hpd) {
97 case RADEON_HPD_1:
98 tmp = RREG32(RADEON_FP_GEN_CNTL);
99 if (connected)
100 tmp &= ~RADEON_FP_DETECT_INT_POL;
101 else
102 tmp |= RADEON_FP_DETECT_INT_POL;
103 WREG32(RADEON_FP_GEN_CNTL, tmp);
104 break;
105 case RADEON_HPD_2:
106 tmp = RREG32(RADEON_FP2_GEN_CNTL);
107 if (connected)
108 tmp &= ~RADEON_FP2_DETECT_INT_POL;
109 else
110 tmp |= RADEON_FP2_DETECT_INT_POL;
111 WREG32(RADEON_FP2_GEN_CNTL, tmp);
112 break;
113 default:
114 break;
115 }
116}
117
118void r100_hpd_init(struct radeon_device *rdev)
119{
120 struct drm_device *dev = rdev->ddev;
121 struct drm_connector *connector;
122
123 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
124 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
125 switch (radeon_connector->hpd.hpd) {
126 case RADEON_HPD_1:
127 rdev->irq.hpd[0] = true;
128 break;
129 case RADEON_HPD_2:
130 rdev->irq.hpd[1] = true;
131 break;
132 default:
133 break;
134 }
135 }
136 if (rdev->irq.installed)
137 r100_irq_set(rdev);
138}
139
140void r100_hpd_fini(struct radeon_device *rdev)
141{
142 struct drm_device *dev = rdev->ddev;
143 struct drm_connector *connector;
144
145 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
146 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
147 switch (radeon_connector->hpd.hpd) {
148 case RADEON_HPD_1:
149 rdev->irq.hpd[0] = false;
150 break;
151 case RADEON_HPD_2:
152 rdev->irq.hpd[1] = false;
153 break;
154 default:
155 break;
156 }
157 }
158}
159
68/* 160/*
69 * PCI GART 161 * PCI GART
70 */ 162 */
@@ -94,20 +186,26 @@ int r100_pci_gart_init(struct radeon_device *rdev)
94 return radeon_gart_table_ram_alloc(rdev); 186 return radeon_gart_table_ram_alloc(rdev);
95} 187}
96 188
189/* required on r1xx, r2xx, r300, r(v)350, r420/r481, rs400/rs480 */
190void r100_enable_bm(struct radeon_device *rdev)
191{
192 uint32_t tmp;
193 /* Enable bus mastering */
194 tmp = RREG32(RADEON_BUS_CNTL) & ~RADEON_BUS_MASTER_DIS;
195 WREG32(RADEON_BUS_CNTL, tmp);
196}
197
97int r100_pci_gart_enable(struct radeon_device *rdev) 198int r100_pci_gart_enable(struct radeon_device *rdev)
98{ 199{
99 uint32_t tmp; 200 uint32_t tmp;
100 201
202 radeon_gart_restore(rdev);
101 /* discard memory request outside of configured range */ 203 /* discard memory request outside of configured range */
102 tmp = RREG32(RADEON_AIC_CNTL) | RADEON_DIS_OUT_OF_PCI_GART_ACCESS; 204 tmp = RREG32(RADEON_AIC_CNTL) | RADEON_DIS_OUT_OF_PCI_GART_ACCESS;
103 WREG32(RADEON_AIC_CNTL, tmp); 205 WREG32(RADEON_AIC_CNTL, tmp);
104 /* set address range for PCI address translate */ 206 /* set address range for PCI address translate */
105 WREG32(RADEON_AIC_LO_ADDR, rdev->mc.gtt_location); 207 WREG32(RADEON_AIC_LO_ADDR, rdev->mc.gtt_start);
106 tmp = rdev->mc.gtt_location + rdev->mc.gtt_size - 1; 208 WREG32(RADEON_AIC_HI_ADDR, rdev->mc.gtt_end);
107 WREG32(RADEON_AIC_HI_ADDR, tmp);
108 /* Enable bus mastering */
109 tmp = RREG32(RADEON_BUS_CNTL) & ~RADEON_BUS_MASTER_DIS;
110 WREG32(RADEON_BUS_CNTL, tmp);
111 /* set PCI GART page-table base address */ 209 /* set PCI GART page-table base address */
112 WREG32(RADEON_AIC_PT_BASE, rdev->gart.table_addr); 210 WREG32(RADEON_AIC_PT_BASE, rdev->gart.table_addr);
113 tmp = RREG32(RADEON_AIC_CNTL) | RADEON_PCIGART_TRANSLATE_EN; 211 tmp = RREG32(RADEON_AIC_CNTL) | RADEON_PCIGART_TRANSLATE_EN;
@@ -139,15 +237,20 @@ int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr)
139 237
140void r100_pci_gart_fini(struct radeon_device *rdev) 238void r100_pci_gart_fini(struct radeon_device *rdev)
141{ 239{
240 radeon_gart_fini(rdev);
142 r100_pci_gart_disable(rdev); 241 r100_pci_gart_disable(rdev);
143 radeon_gart_table_ram_free(rdev); 242 radeon_gart_table_ram_free(rdev);
144 radeon_gart_fini(rdev);
145} 243}
146 244
147int r100_irq_set(struct radeon_device *rdev) 245int r100_irq_set(struct radeon_device *rdev)
148{ 246{
149 uint32_t tmp = 0; 247 uint32_t tmp = 0;
150 248
249 if (!rdev->irq.installed) {
250 WARN(1, "Can't enable IRQ/MSI because no handler is installed.\n");
251 WREG32(R_000040_GEN_INT_CNTL, 0);
252 return -EINVAL;
253 }
151 if (rdev->irq.sw_int) { 254 if (rdev->irq.sw_int) {
152 tmp |= RADEON_SW_INT_ENABLE; 255 tmp |= RADEON_SW_INT_ENABLE;
153 } 256 }
@@ -157,6 +260,12 @@ int r100_irq_set(struct radeon_device *rdev)
157 if (rdev->irq.crtc_vblank_int[1]) { 260 if (rdev->irq.crtc_vblank_int[1]) {
158 tmp |= RADEON_CRTC2_VBLANK_MASK; 261 tmp |= RADEON_CRTC2_VBLANK_MASK;
159 } 262 }
263 if (rdev->irq.hpd[0]) {
264 tmp |= RADEON_FP_DETECT_MASK;
265 }
266 if (rdev->irq.hpd[1]) {
267 tmp |= RADEON_FP2_DETECT_MASK;
268 }
160 WREG32(RADEON_GEN_INT_CNTL, tmp); 269 WREG32(RADEON_GEN_INT_CNTL, tmp);
161 return 0; 270 return 0;
162} 271}
@@ -175,8 +284,9 @@ void r100_irq_disable(struct radeon_device *rdev)
175static inline uint32_t r100_irq_ack(struct radeon_device *rdev) 284static inline uint32_t r100_irq_ack(struct radeon_device *rdev)
176{ 285{
177 uint32_t irqs = RREG32(RADEON_GEN_INT_STATUS); 286 uint32_t irqs = RREG32(RADEON_GEN_INT_STATUS);
178 uint32_t irq_mask = RADEON_SW_INT_TEST | RADEON_CRTC_VBLANK_STAT | 287 uint32_t irq_mask = RADEON_SW_INT_TEST |
179 RADEON_CRTC2_VBLANK_STAT; 288 RADEON_CRTC_VBLANK_STAT | RADEON_CRTC2_VBLANK_STAT |
289 RADEON_FP_DETECT_STAT | RADEON_FP2_DETECT_STAT;
180 290
181 if (irqs) { 291 if (irqs) {
182 WREG32(RADEON_GEN_INT_STATUS, irqs); 292 WREG32(RADEON_GEN_INT_STATUS, irqs);
@@ -187,6 +297,7 @@ static inline uint32_t r100_irq_ack(struct radeon_device *rdev)
187int r100_irq_process(struct radeon_device *rdev) 297int r100_irq_process(struct radeon_device *rdev)
188{ 298{
189 uint32_t status, msi_rearm; 299 uint32_t status, msi_rearm;
300 bool queue_hotplug = false;
190 301
191 status = r100_irq_ack(rdev); 302 status = r100_irq_ack(rdev);
192 if (!status) { 303 if (!status) {
@@ -203,12 +314,26 @@ int r100_irq_process(struct radeon_device *rdev)
203 /* Vertical blank interrupts */ 314 /* Vertical blank interrupts */
204 if (status & RADEON_CRTC_VBLANK_STAT) { 315 if (status & RADEON_CRTC_VBLANK_STAT) {
205 drm_handle_vblank(rdev->ddev, 0); 316 drm_handle_vblank(rdev->ddev, 0);
317 rdev->pm.vblank_sync = true;
318 wake_up(&rdev->irq.vblank_queue);
206 } 319 }
207 if (status & RADEON_CRTC2_VBLANK_STAT) { 320 if (status & RADEON_CRTC2_VBLANK_STAT) {
208 drm_handle_vblank(rdev->ddev, 1); 321 drm_handle_vblank(rdev->ddev, 1);
322 rdev->pm.vblank_sync = true;
323 wake_up(&rdev->irq.vblank_queue);
324 }
325 if (status & RADEON_FP_DETECT_STAT) {
326 queue_hotplug = true;
327 DRM_DEBUG("HPD1\n");
328 }
329 if (status & RADEON_FP2_DETECT_STAT) {
330 queue_hotplug = true;
331 DRM_DEBUG("HPD2\n");
209 } 332 }
210 status = r100_irq_ack(rdev); 333 status = r100_irq_ack(rdev);
211 } 334 }
335 if (queue_hotplug)
336 queue_work(rdev->wq, &rdev->hotplug_work);
212 if (rdev->msi_enabled) { 337 if (rdev->msi_enabled) {
213 switch (rdev->family) { 338 switch (rdev->family) {
214 case CHIP_RS400: 339 case CHIP_RS400:
@@ -235,14 +360,25 @@ u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc)
235 return RREG32(RADEON_CRTC2_CRNT_FRAME); 360 return RREG32(RADEON_CRTC2_CRNT_FRAME);
236} 361}
237 362
363/* Who ever call radeon_fence_emit should call ring_lock and ask
364 * for enough space (today caller are ib schedule and buffer move) */
238void r100_fence_ring_emit(struct radeon_device *rdev, 365void r100_fence_ring_emit(struct radeon_device *rdev,
239 struct radeon_fence *fence) 366 struct radeon_fence *fence)
240{ 367{
241 /* Who ever call radeon_fence_emit should call ring_lock and ask 368 /* We have to make sure that caches are flushed before
242 * for enough space (today caller are ib schedule and buffer move) */ 369 * CPU might read something from VRAM. */
370 radeon_ring_write(rdev, PACKET0(RADEON_RB3D_DSTCACHE_CTLSTAT, 0));
371 radeon_ring_write(rdev, RADEON_RB3D_DC_FLUSH_ALL);
372 radeon_ring_write(rdev, PACKET0(RADEON_RB3D_ZCACHE_CTLSTAT, 0));
373 radeon_ring_write(rdev, RADEON_RB3D_ZC_FLUSH_ALL);
243 /* Wait until IDLE & CLEAN */ 374 /* Wait until IDLE & CLEAN */
244 radeon_ring_write(rdev, PACKET0(0x1720, 0)); 375 radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0));
245 radeon_ring_write(rdev, (1 << 16) | (1 << 17)); 376 radeon_ring_write(rdev, RADEON_WAIT_2D_IDLECLEAN | RADEON_WAIT_3D_IDLECLEAN);
377 radeon_ring_write(rdev, PACKET0(RADEON_HOST_PATH_CNTL, 0));
378 radeon_ring_write(rdev, rdev->config.r100.hdp_cntl |
379 RADEON_HDP_READ_BUFFER_INVALIDATE);
380 radeon_ring_write(rdev, PACKET0(RADEON_HOST_PATH_CNTL, 0));
381 radeon_ring_write(rdev, rdev->config.r100.hdp_cntl);
246 /* Emit fence sequence & fire IRQ */ 382 /* Emit fence sequence & fire IRQ */
247 radeon_ring_write(rdev, PACKET0(rdev->fence_drv.scratch_reg, 0)); 383 radeon_ring_write(rdev, PACKET0(rdev->fence_drv.scratch_reg, 0));
248 radeon_ring_write(rdev, fence->seq); 384 radeon_ring_write(rdev, fence->seq);
@@ -255,24 +391,27 @@ int r100_wb_init(struct radeon_device *rdev)
255 int r; 391 int r;
256 392
257 if (rdev->wb.wb_obj == NULL) { 393 if (rdev->wb.wb_obj == NULL) {
258 r = radeon_object_create(rdev, NULL, RADEON_GPU_PAGE_SIZE, 394 r = radeon_bo_create(rdev, NULL, RADEON_GPU_PAGE_SIZE, true,
259 true, 395 RADEON_GEM_DOMAIN_GTT,
260 RADEON_GEM_DOMAIN_GTT, 396 &rdev->wb.wb_obj);
261 false, &rdev->wb.wb_obj);
262 if (r) { 397 if (r) {
263 DRM_ERROR("radeon: failed to create WB buffer (%d).\n", r); 398 dev_err(rdev->dev, "(%d) create WB buffer failed\n", r);
264 return r; 399 return r;
265 } 400 }
266 r = radeon_object_pin(rdev->wb.wb_obj, 401 r = radeon_bo_reserve(rdev->wb.wb_obj, false);
267 RADEON_GEM_DOMAIN_GTT, 402 if (unlikely(r != 0))
268 &rdev->wb.gpu_addr); 403 return r;
404 r = radeon_bo_pin(rdev->wb.wb_obj, RADEON_GEM_DOMAIN_GTT,
405 &rdev->wb.gpu_addr);
269 if (r) { 406 if (r) {
270 DRM_ERROR("radeon: failed to pin WB buffer (%d).\n", r); 407 dev_err(rdev->dev, "(%d) pin WB buffer failed\n", r);
408 radeon_bo_unreserve(rdev->wb.wb_obj);
271 return r; 409 return r;
272 } 410 }
273 r = radeon_object_kmap(rdev->wb.wb_obj, (void **)&rdev->wb.wb); 411 r = radeon_bo_kmap(rdev->wb.wb_obj, (void **)&rdev->wb.wb);
412 radeon_bo_unreserve(rdev->wb.wb_obj);
274 if (r) { 413 if (r) {
275 DRM_ERROR("radeon: failed to map WB buffer (%d).\n", r); 414 dev_err(rdev->dev, "(%d) map WB buffer failed\n", r);
276 return r; 415 return r;
277 } 416 }
278 } 417 }
@@ -290,11 +429,19 @@ void r100_wb_disable(struct radeon_device *rdev)
290 429
291void r100_wb_fini(struct radeon_device *rdev) 430void r100_wb_fini(struct radeon_device *rdev)
292{ 431{
432 int r;
433
293 r100_wb_disable(rdev); 434 r100_wb_disable(rdev);
294 if (rdev->wb.wb_obj) { 435 if (rdev->wb.wb_obj) {
295 radeon_object_kunmap(rdev->wb.wb_obj); 436 r = radeon_bo_reserve(rdev->wb.wb_obj, false);
296 radeon_object_unpin(rdev->wb.wb_obj); 437 if (unlikely(r != 0)) {
297 radeon_object_unref(&rdev->wb.wb_obj); 438 dev_err(rdev->dev, "(%d) can't finish WB\n", r);
439 return;
440 }
441 radeon_bo_kunmap(rdev->wb.wb_obj);
442 radeon_bo_unpin(rdev->wb.wb_obj);
443 radeon_bo_unreserve(rdev->wb.wb_obj);
444 radeon_bo_unref(&rdev->wb.wb_obj);
298 rdev->wb.wb = NULL; 445 rdev->wb.wb = NULL;
299 rdev->wb.wb_obj = NULL; 446 rdev->wb.wb_obj = NULL;
300 } 447 }
@@ -598,6 +745,8 @@ int r100_cp_init(struct radeon_device *rdev, unsigned ring_size)
598 udelay(10); 745 udelay(10);
599 rdev->cp.rptr = RREG32(RADEON_CP_RB_RPTR); 746 rdev->cp.rptr = RREG32(RADEON_CP_RB_RPTR);
600 rdev->cp.wptr = RREG32(RADEON_CP_RB_WPTR); 747 rdev->cp.wptr = RREG32(RADEON_CP_RB_WPTR);
748 /* protect against crazy HW on resume */
749 rdev->cp.wptr &= rdev->cp.ptr_mask;
601 /* Set cp mode to bus mastering & enable cp*/ 750 /* Set cp mode to bus mastering & enable cp*/
602 WREG32(RADEON_CP_CSQ_MODE, 751 WREG32(RADEON_CP_CSQ_MODE,
603 REG_SET(RADEON_INDIRECT2_START, indirect2_start) | 752 REG_SET(RADEON_INDIRECT2_START, indirect2_start) |
@@ -1250,7 +1399,6 @@ static int r100_packet0_check(struct radeon_cs_parser *p,
1250 case RADEON_TXFORMAT_ARGB4444: 1399 case RADEON_TXFORMAT_ARGB4444:
1251 case RADEON_TXFORMAT_VYUY422: 1400 case RADEON_TXFORMAT_VYUY422:
1252 case RADEON_TXFORMAT_YVYU422: 1401 case RADEON_TXFORMAT_YVYU422:
1253 case RADEON_TXFORMAT_DXT1:
1254 case RADEON_TXFORMAT_SHADOW16: 1402 case RADEON_TXFORMAT_SHADOW16:
1255 case RADEON_TXFORMAT_LDUDV655: 1403 case RADEON_TXFORMAT_LDUDV655:
1256 case RADEON_TXFORMAT_DUDV88: 1404 case RADEON_TXFORMAT_DUDV88:
@@ -1258,12 +1406,19 @@ static int r100_packet0_check(struct radeon_cs_parser *p,
1258 break; 1406 break;
1259 case RADEON_TXFORMAT_ARGB8888: 1407 case RADEON_TXFORMAT_ARGB8888:
1260 case RADEON_TXFORMAT_RGBA8888: 1408 case RADEON_TXFORMAT_RGBA8888:
1261 case RADEON_TXFORMAT_DXT23:
1262 case RADEON_TXFORMAT_DXT45:
1263 case RADEON_TXFORMAT_SHADOW32: 1409 case RADEON_TXFORMAT_SHADOW32:
1264 case RADEON_TXFORMAT_LDUDUV8888: 1410 case RADEON_TXFORMAT_LDUDUV8888:
1265 track->textures[i].cpp = 4; 1411 track->textures[i].cpp = 4;
1266 break; 1412 break;
1413 case RADEON_TXFORMAT_DXT1:
1414 track->textures[i].cpp = 1;
1415 track->textures[i].compress_format = R100_TRACK_COMP_DXT1;
1416 break;
1417 case RADEON_TXFORMAT_DXT23:
1418 case RADEON_TXFORMAT_DXT45:
1419 track->textures[i].cpp = 1;
1420 track->textures[i].compress_format = R100_TRACK_COMP_DXT35;
1421 break;
1267 } 1422 }
1268 track->textures[i].cube_info[4].width = 1 << ((idx_value >> 16) & 0xf); 1423 track->textures[i].cube_info[4].width = 1 << ((idx_value >> 16) & 0xf);
1269 track->textures[i].cube_info[4].height = 1 << ((idx_value >> 20) & 0xf); 1424 track->textures[i].cube_info[4].height = 1 << ((idx_value >> 20) & 0xf);
@@ -1288,17 +1443,17 @@ static int r100_packet0_check(struct radeon_cs_parser *p,
1288 1443
1289int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p, 1444int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p,
1290 struct radeon_cs_packet *pkt, 1445 struct radeon_cs_packet *pkt,
1291 struct radeon_object *robj) 1446 struct radeon_bo *robj)
1292{ 1447{
1293 unsigned idx; 1448 unsigned idx;
1294 u32 value; 1449 u32 value;
1295 idx = pkt->idx + 1; 1450 idx = pkt->idx + 1;
1296 value = radeon_get_ib_value(p, idx + 2); 1451 value = radeon_get_ib_value(p, idx + 2);
1297 if ((value + 1) > radeon_object_size(robj)) { 1452 if ((value + 1) > radeon_bo_size(robj)) {
1298 DRM_ERROR("[drm] Buffer too small for PACKET3 INDX_BUFFER " 1453 DRM_ERROR("[drm] Buffer too small for PACKET3 INDX_BUFFER "
1299 "(need %u have %lu) !\n", 1454 "(need %u have %lu) !\n",
1300 value + 1, 1455 value + 1,
1301 radeon_object_size(robj)); 1456 radeon_bo_size(robj));
1302 return -EINVAL; 1457 return -EINVAL;
1303 } 1458 }
1304 return 0; 1459 return 0;
@@ -1363,6 +1518,7 @@ static int r100_packet3_check(struct radeon_cs_parser *p,
1363 DRM_ERROR("PRIM_WALK must be 3 for IMMD draw\n"); 1518 DRM_ERROR("PRIM_WALK must be 3 for IMMD draw\n");
1364 return -EINVAL; 1519 return -EINVAL;
1365 } 1520 }
1521 track->vtx_size = r100_get_vtx_size(radeon_get_ib_value(p, idx + 0));
1366 track->vap_vf_cntl = radeon_get_ib_value(p, idx + 1); 1522 track->vap_vf_cntl = radeon_get_ib_value(p, idx + 1);
1367 track->immd_dwords = pkt->count - 1; 1523 track->immd_dwords = pkt->count - 1;
1368 r = r100_cs_track_check(p->rdev, track); 1524 r = r100_cs_track_check(p->rdev, track);
@@ -1553,7 +1709,7 @@ int r100_gui_wait_for_idle(struct radeon_device *rdev)
1553 } 1709 }
1554 for (i = 0; i < rdev->usec_timeout; i++) { 1710 for (i = 0; i < rdev->usec_timeout; i++) {
1555 tmp = RREG32(RADEON_RBBM_STATUS); 1711 tmp = RREG32(RADEON_RBBM_STATUS);
1556 if (!(tmp & (1 << 31))) { 1712 if (!(tmp & RADEON_RBBM_ACTIVE)) {
1557 return 0; 1713 return 0;
1558 } 1714 }
1559 DRM_UDELAY(1); 1715 DRM_UDELAY(1);
@@ -1568,8 +1724,8 @@ int r100_mc_wait_for_idle(struct radeon_device *rdev)
1568 1724
1569 for (i = 0; i < rdev->usec_timeout; i++) { 1725 for (i = 0; i < rdev->usec_timeout; i++) {
1570 /* read MC_STATUS */ 1726 /* read MC_STATUS */
1571 tmp = RREG32(0x0150); 1727 tmp = RREG32(RADEON_MC_STATUS);
1572 if (tmp & (1 << 2)) { 1728 if (tmp & RADEON_MC_IDLE) {
1573 return 0; 1729 return 0;
1574 } 1730 }
1575 DRM_UDELAY(1); 1731 DRM_UDELAY(1);
@@ -1642,7 +1798,7 @@ int r100_gpu_reset(struct radeon_device *rdev)
1642 } 1798 }
1643 /* Check if GPU is idle */ 1799 /* Check if GPU is idle */
1644 status = RREG32(RADEON_RBBM_STATUS); 1800 status = RREG32(RADEON_RBBM_STATUS);
1645 if (status & (1 << 31)) { 1801 if (status & RADEON_RBBM_ACTIVE) {
1646 DRM_ERROR("Failed to reset GPU (RBBM_STATUS=0x%08X)\n", status); 1802 DRM_ERROR("Failed to reset GPU (RBBM_STATUS=0x%08X)\n", status);
1647 return -1; 1803 return -1;
1648 } 1804 }
@@ -1650,6 +1806,89 @@ int r100_gpu_reset(struct radeon_device *rdev)
1650 return 0; 1806 return 0;
1651} 1807}
1652 1808
1809void r100_set_common_regs(struct radeon_device *rdev)
1810{
1811 struct drm_device *dev = rdev->ddev;
1812 bool force_dac2 = false;
1813 u32 tmp;
1814
1815 /* set these so they don't interfere with anything */
1816 WREG32(RADEON_OV0_SCALE_CNTL, 0);
1817 WREG32(RADEON_SUBPIC_CNTL, 0);
1818 WREG32(RADEON_VIPH_CONTROL, 0);
1819 WREG32(RADEON_I2C_CNTL_1, 0);
1820 WREG32(RADEON_DVI_I2C_CNTL_1, 0);
1821 WREG32(RADEON_CAP0_TRIG_CNTL, 0);
1822 WREG32(RADEON_CAP1_TRIG_CNTL, 0);
1823
1824 /* always set up dac2 on rn50 and some rv100 as lots
1825 * of servers seem to wire it up to a VGA port but
1826 * don't report it in the bios connector
1827 * table.
1828 */
1829 switch (dev->pdev->device) {
1830 /* RN50 */
1831 case 0x515e:
1832 case 0x5969:
1833 force_dac2 = true;
1834 break;
1835 /* RV100*/
1836 case 0x5159:
1837 case 0x515a:
1838 /* DELL triple head servers */
1839 if ((dev->pdev->subsystem_vendor == 0x1028 /* DELL */) &&
1840 ((dev->pdev->subsystem_device == 0x016c) ||
1841 (dev->pdev->subsystem_device == 0x016d) ||
1842 (dev->pdev->subsystem_device == 0x016e) ||
1843 (dev->pdev->subsystem_device == 0x016f) ||
1844 (dev->pdev->subsystem_device == 0x0170) ||
1845 (dev->pdev->subsystem_device == 0x017d) ||
1846 (dev->pdev->subsystem_device == 0x017e) ||
1847 (dev->pdev->subsystem_device == 0x0183) ||
1848 (dev->pdev->subsystem_device == 0x018a) ||
1849 (dev->pdev->subsystem_device == 0x019a)))
1850 force_dac2 = true;
1851 break;
1852 }
1853
1854 if (force_dac2) {
1855 u32 disp_hw_debug = RREG32(RADEON_DISP_HW_DEBUG);
1856 u32 tv_dac_cntl = RREG32(RADEON_TV_DAC_CNTL);
1857 u32 dac2_cntl = RREG32(RADEON_DAC_CNTL2);
1858
1859 /* For CRT on DAC2, don't turn it on if BIOS didn't
1860 enable it, even it's detected.
1861 */
1862
1863 /* force it to crtc0 */
1864 dac2_cntl &= ~RADEON_DAC2_DAC_CLK_SEL;
1865 dac2_cntl |= RADEON_DAC2_DAC2_CLK_SEL;
1866 disp_hw_debug |= RADEON_CRT2_DISP1_SEL;
1867
1868 /* set up the TV DAC */
1869 tv_dac_cntl &= ~(RADEON_TV_DAC_PEDESTAL |
1870 RADEON_TV_DAC_STD_MASK |
1871 RADEON_TV_DAC_RDACPD |
1872 RADEON_TV_DAC_GDACPD |
1873 RADEON_TV_DAC_BDACPD |
1874 RADEON_TV_DAC_BGADJ_MASK |
1875 RADEON_TV_DAC_DACADJ_MASK);
1876 tv_dac_cntl |= (RADEON_TV_DAC_NBLANK |
1877 RADEON_TV_DAC_NHOLD |
1878 RADEON_TV_DAC_STD_PS2 |
1879 (0x58 << 16));
1880
1881 WREG32(RADEON_TV_DAC_CNTL, tv_dac_cntl);
1882 WREG32(RADEON_DISP_HW_DEBUG, disp_hw_debug);
1883 WREG32(RADEON_DAC_CNTL2, dac2_cntl);
1884 }
1885
1886 /* switch PM block to ACPI mode */
1887 tmp = RREG32_PLL(RADEON_PLL_PWRMGT_CNTL);
1888 tmp &= ~RADEON_PM_MODE_SEL;
1889 WREG32_PLL(RADEON_PLL_PWRMGT_CNTL, tmp);
1890
1891}
1653 1892
1654/* 1893/*
1655 * VRAM info 1894 * VRAM info
@@ -1730,17 +1969,20 @@ static u32 r100_get_accessible_vram(struct radeon_device *rdev)
1730void r100_vram_init_sizes(struct radeon_device *rdev) 1969void r100_vram_init_sizes(struct radeon_device *rdev)
1731{ 1970{
1732 u64 config_aper_size; 1971 u64 config_aper_size;
1733 u32 accessible;
1734 1972
1973 /* work out accessible VRAM */
1974 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
1975 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
1976 rdev->mc.visible_vram_size = r100_get_accessible_vram(rdev);
1977 /* FIXME we don't use the second aperture yet when we could use it */
1978 if (rdev->mc.visible_vram_size > rdev->mc.aper_size)
1979 rdev->mc.visible_vram_size = rdev->mc.aper_size;
1735 config_aper_size = RREG32(RADEON_CONFIG_APER_SIZE); 1980 config_aper_size = RREG32(RADEON_CONFIG_APER_SIZE);
1736
1737 if (rdev->flags & RADEON_IS_IGP) { 1981 if (rdev->flags & RADEON_IS_IGP) {
1738 uint32_t tom; 1982 uint32_t tom;
1739 /* read NB_TOM to get the amount of ram stolen for the GPU */ 1983 /* read NB_TOM to get the amount of ram stolen for the GPU */
1740 tom = RREG32(RADEON_NB_TOM); 1984 tom = RREG32(RADEON_NB_TOM);
1741 rdev->mc.real_vram_size = (((tom >> 16) - (tom & 0xffff) + 1) << 16); 1985 rdev->mc.real_vram_size = (((tom >> 16) - (tom & 0xffff) + 1) << 16);
1742 /* for IGPs we need to keep VRAM where it was put by the BIOS */
1743 rdev->mc.vram_location = (tom & 0xffff) << 16;
1744 WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.real_vram_size); 1986 WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.real_vram_size);
1745 rdev->mc.mc_vram_size = rdev->mc.real_vram_size; 1987 rdev->mc.mc_vram_size = rdev->mc.real_vram_size;
1746 } else { 1988 } else {
@@ -1752,30 +1994,19 @@ void r100_vram_init_sizes(struct radeon_device *rdev)
1752 rdev->mc.real_vram_size = 8192 * 1024; 1994 rdev->mc.real_vram_size = 8192 * 1024;
1753 WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.real_vram_size); 1995 WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.real_vram_size);
1754 } 1996 }
1755 /* let driver place VRAM */ 1997 /* Fix for RN50, M6, M7 with 8/16/32(??) MBs of VRAM -
1756 rdev->mc.vram_location = 0xFFFFFFFFUL; 1998 * Novell bug 204882 + along with lots of ubuntu ones
1757 /* Fix for RN50, M6, M7 with 8/16/32(??) MBs of VRAM - 1999 */
1758 * Novell bug 204882 + along with lots of ubuntu ones */
1759 if (config_aper_size > rdev->mc.real_vram_size) 2000 if (config_aper_size > rdev->mc.real_vram_size)
1760 rdev->mc.mc_vram_size = config_aper_size; 2001 rdev->mc.mc_vram_size = config_aper_size;
1761 else 2002 else
1762 rdev->mc.mc_vram_size = rdev->mc.real_vram_size; 2003 rdev->mc.mc_vram_size = rdev->mc.real_vram_size;
1763 } 2004 }
1764 2005 /* FIXME remove this once we support unmappable VRAM */
1765 /* work out accessible VRAM */ 2006 if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
1766 accessible = r100_get_accessible_vram(rdev);
1767
1768 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
1769 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
1770
1771 if (accessible > rdev->mc.aper_size)
1772 accessible = rdev->mc.aper_size;
1773
1774 if (rdev->mc.mc_vram_size > rdev->mc.aper_size)
1775 rdev->mc.mc_vram_size = rdev->mc.aper_size; 2007 rdev->mc.mc_vram_size = rdev->mc.aper_size;
1776
1777 if (rdev->mc.real_vram_size > rdev->mc.aper_size)
1778 rdev->mc.real_vram_size = rdev->mc.aper_size; 2008 rdev->mc.real_vram_size = rdev->mc.aper_size;
2009 }
1779} 2010}
1780 2011
1781void r100_vga_set_state(struct radeon_device *rdev, bool state) 2012void r100_vga_set_state(struct radeon_device *rdev, bool state)
@@ -1792,11 +2023,19 @@ void r100_vga_set_state(struct radeon_device *rdev, bool state)
1792 WREG32(RADEON_CONFIG_CNTL, temp); 2023 WREG32(RADEON_CONFIG_CNTL, temp);
1793} 2024}
1794 2025
1795void r100_vram_info(struct radeon_device *rdev) 2026void r100_mc_init(struct radeon_device *rdev)
1796{ 2027{
1797 r100_vram_get_type(rdev); 2028 u64 base;
1798 2029
2030 r100_vram_get_type(rdev);
1799 r100_vram_init_sizes(rdev); 2031 r100_vram_init_sizes(rdev);
2032 base = rdev->mc.aper_base;
2033 if (rdev->flags & RADEON_IS_IGP)
2034 base = (RREG32(RADEON_NB_TOM) & 0xffff) << 16;
2035 radeon_vram_location(rdev, &rdev->mc, base);
2036 if (!(rdev->flags & RADEON_IS_AGP))
2037 radeon_gtt_location(rdev, &rdev->mc);
2038 radeon_update_bandwidth_info(rdev);
1800} 2039}
1801 2040
1802 2041
@@ -2160,6 +2399,8 @@ void r100_bandwidth_update(struct radeon_device *rdev)
2160 uint32_t pixel_bytes1 = 0; 2399 uint32_t pixel_bytes1 = 0;
2161 uint32_t pixel_bytes2 = 0; 2400 uint32_t pixel_bytes2 = 0;
2162 2401
2402 radeon_update_display_priority(rdev);
2403
2163 if (rdev->mode_info.crtcs[0]->base.enabled) { 2404 if (rdev->mode_info.crtcs[0]->base.enabled) {
2164 mode1 = &rdev->mode_info.crtcs[0]->base.mode; 2405 mode1 = &rdev->mode_info.crtcs[0]->base.mode;
2165 pixel_bytes1 = rdev->mode_info.crtcs[0]->base.fb->bits_per_pixel / 8; 2406 pixel_bytes1 = rdev->mode_info.crtcs[0]->base.fb->bits_per_pixel / 8;
@@ -2188,11 +2429,8 @@ void r100_bandwidth_update(struct radeon_device *rdev)
2188 /* 2429 /*
2189 * determine is there is enough bw for current mode 2430 * determine is there is enough bw for current mode
2190 */ 2431 */
2191 mclk_ff.full = rfixed_const(rdev->clock.default_mclk); 2432 sclk_ff = rdev->pm.sclk;
2192 temp_ff.full = rfixed_const(100); 2433 mclk_ff = rdev->pm.mclk;
2193 mclk_ff.full = rfixed_div(mclk_ff, temp_ff);
2194 sclk_ff.full = rfixed_const(rdev->clock.default_sclk);
2195 sclk_ff.full = rfixed_div(sclk_ff, temp_ff);
2196 2434
2197 temp = (rdev->mc.vram_width / 8) * (rdev->mc.vram_is_ddr ? 2 : 1); 2435 temp = (rdev->mc.vram_width / 8) * (rdev->mc.vram_is_ddr ? 2 : 1);
2198 temp_ff.full = rfixed_const(temp); 2436 temp_ff.full = rfixed_const(temp);
@@ -2588,13 +2826,14 @@ static inline void r100_cs_track_texture_print(struct r100_cs_track_texture *t)
2588 DRM_ERROR("coordinate type %d\n", t->tex_coord_type); 2826 DRM_ERROR("coordinate type %d\n", t->tex_coord_type);
2589 DRM_ERROR("width round to power of 2 %d\n", t->roundup_w); 2827 DRM_ERROR("width round to power of 2 %d\n", t->roundup_w);
2590 DRM_ERROR("height round to power of 2 %d\n", t->roundup_h); 2828 DRM_ERROR("height round to power of 2 %d\n", t->roundup_h);
2829 DRM_ERROR("compress format %d\n", t->compress_format);
2591} 2830}
2592 2831
2593static int r100_cs_track_cube(struct radeon_device *rdev, 2832static int r100_cs_track_cube(struct radeon_device *rdev,
2594 struct r100_cs_track *track, unsigned idx) 2833 struct r100_cs_track *track, unsigned idx)
2595{ 2834{
2596 unsigned face, w, h; 2835 unsigned face, w, h;
2597 struct radeon_object *cube_robj; 2836 struct radeon_bo *cube_robj;
2598 unsigned long size; 2837 unsigned long size;
2599 2838
2600 for (face = 0; face < 5; face++) { 2839 for (face = 0; face < 5; face++) {
@@ -2607,9 +2846,9 @@ static int r100_cs_track_cube(struct radeon_device *rdev,
2607 2846
2608 size += track->textures[idx].cube_info[face].offset; 2847 size += track->textures[idx].cube_info[face].offset;
2609 2848
2610 if (size > radeon_object_size(cube_robj)) { 2849 if (size > radeon_bo_size(cube_robj)) {
2611 DRM_ERROR("Cube texture offset greater than object size %lu %lu\n", 2850 DRM_ERROR("Cube texture offset greater than object size %lu %lu\n",
2612 size, radeon_object_size(cube_robj)); 2851 size, radeon_bo_size(cube_robj));
2613 r100_cs_track_texture_print(&track->textures[idx]); 2852 r100_cs_track_texture_print(&track->textures[idx]);
2614 return -1; 2853 return -1;
2615 } 2854 }
@@ -2617,12 +2856,42 @@ static int r100_cs_track_cube(struct radeon_device *rdev,
2617 return 0; 2856 return 0;
2618} 2857}
2619 2858
2859static int r100_track_compress_size(int compress_format, int w, int h)
2860{
2861 int block_width, block_height, block_bytes;
2862 int wblocks, hblocks;
2863 int min_wblocks;
2864 int sz;
2865
2866 block_width = 4;
2867 block_height = 4;
2868
2869 switch (compress_format) {
2870 case R100_TRACK_COMP_DXT1:
2871 block_bytes = 8;
2872 min_wblocks = 4;
2873 break;
2874 default:
2875 case R100_TRACK_COMP_DXT35:
2876 block_bytes = 16;
2877 min_wblocks = 2;
2878 break;
2879 }
2880
2881 hblocks = (h + block_height - 1) / block_height;
2882 wblocks = (w + block_width - 1) / block_width;
2883 if (wblocks < min_wblocks)
2884 wblocks = min_wblocks;
2885 sz = wblocks * hblocks * block_bytes;
2886 return sz;
2887}
2888
2620static int r100_cs_track_texture_check(struct radeon_device *rdev, 2889static int r100_cs_track_texture_check(struct radeon_device *rdev,
2621 struct r100_cs_track *track) 2890 struct r100_cs_track *track)
2622{ 2891{
2623 struct radeon_object *robj; 2892 struct radeon_bo *robj;
2624 unsigned long size; 2893 unsigned long size;
2625 unsigned u, i, w, h; 2894 unsigned u, i, w, h, d;
2626 int ret; 2895 int ret;
2627 2896
2628 for (u = 0; u < track->num_texture; u++) { 2897 for (u = 0; u < track->num_texture; u++) {
@@ -2654,14 +2923,25 @@ static int r100_cs_track_texture_check(struct radeon_device *rdev,
2654 h = h / (1 << i); 2923 h = h / (1 << i);
2655 if (track->textures[u].roundup_h) 2924 if (track->textures[u].roundup_h)
2656 h = roundup_pow_of_two(h); 2925 h = roundup_pow_of_two(h);
2657 size += w * h; 2926 if (track->textures[u].tex_coord_type == 1) {
2927 d = (1 << track->textures[u].txdepth) / (1 << i);
2928 if (!d)
2929 d = 1;
2930 } else {
2931 d = 1;
2932 }
2933 if (track->textures[u].compress_format) {
2934
2935 size += r100_track_compress_size(track->textures[u].compress_format, w, h) * d;
2936 /* compressed textures are block based */
2937 } else
2938 size += w * h * d;
2658 } 2939 }
2659 size *= track->textures[u].cpp; 2940 size *= track->textures[u].cpp;
2941
2660 switch (track->textures[u].tex_coord_type) { 2942 switch (track->textures[u].tex_coord_type) {
2661 case 0: 2943 case 0:
2662 break;
2663 case 1: 2944 case 1:
2664 size *= (1 << track->textures[u].txdepth);
2665 break; 2945 break;
2666 case 2: 2946 case 2:
2667 if (track->separate_cube) { 2947 if (track->separate_cube) {
@@ -2676,9 +2956,9 @@ static int r100_cs_track_texture_check(struct radeon_device *rdev,
2676 "%u\n", track->textures[u].tex_coord_type, u); 2956 "%u\n", track->textures[u].tex_coord_type, u);
2677 return -EINVAL; 2957 return -EINVAL;
2678 } 2958 }
2679 if (size > radeon_object_size(robj)) { 2959 if (size > radeon_bo_size(robj)) {
2680 DRM_ERROR("Texture of unit %u needs %lu bytes but is " 2960 DRM_ERROR("Texture of unit %u needs %lu bytes but is "
2681 "%lu\n", u, size, radeon_object_size(robj)); 2961 "%lu\n", u, size, radeon_bo_size(robj));
2682 r100_cs_track_texture_print(&track->textures[u]); 2962 r100_cs_track_texture_print(&track->textures[u]);
2683 return -EINVAL; 2963 return -EINVAL;
2684 } 2964 }
@@ -2695,15 +2975,19 @@ int r100_cs_track_check(struct radeon_device *rdev, struct r100_cs_track *track)
2695 2975
2696 for (i = 0; i < track->num_cb; i++) { 2976 for (i = 0; i < track->num_cb; i++) {
2697 if (track->cb[i].robj == NULL) { 2977 if (track->cb[i].robj == NULL) {
2978 if (!(track->zb_cb_clear || track->color_channel_mask ||
2979 track->blend_read_enable)) {
2980 continue;
2981 }
2698 DRM_ERROR("[drm] No buffer for color buffer %d !\n", i); 2982 DRM_ERROR("[drm] No buffer for color buffer %d !\n", i);
2699 return -EINVAL; 2983 return -EINVAL;
2700 } 2984 }
2701 size = track->cb[i].pitch * track->cb[i].cpp * track->maxy; 2985 size = track->cb[i].pitch * track->cb[i].cpp * track->maxy;
2702 size += track->cb[i].offset; 2986 size += track->cb[i].offset;
2703 if (size > radeon_object_size(track->cb[i].robj)) { 2987 if (size > radeon_bo_size(track->cb[i].robj)) {
2704 DRM_ERROR("[drm] Buffer too small for color buffer %d " 2988 DRM_ERROR("[drm] Buffer too small for color buffer %d "
2705 "(need %lu have %lu) !\n", i, size, 2989 "(need %lu have %lu) !\n", i, size,
2706 radeon_object_size(track->cb[i].robj)); 2990 radeon_bo_size(track->cb[i].robj));
2707 DRM_ERROR("[drm] color buffer %d (%u %u %u %u)\n", 2991 DRM_ERROR("[drm] color buffer %d (%u %u %u %u)\n",
2708 i, track->cb[i].pitch, track->cb[i].cpp, 2992 i, track->cb[i].pitch, track->cb[i].cpp,
2709 track->cb[i].offset, track->maxy); 2993 track->cb[i].offset, track->maxy);
@@ -2717,10 +3001,10 @@ int r100_cs_track_check(struct radeon_device *rdev, struct r100_cs_track *track)
2717 } 3001 }
2718 size = track->zb.pitch * track->zb.cpp * track->maxy; 3002 size = track->zb.pitch * track->zb.cpp * track->maxy;
2719 size += track->zb.offset; 3003 size += track->zb.offset;
2720 if (size > radeon_object_size(track->zb.robj)) { 3004 if (size > radeon_bo_size(track->zb.robj)) {
2721 DRM_ERROR("[drm] Buffer too small for z buffer " 3005 DRM_ERROR("[drm] Buffer too small for z buffer "
2722 "(need %lu have %lu) !\n", size, 3006 "(need %lu have %lu) !\n", size,
2723 radeon_object_size(track->zb.robj)); 3007 radeon_bo_size(track->zb.robj));
2724 DRM_ERROR("[drm] zbuffer (%u %u %u %u)\n", 3008 DRM_ERROR("[drm] zbuffer (%u %u %u %u)\n",
2725 track->zb.pitch, track->zb.cpp, 3009 track->zb.pitch, track->zb.cpp,
2726 track->zb.offset, track->maxy); 3010 track->zb.offset, track->maxy);
@@ -2728,7 +3012,11 @@ int r100_cs_track_check(struct radeon_device *rdev, struct r100_cs_track *track)
2728 } 3012 }
2729 } 3013 }
2730 prim_walk = (track->vap_vf_cntl >> 4) & 0x3; 3014 prim_walk = (track->vap_vf_cntl >> 4) & 0x3;
2731 nverts = (track->vap_vf_cntl >> 16) & 0xFFFF; 3015 if (track->vap_vf_cntl & (1 << 14)) {
3016 nverts = track->vap_alt_nverts;
3017 } else {
3018 nverts = (track->vap_vf_cntl >> 16) & 0xFFFF;
3019 }
2732 switch (prim_walk) { 3020 switch (prim_walk) {
2733 case 1: 3021 case 1:
2734 for (i = 0; i < track->num_arrays; i++) { 3022 for (i = 0; i < track->num_arrays; i++) {
@@ -2738,11 +3026,12 @@ int r100_cs_track_check(struct radeon_device *rdev, struct r100_cs_track *track)
2738 "bound\n", prim_walk, i); 3026 "bound\n", prim_walk, i);
2739 return -EINVAL; 3027 return -EINVAL;
2740 } 3028 }
2741 if (size > radeon_object_size(track->arrays[i].robj)) { 3029 if (size > radeon_bo_size(track->arrays[i].robj)) {
2742 DRM_ERROR("(PW %u) Vertex array %u need %lu dwords " 3030 dev_err(rdev->dev, "(PW %u) Vertex array %u "
2743 "have %lu dwords\n", prim_walk, i, 3031 "need %lu dwords have %lu dwords\n",
2744 size >> 2, 3032 prim_walk, i, size >> 2,
2745 radeon_object_size(track->arrays[i].robj) >> 2); 3033 radeon_bo_size(track->arrays[i].robj)
3034 >> 2);
2746 DRM_ERROR("Max indices %u\n", track->max_indx); 3035 DRM_ERROR("Max indices %u\n", track->max_indx);
2747 return -EINVAL; 3036 return -EINVAL;
2748 } 3037 }
@@ -2756,10 +3045,12 @@ int r100_cs_track_check(struct radeon_device *rdev, struct r100_cs_track *track)
2756 "bound\n", prim_walk, i); 3045 "bound\n", prim_walk, i);
2757 return -EINVAL; 3046 return -EINVAL;
2758 } 3047 }
2759 if (size > radeon_object_size(track->arrays[i].robj)) { 3048 if (size > radeon_bo_size(track->arrays[i].robj)) {
2760 DRM_ERROR("(PW %u) Vertex array %u need %lu dwords " 3049 dev_err(rdev->dev, "(PW %u) Vertex array %u "
2761 "have %lu dwords\n", prim_walk, i, size >> 2, 3050 "need %lu dwords have %lu dwords\n",
2762 radeon_object_size(track->arrays[i].robj) >> 2); 3051 prim_walk, i, size >> 2,
3052 radeon_bo_size(track->arrays[i].robj)
3053 >> 2);
2763 return -EINVAL; 3054 return -EINVAL;
2764 } 3055 }
2765 } 3056 }
@@ -2821,6 +3112,7 @@ void r100_cs_track_clear(struct radeon_device *rdev, struct r100_cs_track *track
2821 track->arrays[i].esize = 0x7F; 3112 track->arrays[i].esize = 0x7F;
2822 } 3113 }
2823 for (i = 0; i < track->num_texture; i++) { 3114 for (i = 0; i < track->num_texture; i++) {
3115 track->textures[i].compress_format = R100_TRACK_COMP_NONE;
2824 track->textures[i].pitch = 16536; 3116 track->textures[i].pitch = 16536;
2825 track->textures[i].width = 16536; 3117 track->textures[i].width = 16536;
2826 track->textures[i].height = 16536; 3118 track->textures[i].height = 16536;
@@ -3022,10 +3314,9 @@ void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save)
3022void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save) 3314void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save)
3023{ 3315{
3024 /* Update base address for crtc */ 3316 /* Update base address for crtc */
3025 WREG32(R_00023C_DISPLAY_BASE_ADDR, rdev->mc.vram_location); 3317 WREG32(R_00023C_DISPLAY_BASE_ADDR, rdev->mc.vram_start);
3026 if (!(rdev->flags & RADEON_SINGLE_CRTC)) { 3318 if (!(rdev->flags & RADEON_SINGLE_CRTC)) {
3027 WREG32(R_00033C_CRTC2_DISPLAY_BASE_ADDR, 3319 WREG32(R_00033C_CRTC2_DISPLAY_BASE_ADDR, rdev->mc.vram_start);
3028 rdev->mc.vram_location);
3029 } 3320 }
3030 /* Restore CRTC registers */ 3321 /* Restore CRTC registers */
3031 WREG8(R_0003C2_GENMO_WT, save->GENMO_WT); 3322 WREG8(R_0003C2_GENMO_WT, save->GENMO_WT);
@@ -3101,6 +3392,9 @@ static int r100_startup(struct radeon_device *rdev)
3101{ 3392{
3102 int r; 3393 int r;
3103 3394
3395 /* set common regs */
3396 r100_set_common_regs(rdev);
3397 /* program mc */
3104 r100_mc_program(rdev); 3398 r100_mc_program(rdev);
3105 /* Resume clock */ 3399 /* Resume clock */
3106 r100_clock_startup(rdev); 3400 r100_clock_startup(rdev);
@@ -3108,14 +3402,15 @@ static int r100_startup(struct radeon_device *rdev)
3108 r100_gpu_init(rdev); 3402 r100_gpu_init(rdev);
3109 /* Initialize GART (initialize after TTM so we can allocate 3403 /* Initialize GART (initialize after TTM so we can allocate
3110 * memory through TTM but finalize after TTM) */ 3404 * memory through TTM but finalize after TTM) */
3405 r100_enable_bm(rdev);
3111 if (rdev->flags & RADEON_IS_PCI) { 3406 if (rdev->flags & RADEON_IS_PCI) {
3112 r = r100_pci_gart_enable(rdev); 3407 r = r100_pci_gart_enable(rdev);
3113 if (r) 3408 if (r)
3114 return r; 3409 return r;
3115 } 3410 }
3116 /* Enable IRQ */ 3411 /* Enable IRQ */
3117 rdev->irq.sw_int = true;
3118 r100_irq_set(rdev); 3412 r100_irq_set(rdev);
3413 rdev->config.r100.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
3119 /* 1M ring buffer */ 3414 /* 1M ring buffer */
3120 r = r100_cp_init(rdev, 1024 * 1024); 3415 r = r100_cp_init(rdev, 1024 * 1024);
3121 if (r) { 3416 if (r) {
@@ -3150,6 +3445,8 @@ int r100_resume(struct radeon_device *rdev)
3150 radeon_combios_asic_init(rdev->ddev); 3445 radeon_combios_asic_init(rdev->ddev);
3151 /* Resume clock after posting */ 3446 /* Resume clock after posting */
3152 r100_clock_startup(rdev); 3447 r100_clock_startup(rdev);
3448 /* Initialize surface registers */
3449 radeon_surface_init(rdev);
3153 return r100_startup(rdev); 3450 return r100_startup(rdev);
3154} 3451}
3155 3452
@@ -3165,49 +3462,22 @@ int r100_suspend(struct radeon_device *rdev)
3165 3462
3166void r100_fini(struct radeon_device *rdev) 3463void r100_fini(struct radeon_device *rdev)
3167{ 3464{
3168 r100_suspend(rdev); 3465 radeon_pm_fini(rdev);
3169 r100_cp_fini(rdev); 3466 r100_cp_fini(rdev);
3170 r100_wb_fini(rdev); 3467 r100_wb_fini(rdev);
3171 r100_ib_fini(rdev); 3468 r100_ib_fini(rdev);
3172 radeon_gem_fini(rdev); 3469 radeon_gem_fini(rdev);
3173 if (rdev->flags & RADEON_IS_PCI) 3470 if (rdev->flags & RADEON_IS_PCI)
3174 r100_pci_gart_fini(rdev); 3471 r100_pci_gart_fini(rdev);
3472 radeon_agp_fini(rdev);
3175 radeon_irq_kms_fini(rdev); 3473 radeon_irq_kms_fini(rdev);
3176 radeon_fence_driver_fini(rdev); 3474 radeon_fence_driver_fini(rdev);
3177 radeon_object_fini(rdev); 3475 radeon_bo_fini(rdev);
3178 radeon_atombios_fini(rdev); 3476 radeon_atombios_fini(rdev);
3179 kfree(rdev->bios); 3477 kfree(rdev->bios);
3180 rdev->bios = NULL; 3478 rdev->bios = NULL;
3181} 3479}
3182 3480
3183int r100_mc_init(struct radeon_device *rdev)
3184{
3185 int r;
3186 u32 tmp;
3187
3188 /* Setup GPU memory space */
3189 rdev->mc.vram_location = 0xFFFFFFFFUL;
3190 rdev->mc.gtt_location = 0xFFFFFFFFUL;
3191 if (rdev->flags & RADEON_IS_IGP) {
3192 tmp = G_00015C_MC_FB_START(RREG32(R_00015C_NB_TOM));
3193 rdev->mc.vram_location = tmp << 16;
3194 }
3195 if (rdev->flags & RADEON_IS_AGP) {
3196 r = radeon_agp_init(rdev);
3197 if (r) {
3198 printk(KERN_WARNING "[drm] Disabling AGP\n");
3199 rdev->flags &= ~RADEON_IS_AGP;
3200 rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024;
3201 } else {
3202 rdev->mc.gtt_location = rdev->mc.agp_base;
3203 }
3204 }
3205 r = radeon_mc_setup(rdev);
3206 if (r)
3207 return r;
3208 return 0;
3209}
3210
3211int r100_init(struct radeon_device *rdev) 3481int r100_init(struct radeon_device *rdev)
3212{ 3482{
3213 int r; 3483 int r;
@@ -3242,20 +3512,23 @@ int r100_init(struct radeon_device *rdev)
3242 RREG32(R_0007C0_CP_STAT)); 3512 RREG32(R_0007C0_CP_STAT));
3243 } 3513 }
3244 /* check if cards are posted or not */ 3514 /* check if cards are posted or not */
3245 if (!radeon_card_posted(rdev) && rdev->bios) { 3515 if (radeon_boot_test_post_card(rdev) == false)
3246 DRM_INFO("GPU not posted. posting now...\n"); 3516 return -EINVAL;
3247 radeon_combios_asic_init(rdev->ddev);
3248 }
3249 /* Set asic errata */ 3517 /* Set asic errata */
3250 r100_errata(rdev); 3518 r100_errata(rdev);
3251 /* Initialize clocks */ 3519 /* Initialize clocks */
3252 radeon_get_clock_info(rdev->ddev); 3520 radeon_get_clock_info(rdev->ddev);
3253 /* Get vram informations */ 3521 /* Initialize power management */
3254 r100_vram_info(rdev); 3522 radeon_pm_init(rdev);
3255 /* Initialize memory controller (also test AGP) */ 3523 /* initialize AGP */
3256 r = r100_mc_init(rdev); 3524 if (rdev->flags & RADEON_IS_AGP) {
3257 if (r) 3525 r = radeon_agp_init(rdev);
3258 return r; 3526 if (r) {
3527 radeon_agp_disable(rdev);
3528 }
3529 }
3530 /* initialize VRAM */
3531 r100_mc_init(rdev);
3259 /* Fence driver */ 3532 /* Fence driver */
3260 r = radeon_fence_driver_init(rdev); 3533 r = radeon_fence_driver_init(rdev);
3261 if (r) 3534 if (r)
@@ -3264,7 +3537,7 @@ int r100_init(struct radeon_device *rdev)
3264 if (r) 3537 if (r)
3265 return r; 3538 return r;
3266 /* Memory manager */ 3539 /* Memory manager */
3267 r = radeon_object_init(rdev); 3540 r = radeon_bo_init(rdev);
3268 if (r) 3541 if (r)
3269 return r; 3542 return r;
3270 if (rdev->flags & RADEON_IS_PCI) { 3543 if (rdev->flags & RADEON_IS_PCI) {
@@ -3278,13 +3551,12 @@ int r100_init(struct radeon_device *rdev)
3278 if (r) { 3551 if (r) {
3279 /* Somethings want wront with the accel init stop accel */ 3552 /* Somethings want wront with the accel init stop accel */
3280 dev_err(rdev->dev, "Disabling GPU acceleration\n"); 3553 dev_err(rdev->dev, "Disabling GPU acceleration\n");
3281 r100_suspend(rdev);
3282 r100_cp_fini(rdev); 3554 r100_cp_fini(rdev);
3283 r100_wb_fini(rdev); 3555 r100_wb_fini(rdev);
3284 r100_ib_fini(rdev); 3556 r100_ib_fini(rdev);
3557 radeon_irq_kms_fini(rdev);
3285 if (rdev->flags & RADEON_IS_PCI) 3558 if (rdev->flags & RADEON_IS_PCI)
3286 r100_pci_gart_fini(rdev); 3559 r100_pci_gart_fini(rdev);
3287 radeon_irq_kms_fini(rdev);
3288 rdev->accel_working = false; 3560 rdev->accel_working = false;
3289 } 3561 }
3290 return 0; 3562 return 0;
diff --git a/drivers/gpu/drm/radeon/r100_track.h b/drivers/gpu/drm/radeon/r100_track.h
index 0daf0d76a891..f47cdca1c004 100644
--- a/drivers/gpu/drm/radeon/r100_track.h
+++ b/drivers/gpu/drm/radeon/r100_track.h
@@ -10,26 +10,30 @@
10 * CS functions 10 * CS functions
11 */ 11 */
12struct r100_cs_track_cb { 12struct r100_cs_track_cb {
13 struct radeon_object *robj; 13 struct radeon_bo *robj;
14 unsigned pitch; 14 unsigned pitch;
15 unsigned cpp; 15 unsigned cpp;
16 unsigned offset; 16 unsigned offset;
17}; 17};
18 18
19struct r100_cs_track_array { 19struct r100_cs_track_array {
20 struct radeon_object *robj; 20 struct radeon_bo *robj;
21 unsigned esize; 21 unsigned esize;
22}; 22};
23 23
24struct r100_cs_cube_info { 24struct r100_cs_cube_info {
25 struct radeon_object *robj; 25 struct radeon_bo *robj;
26 unsigned offset; 26 unsigned offset;
27 unsigned width; 27 unsigned width;
28 unsigned height; 28 unsigned height;
29}; 29};
30 30
31#define R100_TRACK_COMP_NONE 0
32#define R100_TRACK_COMP_DXT1 1
33#define R100_TRACK_COMP_DXT35 2
34
31struct r100_cs_track_texture { 35struct r100_cs_track_texture {
32 struct radeon_object *robj; 36 struct radeon_bo *robj;
33 struct r100_cs_cube_info cube_info[5]; /* info for 5 non-primary faces */ 37 struct r100_cs_cube_info cube_info[5]; /* info for 5 non-primary faces */
34 unsigned pitch; 38 unsigned pitch;
35 unsigned width; 39 unsigned width;
@@ -44,6 +48,7 @@ struct r100_cs_track_texture {
44 bool enabled; 48 bool enabled;
45 bool roundup_w; 49 bool roundup_w;
46 bool roundup_h; 50 bool roundup_h;
51 unsigned compress_format;
47}; 52};
48 53
49struct r100_cs_track_limits { 54struct r100_cs_track_limits {
@@ -59,16 +64,19 @@ struct r100_cs_track {
59 unsigned maxy; 64 unsigned maxy;
60 unsigned vtx_size; 65 unsigned vtx_size;
61 unsigned vap_vf_cntl; 66 unsigned vap_vf_cntl;
67 unsigned vap_alt_nverts;
62 unsigned immd_dwords; 68 unsigned immd_dwords;
63 unsigned num_arrays; 69 unsigned num_arrays;
64 unsigned max_indx; 70 unsigned max_indx;
71 unsigned color_channel_mask;
65 struct r100_cs_track_array arrays[11]; 72 struct r100_cs_track_array arrays[11];
66 struct r100_cs_track_cb cb[R300_MAX_CB]; 73 struct r100_cs_track_cb cb[R300_MAX_CB];
67 struct r100_cs_track_cb zb; 74 struct r100_cs_track_cb zb;
68 struct r100_cs_track_texture textures[R300_TRACK_MAX_TEXTURE]; 75 struct r100_cs_track_texture textures[R300_TRACK_MAX_TEXTURE];
69 bool z_enabled; 76 bool z_enabled;
70 bool separate_cube; 77 bool separate_cube;
71 78 bool zb_cb_clear;
79 bool blend_read_enable;
72}; 80};
73 81
74int r100_cs_track_check(struct radeon_device *rdev, struct r100_cs_track *track); 82int r100_cs_track_check(struct radeon_device *rdev, struct r100_cs_track *track);
diff --git a/drivers/gpu/drm/radeon/r200.c b/drivers/gpu/drm/radeon/r200.c
index eb740fc3549f..85617c311212 100644
--- a/drivers/gpu/drm/radeon/r200.c
+++ b/drivers/gpu/drm/radeon/r200.c
@@ -30,7 +30,9 @@
30#include "radeon_drm.h" 30#include "radeon_drm.h"
31#include "radeon_reg.h" 31#include "radeon_reg.h"
32#include "radeon.h" 32#include "radeon.h"
33#include "radeon_asic.h"
33 34
35#include "r100d.h"
34#include "r200_reg_safe.h" 36#include "r200_reg_safe.h"
35 37
36#include "r100_track.h" 38#include "r100_track.h"
@@ -79,6 +81,51 @@ static int r200_get_vtx_size_0(uint32_t vtx_fmt_0)
79 return vtx_size; 81 return vtx_size;
80} 82}
81 83
84int r200_copy_dma(struct radeon_device *rdev,
85 uint64_t src_offset,
86 uint64_t dst_offset,
87 unsigned num_pages,
88 struct radeon_fence *fence)
89{
90 uint32_t size;
91 uint32_t cur_size;
92 int i, num_loops;
93 int r = 0;
94
95 /* radeon pitch is /64 */
96 size = num_pages << PAGE_SHIFT;
97 num_loops = DIV_ROUND_UP(size, 0x1FFFFF);
98 r = radeon_ring_lock(rdev, num_loops * 4 + 64);
99 if (r) {
100 DRM_ERROR("radeon: moving bo (%d).\n", r);
101 return r;
102 }
103 /* Must wait for 2D idle & clean before DMA or hangs might happen */
104 radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0));
105 radeon_ring_write(rdev, (1 << 16));
106 for (i = 0; i < num_loops; i++) {
107 cur_size = size;
108 if (cur_size > 0x1FFFFF) {
109 cur_size = 0x1FFFFF;
110 }
111 size -= cur_size;
112 radeon_ring_write(rdev, PACKET0(0x720, 2));
113 radeon_ring_write(rdev, src_offset);
114 radeon_ring_write(rdev, dst_offset);
115 radeon_ring_write(rdev, cur_size | (1 << 31) | (1 << 30));
116 src_offset += cur_size;
117 dst_offset += cur_size;
118 }
119 radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0));
120 radeon_ring_write(rdev, RADEON_WAIT_DMA_GUI_IDLE);
121 if (fence) {
122 r = radeon_fence_emit(rdev, fence);
123 }
124 radeon_ring_unlock_commit(rdev);
125 return r;
126}
127
128
82static int r200_get_vtx_size_1(uint32_t vtx_fmt_1) 129static int r200_get_vtx_size_1(uint32_t vtx_fmt_1)
83{ 130{
84 int vtx_size, i, tex_size; 131 int vtx_size, i, tex_size;
@@ -371,13 +418,16 @@ int r200_packet0_check(struct radeon_cs_parser *p,
371 case 5: 418 case 5:
372 case 6: 419 case 6:
373 case 7: 420 case 7:
421 /* 1D/2D */
374 track->textures[i].tex_coord_type = 0; 422 track->textures[i].tex_coord_type = 0;
375 break; 423 break;
376 case 1: 424 case 1:
377 track->textures[i].tex_coord_type = 1; 425 /* CUBE */
426 track->textures[i].tex_coord_type = 2;
378 break; 427 break;
379 case 2: 428 case 2:
380 track->textures[i].tex_coord_type = 2; 429 /* 3D */
430 track->textures[i].tex_coord_type = 1;
381 break; 431 break;
382 } 432 }
383 break; 433 break;
@@ -401,7 +451,6 @@ int r200_packet0_check(struct radeon_cs_parser *p,
401 case R200_TXFORMAT_Y8: 451 case R200_TXFORMAT_Y8:
402 track->textures[i].cpp = 1; 452 track->textures[i].cpp = 1;
403 break; 453 break;
404 case R200_TXFORMAT_DXT1:
405 case R200_TXFORMAT_AI88: 454 case R200_TXFORMAT_AI88:
406 case R200_TXFORMAT_ARGB1555: 455 case R200_TXFORMAT_ARGB1555:
407 case R200_TXFORMAT_RGB565: 456 case R200_TXFORMAT_RGB565:
@@ -418,9 +467,16 @@ int r200_packet0_check(struct radeon_cs_parser *p,
418 case R200_TXFORMAT_ABGR8888: 467 case R200_TXFORMAT_ABGR8888:
419 case R200_TXFORMAT_BGR111110: 468 case R200_TXFORMAT_BGR111110:
420 case R200_TXFORMAT_LDVDU8888: 469 case R200_TXFORMAT_LDVDU8888:
470 track->textures[i].cpp = 4;
471 break;
472 case R200_TXFORMAT_DXT1:
473 track->textures[i].cpp = 1;
474 track->textures[i].compress_format = R100_TRACK_COMP_DXT1;
475 break;
421 case R200_TXFORMAT_DXT23: 476 case R200_TXFORMAT_DXT23:
422 case R200_TXFORMAT_DXT45: 477 case R200_TXFORMAT_DXT45:
423 track->textures[i].cpp = 4; 478 track->textures[i].cpp = 1;
479 track->textures[i].compress_format = R100_TRACK_COMP_DXT1;
424 break; 480 break;
425 } 481 }
426 track->textures[i].cube_info[4].width = 1 << ((idx_value >> 16) & 0xf); 482 track->textures[i].cube_info[4].width = 1 << ((idx_value >> 16) & 0xf);
diff --git a/drivers/gpu/drm/radeon/r300.c b/drivers/gpu/drm/radeon/r300.c
index 2f43ee8e4048..a5ff8076b423 100644
--- a/drivers/gpu/drm/radeon/r300.c
+++ b/drivers/gpu/drm/radeon/r300.c
@@ -26,17 +26,27 @@
26 * Jerome Glisse 26 * Jerome Glisse
27 */ 27 */
28#include <linux/seq_file.h> 28#include <linux/seq_file.h>
29#include <linux/slab.h>
29#include "drmP.h" 30#include "drmP.h"
30#include "drm.h" 31#include "drm.h"
31#include "radeon_reg.h" 32#include "radeon_reg.h"
32#include "radeon.h" 33#include "radeon.h"
34#include "radeon_asic.h"
33#include "radeon_drm.h" 35#include "radeon_drm.h"
34#include "r100_track.h" 36#include "r100_track.h"
35#include "r300d.h" 37#include "r300d.h"
36#include "rv350d.h" 38#include "rv350d.h"
37#include "r300_reg_safe.h" 39#include "r300_reg_safe.h"
38 40
39/* This files gather functions specifics to: r300,r350,rv350,rv370,rv380 */ 41/* This files gather functions specifics to: r300,r350,rv350,rv370,rv380
42 *
43 * GPU Errata:
44 * - HOST_PATH_CNTL: r300 family seems to dislike write to HOST_PATH_CNTL
45 * using MMIO to flush host path read cache, this lead to HARDLOCKUP.
46 * However, scheduling such write to the ring seems harmless, i suspect
47 * the CP read collide with the flush somehow, or maybe the MC, hard to
48 * tell. (Jerome Glisse)
49 */
40 50
41/* 51/*
42 * rv370,rv380 PCIE GART 52 * rv370,rv380 PCIE GART
@@ -109,18 +119,19 @@ int rv370_pcie_gart_enable(struct radeon_device *rdev)
109 r = radeon_gart_table_vram_pin(rdev); 119 r = radeon_gart_table_vram_pin(rdev);
110 if (r) 120 if (r)
111 return r; 121 return r;
122 radeon_gart_restore(rdev);
112 /* discard memory request outside of configured range */ 123 /* discard memory request outside of configured range */
113 tmp = RADEON_PCIE_TX_GART_UNMAPPED_ACCESS_DISCARD; 124 tmp = RADEON_PCIE_TX_GART_UNMAPPED_ACCESS_DISCARD;
114 WREG32_PCIE(RADEON_PCIE_TX_GART_CNTL, tmp); 125 WREG32_PCIE(RADEON_PCIE_TX_GART_CNTL, tmp);
115 WREG32_PCIE(RADEON_PCIE_TX_GART_START_LO, rdev->mc.gtt_location); 126 WREG32_PCIE(RADEON_PCIE_TX_GART_START_LO, rdev->mc.gtt_start);
116 tmp = rdev->mc.gtt_location + rdev->mc.gtt_size - RADEON_GPU_PAGE_SIZE; 127 tmp = rdev->mc.gtt_end & ~RADEON_GPU_PAGE_MASK;
117 WREG32_PCIE(RADEON_PCIE_TX_GART_END_LO, tmp); 128 WREG32_PCIE(RADEON_PCIE_TX_GART_END_LO, tmp);
118 WREG32_PCIE(RADEON_PCIE_TX_GART_START_HI, 0); 129 WREG32_PCIE(RADEON_PCIE_TX_GART_START_HI, 0);
119 WREG32_PCIE(RADEON_PCIE_TX_GART_END_HI, 0); 130 WREG32_PCIE(RADEON_PCIE_TX_GART_END_HI, 0);
120 table_addr = rdev->gart.table_addr; 131 table_addr = rdev->gart.table_addr;
121 WREG32_PCIE(RADEON_PCIE_TX_GART_BASE, table_addr); 132 WREG32_PCIE(RADEON_PCIE_TX_GART_BASE, table_addr);
122 /* FIXME: setup default page */ 133 /* FIXME: setup default page */
123 WREG32_PCIE(RADEON_PCIE_TX_DISCARD_RD_ADDR_LO, rdev->mc.vram_location); 134 WREG32_PCIE(RADEON_PCIE_TX_DISCARD_RD_ADDR_LO, rdev->mc.vram_start);
124 WREG32_PCIE(RADEON_PCIE_TX_DISCARD_RD_ADDR_HI, 0); 135 WREG32_PCIE(RADEON_PCIE_TX_DISCARD_RD_ADDR_HI, 0);
125 /* Clear error */ 136 /* Clear error */
126 WREG32_PCIE(0x18, 0); 137 WREG32_PCIE(0x18, 0);
@@ -137,22 +148,27 @@ int rv370_pcie_gart_enable(struct radeon_device *rdev)
137 148
138void rv370_pcie_gart_disable(struct radeon_device *rdev) 149void rv370_pcie_gart_disable(struct radeon_device *rdev)
139{ 150{
140 uint32_t tmp; 151 u32 tmp;
152 int r;
141 153
142 tmp = RREG32_PCIE(RADEON_PCIE_TX_GART_CNTL); 154 tmp = RREG32_PCIE(RADEON_PCIE_TX_GART_CNTL);
143 tmp |= RADEON_PCIE_TX_GART_UNMAPPED_ACCESS_DISCARD; 155 tmp |= RADEON_PCIE_TX_GART_UNMAPPED_ACCESS_DISCARD;
144 WREG32_PCIE(RADEON_PCIE_TX_GART_CNTL, tmp & ~RADEON_PCIE_TX_GART_EN); 156 WREG32_PCIE(RADEON_PCIE_TX_GART_CNTL, tmp & ~RADEON_PCIE_TX_GART_EN);
145 if (rdev->gart.table.vram.robj) { 157 if (rdev->gart.table.vram.robj) {
146 radeon_object_kunmap(rdev->gart.table.vram.robj); 158 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
147 radeon_object_unpin(rdev->gart.table.vram.robj); 159 if (likely(r == 0)) {
160 radeon_bo_kunmap(rdev->gart.table.vram.robj);
161 radeon_bo_unpin(rdev->gart.table.vram.robj);
162 radeon_bo_unreserve(rdev->gart.table.vram.robj);
163 }
148 } 164 }
149} 165}
150 166
151void rv370_pcie_gart_fini(struct radeon_device *rdev) 167void rv370_pcie_gart_fini(struct radeon_device *rdev)
152{ 168{
169 radeon_gart_fini(rdev);
153 rv370_pcie_gart_disable(rdev); 170 rv370_pcie_gart_disable(rdev);
154 radeon_gart_table_vram_free(rdev); 171 radeon_gart_table_vram_free(rdev);
155 radeon_gart_fini(rdev);
156} 172}
157 173
158void r300_fence_ring_emit(struct radeon_device *rdev, 174void r300_fence_ring_emit(struct radeon_device *rdev,
@@ -161,18 +177,25 @@ void r300_fence_ring_emit(struct radeon_device *rdev,
161 /* Who ever call radeon_fence_emit should call ring_lock and ask 177 /* Who ever call radeon_fence_emit should call ring_lock and ask
162 * for enough space (today caller are ib schedule and buffer move) */ 178 * for enough space (today caller are ib schedule and buffer move) */
163 /* Write SC register so SC & US assert idle */ 179 /* Write SC register so SC & US assert idle */
164 radeon_ring_write(rdev, PACKET0(0x43E0, 0)); 180 radeon_ring_write(rdev, PACKET0(R300_RE_SCISSORS_TL, 0));
165 radeon_ring_write(rdev, 0); 181 radeon_ring_write(rdev, 0);
166 radeon_ring_write(rdev, PACKET0(0x43E4, 0)); 182 radeon_ring_write(rdev, PACKET0(R300_RE_SCISSORS_BR, 0));
167 radeon_ring_write(rdev, 0); 183 radeon_ring_write(rdev, 0);
168 /* Flush 3D cache */ 184 /* Flush 3D cache */
169 radeon_ring_write(rdev, PACKET0(0x4E4C, 0)); 185 radeon_ring_write(rdev, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
170 radeon_ring_write(rdev, (2 << 0)); 186 radeon_ring_write(rdev, R300_RB3D_DC_FLUSH);
171 radeon_ring_write(rdev, PACKET0(0x4F18, 0)); 187 radeon_ring_write(rdev, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0));
172 radeon_ring_write(rdev, (1 << 0)); 188 radeon_ring_write(rdev, R300_ZC_FLUSH);
173 /* Wait until IDLE & CLEAN */ 189 /* Wait until IDLE & CLEAN */
174 radeon_ring_write(rdev, PACKET0(0x1720, 0)); 190 radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0));
175 radeon_ring_write(rdev, (1 << 17) | (1 << 16) | (1 << 9)); 191 radeon_ring_write(rdev, (RADEON_WAIT_3D_IDLECLEAN |
192 RADEON_WAIT_2D_IDLECLEAN |
193 RADEON_WAIT_DMA_GUI_IDLE));
194 radeon_ring_write(rdev, PACKET0(RADEON_HOST_PATH_CNTL, 0));
195 radeon_ring_write(rdev, rdev->config.r300.hdp_cntl |
196 RADEON_HDP_READ_BUFFER_INVALIDATE);
197 radeon_ring_write(rdev, PACKET0(RADEON_HOST_PATH_CNTL, 0));
198 radeon_ring_write(rdev, rdev->config.r300.hdp_cntl);
176 /* Emit fence sequence & fire IRQ */ 199 /* Emit fence sequence & fire IRQ */
177 radeon_ring_write(rdev, PACKET0(rdev->fence_drv.scratch_reg, 0)); 200 radeon_ring_write(rdev, PACKET0(rdev->fence_drv.scratch_reg, 0));
178 radeon_ring_write(rdev, fence->seq); 201 radeon_ring_write(rdev, fence->seq);
@@ -180,50 +203,6 @@ void r300_fence_ring_emit(struct radeon_device *rdev,
180 radeon_ring_write(rdev, RADEON_SW_INT_FIRE); 203 radeon_ring_write(rdev, RADEON_SW_INT_FIRE);
181} 204}
182 205
183int r300_copy_dma(struct radeon_device *rdev,
184 uint64_t src_offset,
185 uint64_t dst_offset,
186 unsigned num_pages,
187 struct radeon_fence *fence)
188{
189 uint32_t size;
190 uint32_t cur_size;
191 int i, num_loops;
192 int r = 0;
193
194 /* radeon pitch is /64 */
195 size = num_pages << PAGE_SHIFT;
196 num_loops = DIV_ROUND_UP(size, 0x1FFFFF);
197 r = radeon_ring_lock(rdev, num_loops * 4 + 64);
198 if (r) {
199 DRM_ERROR("radeon: moving bo (%d).\n", r);
200 return r;
201 }
202 /* Must wait for 2D idle & clean before DMA or hangs might happen */
203 radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0 ));
204 radeon_ring_write(rdev, (1 << 16));
205 for (i = 0; i < num_loops; i++) {
206 cur_size = size;
207 if (cur_size > 0x1FFFFF) {
208 cur_size = 0x1FFFFF;
209 }
210 size -= cur_size;
211 radeon_ring_write(rdev, PACKET0(0x720, 2));
212 radeon_ring_write(rdev, src_offset);
213 radeon_ring_write(rdev, dst_offset);
214 radeon_ring_write(rdev, cur_size | (1 << 31) | (1 << 30));
215 src_offset += cur_size;
216 dst_offset += cur_size;
217 }
218 radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0));
219 radeon_ring_write(rdev, RADEON_WAIT_DMA_GUI_IDLE);
220 if (fence) {
221 r = radeon_fence_emit(rdev, fence);
222 }
223 radeon_ring_unlock_commit(rdev);
224 return r;
225}
226
227void r300_ring_start(struct radeon_device *rdev) 206void r300_ring_start(struct radeon_device *rdev)
228{ 207{
229 unsigned gb_tile_config; 208 unsigned gb_tile_config;
@@ -263,8 +242,8 @@ void r300_ring_start(struct radeon_device *rdev)
263 radeon_ring_write(rdev, 242 radeon_ring_write(rdev,
264 RADEON_WAIT_2D_IDLECLEAN | 243 RADEON_WAIT_2D_IDLECLEAN |
265 RADEON_WAIT_3D_IDLECLEAN); 244 RADEON_WAIT_3D_IDLECLEAN);
266 radeon_ring_write(rdev, PACKET0(0x170C, 0)); 245 radeon_ring_write(rdev, PACKET0(R300_DST_PIPE_CONFIG, 0));
267 radeon_ring_write(rdev, 1 << 31); 246 radeon_ring_write(rdev, R300_PIPE_AUTO_CONFIG);
268 radeon_ring_write(rdev, PACKET0(R300_GB_SELECT, 0)); 247 radeon_ring_write(rdev, PACKET0(R300_GB_SELECT, 0));
269 radeon_ring_write(rdev, 0); 248 radeon_ring_write(rdev, 0);
270 radeon_ring_write(rdev, PACKET0(R300_GB_ENABLE, 0)); 249 radeon_ring_write(rdev, PACKET0(R300_GB_ENABLE, 0));
@@ -331,8 +310,8 @@ int r300_mc_wait_for_idle(struct radeon_device *rdev)
331 310
332 for (i = 0; i < rdev->usec_timeout; i++) { 311 for (i = 0; i < rdev->usec_timeout; i++) {
333 /* read MC_STATUS */ 312 /* read MC_STATUS */
334 tmp = RREG32(0x0150); 313 tmp = RREG32(RADEON_MC_STATUS);
335 if (tmp & (1 << 4)) { 314 if (tmp & R300_MC_IDLE) {
336 return 0; 315 return 0;
337 } 316 }
338 DRM_UDELAY(1); 317 DRM_UDELAY(1);
@@ -345,12 +324,12 @@ void r300_gpu_init(struct radeon_device *rdev)
345 uint32_t gb_tile_config, tmp; 324 uint32_t gb_tile_config, tmp;
346 325
347 r100_hdp_reset(rdev); 326 r100_hdp_reset(rdev);
348 /* FIXME: rv380 one pipes ? */ 327 if ((rdev->family == CHIP_R300 && rdev->pdev->device != 0x4144) ||
349 if ((rdev->family == CHIP_R300) || (rdev->family == CHIP_R350)) { 328 (rdev->family == CHIP_R350 && rdev->pdev->device != 0x4148)) {
350 /* r300,r350 */ 329 /* r300,r350 */
351 rdev->num_gb_pipes = 2; 330 rdev->num_gb_pipes = 2;
352 } else { 331 } else {
353 /* rv350,rv370,rv380 */ 332 /* rv350,rv370,rv380,r300 AD, r350 AH */
354 rdev->num_gb_pipes = 1; 333 rdev->num_gb_pipes = 1;
355 } 334 }
356 rdev->num_z_pipes = 1; 335 rdev->num_z_pipes = 1;
@@ -377,8 +356,8 @@ void r300_gpu_init(struct radeon_device *rdev)
377 "programming pipes. Bad things might happen.\n"); 356 "programming pipes. Bad things might happen.\n");
378 } 357 }
379 358
380 tmp = RREG32(0x170C); 359 tmp = RREG32(R300_DST_PIPE_CONFIG);
381 WREG32(0x170C, tmp | (1 << 31)); 360 WREG32(R300_DST_PIPE_CONFIG, tmp | R300_PIPE_AUTO_CONFIG);
382 361
383 WREG32(R300_RB2D_DSTCACHE_MODE, 362 WREG32(R300_RB2D_DSTCACHE_MODE,
384 R300_DC_AUTOFLUSH_ENABLE | 363 R300_DC_AUTOFLUSH_ENABLE |
@@ -419,8 +398,8 @@ int r300_ga_reset(struct radeon_device *rdev)
419 /* GA still busy soft reset it */ 398 /* GA still busy soft reset it */
420 WREG32(0x429C, 0x200); 399 WREG32(0x429C, 0x200);
421 WREG32(R300_VAP_PVS_STATE_FLUSH_REG, 0); 400 WREG32(R300_VAP_PVS_STATE_FLUSH_REG, 0);
422 WREG32(0x43E0, 0); 401 WREG32(R300_RE_SCISSORS_TL, 0);
423 WREG32(0x43E4, 0); 402 WREG32(R300_RE_SCISSORS_BR, 0);
424 WREG32(0x24AC, 0); 403 WREG32(0x24AC, 0);
425 } 404 }
426 /* Wait to prevent race in RBBM_STATUS */ 405 /* Wait to prevent race in RBBM_STATUS */
@@ -470,7 +449,7 @@ int r300_gpu_reset(struct radeon_device *rdev)
470 } 449 }
471 /* Check if GPU is idle */ 450 /* Check if GPU is idle */
472 status = RREG32(RADEON_RBBM_STATUS); 451 status = RREG32(RADEON_RBBM_STATUS);
473 if (status & (1 << 31)) { 452 if (status & RADEON_RBBM_ACTIVE) {
474 DRM_ERROR("Failed to reset GPU (RBBM_STATUS=0x%08X)\n", status); 453 DRM_ERROR("Failed to reset GPU (RBBM_STATUS=0x%08X)\n", status);
475 return -1; 454 return -1;
476 } 455 }
@@ -482,20 +461,29 @@ int r300_gpu_reset(struct radeon_device *rdev)
482/* 461/*
483 * r300,r350,rv350,rv380 VRAM info 462 * r300,r350,rv350,rv380 VRAM info
484 */ 463 */
485void r300_vram_info(struct radeon_device *rdev) 464void r300_mc_init(struct radeon_device *rdev)
486{ 465{
487 uint32_t tmp; 466 u64 base;
467 u32 tmp;
488 468
489 /* DDR for all card after R300 & IGP */ 469 /* DDR for all card after R300 & IGP */
490 rdev->mc.vram_is_ddr = true; 470 rdev->mc.vram_is_ddr = true;
491 tmp = RREG32(RADEON_MEM_CNTL); 471 tmp = RREG32(RADEON_MEM_CNTL);
492 if (tmp & R300_MEM_NUM_CHANNELS_MASK) { 472 tmp &= R300_MEM_NUM_CHANNELS_MASK;
493 rdev->mc.vram_width = 128; 473 switch (tmp) {
494 } else { 474 case 0: rdev->mc.vram_width = 64; break;
495 rdev->mc.vram_width = 64; 475 case 1: rdev->mc.vram_width = 128; break;
476 case 2: rdev->mc.vram_width = 256; break;
477 default: rdev->mc.vram_width = 128; break;
496 } 478 }
497
498 r100_vram_init_sizes(rdev); 479 r100_vram_init_sizes(rdev);
480 base = rdev->mc.aper_base;
481 if (rdev->flags & RADEON_IS_IGP)
482 base = (RREG32(RADEON_NB_TOM) & 0xffff) << 16;
483 radeon_vram_location(rdev, &rdev->mc, base);
484 if (!(rdev->flags & RADEON_IS_AGP))
485 radeon_gtt_location(rdev, &rdev->mc);
486 radeon_update_bandwidth_info(rdev);
499} 487}
500 488
501void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes) 489void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes)
@@ -557,6 +545,40 @@ void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes)
557 545
558} 546}
559 547
548int rv370_get_pcie_lanes(struct radeon_device *rdev)
549{
550 u32 link_width_cntl;
551
552 if (rdev->flags & RADEON_IS_IGP)
553 return 0;
554
555 if (!(rdev->flags & RADEON_IS_PCIE))
556 return 0;
557
558 /* FIXME wait for idle */
559
560 if (rdev->family < CHIP_R600)
561 link_width_cntl = RREG32_PCIE(RADEON_PCIE_LC_LINK_WIDTH_CNTL);
562 else
563 link_width_cntl = RREG32_PCIE_P(RADEON_PCIE_LC_LINK_WIDTH_CNTL);
564
565 switch ((link_width_cntl & RADEON_PCIE_LC_LINK_WIDTH_RD_MASK) >> RADEON_PCIE_LC_LINK_WIDTH_RD_SHIFT) {
566 case RADEON_PCIE_LC_LINK_WIDTH_X0:
567 return 0;
568 case RADEON_PCIE_LC_LINK_WIDTH_X1:
569 return 1;
570 case RADEON_PCIE_LC_LINK_WIDTH_X2:
571 return 2;
572 case RADEON_PCIE_LC_LINK_WIDTH_X4:
573 return 4;
574 case RADEON_PCIE_LC_LINK_WIDTH_X8:
575 return 8;
576 case RADEON_PCIE_LC_LINK_WIDTH_X16:
577 default:
578 return 16;
579 }
580}
581
560#if defined(CONFIG_DEBUG_FS) 582#if defined(CONFIG_DEBUG_FS)
561static int rv370_debugfs_pcie_gart_info(struct seq_file *m, void *data) 583static int rv370_debugfs_pcie_gart_info(struct seq_file *m, void *data)
562{ 584{
@@ -681,7 +703,17 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
681 r100_cs_dump_packet(p, pkt); 703 r100_cs_dump_packet(p, pkt);
682 return r; 704 return r;
683 } 705 }
684 ib[idx] = idx_value + ((u32)reloc->lobj.gpu_offset); 706
707 if (reloc->lobj.tiling_flags & RADEON_TILING_MACRO)
708 tile_flags |= R300_TXO_MACRO_TILE;
709 if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO)
710 tile_flags |= R300_TXO_MICRO_TILE;
711 else if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO_SQUARE)
712 tile_flags |= R300_TXO_MICRO_TILE_SQUARE;
713
714 tmp = idx_value + ((u32)reloc->lobj.gpu_offset);
715 tmp |= tile_flags;
716 ib[idx] = tmp;
685 track->textures[i].robj = reloc->robj; 717 track->textures[i].robj = reloc->robj;
686 break; 718 break;
687 /* Tracked registers */ 719 /* Tracked registers */
@@ -697,6 +729,12 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
697 /* VAP_VF_MAX_VTX_INDX */ 729 /* VAP_VF_MAX_VTX_INDX */
698 track->max_indx = idx_value & 0x00FFFFFFUL; 730 track->max_indx = idx_value & 0x00FFFFFFUL;
699 break; 731 break;
732 case 0x2088:
733 /* VAP_ALT_NUM_VERTICES - only valid on r500 */
734 if (p->rdev->family < CHIP_RV515)
735 goto fail;
736 track->vap_alt_nverts = idx_value & 0xFFFFFF;
737 break;
700 case 0x43E4: 738 case 0x43E4:
701 /* SC_SCISSOR1 */ 739 /* SC_SCISSOR1 */
702 track->maxy = ((idx_value >> 13) & 0x1FFF) + 1; 740 track->maxy = ((idx_value >> 13) & 0x1FFF) + 1;
@@ -728,11 +766,12 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
728 tile_flags |= R300_COLOR_TILE_ENABLE; 766 tile_flags |= R300_COLOR_TILE_ENABLE;
729 if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO) 767 if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO)
730 tile_flags |= R300_COLOR_MICROTILE_ENABLE; 768 tile_flags |= R300_COLOR_MICROTILE_ENABLE;
769 else if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO_SQUARE)
770 tile_flags |= R300_COLOR_MICROTILE_SQUARE_ENABLE;
731 771
732 tmp = idx_value & ~(0x7 << 16); 772 tmp = idx_value & ~(0x7 << 16);
733 tmp |= tile_flags; 773 tmp |= tile_flags;
734 ib[idx] = tmp; 774 ib[idx] = tmp;
735
736 i = (reg - 0x4E38) >> 2; 775 i = (reg - 0x4E38) >> 2;
737 track->cb[i].pitch = idx_value & 0x3FFE; 776 track->cb[i].pitch = idx_value & 0x3FFE;
738 switch (((idx_value >> 21) & 0xF)) { 777 switch (((idx_value >> 21) & 0xF)) {
@@ -799,7 +838,9 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
799 if (reloc->lobj.tiling_flags & RADEON_TILING_MACRO) 838 if (reloc->lobj.tiling_flags & RADEON_TILING_MACRO)
800 tile_flags |= R300_DEPTHMACROTILE_ENABLE; 839 tile_flags |= R300_DEPTHMACROTILE_ENABLE;
801 if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO) 840 if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO)
802 tile_flags |= R300_DEPTHMICROTILE_TILED;; 841 tile_flags |= R300_DEPTHMICROTILE_TILED;
842 else if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO_SQUARE)
843 tile_flags |= R300_DEPTHMICROTILE_TILED_SQUARE;
803 844
804 tmp = idx_value & ~(0x7 << 16); 845 tmp = idx_value & ~(0x7 << 16);
805 tmp |= tile_flags; 846 tmp |= tile_flags;
@@ -847,7 +888,6 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
847 case R300_TX_FORMAT_Z6Y5X5: 888 case R300_TX_FORMAT_Z6Y5X5:
848 case R300_TX_FORMAT_W4Z4Y4X4: 889 case R300_TX_FORMAT_W4Z4Y4X4:
849 case R300_TX_FORMAT_W1Z5Y5X5: 890 case R300_TX_FORMAT_W1Z5Y5X5:
850 case R300_TX_FORMAT_DXT1:
851 case R300_TX_FORMAT_D3DMFT_CxV8U8: 891 case R300_TX_FORMAT_D3DMFT_CxV8U8:
852 case R300_TX_FORMAT_B8G8_B8G8: 892 case R300_TX_FORMAT_B8G8_B8G8:
853 case R300_TX_FORMAT_G8R8_G8B8: 893 case R300_TX_FORMAT_G8R8_G8B8:
@@ -861,8 +901,6 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
861 case 0x17: 901 case 0x17:
862 case R300_TX_FORMAT_FL_I32: 902 case R300_TX_FORMAT_FL_I32:
863 case 0x1e: 903 case 0x1e:
864 case R300_TX_FORMAT_DXT3:
865 case R300_TX_FORMAT_DXT5:
866 track->textures[i].cpp = 4; 904 track->textures[i].cpp = 4;
867 break; 905 break;
868 case R300_TX_FORMAT_W16Z16Y16X16: 906 case R300_TX_FORMAT_W16Z16Y16X16:
@@ -873,6 +911,23 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
873 case R300_TX_FORMAT_FL_R32G32B32A32: 911 case R300_TX_FORMAT_FL_R32G32B32A32:
874 track->textures[i].cpp = 16; 912 track->textures[i].cpp = 16;
875 break; 913 break;
914 case R300_TX_FORMAT_DXT1:
915 track->textures[i].cpp = 1;
916 track->textures[i].compress_format = R100_TRACK_COMP_DXT1;
917 break;
918 case R300_TX_FORMAT_ATI2N:
919 if (p->rdev->family < CHIP_R420) {
920 DRM_ERROR("Invalid texture format %u\n",
921 (idx_value & 0x1F));
922 return -EINVAL;
923 }
924 /* The same rules apply as for DXT3/5. */
925 /* Pass through. */
926 case R300_TX_FORMAT_DXT3:
927 case R300_TX_FORMAT_DXT5:
928 track->textures[i].cpp = 1;
929 track->textures[i].compress_format = R100_TRACK_COMP_DXT35;
930 break;
876 default: 931 default:
877 DRM_ERROR("Invalid texture format %u\n", 932 DRM_ERROR("Invalid texture format %u\n",
878 (idx_value & 0x1F)); 933 (idx_value & 0x1F));
@@ -932,6 +987,16 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
932 track->textures[i].width_11 = tmp; 987 track->textures[i].width_11 = tmp;
933 tmp = ((idx_value >> 16) & 1) << 11; 988 tmp = ((idx_value >> 16) & 1) << 11;
934 track->textures[i].height_11 = tmp; 989 track->textures[i].height_11 = tmp;
990
991 /* ATI1N */
992 if (idx_value & (1 << 14)) {
993 /* The same rules apply as for DXT1. */
994 track->textures[i].compress_format =
995 R100_TRACK_COMP_DXT1;
996 }
997 } else if (idx_value & (1 << 14)) {
998 DRM_ERROR("Forbidden bit TXFORMAT_MSB\n");
999 return -EINVAL;
935 } 1000 }
936 break; 1001 break;
937 case 0x4480: 1002 case 0x4480:
@@ -973,17 +1038,31 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
973 } 1038 }
974 ib[idx] = idx_value + ((u32)reloc->lobj.gpu_offset); 1039 ib[idx] = idx_value + ((u32)reloc->lobj.gpu_offset);
975 break; 1040 break;
1041 case 0x4e0c:
1042 /* RB3D_COLOR_CHANNEL_MASK */
1043 track->color_channel_mask = idx_value;
1044 break;
1045 case 0x4d1c:
1046 /* ZB_BW_CNTL */
1047 track->zb_cb_clear = !!(idx_value & (1 << 5));
1048 break;
1049 case 0x4e04:
1050 /* RB3D_BLENDCNTL */
1051 track->blend_read_enable = !!(idx_value & (1 << 2));
1052 break;
976 case 0x4be8: 1053 case 0x4be8:
977 /* valid register only on RV530 */ 1054 /* valid register only on RV530 */
978 if (p->rdev->family == CHIP_RV530) 1055 if (p->rdev->family == CHIP_RV530)
979 break; 1056 break;
980 /* fallthrough do not move */ 1057 /* fallthrough do not move */
981 default: 1058 default:
982 printk(KERN_ERR "Forbidden register 0x%04X in cs at %d\n", 1059 goto fail;
983 reg, idx);
984 return -EINVAL;
985 } 1060 }
986 return 0; 1061 return 0;
1062fail:
1063 printk(KERN_ERR "Forbidden register 0x%04X in cs at %d\n",
1064 reg, idx);
1065 return -EINVAL;
987} 1066}
988 1067
989static int r300_packet3_check(struct radeon_cs_parser *p, 1068static int r300_packet3_check(struct radeon_cs_parser *p,
@@ -1181,6 +1260,9 @@ static int r300_startup(struct radeon_device *rdev)
1181{ 1260{
1182 int r; 1261 int r;
1183 1262
1263 /* set common regs */
1264 r100_set_common_regs(rdev);
1265 /* program mc */
1184 r300_mc_program(rdev); 1266 r300_mc_program(rdev);
1185 /* Resume clock */ 1267 /* Resume clock */
1186 r300_clock_startup(rdev); 1268 r300_clock_startup(rdev);
@@ -1193,14 +1275,20 @@ static int r300_startup(struct radeon_device *rdev)
1193 if (r) 1275 if (r)
1194 return r; 1276 return r;
1195 } 1277 }
1278
1279 if (rdev->family == CHIP_R300 ||
1280 rdev->family == CHIP_R350 ||
1281 rdev->family == CHIP_RV350)
1282 r100_enable_bm(rdev);
1283
1196 if (rdev->flags & RADEON_IS_PCI) { 1284 if (rdev->flags & RADEON_IS_PCI) {
1197 r = r100_pci_gart_enable(rdev); 1285 r = r100_pci_gart_enable(rdev);
1198 if (r) 1286 if (r)
1199 return r; 1287 return r;
1200 } 1288 }
1201 /* Enable IRQ */ 1289 /* Enable IRQ */
1202 rdev->irq.sw_int = true;
1203 r100_irq_set(rdev); 1290 r100_irq_set(rdev);
1291 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
1204 /* 1M ring buffer */ 1292 /* 1M ring buffer */
1205 r = r100_cp_init(rdev, 1024 * 1024); 1293 r = r100_cp_init(rdev, 1024 * 1024);
1206 if (r) { 1294 if (r) {
@@ -1237,6 +1325,8 @@ int r300_resume(struct radeon_device *rdev)
1237 radeon_combios_asic_init(rdev->ddev); 1325 radeon_combios_asic_init(rdev->ddev);
1238 /* Resume clock after posting */ 1326 /* Resume clock after posting */
1239 r300_clock_startup(rdev); 1327 r300_clock_startup(rdev);
1328 /* Initialize surface registers */
1329 radeon_surface_init(rdev);
1240 return r300_startup(rdev); 1330 return r300_startup(rdev);
1241} 1331}
1242 1332
@@ -1254,7 +1344,7 @@ int r300_suspend(struct radeon_device *rdev)
1254 1344
1255void r300_fini(struct radeon_device *rdev) 1345void r300_fini(struct radeon_device *rdev)
1256{ 1346{
1257 r300_suspend(rdev); 1347 radeon_pm_fini(rdev);
1258 r100_cp_fini(rdev); 1348 r100_cp_fini(rdev);
1259 r100_wb_fini(rdev); 1349 r100_wb_fini(rdev);
1260 r100_ib_fini(rdev); 1350 r100_ib_fini(rdev);
@@ -1263,9 +1353,10 @@ void r300_fini(struct radeon_device *rdev)
1263 rv370_pcie_gart_fini(rdev); 1353 rv370_pcie_gart_fini(rdev);
1264 if (rdev->flags & RADEON_IS_PCI) 1354 if (rdev->flags & RADEON_IS_PCI)
1265 r100_pci_gart_fini(rdev); 1355 r100_pci_gart_fini(rdev);
1356 radeon_agp_fini(rdev);
1266 radeon_irq_kms_fini(rdev); 1357 radeon_irq_kms_fini(rdev);
1267 radeon_fence_driver_fini(rdev); 1358 radeon_fence_driver_fini(rdev);
1268 radeon_object_fini(rdev); 1359 radeon_bo_fini(rdev);
1269 radeon_atombios_fini(rdev); 1360 radeon_atombios_fini(rdev);
1270 kfree(rdev->bios); 1361 kfree(rdev->bios);
1271 rdev->bios = NULL; 1362 rdev->bios = NULL;
@@ -1303,20 +1394,23 @@ int r300_init(struct radeon_device *rdev)
1303 RREG32(R_0007C0_CP_STAT)); 1394 RREG32(R_0007C0_CP_STAT));
1304 } 1395 }
1305 /* check if cards are posted or not */ 1396 /* check if cards are posted or not */
1306 if (!radeon_card_posted(rdev) && rdev->bios) { 1397 if (radeon_boot_test_post_card(rdev) == false)
1307 DRM_INFO("GPU not posted. posting now...\n"); 1398 return -EINVAL;
1308 radeon_combios_asic_init(rdev->ddev);
1309 }
1310 /* Set asic errata */ 1399 /* Set asic errata */
1311 r300_errata(rdev); 1400 r300_errata(rdev);
1312 /* Initialize clocks */ 1401 /* Initialize clocks */
1313 radeon_get_clock_info(rdev->ddev); 1402 radeon_get_clock_info(rdev->ddev);
1314 /* Get vram informations */ 1403 /* Initialize power management */
1315 r300_vram_info(rdev); 1404 radeon_pm_init(rdev);
1316 /* Initialize memory controller (also test AGP) */ 1405 /* initialize AGP */
1317 r = r420_mc_init(rdev); 1406 if (rdev->flags & RADEON_IS_AGP) {
1318 if (r) 1407 r = radeon_agp_init(rdev);
1319 return r; 1408 if (r) {
1409 radeon_agp_disable(rdev);
1410 }
1411 }
1412 /* initialize memory controller */
1413 r300_mc_init(rdev);
1320 /* Fence driver */ 1414 /* Fence driver */
1321 r = radeon_fence_driver_init(rdev); 1415 r = radeon_fence_driver_init(rdev);
1322 if (r) 1416 if (r)
@@ -1325,7 +1419,7 @@ int r300_init(struct radeon_device *rdev)
1325 if (r) 1419 if (r)
1326 return r; 1420 return r;
1327 /* Memory manager */ 1421 /* Memory manager */
1328 r = radeon_object_init(rdev); 1422 r = radeon_bo_init(rdev);
1329 if (r) 1423 if (r)
1330 return r; 1424 return r;
1331 if (rdev->flags & RADEON_IS_PCIE) { 1425 if (rdev->flags & RADEON_IS_PCIE) {
@@ -1344,15 +1438,15 @@ int r300_init(struct radeon_device *rdev)
1344 if (r) { 1438 if (r) {
1345 /* Somethings want wront with the accel init stop accel */ 1439 /* Somethings want wront with the accel init stop accel */
1346 dev_err(rdev->dev, "Disabling GPU acceleration\n"); 1440 dev_err(rdev->dev, "Disabling GPU acceleration\n");
1347 r300_suspend(rdev);
1348 r100_cp_fini(rdev); 1441 r100_cp_fini(rdev);
1349 r100_wb_fini(rdev); 1442 r100_wb_fini(rdev);
1350 r100_ib_fini(rdev); 1443 r100_ib_fini(rdev);
1444 radeon_irq_kms_fini(rdev);
1351 if (rdev->flags & RADEON_IS_PCIE) 1445 if (rdev->flags & RADEON_IS_PCIE)
1352 rv370_pcie_gart_fini(rdev); 1446 rv370_pcie_gart_fini(rdev);
1353 if (rdev->flags & RADEON_IS_PCI) 1447 if (rdev->flags & RADEON_IS_PCI)
1354 r100_pci_gart_fini(rdev); 1448 r100_pci_gart_fini(rdev);
1355 radeon_irq_kms_fini(rdev); 1449 radeon_agp_fini(rdev);
1356 rdev->accel_working = false; 1450 rdev->accel_working = false;
1357 } 1451 }
1358 return 0; 1452 return 0;
diff --git a/drivers/gpu/drm/radeon/r300_cmdbuf.c b/drivers/gpu/drm/radeon/r300_cmdbuf.c
index cb2e470f97d4..c5c2742e4140 100644
--- a/drivers/gpu/drm/radeon/r300_cmdbuf.c
+++ b/drivers/gpu/drm/radeon/r300_cmdbuf.c
@@ -33,6 +33,7 @@
33 33
34#include "drmP.h" 34#include "drmP.h"
35#include "drm.h" 35#include "drm.h"
36#include "drm_buffer.h"
36#include "radeon_drm.h" 37#include "radeon_drm.h"
37#include "radeon_drv.h" 38#include "radeon_drv.h"
38#include "r300_reg.h" 39#include "r300_reg.h"
@@ -299,46 +300,42 @@ static __inline__ int r300_emit_carefully_checked_packet0(drm_radeon_private_t *
299 int reg; 300 int reg;
300 int sz; 301 int sz;
301 int i; 302 int i;
302 int values[64]; 303 u32 *value;
303 RING_LOCALS; 304 RING_LOCALS;
304 305
305 sz = header.packet0.count; 306 sz = header.packet0.count;
306 reg = (header.packet0.reghi << 8) | header.packet0.reglo; 307 reg = (header.packet0.reghi << 8) | header.packet0.reglo;
307 308
308 if ((sz > 64) || (sz < 0)) { 309 if ((sz > 64) || (sz < 0)) {
309 DRM_ERROR 310 DRM_ERROR("Cannot emit more than 64 values at a time (reg=%04x sz=%d)\n",
310 ("Cannot emit more than 64 values at a time (reg=%04x sz=%d)\n", 311 reg, sz);
311 reg, sz);
312 return -EINVAL; 312 return -EINVAL;
313 } 313 }
314
314 for (i = 0; i < sz; i++) { 315 for (i = 0; i < sz; i++) {
315 values[i] = ((int *)cmdbuf->buf)[i];
316 switch (r300_reg_flags[(reg >> 2) + i]) { 316 switch (r300_reg_flags[(reg >> 2) + i]) {
317 case MARK_SAFE: 317 case MARK_SAFE:
318 break; 318 break;
319 case MARK_CHECK_OFFSET: 319 case MARK_CHECK_OFFSET:
320 if (!radeon_check_offset(dev_priv, (u32) values[i])) { 320 value = drm_buffer_pointer_to_dword(cmdbuf->buffer, i);
321 DRM_ERROR 321 if (!radeon_check_offset(dev_priv, *value)) {
322 ("Offset failed range check (reg=%04x sz=%d)\n", 322 DRM_ERROR("Offset failed range check (reg=%04x sz=%d)\n",
323 reg, sz); 323 reg, sz);
324 return -EINVAL; 324 return -EINVAL;
325 } 325 }
326 break; 326 break;
327 default: 327 default:
328 DRM_ERROR("Register %04x failed check as flag=%02x\n", 328 DRM_ERROR("Register %04x failed check as flag=%02x\n",
329 reg + i * 4, r300_reg_flags[(reg >> 2) + i]); 329 reg + i * 4, r300_reg_flags[(reg >> 2) + i]);
330 return -EINVAL; 330 return -EINVAL;
331 } 331 }
332 } 332 }
333 333
334 BEGIN_RING(1 + sz); 334 BEGIN_RING(1 + sz);
335 OUT_RING(CP_PACKET0(reg, sz - 1)); 335 OUT_RING(CP_PACKET0(reg, sz - 1));
336 OUT_RING_TABLE(values, sz); 336 OUT_RING_DRM_BUFFER(cmdbuf->buffer, sz);
337 ADVANCE_RING(); 337 ADVANCE_RING();
338 338
339 cmdbuf->buf += sz * 4;
340 cmdbuf->bufsz -= sz * 4;
341
342 return 0; 339 return 0;
343} 340}
344 341
@@ -362,7 +359,7 @@ static __inline__ int r300_emit_packet0(drm_radeon_private_t *dev_priv,
362 if (!sz) 359 if (!sz)
363 return 0; 360 return 0;
364 361
365 if (sz * 4 > cmdbuf->bufsz) 362 if (sz * 4 > drm_buffer_unprocessed(cmdbuf->buffer))
366 return -EINVAL; 363 return -EINVAL;
367 364
368 if (reg + sz * 4 >= 0x10000) { 365 if (reg + sz * 4 >= 0x10000) {
@@ -380,12 +377,9 @@ static __inline__ int r300_emit_packet0(drm_radeon_private_t *dev_priv,
380 377
381 BEGIN_RING(1 + sz); 378 BEGIN_RING(1 + sz);
382 OUT_RING(CP_PACKET0(reg, sz - 1)); 379 OUT_RING(CP_PACKET0(reg, sz - 1));
383 OUT_RING_TABLE((int *)cmdbuf->buf, sz); 380 OUT_RING_DRM_BUFFER(cmdbuf->buffer, sz);
384 ADVANCE_RING(); 381 ADVANCE_RING();
385 382
386 cmdbuf->buf += sz * 4;
387 cmdbuf->bufsz -= sz * 4;
388
389 return 0; 383 return 0;
390} 384}
391 385
@@ -407,7 +401,7 @@ static __inline__ int r300_emit_vpu(drm_radeon_private_t *dev_priv,
407 401
408 if (!sz) 402 if (!sz)
409 return 0; 403 return 0;
410 if (sz * 16 > cmdbuf->bufsz) 404 if (sz * 16 > drm_buffer_unprocessed(cmdbuf->buffer))
411 return -EINVAL; 405 return -EINVAL;
412 406
413 /* VAP is very sensitive so we purge cache before we program it 407 /* VAP is very sensitive so we purge cache before we program it
@@ -426,7 +420,7 @@ static __inline__ int r300_emit_vpu(drm_radeon_private_t *dev_priv,
426 BEGIN_RING(3 + sz * 4); 420 BEGIN_RING(3 + sz * 4);
427 OUT_RING_REG(R300_VAP_PVS_UPLOAD_ADDRESS, addr); 421 OUT_RING_REG(R300_VAP_PVS_UPLOAD_ADDRESS, addr);
428 OUT_RING(CP_PACKET0_TABLE(R300_VAP_PVS_UPLOAD_DATA, sz * 4 - 1)); 422 OUT_RING(CP_PACKET0_TABLE(R300_VAP_PVS_UPLOAD_DATA, sz * 4 - 1));
429 OUT_RING_TABLE((int *)cmdbuf->buf, sz * 4); 423 OUT_RING_DRM_BUFFER(cmdbuf->buffer, sz * 4);
430 ADVANCE_RING(); 424 ADVANCE_RING();
431 425
432 BEGIN_RING(2); 426 BEGIN_RING(2);
@@ -434,9 +428,6 @@ static __inline__ int r300_emit_vpu(drm_radeon_private_t *dev_priv,
434 OUT_RING(0); 428 OUT_RING(0);
435 ADVANCE_RING(); 429 ADVANCE_RING();
436 430
437 cmdbuf->buf += sz * 16;
438 cmdbuf->bufsz -= sz * 16;
439
440 return 0; 431 return 0;
441} 432}
442 433
@@ -449,14 +440,14 @@ static __inline__ int r300_emit_clear(drm_radeon_private_t *dev_priv,
449{ 440{
450 RING_LOCALS; 441 RING_LOCALS;
451 442
452 if (8 * 4 > cmdbuf->bufsz) 443 if (8 * 4 > drm_buffer_unprocessed(cmdbuf->buffer))
453 return -EINVAL; 444 return -EINVAL;
454 445
455 BEGIN_RING(10); 446 BEGIN_RING(10);
456 OUT_RING(CP_PACKET3(R200_3D_DRAW_IMMD_2, 8)); 447 OUT_RING(CP_PACKET3(R200_3D_DRAW_IMMD_2, 8));
457 OUT_RING(R300_PRIM_TYPE_POINT | R300_PRIM_WALK_RING | 448 OUT_RING(R300_PRIM_TYPE_POINT | R300_PRIM_WALK_RING |
458 (1 << R300_PRIM_NUM_VERTICES_SHIFT)); 449 (1 << R300_PRIM_NUM_VERTICES_SHIFT));
459 OUT_RING_TABLE((int *)cmdbuf->buf, 8); 450 OUT_RING_DRM_BUFFER(cmdbuf->buffer, 8);
460 ADVANCE_RING(); 451 ADVANCE_RING();
461 452
462 BEGIN_RING(4); 453 BEGIN_RING(4);
@@ -468,9 +459,6 @@ static __inline__ int r300_emit_clear(drm_radeon_private_t *dev_priv,
468 /* set flush flag */ 459 /* set flush flag */
469 dev_priv->track_flush |= RADEON_FLUSH_EMITED; 460 dev_priv->track_flush |= RADEON_FLUSH_EMITED;
470 461
471 cmdbuf->buf += 8 * 4;
472 cmdbuf->bufsz -= 8 * 4;
473
474 return 0; 462 return 0;
475} 463}
476 464
@@ -480,28 +468,29 @@ static __inline__ int r300_emit_3d_load_vbpntr(drm_radeon_private_t *dev_priv,
480{ 468{
481 int count, i, k; 469 int count, i, k;
482#define MAX_ARRAY_PACKET 64 470#define MAX_ARRAY_PACKET 64
483 u32 payload[MAX_ARRAY_PACKET]; 471 u32 *data;
484 u32 narrays; 472 u32 narrays;
485 RING_LOCALS; 473 RING_LOCALS;
486 474
487 count = (header >> 16) & 0x3fff; 475 count = (header & RADEON_CP_PACKET_COUNT_MASK) >> 16;
488 476
489 if ((count + 1) > MAX_ARRAY_PACKET) { 477 if ((count + 1) > MAX_ARRAY_PACKET) {
490 DRM_ERROR("Too large payload in 3D_LOAD_VBPNTR (count=%d)\n", 478 DRM_ERROR("Too large payload in 3D_LOAD_VBPNTR (count=%d)\n",
491 count); 479 count);
492 return -EINVAL; 480 return -EINVAL;
493 } 481 }
494 memset(payload, 0, MAX_ARRAY_PACKET * 4);
495 memcpy(payload, cmdbuf->buf + 4, (count + 1) * 4);
496
497 /* carefully check packet contents */ 482 /* carefully check packet contents */
498 483
499 narrays = payload[0]; 484 /* We have already read the header so advance the buffer. */
485 drm_buffer_advance(cmdbuf->buffer, 4);
486
487 narrays = *(u32 *)drm_buffer_pointer_to_dword(cmdbuf->buffer, 0);
500 k = 0; 488 k = 0;
501 i = 1; 489 i = 1;
502 while ((k < narrays) && (i < (count + 1))) { 490 while ((k < narrays) && (i < (count + 1))) {
503 i++; /* skip attribute field */ 491 i++; /* skip attribute field */
504 if (!radeon_check_offset(dev_priv, payload[i])) { 492 data = drm_buffer_pointer_to_dword(cmdbuf->buffer, i);
493 if (!radeon_check_offset(dev_priv, *data)) {
505 DRM_ERROR 494 DRM_ERROR
506 ("Offset failed range check (k=%d i=%d) while processing 3D_LOAD_VBPNTR packet.\n", 495 ("Offset failed range check (k=%d i=%d) while processing 3D_LOAD_VBPNTR packet.\n",
507 k, i); 496 k, i);
@@ -512,7 +501,8 @@ static __inline__ int r300_emit_3d_load_vbpntr(drm_radeon_private_t *dev_priv,
512 if (k == narrays) 501 if (k == narrays)
513 break; 502 break;
514 /* have one more to process, they come in pairs */ 503 /* have one more to process, they come in pairs */
515 if (!radeon_check_offset(dev_priv, payload[i])) { 504 data = drm_buffer_pointer_to_dword(cmdbuf->buffer, i);
505 if (!radeon_check_offset(dev_priv, *data)) {
516 DRM_ERROR 506 DRM_ERROR
517 ("Offset failed range check (k=%d i=%d) while processing 3D_LOAD_VBPNTR packet.\n", 507 ("Offset failed range check (k=%d i=%d) while processing 3D_LOAD_VBPNTR packet.\n",
518 k, i); 508 k, i);
@@ -533,30 +523,30 @@ static __inline__ int r300_emit_3d_load_vbpntr(drm_radeon_private_t *dev_priv,
533 523
534 BEGIN_RING(count + 2); 524 BEGIN_RING(count + 2);
535 OUT_RING(header); 525 OUT_RING(header);
536 OUT_RING_TABLE(payload, count + 1); 526 OUT_RING_DRM_BUFFER(cmdbuf->buffer, count + 1);
537 ADVANCE_RING(); 527 ADVANCE_RING();
538 528
539 cmdbuf->buf += (count + 2) * 4;
540 cmdbuf->bufsz -= (count + 2) * 4;
541
542 return 0; 529 return 0;
543} 530}
544 531
545static __inline__ int r300_emit_bitblt_multi(drm_radeon_private_t *dev_priv, 532static __inline__ int r300_emit_bitblt_multi(drm_radeon_private_t *dev_priv,
546 drm_radeon_kcmd_buffer_t *cmdbuf) 533 drm_radeon_kcmd_buffer_t *cmdbuf)
547{ 534{
548 u32 *cmd = (u32 *) cmdbuf->buf; 535 u32 *cmd = drm_buffer_pointer_to_dword(cmdbuf->buffer, 0);
549 int count, ret; 536 int count, ret;
550 RING_LOCALS; 537 RING_LOCALS;
551 538
552 count=(cmd[0]>>16) & 0x3fff;
553 539
554 if (cmd[0] & 0x8000) { 540 count = (*cmd & RADEON_CP_PACKET_COUNT_MASK) >> 16;
555 u32 offset;
556 541
557 if (cmd[1] & (RADEON_GMC_SRC_PITCH_OFFSET_CNTL 542 if (*cmd & 0x8000) {
543 u32 offset;
544 u32 *cmd1 = drm_buffer_pointer_to_dword(cmdbuf->buffer, 1);
545 if (*cmd1 & (RADEON_GMC_SRC_PITCH_OFFSET_CNTL
558 | RADEON_GMC_DST_PITCH_OFFSET_CNTL)) { 546 | RADEON_GMC_DST_PITCH_OFFSET_CNTL)) {
559 offset = cmd[2] << 10; 547
548 u32 *cmd2 = drm_buffer_pointer_to_dword(cmdbuf->buffer, 2);
549 offset = *cmd2 << 10;
560 ret = !radeon_check_offset(dev_priv, offset); 550 ret = !radeon_check_offset(dev_priv, offset);
561 if (ret) { 551 if (ret) {
562 DRM_ERROR("Invalid bitblt first offset is %08X\n", offset); 552 DRM_ERROR("Invalid bitblt first offset is %08X\n", offset);
@@ -564,9 +554,10 @@ static __inline__ int r300_emit_bitblt_multi(drm_radeon_private_t *dev_priv,
564 } 554 }
565 } 555 }
566 556
567 if ((cmd[1] & RADEON_GMC_SRC_PITCH_OFFSET_CNTL) && 557 if ((*cmd1 & RADEON_GMC_SRC_PITCH_OFFSET_CNTL) &&
568 (cmd[1] & RADEON_GMC_DST_PITCH_OFFSET_CNTL)) { 558 (*cmd1 & RADEON_GMC_DST_PITCH_OFFSET_CNTL)) {
569 offset = cmd[3] << 10; 559 u32 *cmd3 = drm_buffer_pointer_to_dword(cmdbuf->buffer, 3);
560 offset = *cmd3 << 10;
570 ret = !radeon_check_offset(dev_priv, offset); 561 ret = !radeon_check_offset(dev_priv, offset);
571 if (ret) { 562 if (ret) {
572 DRM_ERROR("Invalid bitblt second offset is %08X\n", offset); 563 DRM_ERROR("Invalid bitblt second offset is %08X\n", offset);
@@ -577,28 +568,25 @@ static __inline__ int r300_emit_bitblt_multi(drm_radeon_private_t *dev_priv,
577 } 568 }
578 569
579 BEGIN_RING(count+2); 570 BEGIN_RING(count+2);
580 OUT_RING(cmd[0]); 571 OUT_RING_DRM_BUFFER(cmdbuf->buffer, count + 2);
581 OUT_RING_TABLE((int *)(cmdbuf->buf + 4), count + 1);
582 ADVANCE_RING(); 572 ADVANCE_RING();
583 573
584 cmdbuf->buf += (count+2)*4;
585 cmdbuf->bufsz -= (count+2)*4;
586
587 return 0; 574 return 0;
588} 575}
589 576
590static __inline__ int r300_emit_draw_indx_2(drm_radeon_private_t *dev_priv, 577static __inline__ int r300_emit_draw_indx_2(drm_radeon_private_t *dev_priv,
591 drm_radeon_kcmd_buffer_t *cmdbuf) 578 drm_radeon_kcmd_buffer_t *cmdbuf)
592{ 579{
593 u32 *cmd; 580 u32 *cmd = drm_buffer_pointer_to_dword(cmdbuf->buffer, 0);
581 u32 *cmd1 = drm_buffer_pointer_to_dword(cmdbuf->buffer, 1);
594 int count; 582 int count;
595 int expected_count; 583 int expected_count;
596 RING_LOCALS; 584 RING_LOCALS;
597 585
598 cmd = (u32 *) cmdbuf->buf; 586 count = (*cmd & RADEON_CP_PACKET_COUNT_MASK) >> 16;
599 count = (cmd[0]>>16) & 0x3fff; 587
600 expected_count = cmd[1] >> 16; 588 expected_count = *cmd1 >> 16;
601 if (!(cmd[1] & R300_VAP_VF_CNTL__INDEX_SIZE_32bit)) 589 if (!(*cmd1 & R300_VAP_VF_CNTL__INDEX_SIZE_32bit))
602 expected_count = (expected_count+1)/2; 590 expected_count = (expected_count+1)/2;
603 591
604 if (count && count != expected_count) { 592 if (count && count != expected_count) {
@@ -608,55 +596,53 @@ static __inline__ int r300_emit_draw_indx_2(drm_radeon_private_t *dev_priv,
608 } 596 }
609 597
610 BEGIN_RING(count+2); 598 BEGIN_RING(count+2);
611 OUT_RING(cmd[0]); 599 OUT_RING_DRM_BUFFER(cmdbuf->buffer, count + 2);
612 OUT_RING_TABLE((int *)(cmdbuf->buf + 4), count + 1);
613 ADVANCE_RING(); 600 ADVANCE_RING();
614 601
615 cmdbuf->buf += (count+2)*4;
616 cmdbuf->bufsz -= (count+2)*4;
617
618 if (!count) { 602 if (!count) {
619 drm_r300_cmd_header_t header; 603 drm_r300_cmd_header_t stack_header, *header;
604 u32 *cmd1, *cmd2, *cmd3;
620 605
621 if (cmdbuf->bufsz < 4*4 + sizeof(header)) { 606 if (drm_buffer_unprocessed(cmdbuf->buffer)
607 < 4*4 + sizeof(stack_header)) {
622 DRM_ERROR("3D_DRAW_INDX_2: expect subsequent INDX_BUFFER, but stream is too short.\n"); 608 DRM_ERROR("3D_DRAW_INDX_2: expect subsequent INDX_BUFFER, but stream is too short.\n");
623 return -EINVAL; 609 return -EINVAL;
624 } 610 }
625 611
626 header.u = *(unsigned int *)cmdbuf->buf; 612 header = drm_buffer_read_object(cmdbuf->buffer,
613 sizeof(stack_header), &stack_header);
627 614
628 cmdbuf->buf += sizeof(header); 615 cmd = drm_buffer_pointer_to_dword(cmdbuf->buffer, 0);
629 cmdbuf->bufsz -= sizeof(header); 616 cmd1 = drm_buffer_pointer_to_dword(cmdbuf->buffer, 1);
630 cmd = (u32 *) cmdbuf->buf; 617 cmd2 = drm_buffer_pointer_to_dword(cmdbuf->buffer, 2);
618 cmd3 = drm_buffer_pointer_to_dword(cmdbuf->buffer, 3);
631 619
632 if (header.header.cmd_type != R300_CMD_PACKET3 || 620 if (header->header.cmd_type != R300_CMD_PACKET3 ||
633 header.packet3.packet != R300_CMD_PACKET3_RAW || 621 header->packet3.packet != R300_CMD_PACKET3_RAW ||
634 cmd[0] != CP_PACKET3(RADEON_CP_INDX_BUFFER, 2)) { 622 *cmd != CP_PACKET3(RADEON_CP_INDX_BUFFER, 2)) {
635 DRM_ERROR("3D_DRAW_INDX_2: expect subsequent INDX_BUFFER.\n"); 623 DRM_ERROR("3D_DRAW_INDX_2: expect subsequent INDX_BUFFER.\n");
636 return -EINVAL; 624 return -EINVAL;
637 } 625 }
638 626
639 if ((cmd[1] & 0x8000ffff) != 0x80000810) { 627 if ((*cmd1 & 0x8000ffff) != 0x80000810) {
640 DRM_ERROR("Invalid indx_buffer reg address %08X\n", cmd[1]); 628 DRM_ERROR("Invalid indx_buffer reg address %08X\n",
629 *cmd1);
641 return -EINVAL; 630 return -EINVAL;
642 } 631 }
643 if (!radeon_check_offset(dev_priv, cmd[2])) { 632 if (!radeon_check_offset(dev_priv, *cmd2)) {
644 DRM_ERROR("Invalid indx_buffer offset is %08X\n", cmd[2]); 633 DRM_ERROR("Invalid indx_buffer offset is %08X\n",
634 *cmd2);
645 return -EINVAL; 635 return -EINVAL;
646 } 636 }
647 if (cmd[3] != expected_count) { 637 if (*cmd3 != expected_count) {
648 DRM_ERROR("INDX_BUFFER: buffer size %i, expected %i\n", 638 DRM_ERROR("INDX_BUFFER: buffer size %i, expected %i\n",
649 cmd[3], expected_count); 639 *cmd3, expected_count);
650 return -EINVAL; 640 return -EINVAL;
651 } 641 }
652 642
653 BEGIN_RING(4); 643 BEGIN_RING(4);
654 OUT_RING(cmd[0]); 644 OUT_RING_DRM_BUFFER(cmdbuf->buffer, 4);
655 OUT_RING_TABLE((int *)(cmdbuf->buf + 4), 3);
656 ADVANCE_RING(); 645 ADVANCE_RING();
657
658 cmdbuf->buf += 4*4;
659 cmdbuf->bufsz -= 4*4;
660 } 646 }
661 647
662 return 0; 648 return 0;
@@ -665,39 +651,39 @@ static __inline__ int r300_emit_draw_indx_2(drm_radeon_private_t *dev_priv,
665static __inline__ int r300_emit_raw_packet3(drm_radeon_private_t *dev_priv, 651static __inline__ int r300_emit_raw_packet3(drm_radeon_private_t *dev_priv,
666 drm_radeon_kcmd_buffer_t *cmdbuf) 652 drm_radeon_kcmd_buffer_t *cmdbuf)
667{ 653{
668 u32 header; 654 u32 *header;
669 int count; 655 int count;
670 RING_LOCALS; 656 RING_LOCALS;
671 657
672 if (4 > cmdbuf->bufsz) 658 if (4 > drm_buffer_unprocessed(cmdbuf->buffer))
673 return -EINVAL; 659 return -EINVAL;
674 660
675 /* Fixme !! This simply emits a packet without much checking. 661 /* Fixme !! This simply emits a packet without much checking.
676 We need to be smarter. */ 662 We need to be smarter. */
677 663
678 /* obtain first word - actual packet3 header */ 664 /* obtain first word - actual packet3 header */
679 header = *(u32 *) cmdbuf->buf; 665 header = drm_buffer_pointer_to_dword(cmdbuf->buffer, 0);
680 666
681 /* Is it packet 3 ? */ 667 /* Is it packet 3 ? */
682 if ((header >> 30) != 0x3) { 668 if ((*header >> 30) != 0x3) {
683 DRM_ERROR("Not a packet3 header (0x%08x)\n", header); 669 DRM_ERROR("Not a packet3 header (0x%08x)\n", *header);
684 return -EINVAL; 670 return -EINVAL;
685 } 671 }
686 672
687 count = (header >> 16) & 0x3fff; 673 count = (*header >> 16) & 0x3fff;
688 674
689 /* Check again now that we know how much data to expect */ 675 /* Check again now that we know how much data to expect */
690 if ((count + 2) * 4 > cmdbuf->bufsz) { 676 if ((count + 2) * 4 > drm_buffer_unprocessed(cmdbuf->buffer)) {
691 DRM_ERROR 677 DRM_ERROR
692 ("Expected packet3 of length %d but have only %d bytes left\n", 678 ("Expected packet3 of length %d but have only %d bytes left\n",
693 (count + 2) * 4, cmdbuf->bufsz); 679 (count + 2) * 4, drm_buffer_unprocessed(cmdbuf->buffer));
694 return -EINVAL; 680 return -EINVAL;
695 } 681 }
696 682
697 /* Is it a packet type we know about ? */ 683 /* Is it a packet type we know about ? */
698 switch (header & 0xff00) { 684 switch (*header & 0xff00) {
699 case RADEON_3D_LOAD_VBPNTR: /* load vertex array pointers */ 685 case RADEON_3D_LOAD_VBPNTR: /* load vertex array pointers */
700 return r300_emit_3d_load_vbpntr(dev_priv, cmdbuf, header); 686 return r300_emit_3d_load_vbpntr(dev_priv, cmdbuf, *header);
701 687
702 case RADEON_CNTL_BITBLT_MULTI: 688 case RADEON_CNTL_BITBLT_MULTI:
703 return r300_emit_bitblt_multi(dev_priv, cmdbuf); 689 return r300_emit_bitblt_multi(dev_priv, cmdbuf);
@@ -723,18 +709,14 @@ static __inline__ int r300_emit_raw_packet3(drm_radeon_private_t *dev_priv,
723 /* these packets are safe */ 709 /* these packets are safe */
724 break; 710 break;
725 default: 711 default:
726 DRM_ERROR("Unknown packet3 header (0x%08x)\n", header); 712 DRM_ERROR("Unknown packet3 header (0x%08x)\n", *header);
727 return -EINVAL; 713 return -EINVAL;
728 } 714 }
729 715
730 BEGIN_RING(count + 2); 716 BEGIN_RING(count + 2);
731 OUT_RING(header); 717 OUT_RING_DRM_BUFFER(cmdbuf->buffer, count + 2);
732 OUT_RING_TABLE((int *)(cmdbuf->buf + 4), count + 1);
733 ADVANCE_RING(); 718 ADVANCE_RING();
734 719
735 cmdbuf->buf += (count + 2) * 4;
736 cmdbuf->bufsz -= (count + 2) * 4;
737
738 return 0; 720 return 0;
739} 721}
740 722
@@ -748,8 +730,7 @@ static __inline__ int r300_emit_packet3(drm_radeon_private_t *dev_priv,
748{ 730{
749 int n; 731 int n;
750 int ret; 732 int ret;
751 char *orig_buf = cmdbuf->buf; 733 int orig_iter = cmdbuf->buffer->iterator;
752 int orig_bufsz = cmdbuf->bufsz;
753 734
754 /* This is a do-while-loop so that we run the interior at least once, 735 /* This is a do-while-loop so that we run the interior at least once,
755 * even if cmdbuf->nbox is 0. Compare r300_emit_cliprects for rationale. 736 * even if cmdbuf->nbox is 0. Compare r300_emit_cliprects for rationale.
@@ -761,8 +742,7 @@ static __inline__ int r300_emit_packet3(drm_radeon_private_t *dev_priv,
761 if (ret) 742 if (ret)
762 return ret; 743 return ret;
763 744
764 cmdbuf->buf = orig_buf; 745 cmdbuf->buffer->iterator = orig_iter;
765 cmdbuf->bufsz = orig_bufsz;
766 } 746 }
767 747
768 switch (header.packet3.packet) { 748 switch (header.packet3.packet) {
@@ -785,9 +765,9 @@ static __inline__ int r300_emit_packet3(drm_radeon_private_t *dev_priv,
785 break; 765 break;
786 766
787 default: 767 default:
788 DRM_ERROR("bad packet3 type %i at %p\n", 768 DRM_ERROR("bad packet3 type %i at byte %d\n",
789 header.packet3.packet, 769 header.packet3.packet,
790 cmdbuf->buf - sizeof(header)); 770 cmdbuf->buffer->iterator - (int)sizeof(header));
791 return -EINVAL; 771 return -EINVAL;
792 } 772 }
793 773
@@ -923,12 +903,13 @@ static int r300_scratch(drm_radeon_private_t *dev_priv,
923 drm_r300_cmd_header_t header) 903 drm_r300_cmd_header_t header)
924{ 904{
925 u32 *ref_age_base; 905 u32 *ref_age_base;
926 u32 i, buf_idx, h_pending; 906 u32 i, *buf_idx, h_pending;
927 u64 ptr_addr; 907 u64 *ptr_addr;
908 u64 stack_ptr_addr;
928 RING_LOCALS; 909 RING_LOCALS;
929 910
930 if (cmdbuf->bufsz < 911 if (drm_buffer_unprocessed(cmdbuf->buffer) <
931 (sizeof(u64) + header.scratch.n_bufs * sizeof(buf_idx))) { 912 (sizeof(u64) + header.scratch.n_bufs * sizeof(*buf_idx))) {
932 return -EINVAL; 913 return -EINVAL;
933 } 914 }
934 915
@@ -938,36 +919,35 @@ static int r300_scratch(drm_radeon_private_t *dev_priv,
938 919
939 dev_priv->scratch_ages[header.scratch.reg]++; 920 dev_priv->scratch_ages[header.scratch.reg]++;
940 921
941 ptr_addr = get_unaligned((u64 *)cmdbuf->buf); 922 ptr_addr = drm_buffer_read_object(cmdbuf->buffer,
942 ref_age_base = (u32 *)(unsigned long)ptr_addr; 923 sizeof(stack_ptr_addr), &stack_ptr_addr);
943 924 ref_age_base = (u32 *)(unsigned long)get_unaligned(ptr_addr);
944 cmdbuf->buf += sizeof(u64);
945 cmdbuf->bufsz -= sizeof(u64);
946 925
947 for (i=0; i < header.scratch.n_bufs; i++) { 926 for (i=0; i < header.scratch.n_bufs; i++) {
948 buf_idx = *(u32 *)cmdbuf->buf; 927 buf_idx = drm_buffer_pointer_to_dword(cmdbuf->buffer, 0);
949 buf_idx *= 2; /* 8 bytes per buf */ 928 *buf_idx *= 2; /* 8 bytes per buf */
950 929
951 if (DRM_COPY_TO_USER(ref_age_base + buf_idx, &dev_priv->scratch_ages[header.scratch.reg], sizeof(u32))) { 930 if (DRM_COPY_TO_USER(ref_age_base + *buf_idx,
931 &dev_priv->scratch_ages[header.scratch.reg],
932 sizeof(u32)))
952 return -EINVAL; 933 return -EINVAL;
953 }
954 934
955 if (DRM_COPY_FROM_USER(&h_pending, ref_age_base + buf_idx + 1, sizeof(u32))) { 935 if (DRM_COPY_FROM_USER(&h_pending,
936 ref_age_base + *buf_idx + 1,
937 sizeof(u32)))
956 return -EINVAL; 938 return -EINVAL;
957 }
958 939
959 if (h_pending == 0) { 940 if (h_pending == 0)
960 return -EINVAL; 941 return -EINVAL;
961 }
962 942
963 h_pending--; 943 h_pending--;
964 944
965 if (DRM_COPY_TO_USER(ref_age_base + buf_idx + 1, &h_pending, sizeof(u32))) { 945 if (DRM_COPY_TO_USER(ref_age_base + *buf_idx + 1,
946 &h_pending,
947 sizeof(u32)))
966 return -EINVAL; 948 return -EINVAL;
967 }
968 949
969 cmdbuf->buf += sizeof(buf_idx); 950 drm_buffer_advance(cmdbuf->buffer, sizeof(*buf_idx));
970 cmdbuf->bufsz -= sizeof(buf_idx);
971 } 951 }
972 952
973 BEGIN_RING(2); 953 BEGIN_RING(2);
@@ -990,7 +970,7 @@ static inline int r300_emit_r500fp(drm_radeon_private_t *dev_priv,
990 int sz; 970 int sz;
991 int addr; 971 int addr;
992 int type; 972 int type;
993 int clamp; 973 int isclamp;
994 int stride; 974 int stride;
995 RING_LOCALS; 975 RING_LOCALS;
996 976
@@ -999,29 +979,26 @@ static inline int r300_emit_r500fp(drm_radeon_private_t *dev_priv,
999 addr = ((header.r500fp.adrhi_flags & 1) << 8) | header.r500fp.adrlo; 979 addr = ((header.r500fp.adrhi_flags & 1) << 8) | header.r500fp.adrlo;
1000 980
1001 type = !!(header.r500fp.adrhi_flags & R500FP_CONSTANT_TYPE); 981 type = !!(header.r500fp.adrhi_flags & R500FP_CONSTANT_TYPE);
1002 clamp = !!(header.r500fp.adrhi_flags & R500FP_CONSTANT_CLAMP); 982 isclamp = !!(header.r500fp.adrhi_flags & R500FP_CONSTANT_CLAMP);
1003 983
1004 addr |= (type << 16); 984 addr |= (type << 16);
1005 addr |= (clamp << 17); 985 addr |= (isclamp << 17);
1006 986
1007 stride = type ? 4 : 6; 987 stride = type ? 4 : 6;
1008 988
1009 DRM_DEBUG("r500fp %d %d type: %d\n", sz, addr, type); 989 DRM_DEBUG("r500fp %d %d type: %d\n", sz, addr, type);
1010 if (!sz) 990 if (!sz)
1011 return 0; 991 return 0;
1012 if (sz * stride * 4 > cmdbuf->bufsz) 992 if (sz * stride * 4 > drm_buffer_unprocessed(cmdbuf->buffer))
1013 return -EINVAL; 993 return -EINVAL;
1014 994
1015 BEGIN_RING(3 + sz * stride); 995 BEGIN_RING(3 + sz * stride);
1016 OUT_RING_REG(R500_GA_US_VECTOR_INDEX, addr); 996 OUT_RING_REG(R500_GA_US_VECTOR_INDEX, addr);
1017 OUT_RING(CP_PACKET0_TABLE(R500_GA_US_VECTOR_DATA, sz * stride - 1)); 997 OUT_RING(CP_PACKET0_TABLE(R500_GA_US_VECTOR_DATA, sz * stride - 1));
1018 OUT_RING_TABLE((int *)cmdbuf->buf, sz * stride); 998 OUT_RING_DRM_BUFFER(cmdbuf->buffer, sz * stride);
1019 999
1020 ADVANCE_RING(); 1000 ADVANCE_RING();
1021 1001
1022 cmdbuf->buf += sz * stride * 4;
1023 cmdbuf->bufsz -= sz * stride * 4;
1024
1025 return 0; 1002 return 0;
1026} 1003}
1027 1004
@@ -1053,19 +1030,18 @@ int r300_do_cp_cmdbuf(struct drm_device *dev,
1053 goto cleanup; 1030 goto cleanup;
1054 } 1031 }
1055 1032
1056 while (cmdbuf->bufsz >= sizeof(drm_r300_cmd_header_t)) { 1033 while (drm_buffer_unprocessed(cmdbuf->buffer)
1034 >= sizeof(drm_r300_cmd_header_t)) {
1057 int idx; 1035 int idx;
1058 drm_r300_cmd_header_t header; 1036 drm_r300_cmd_header_t *header, stack_header;
1059
1060 header.u = *(unsigned int *)cmdbuf->buf;
1061 1037
1062 cmdbuf->buf += sizeof(header); 1038 header = drm_buffer_read_object(cmdbuf->buffer,
1063 cmdbuf->bufsz -= sizeof(header); 1039 sizeof(stack_header), &stack_header);
1064 1040
1065 switch (header.header.cmd_type) { 1041 switch (header->header.cmd_type) {
1066 case R300_CMD_PACKET0: 1042 case R300_CMD_PACKET0:
1067 DRM_DEBUG("R300_CMD_PACKET0\n"); 1043 DRM_DEBUG("R300_CMD_PACKET0\n");
1068 ret = r300_emit_packet0(dev_priv, cmdbuf, header); 1044 ret = r300_emit_packet0(dev_priv, cmdbuf, *header);
1069 if (ret) { 1045 if (ret) {
1070 DRM_ERROR("r300_emit_packet0 failed\n"); 1046 DRM_ERROR("r300_emit_packet0 failed\n");
1071 goto cleanup; 1047 goto cleanup;
@@ -1074,7 +1050,7 @@ int r300_do_cp_cmdbuf(struct drm_device *dev,
1074 1050
1075 case R300_CMD_VPU: 1051 case R300_CMD_VPU:
1076 DRM_DEBUG("R300_CMD_VPU\n"); 1052 DRM_DEBUG("R300_CMD_VPU\n");
1077 ret = r300_emit_vpu(dev_priv, cmdbuf, header); 1053 ret = r300_emit_vpu(dev_priv, cmdbuf, *header);
1078 if (ret) { 1054 if (ret) {
1079 DRM_ERROR("r300_emit_vpu failed\n"); 1055 DRM_ERROR("r300_emit_vpu failed\n");
1080 goto cleanup; 1056 goto cleanup;
@@ -1083,7 +1059,7 @@ int r300_do_cp_cmdbuf(struct drm_device *dev,
1083 1059
1084 case R300_CMD_PACKET3: 1060 case R300_CMD_PACKET3:
1085 DRM_DEBUG("R300_CMD_PACKET3\n"); 1061 DRM_DEBUG("R300_CMD_PACKET3\n");
1086 ret = r300_emit_packet3(dev_priv, cmdbuf, header); 1062 ret = r300_emit_packet3(dev_priv, cmdbuf, *header);
1087 if (ret) { 1063 if (ret) {
1088 DRM_ERROR("r300_emit_packet3 failed\n"); 1064 DRM_ERROR("r300_emit_packet3 failed\n");
1089 goto cleanup; 1065 goto cleanup;
@@ -1117,8 +1093,8 @@ int r300_do_cp_cmdbuf(struct drm_device *dev,
1117 int i; 1093 int i;
1118 RING_LOCALS; 1094 RING_LOCALS;
1119 1095
1120 BEGIN_RING(header.delay.count); 1096 BEGIN_RING(header->delay.count);
1121 for (i = 0; i < header.delay.count; i++) 1097 for (i = 0; i < header->delay.count; i++)
1122 OUT_RING(RADEON_CP_PACKET2); 1098 OUT_RING(RADEON_CP_PACKET2);
1123 ADVANCE_RING(); 1099 ADVANCE_RING();
1124 } 1100 }
@@ -1126,7 +1102,7 @@ int r300_do_cp_cmdbuf(struct drm_device *dev,
1126 1102
1127 case R300_CMD_DMA_DISCARD: 1103 case R300_CMD_DMA_DISCARD:
1128 DRM_DEBUG("RADEON_CMD_DMA_DISCARD\n"); 1104 DRM_DEBUG("RADEON_CMD_DMA_DISCARD\n");
1129 idx = header.dma.buf_idx; 1105 idx = header->dma.buf_idx;
1130 if (idx < 0 || idx >= dma->buf_count) { 1106 if (idx < 0 || idx >= dma->buf_count) {
1131 DRM_ERROR("buffer index %d (of %d max)\n", 1107 DRM_ERROR("buffer index %d (of %d max)\n",
1132 idx, dma->buf_count - 1); 1108 idx, dma->buf_count - 1);
@@ -1149,12 +1125,12 @@ int r300_do_cp_cmdbuf(struct drm_device *dev,
1149 1125
1150 case R300_CMD_WAIT: 1126 case R300_CMD_WAIT:
1151 DRM_DEBUG("R300_CMD_WAIT\n"); 1127 DRM_DEBUG("R300_CMD_WAIT\n");
1152 r300_cmd_wait(dev_priv, header); 1128 r300_cmd_wait(dev_priv, *header);
1153 break; 1129 break;
1154 1130
1155 case R300_CMD_SCRATCH: 1131 case R300_CMD_SCRATCH:
1156 DRM_DEBUG("R300_CMD_SCRATCH\n"); 1132 DRM_DEBUG("R300_CMD_SCRATCH\n");
1157 ret = r300_scratch(dev_priv, cmdbuf, header); 1133 ret = r300_scratch(dev_priv, cmdbuf, *header);
1158 if (ret) { 1134 if (ret) {
1159 DRM_ERROR("r300_scratch failed\n"); 1135 DRM_ERROR("r300_scratch failed\n");
1160 goto cleanup; 1136 goto cleanup;
@@ -1168,16 +1144,16 @@ int r300_do_cp_cmdbuf(struct drm_device *dev,
1168 goto cleanup; 1144 goto cleanup;
1169 } 1145 }
1170 DRM_DEBUG("R300_CMD_R500FP\n"); 1146 DRM_DEBUG("R300_CMD_R500FP\n");
1171 ret = r300_emit_r500fp(dev_priv, cmdbuf, header); 1147 ret = r300_emit_r500fp(dev_priv, cmdbuf, *header);
1172 if (ret) { 1148 if (ret) {
1173 DRM_ERROR("r300_emit_r500fp failed\n"); 1149 DRM_ERROR("r300_emit_r500fp failed\n");
1174 goto cleanup; 1150 goto cleanup;
1175 } 1151 }
1176 break; 1152 break;
1177 default: 1153 default:
1178 DRM_ERROR("bad cmd_type %i at %p\n", 1154 DRM_ERROR("bad cmd_type %i at byte %d\n",
1179 header.header.cmd_type, 1155 header->header.cmd_type,
1180 cmdbuf->buf - sizeof(header)); 1156 cmdbuf->buffer->iterator - (int)sizeof(*header));
1181 ret = -EINVAL; 1157 ret = -EINVAL;
1182 goto cleanup; 1158 goto cleanup;
1183 } 1159 }
diff --git a/drivers/gpu/drm/radeon/r300_reg.h b/drivers/gpu/drm/radeon/r300_reg.h
index 4b7afef35a65..1a0d5362cd79 100644
--- a/drivers/gpu/drm/radeon/r300_reg.h
+++ b/drivers/gpu/drm/radeon/r300_reg.h
@@ -900,6 +900,7 @@
900# define R300_TX_FORMAT_FL_I32 0x1B 900# define R300_TX_FORMAT_FL_I32 0x1B
901# define R300_TX_FORMAT_FL_I32A32 0x1C 901# define R300_TX_FORMAT_FL_I32A32 0x1C
902# define R300_TX_FORMAT_FL_R32G32B32A32 0x1D 902# define R300_TX_FORMAT_FL_R32G32B32A32 0x1D
903# define R300_TX_FORMAT_ATI2N 0x1F
903 /* alpha modes, convenience mostly */ 904 /* alpha modes, convenience mostly */
904 /* if you have alpha, pick constant appropriate to the 905 /* if you have alpha, pick constant appropriate to the
905 number of channels (1 for I8, 2 for I8A8, 4 for R8G8B8A8, etc */ 906 number of channels (1 for I8, 2 for I8A8, 4 for R8G8B8A8, etc */
@@ -951,6 +952,7 @@
951# define R300_TXO_ENDIAN_HALFDW_SWAP (3 << 0) 952# define R300_TXO_ENDIAN_HALFDW_SWAP (3 << 0)
952# define R300_TXO_MACRO_TILE (1 << 2) 953# define R300_TXO_MACRO_TILE (1 << 2)
953# define R300_TXO_MICRO_TILE (1 << 3) 954# define R300_TXO_MICRO_TILE (1 << 3)
955# define R300_TXO_MICRO_TILE_SQUARE (2 << 3)
954# define R300_TXO_OFFSET_MASK 0xffffffe0 956# define R300_TXO_OFFSET_MASK 0xffffffe0
955# define R300_TXO_OFFSET_SHIFT 5 957# define R300_TXO_OFFSET_SHIFT 5
956 /* END: Guess from R200 */ 958 /* END: Guess from R200 */
@@ -1359,6 +1361,7 @@
1359# define R300_COLORPITCH_MASK 0x00001FF8 /* GUESS */ 1361# define R300_COLORPITCH_MASK 0x00001FF8 /* GUESS */
1360# define R300_COLOR_TILE_ENABLE (1 << 16) /* GUESS */ 1362# define R300_COLOR_TILE_ENABLE (1 << 16) /* GUESS */
1361# define R300_COLOR_MICROTILE_ENABLE (1 << 17) /* GUESS */ 1363# define R300_COLOR_MICROTILE_ENABLE (1 << 17) /* GUESS */
1364# define R300_COLOR_MICROTILE_SQUARE_ENABLE (2 << 17)
1362# define R300_COLOR_ENDIAN_NO_SWAP (0 << 18) /* GUESS */ 1365# define R300_COLOR_ENDIAN_NO_SWAP (0 << 18) /* GUESS */
1363# define R300_COLOR_ENDIAN_WORD_SWAP (1 << 18) /* GUESS */ 1366# define R300_COLOR_ENDIAN_WORD_SWAP (1 << 18) /* GUESS */
1364# define R300_COLOR_ENDIAN_DWORD_SWAP (2 << 18) /* GUESS */ 1367# define R300_COLOR_ENDIAN_DWORD_SWAP (2 << 18) /* GUESS */
diff --git a/drivers/gpu/drm/radeon/r420.c b/drivers/gpu/drm/radeon/r420.c
index 1cefdbcc0850..c2bda4ad62e7 100644
--- a/drivers/gpu/drm/radeon/r420.c
+++ b/drivers/gpu/drm/radeon/r420.c
@@ -26,34 +26,20 @@
26 * Jerome Glisse 26 * Jerome Glisse
27 */ 27 */
28#include <linux/seq_file.h> 28#include <linux/seq_file.h>
29#include <linux/slab.h>
29#include "drmP.h" 30#include "drmP.h"
30#include "radeon_reg.h" 31#include "radeon_reg.h"
31#include "radeon.h" 32#include "radeon.h"
33#include "radeon_asic.h"
32#include "atom.h" 34#include "atom.h"
35#include "r100d.h"
33#include "r420d.h" 36#include "r420d.h"
37#include "r420_reg_safe.h"
34 38
35int r420_mc_init(struct radeon_device *rdev) 39static void r420_set_reg_safe(struct radeon_device *rdev)
36{ 40{
37 int r; 41 rdev->config.r300.reg_safe_bm = r420_reg_safe_bm;
38 42 rdev->config.r300.reg_safe_bm_size = ARRAY_SIZE(r420_reg_safe_bm);
39 /* Setup GPU memory space */
40 rdev->mc.vram_location = 0xFFFFFFFFUL;
41 rdev->mc.gtt_location = 0xFFFFFFFFUL;
42 if (rdev->flags & RADEON_IS_AGP) {
43 r = radeon_agp_init(rdev);
44 if (r) {
45 printk(KERN_WARNING "[drm] Disabling AGP\n");
46 rdev->flags &= ~RADEON_IS_AGP;
47 rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024;
48 } else {
49 rdev->mc.gtt_location = rdev->mc.agp_base;
50 }
51 }
52 r = radeon_mc_setup(rdev);
53 if (r) {
54 return r;
55 }
56 return 0;
57} 43}
58 44
59void r420_pipes_init(struct radeon_device *rdev) 45void r420_pipes_init(struct radeon_device *rdev)
@@ -63,7 +49,8 @@ void r420_pipes_init(struct radeon_device *rdev)
63 unsigned num_pipes; 49 unsigned num_pipes;
64 50
65 /* GA_ENHANCE workaround TCL deadlock issue */ 51 /* GA_ENHANCE workaround TCL deadlock issue */
66 WREG32(0x4274, (1 << 0) | (1 << 1) | (1 << 2) | (1 << 3)); 52 WREG32(R300_GA_ENHANCE, R300_GA_DEADLOCK_CNTL | R300_GA_FASTSYNC_CNTL |
53 (1 << 2) | (1 << 3));
67 /* add idle wait as per freedesktop.org bug 24041 */ 54 /* add idle wait as per freedesktop.org bug 24041 */
68 if (r100_gui_wait_for_idle(rdev)) { 55 if (r100_gui_wait_for_idle(rdev)) {
69 printk(KERN_WARNING "Failed to wait GUI idle while " 56 printk(KERN_WARNING "Failed to wait GUI idle while "
@@ -72,6 +59,12 @@ void r420_pipes_init(struct radeon_device *rdev)
72 /* get max number of pipes */ 59 /* get max number of pipes */
73 gb_pipe_select = RREG32(0x402C); 60 gb_pipe_select = RREG32(0x402C);
74 num_pipes = ((gb_pipe_select >> 12) & 3) + 1; 61 num_pipes = ((gb_pipe_select >> 12) & 3) + 1;
62
63 /* SE chips have 1 pipe */
64 if ((rdev->pdev->device == 0x5e4c) ||
65 (rdev->pdev->device == 0x5e4f))
66 num_pipes = 1;
67
75 rdev->num_gb_pipes = num_pipes; 68 rdev->num_gb_pipes = num_pipes;
76 tmp = 0; 69 tmp = 0;
77 switch (num_pipes) { 70 switch (num_pipes) {
@@ -91,17 +84,17 @@ void r420_pipes_init(struct radeon_device *rdev)
91 tmp = (7 << 1); 84 tmp = (7 << 1);
92 break; 85 break;
93 } 86 }
94 WREG32(0x42C8, (1 << num_pipes) - 1); 87 WREG32(R500_SU_REG_DEST, (1 << num_pipes) - 1);
95 /* Sub pixel 1/12 so we can have 4K rendering according to doc */ 88 /* Sub pixel 1/12 so we can have 4K rendering according to doc */
96 tmp |= (1 << 4) | (1 << 0); 89 tmp |= R300_TILE_SIZE_16 | R300_ENABLE_TILING;
97 WREG32(0x4018, tmp); 90 WREG32(R300_GB_TILE_CONFIG, tmp);
98 if (r100_gui_wait_for_idle(rdev)) { 91 if (r100_gui_wait_for_idle(rdev)) {
99 printk(KERN_WARNING "Failed to wait GUI idle while " 92 printk(KERN_WARNING "Failed to wait GUI idle while "
100 "programming pipes. Bad things might happen.\n"); 93 "programming pipes. Bad things might happen.\n");
101 } 94 }
102 95
103 tmp = RREG32(0x170C); 96 tmp = RREG32(R300_DST_PIPE_CONFIG);
104 WREG32(0x170C, tmp | (1 << 31)); 97 WREG32(R300_DST_PIPE_CONFIG, tmp | R300_PIPE_AUTO_CONFIG);
105 98
106 WREG32(R300_RB2D_DSTCACHE_MODE, 99 WREG32(R300_RB2D_DSTCACHE_MODE,
107 RREG32(R300_RB2D_DSTCACHE_MODE) | 100 RREG32(R300_RB2D_DSTCACHE_MODE) |
@@ -165,10 +158,41 @@ static void r420_clock_resume(struct radeon_device *rdev)
165 WREG32_PLL(R_00000D_SCLK_CNTL, sclk_cntl); 158 WREG32_PLL(R_00000D_SCLK_CNTL, sclk_cntl);
166} 159}
167 160
161static void r420_cp_errata_init(struct radeon_device *rdev)
162{
163 /* RV410 and R420 can lock up if CP DMA to host memory happens
164 * while the 2D engine is busy.
165 *
166 * The proper workaround is to queue a RESYNC at the beginning
167 * of the CP init, apparently.
168 */
169 radeon_scratch_get(rdev, &rdev->config.r300.resync_scratch);
170 radeon_ring_lock(rdev, 8);
171 radeon_ring_write(rdev, PACKET0(R300_CP_RESYNC_ADDR, 1));
172 radeon_ring_write(rdev, rdev->config.r300.resync_scratch);
173 radeon_ring_write(rdev, 0xDEADBEEF);
174 radeon_ring_unlock_commit(rdev);
175}
176
177static void r420_cp_errata_fini(struct radeon_device *rdev)
178{
179 /* Catch the RESYNC we dispatched all the way back,
180 * at the very beginning of the CP init.
181 */
182 radeon_ring_lock(rdev, 8);
183 radeon_ring_write(rdev, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
184 radeon_ring_write(rdev, R300_RB3D_DC_FINISH);
185 radeon_ring_unlock_commit(rdev);
186 radeon_scratch_free(rdev, rdev->config.r300.resync_scratch);
187}
188
168static int r420_startup(struct radeon_device *rdev) 189static int r420_startup(struct radeon_device *rdev)
169{ 190{
170 int r; 191 int r;
171 192
193 /* set common regs */
194 r100_set_common_regs(rdev);
195 /* program mc */
172 r300_mc_program(rdev); 196 r300_mc_program(rdev);
173 /* Resume clock */ 197 /* Resume clock */
174 r420_clock_resume(rdev); 198 r420_clock_resume(rdev);
@@ -186,14 +210,15 @@ static int r420_startup(struct radeon_device *rdev)
186 } 210 }
187 r420_pipes_init(rdev); 211 r420_pipes_init(rdev);
188 /* Enable IRQ */ 212 /* Enable IRQ */
189 rdev->irq.sw_int = true;
190 r100_irq_set(rdev); 213 r100_irq_set(rdev);
214 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
191 /* 1M ring buffer */ 215 /* 1M ring buffer */
192 r = r100_cp_init(rdev, 1024 * 1024); 216 r = r100_cp_init(rdev, 1024 * 1024);
193 if (r) { 217 if (r) {
194 dev_err(rdev->dev, "failled initializing CP (%d).\n", r); 218 dev_err(rdev->dev, "failled initializing CP (%d).\n", r);
195 return r; 219 return r;
196 } 220 }
221 r420_cp_errata_init(rdev);
197 r = r100_wb_init(rdev); 222 r = r100_wb_init(rdev);
198 if (r) { 223 if (r) {
199 dev_err(rdev->dev, "failled initializing WB (%d).\n", r); 224 dev_err(rdev->dev, "failled initializing WB (%d).\n", r);
@@ -229,12 +254,14 @@ int r420_resume(struct radeon_device *rdev)
229 } 254 }
230 /* Resume clock after posting */ 255 /* Resume clock after posting */
231 r420_clock_resume(rdev); 256 r420_clock_resume(rdev);
232 257 /* Initialize surface registers */
258 radeon_surface_init(rdev);
233 return r420_startup(rdev); 259 return r420_startup(rdev);
234} 260}
235 261
236int r420_suspend(struct radeon_device *rdev) 262int r420_suspend(struct radeon_device *rdev)
237{ 263{
264 r420_cp_errata_fini(rdev);
238 r100_cp_disable(rdev); 265 r100_cp_disable(rdev);
239 r100_wb_disable(rdev); 266 r100_wb_disable(rdev);
240 r100_irq_disable(rdev); 267 r100_irq_disable(rdev);
@@ -247,6 +274,7 @@ int r420_suspend(struct radeon_device *rdev)
247 274
248void r420_fini(struct radeon_device *rdev) 275void r420_fini(struct radeon_device *rdev)
249{ 276{
277 radeon_pm_fini(rdev);
250 r100_cp_fini(rdev); 278 r100_cp_fini(rdev);
251 r100_wb_fini(rdev); 279 r100_wb_fini(rdev);
252 r100_ib_fini(rdev); 280 r100_ib_fini(rdev);
@@ -258,7 +286,7 @@ void r420_fini(struct radeon_device *rdev)
258 radeon_agp_fini(rdev); 286 radeon_agp_fini(rdev);
259 radeon_irq_kms_fini(rdev); 287 radeon_irq_kms_fini(rdev);
260 radeon_fence_driver_fini(rdev); 288 radeon_fence_driver_fini(rdev);
261 radeon_object_fini(rdev); 289 radeon_bo_fini(rdev);
262 if (rdev->is_atom_bios) { 290 if (rdev->is_atom_bios) {
263 radeon_atombios_fini(rdev); 291 radeon_atombios_fini(rdev);
264 } else { 292 } else {
@@ -301,25 +329,22 @@ int r420_init(struct radeon_device *rdev)
301 RREG32(R_0007C0_CP_STAT)); 329 RREG32(R_0007C0_CP_STAT));
302 } 330 }
303 /* check if cards are posted or not */ 331 /* check if cards are posted or not */
304 if (!radeon_card_posted(rdev) && rdev->bios) { 332 if (radeon_boot_test_post_card(rdev) == false)
305 DRM_INFO("GPU not posted. posting now...\n"); 333 return -EINVAL;
306 if (rdev->is_atom_bios) { 334
307 atom_asic_init(rdev->mode_info.atom_context);
308 } else {
309 radeon_combios_asic_init(rdev->ddev);
310 }
311 }
312 /* Initialize clocks */ 335 /* Initialize clocks */
313 radeon_get_clock_info(rdev->ddev); 336 radeon_get_clock_info(rdev->ddev);
314 /* Initialize power management */ 337 /* Initialize power management */
315 radeon_pm_init(rdev); 338 radeon_pm_init(rdev);
316 /* Get vram informations */ 339 /* initialize AGP */
317 r300_vram_info(rdev); 340 if (rdev->flags & RADEON_IS_AGP) {
318 /* Initialize memory controller (also test AGP) */ 341 r = radeon_agp_init(rdev);
319 r = r420_mc_init(rdev); 342 if (r) {
320 if (r) { 343 radeon_agp_disable(rdev);
321 return r; 344 }
322 } 345 }
346 /* initialize memory controller */
347 r300_mc_init(rdev);
323 r420_debugfs(rdev); 348 r420_debugfs(rdev);
324 /* Fence driver */ 349 /* Fence driver */
325 r = radeon_fence_driver_init(rdev); 350 r = radeon_fence_driver_init(rdev);
@@ -331,10 +356,13 @@ int r420_init(struct radeon_device *rdev)
331 return r; 356 return r;
332 } 357 }
333 /* Memory manager */ 358 /* Memory manager */
334 r = radeon_object_init(rdev); 359 r = radeon_bo_init(rdev);
335 if (r) { 360 if (r) {
336 return r; 361 return r;
337 } 362 }
363 if (rdev->family == CHIP_R420)
364 r100_enable_bm(rdev);
365
338 if (rdev->flags & RADEON_IS_PCIE) { 366 if (rdev->flags & RADEON_IS_PCIE) {
339 r = rv370_pcie_gart_init(rdev); 367 r = rv370_pcie_gart_init(rdev);
340 if (r) 368 if (r)
@@ -345,22 +373,21 @@ int r420_init(struct radeon_device *rdev)
345 if (r) 373 if (r)
346 return r; 374 return r;
347 } 375 }
348 r300_set_reg_safe(rdev); 376 r420_set_reg_safe(rdev);
349 rdev->accel_working = true; 377 rdev->accel_working = true;
350 r = r420_startup(rdev); 378 r = r420_startup(rdev);
351 if (r) { 379 if (r) {
352 /* Somethings want wront with the accel init stop accel */ 380 /* Somethings want wront with the accel init stop accel */
353 dev_err(rdev->dev, "Disabling GPU acceleration\n"); 381 dev_err(rdev->dev, "Disabling GPU acceleration\n");
354 r420_suspend(rdev);
355 r100_cp_fini(rdev); 382 r100_cp_fini(rdev);
356 r100_wb_fini(rdev); 383 r100_wb_fini(rdev);
357 r100_ib_fini(rdev); 384 r100_ib_fini(rdev);
385 radeon_irq_kms_fini(rdev);
358 if (rdev->flags & RADEON_IS_PCIE) 386 if (rdev->flags & RADEON_IS_PCIE)
359 rv370_pcie_gart_fini(rdev); 387 rv370_pcie_gart_fini(rdev);
360 if (rdev->flags & RADEON_IS_PCI) 388 if (rdev->flags & RADEON_IS_PCI)
361 r100_pci_gart_fini(rdev); 389 r100_pci_gart_fini(rdev);
362 radeon_agp_fini(rdev); 390 radeon_agp_fini(rdev);
363 radeon_irq_kms_fini(rdev);
364 rdev->accel_working = false; 391 rdev->accel_working = false;
365 } 392 }
366 return 0; 393 return 0;
diff --git a/drivers/gpu/drm/radeon/r500_reg.h b/drivers/gpu/drm/radeon/r500_reg.h
index 7baa73955563..0cf2ad2a5585 100644
--- a/drivers/gpu/drm/radeon/r500_reg.h
+++ b/drivers/gpu/drm/radeon/r500_reg.h
@@ -716,53 +716,63 @@
716 716
717#define AVIVO_DVOA_BIT_DEPTH_CONTROL 0x7988 717#define AVIVO_DVOA_BIT_DEPTH_CONTROL 0x7988
718 718
719#define AVIVO_GPIO_0 0x7e30 719#define AVIVO_DC_GPIO_HPD_A 0x7e94
720#define AVIVO_GPIO_1 0x7e40
721#define AVIVO_GPIO_2 0x7e50
722#define AVIVO_GPIO_3 0x7e60
723
724#define AVIVO_DC_GPIO_HPD_Y 0x7e9c 720#define AVIVO_DC_GPIO_HPD_Y 0x7e9c
725 721
726#define AVIVO_I2C_STATUS 0x7d30 722#define AVIVO_DC_I2C_STATUS1 0x7d30
727# define AVIVO_I2C_STATUS_DONE (1 << 0) 723# define AVIVO_DC_I2C_DONE (1 << 0)
728# define AVIVO_I2C_STATUS_NACK (1 << 1) 724# define AVIVO_DC_I2C_NACK (1 << 1)
729# define AVIVO_I2C_STATUS_HALT (1 << 2) 725# define AVIVO_DC_I2C_HALT (1 << 2)
730# define AVIVO_I2C_STATUS_GO (1 << 3) 726# define AVIVO_DC_I2C_GO (1 << 3)
731# define AVIVO_I2C_STATUS_MASK 0x7 727#define AVIVO_DC_I2C_RESET 0x7d34
732/* If radeon_mm_i2c is to be believed, this is HALT, NACK, and maybe 728# define AVIVO_DC_I2C_SOFT_RESET (1 << 0)
733 * DONE? */ 729# define AVIVO_DC_I2C_ABORT (1 << 8)
734# define AVIVO_I2C_STATUS_CMD_RESET 0x7 730#define AVIVO_DC_I2C_CONTROL1 0x7d38
735# define AVIVO_I2C_STATUS_CMD_WAIT (1 << 3) 731# define AVIVO_DC_I2C_START (1 << 0)
736#define AVIVO_I2C_STOP 0x7d34 732# define AVIVO_DC_I2C_STOP (1 << 1)
737#define AVIVO_I2C_START_CNTL 0x7d38 733# define AVIVO_DC_I2C_RECEIVE (1 << 2)
738# define AVIVO_I2C_START (1 << 8) 734# define AVIVO_DC_I2C_EN (1 << 8)
739# define AVIVO_I2C_CONNECTOR0 (0 << 16) 735# define AVIVO_DC_I2C_PIN_SELECT(x) ((x) << 16)
740# define AVIVO_I2C_CONNECTOR1 (1 << 16) 736# define AVIVO_SEL_DDC1 0
741#define R520_I2C_START (1<<0) 737# define AVIVO_SEL_DDC2 1
742#define R520_I2C_STOP (1<<1) 738# define AVIVO_SEL_DDC3 2
743#define R520_I2C_RX (1<<2) 739#define AVIVO_DC_I2C_CONTROL2 0x7d3c
744#define R520_I2C_EN (1<<8) 740# define AVIVO_DC_I2C_ADDR_COUNT(x) ((x) << 0)
745#define R520_I2C_DDC1 (0<<16) 741# define AVIVO_DC_I2C_DATA_COUNT(x) ((x) << 8)
746#define R520_I2C_DDC2 (1<<16) 742#define AVIVO_DC_I2C_CONTROL3 0x7d40
747#define R520_I2C_DDC3 (2<<16) 743# define AVIVO_DC_I2C_DATA_DRIVE_EN (1 << 0)
748#define R520_I2C_DDC_MASK (3<<16) 744# define AVIVO_DC_I2C_DATA_DRIVE_SEL (1 << 1)
749#define AVIVO_I2C_CONTROL2 0x7d3c 745# define AVIVO_DC_I2C_CLK_DRIVE_EN (1 << 7)
750# define AVIVO_I2C_7D3C_SIZE_SHIFT 8 746# define AVIVO_DC_I2C_RD_INTRA_BYTE_DELAY(x) ((x) << 8)
751# define AVIVO_I2C_7D3C_SIZE_MASK (0xf << 8) 747# define AVIVO_DC_I2C_WR_INTRA_BYTE_DELAY(x) ((x) << 16)
752#define AVIVO_I2C_CONTROL3 0x7d40 748# define AVIVO_DC_I2C_TIME_LIMIT(x) ((x) << 24)
753/* Reading is done 4 bytes at a time: read the bottom 8 bits from 749#define AVIVO_DC_I2C_DATA 0x7d44
754 * 7d44, four times in a row. 750#define AVIVO_DC_I2C_INTERRUPT_CONTROL 0x7d48
755 * Writing is a little more complex. First write DATA with 751# define AVIVO_DC_I2C_INTERRUPT_STATUS (1 << 0)
756 * 0xnnnnnnzz, then 0xnnnnnnyy, where nnnnnn is some non-deterministic 752# define AVIVO_DC_I2C_INTERRUPT_AK (1 << 8)
757 * magic number, zz is, I think, the slave address, and yy is the byte 753# define AVIVO_DC_I2C_INTERRUPT_ENABLE (1 << 16)
758 * you want to write. */ 754#define AVIVO_DC_I2C_ARBITRATION 0x7d50
759#define AVIVO_I2C_DATA 0x7d44 755# define AVIVO_DC_I2C_SW_WANTS_TO_USE_I2C (1 << 0)
760#define R520_I2C_ADDR_COUNT_MASK (0x7) 756# define AVIVO_DC_I2C_SW_CAN_USE_I2C (1 << 1)
761#define R520_I2C_DATA_COUNT_SHIFT (8) 757# define AVIVO_DC_I2C_SW_DONE_USING_I2C (1 << 8)
762#define R520_I2C_DATA_COUNT_MASK (0xF00) 758# define AVIVO_DC_I2C_HW_NEEDS_I2C (1 << 9)
763#define AVIVO_I2C_CNTL 0x7d50 759# define AVIVO_DC_I2C_ABORT_HDCP_I2C (1 << 16)
764# define AVIVO_I2C_EN (1 << 0) 760# define AVIVO_DC_I2C_HW_USING_I2C (1 << 17)
765# define AVIVO_I2C_RESET (1 << 8) 761
762#define AVIVO_DC_GPIO_DDC1_MASK 0x7e40
763#define AVIVO_DC_GPIO_DDC1_A 0x7e44
764#define AVIVO_DC_GPIO_DDC1_EN 0x7e48
765#define AVIVO_DC_GPIO_DDC1_Y 0x7e4c
766
767#define AVIVO_DC_GPIO_DDC2_MASK 0x7e50
768#define AVIVO_DC_GPIO_DDC2_A 0x7e54
769#define AVIVO_DC_GPIO_DDC2_EN 0x7e58
770#define AVIVO_DC_GPIO_DDC2_Y 0x7e5c
771
772#define AVIVO_DC_GPIO_DDC3_MASK 0x7e60
773#define AVIVO_DC_GPIO_DDC3_A 0x7e64
774#define AVIVO_DC_GPIO_DDC3_EN 0x7e68
775#define AVIVO_DC_GPIO_DDC3_Y 0x7e6c
766 776
767#define AVIVO_DISP_INTERRUPT_STATUS 0x7edc 777#define AVIVO_DISP_INTERRUPT_STATUS 0x7edc
768# define AVIVO_D1_VBLANK_INTERRUPT (1 << 4) 778# define AVIVO_D1_VBLANK_INTERRUPT (1 << 4)
diff --git a/drivers/gpu/drm/radeon/r520.c b/drivers/gpu/drm/radeon/r520.c
index f7435185c0a6..3c44b8d39318 100644
--- a/drivers/gpu/drm/radeon/r520.c
+++ b/drivers/gpu/drm/radeon/r520.c
@@ -27,6 +27,7 @@
27 */ 27 */
28#include "drmP.h" 28#include "drmP.h"
29#include "radeon.h" 29#include "radeon.h"
30#include "radeon_asic.h"
30#include "atom.h" 31#include "atom.h"
31#include "r520d.h" 32#include "r520d.h"
32 33
@@ -119,19 +120,15 @@ static void r520_vram_get_type(struct radeon_device *rdev)
119 rdev->mc.vram_width *= 2; 120 rdev->mc.vram_width *= 2;
120} 121}
121 122
122void r520_vram_info(struct radeon_device *rdev) 123void r520_mc_init(struct radeon_device *rdev)
123{ 124{
124 fixed20_12 a;
125 125
126 r520_vram_get_type(rdev); 126 r520_vram_get_type(rdev);
127
128 r100_vram_init_sizes(rdev); 127 r100_vram_init_sizes(rdev);
129 /* FIXME: we should enforce default clock in case GPU is not in 128 radeon_vram_location(rdev, &rdev->mc, 0);
130 * default setup 129 if (!(rdev->flags & RADEON_IS_AGP))
131 */ 130 radeon_gtt_location(rdev, &rdev->mc);
132 a.full = rfixed_const(100); 131 radeon_update_bandwidth_info(rdev);
133 rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk);
134 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
135} 132}
136 133
137void r520_mc_program(struct radeon_device *rdev) 134void r520_mc_program(struct radeon_device *rdev)
@@ -185,8 +182,8 @@ static int r520_startup(struct radeon_device *rdev)
185 return r; 182 return r;
186 } 183 }
187 /* Enable IRQ */ 184 /* Enable IRQ */
188 rdev->irq.sw_int = true;
189 rs600_irq_set(rdev); 185 rs600_irq_set(rdev);
186 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
190 /* 1M ring buffer */ 187 /* 1M ring buffer */
191 r = r100_cp_init(rdev, 1024 * 1024); 188 r = r100_cp_init(rdev, 1024 * 1024);
192 if (r) { 189 if (r) {
@@ -221,6 +218,8 @@ int r520_resume(struct radeon_device *rdev)
221 atom_asic_init(rdev->mode_info.atom_context); 218 atom_asic_init(rdev->mode_info.atom_context);
222 /* Resume clock after posting */ 219 /* Resume clock after posting */
223 rv515_clock_startup(rdev); 220 rv515_clock_startup(rdev);
221 /* Initialize surface registers */
222 radeon_surface_init(rdev);
224 return r520_startup(rdev); 223 return r520_startup(rdev);
225} 224}
226 225
@@ -254,6 +253,9 @@ int r520_init(struct radeon_device *rdev)
254 RREG32(R_0007C0_CP_STAT)); 253 RREG32(R_0007C0_CP_STAT));
255 } 254 }
256 /* check if cards are posted or not */ 255 /* check if cards are posted or not */
256 if (radeon_boot_test_post_card(rdev) == false)
257 return -EINVAL;
258
257 if (!radeon_card_posted(rdev) && rdev->bios) { 259 if (!radeon_card_posted(rdev) && rdev->bios) {
258 DRM_INFO("GPU not posted. posting now...\n"); 260 DRM_INFO("GPU not posted. posting now...\n");
259 atom_asic_init(rdev->mode_info.atom_context); 261 atom_asic_init(rdev->mode_info.atom_context);
@@ -262,12 +264,15 @@ int r520_init(struct radeon_device *rdev)
262 radeon_get_clock_info(rdev->ddev); 264 radeon_get_clock_info(rdev->ddev);
263 /* Initialize power management */ 265 /* Initialize power management */
264 radeon_pm_init(rdev); 266 radeon_pm_init(rdev);
265 /* Get vram informations */ 267 /* initialize AGP */
266 r520_vram_info(rdev); 268 if (rdev->flags & RADEON_IS_AGP) {
267 /* Initialize memory controller (also test AGP) */ 269 r = radeon_agp_init(rdev);
268 r = r420_mc_init(rdev); 270 if (r) {
269 if (r) 271 radeon_agp_disable(rdev);
270 return r; 272 }
273 }
274 /* initialize memory controller */
275 r520_mc_init(rdev);
271 rv515_debugfs(rdev); 276 rv515_debugfs(rdev);
272 /* Fence driver */ 277 /* Fence driver */
273 r = radeon_fence_driver_init(rdev); 278 r = radeon_fence_driver_init(rdev);
@@ -277,7 +282,7 @@ int r520_init(struct radeon_device *rdev)
277 if (r) 282 if (r)
278 return r; 283 return r;
279 /* Memory manager */ 284 /* Memory manager */
280 r = radeon_object_init(rdev); 285 r = radeon_bo_init(rdev);
281 if (r) 286 if (r)
282 return r; 287 return r;
283 r = rv370_pcie_gart_init(rdev); 288 r = rv370_pcie_gart_init(rdev);
@@ -289,13 +294,12 @@ int r520_init(struct radeon_device *rdev)
289 if (r) { 294 if (r) {
290 /* Somethings want wront with the accel init stop accel */ 295 /* Somethings want wront with the accel init stop accel */
291 dev_err(rdev->dev, "Disabling GPU acceleration\n"); 296 dev_err(rdev->dev, "Disabling GPU acceleration\n");
292 rv515_suspend(rdev);
293 r100_cp_fini(rdev); 297 r100_cp_fini(rdev);
294 r100_wb_fini(rdev); 298 r100_wb_fini(rdev);
295 r100_ib_fini(rdev); 299 r100_ib_fini(rdev);
300 radeon_irq_kms_fini(rdev);
296 rv370_pcie_gart_fini(rdev); 301 rv370_pcie_gart_fini(rdev);
297 radeon_agp_fini(rdev); 302 radeon_agp_fini(rdev);
298 radeon_irq_kms_fini(rdev);
299 rdev->accel_working = false; 303 rdev->accel_working = false;
300 } 304 }
301 return 0; 305 return 0;
diff --git a/drivers/gpu/drm/radeon/r600.c b/drivers/gpu/drm/radeon/r600.c
index 278f646bc18e..8f3454e2056a 100644
--- a/drivers/gpu/drm/radeon/r600.c
+++ b/drivers/gpu/drm/radeon/r600.c
@@ -25,12 +25,14 @@
25 * Alex Deucher 25 * Alex Deucher
26 * Jerome Glisse 26 * Jerome Glisse
27 */ 27 */
28#include <linux/slab.h>
28#include <linux/seq_file.h> 29#include <linux/seq_file.h>
29#include <linux/firmware.h> 30#include <linux/firmware.h>
30#include <linux/platform_device.h> 31#include <linux/platform_device.h>
31#include "drmP.h" 32#include "drmP.h"
32#include "radeon_drm.h" 33#include "radeon_drm.h"
33#include "radeon.h" 34#include "radeon.h"
35#include "radeon_asic.h"
34#include "radeon_mode.h" 36#include "radeon_mode.h"
35#include "r600d.h" 37#include "r600d.h"
36#include "atom.h" 38#include "atom.h"
@@ -38,8 +40,10 @@
38 40
39#define PFP_UCODE_SIZE 576 41#define PFP_UCODE_SIZE 576
40#define PM4_UCODE_SIZE 1792 42#define PM4_UCODE_SIZE 1792
43#define RLC_UCODE_SIZE 768
41#define R700_PFP_UCODE_SIZE 848 44#define R700_PFP_UCODE_SIZE 848
42#define R700_PM4_UCODE_SIZE 1360 45#define R700_PM4_UCODE_SIZE 1360
46#define R700_RLC_UCODE_SIZE 1024
43 47
44/* Firmware Names */ 48/* Firmware Names */
45MODULE_FIRMWARE("radeon/R600_pfp.bin"); 49MODULE_FIRMWARE("radeon/R600_pfp.bin");
@@ -62,6 +66,8 @@ MODULE_FIRMWARE("radeon/RV730_pfp.bin");
62MODULE_FIRMWARE("radeon/RV730_me.bin"); 66MODULE_FIRMWARE("radeon/RV730_me.bin");
63MODULE_FIRMWARE("radeon/RV710_pfp.bin"); 67MODULE_FIRMWARE("radeon/RV710_pfp.bin");
64MODULE_FIRMWARE("radeon/RV710_me.bin"); 68MODULE_FIRMWARE("radeon/RV710_me.bin");
69MODULE_FIRMWARE("radeon/R600_rlc.bin");
70MODULE_FIRMWARE("radeon/R700_rlc.bin");
65 71
66int r600_debugfs_mc_info_init(struct radeon_device *rdev); 72int r600_debugfs_mc_info_init(struct radeon_device *rdev);
67 73
@@ -70,26 +76,293 @@ int r600_mc_wait_for_idle(struct radeon_device *rdev);
70void r600_gpu_init(struct radeon_device *rdev); 76void r600_gpu_init(struct radeon_device *rdev);
71void r600_fini(struct radeon_device *rdev); 77void r600_fini(struct radeon_device *rdev);
72 78
73/* 79/* hpd for digital panel detect/disconnect */
74 * R600 PCIE GART 80bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
75 */
76int r600_gart_clear_page(struct radeon_device *rdev, int i)
77{ 81{
78 void __iomem *ptr = (void *)rdev->gart.table.vram.ptr; 82 bool connected = false;
79 u64 pte;
80 83
81 if (i < 0 || i > rdev->gart.num_gpu_pages) 84 if (ASIC_IS_DCE3(rdev)) {
82 return -EINVAL; 85 switch (hpd) {
83 pte = 0; 86 case RADEON_HPD_1:
84 writeq(pte, ((void __iomem *)ptr) + (i * 8)); 87 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
85 return 0; 88 connected = true;
89 break;
90 case RADEON_HPD_2:
91 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
92 connected = true;
93 break;
94 case RADEON_HPD_3:
95 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
96 connected = true;
97 break;
98 case RADEON_HPD_4:
99 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
100 connected = true;
101 break;
102 /* DCE 3.2 */
103 case RADEON_HPD_5:
104 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
105 connected = true;
106 break;
107 case RADEON_HPD_6:
108 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
109 connected = true;
110 break;
111 default:
112 break;
113 }
114 } else {
115 switch (hpd) {
116 case RADEON_HPD_1:
117 if (RREG32(DC_HOT_PLUG_DETECT1_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
118 connected = true;
119 break;
120 case RADEON_HPD_2:
121 if (RREG32(DC_HOT_PLUG_DETECT2_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
122 connected = true;
123 break;
124 case RADEON_HPD_3:
125 if (RREG32(DC_HOT_PLUG_DETECT3_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
126 connected = true;
127 break;
128 default:
129 break;
130 }
131 }
132 return connected;
133}
134
135void r600_hpd_set_polarity(struct radeon_device *rdev,
136 enum radeon_hpd_id hpd)
137{
138 u32 tmp;
139 bool connected = r600_hpd_sense(rdev, hpd);
140
141 if (ASIC_IS_DCE3(rdev)) {
142 switch (hpd) {
143 case RADEON_HPD_1:
144 tmp = RREG32(DC_HPD1_INT_CONTROL);
145 if (connected)
146 tmp &= ~DC_HPDx_INT_POLARITY;
147 else
148 tmp |= DC_HPDx_INT_POLARITY;
149 WREG32(DC_HPD1_INT_CONTROL, tmp);
150 break;
151 case RADEON_HPD_2:
152 tmp = RREG32(DC_HPD2_INT_CONTROL);
153 if (connected)
154 tmp &= ~DC_HPDx_INT_POLARITY;
155 else
156 tmp |= DC_HPDx_INT_POLARITY;
157 WREG32(DC_HPD2_INT_CONTROL, tmp);
158 break;
159 case RADEON_HPD_3:
160 tmp = RREG32(DC_HPD3_INT_CONTROL);
161 if (connected)
162 tmp &= ~DC_HPDx_INT_POLARITY;
163 else
164 tmp |= DC_HPDx_INT_POLARITY;
165 WREG32(DC_HPD3_INT_CONTROL, tmp);
166 break;
167 case RADEON_HPD_4:
168 tmp = RREG32(DC_HPD4_INT_CONTROL);
169 if (connected)
170 tmp &= ~DC_HPDx_INT_POLARITY;
171 else
172 tmp |= DC_HPDx_INT_POLARITY;
173 WREG32(DC_HPD4_INT_CONTROL, tmp);
174 break;
175 case RADEON_HPD_5:
176 tmp = RREG32(DC_HPD5_INT_CONTROL);
177 if (connected)
178 tmp &= ~DC_HPDx_INT_POLARITY;
179 else
180 tmp |= DC_HPDx_INT_POLARITY;
181 WREG32(DC_HPD5_INT_CONTROL, tmp);
182 break;
183 /* DCE 3.2 */
184 case RADEON_HPD_6:
185 tmp = RREG32(DC_HPD6_INT_CONTROL);
186 if (connected)
187 tmp &= ~DC_HPDx_INT_POLARITY;
188 else
189 tmp |= DC_HPDx_INT_POLARITY;
190 WREG32(DC_HPD6_INT_CONTROL, tmp);
191 break;
192 default:
193 break;
194 }
195 } else {
196 switch (hpd) {
197 case RADEON_HPD_1:
198 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL);
199 if (connected)
200 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
201 else
202 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
203 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
204 break;
205 case RADEON_HPD_2:
206 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL);
207 if (connected)
208 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
209 else
210 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
211 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
212 break;
213 case RADEON_HPD_3:
214 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL);
215 if (connected)
216 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
217 else
218 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
219 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
220 break;
221 default:
222 break;
223 }
224 }
225}
226
227void r600_hpd_init(struct radeon_device *rdev)
228{
229 struct drm_device *dev = rdev->ddev;
230 struct drm_connector *connector;
231
232 if (ASIC_IS_DCE3(rdev)) {
233 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) | DC_HPDx_RX_INT_TIMER(0xfa);
234 if (ASIC_IS_DCE32(rdev))
235 tmp |= DC_HPDx_EN;
236
237 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
238 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
239 switch (radeon_connector->hpd.hpd) {
240 case RADEON_HPD_1:
241 WREG32(DC_HPD1_CONTROL, tmp);
242 rdev->irq.hpd[0] = true;
243 break;
244 case RADEON_HPD_2:
245 WREG32(DC_HPD2_CONTROL, tmp);
246 rdev->irq.hpd[1] = true;
247 break;
248 case RADEON_HPD_3:
249 WREG32(DC_HPD3_CONTROL, tmp);
250 rdev->irq.hpd[2] = true;
251 break;
252 case RADEON_HPD_4:
253 WREG32(DC_HPD4_CONTROL, tmp);
254 rdev->irq.hpd[3] = true;
255 break;
256 /* DCE 3.2 */
257 case RADEON_HPD_5:
258 WREG32(DC_HPD5_CONTROL, tmp);
259 rdev->irq.hpd[4] = true;
260 break;
261 case RADEON_HPD_6:
262 WREG32(DC_HPD6_CONTROL, tmp);
263 rdev->irq.hpd[5] = true;
264 break;
265 default:
266 break;
267 }
268 }
269 } else {
270 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
271 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
272 switch (radeon_connector->hpd.hpd) {
273 case RADEON_HPD_1:
274 WREG32(DC_HOT_PLUG_DETECT1_CONTROL, DC_HOT_PLUG_DETECTx_EN);
275 rdev->irq.hpd[0] = true;
276 break;
277 case RADEON_HPD_2:
278 WREG32(DC_HOT_PLUG_DETECT2_CONTROL, DC_HOT_PLUG_DETECTx_EN);
279 rdev->irq.hpd[1] = true;
280 break;
281 case RADEON_HPD_3:
282 WREG32(DC_HOT_PLUG_DETECT3_CONTROL, DC_HOT_PLUG_DETECTx_EN);
283 rdev->irq.hpd[2] = true;
284 break;
285 default:
286 break;
287 }
288 }
289 }
290 if (rdev->irq.installed)
291 r600_irq_set(rdev);
292}
293
294void r600_hpd_fini(struct radeon_device *rdev)
295{
296 struct drm_device *dev = rdev->ddev;
297 struct drm_connector *connector;
298
299 if (ASIC_IS_DCE3(rdev)) {
300 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
301 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
302 switch (radeon_connector->hpd.hpd) {
303 case RADEON_HPD_1:
304 WREG32(DC_HPD1_CONTROL, 0);
305 rdev->irq.hpd[0] = false;
306 break;
307 case RADEON_HPD_2:
308 WREG32(DC_HPD2_CONTROL, 0);
309 rdev->irq.hpd[1] = false;
310 break;
311 case RADEON_HPD_3:
312 WREG32(DC_HPD3_CONTROL, 0);
313 rdev->irq.hpd[2] = false;
314 break;
315 case RADEON_HPD_4:
316 WREG32(DC_HPD4_CONTROL, 0);
317 rdev->irq.hpd[3] = false;
318 break;
319 /* DCE 3.2 */
320 case RADEON_HPD_5:
321 WREG32(DC_HPD5_CONTROL, 0);
322 rdev->irq.hpd[4] = false;
323 break;
324 case RADEON_HPD_6:
325 WREG32(DC_HPD6_CONTROL, 0);
326 rdev->irq.hpd[5] = false;
327 break;
328 default:
329 break;
330 }
331 }
332 } else {
333 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
334 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
335 switch (radeon_connector->hpd.hpd) {
336 case RADEON_HPD_1:
337 WREG32(DC_HOT_PLUG_DETECT1_CONTROL, 0);
338 rdev->irq.hpd[0] = false;
339 break;
340 case RADEON_HPD_2:
341 WREG32(DC_HOT_PLUG_DETECT2_CONTROL, 0);
342 rdev->irq.hpd[1] = false;
343 break;
344 case RADEON_HPD_3:
345 WREG32(DC_HOT_PLUG_DETECT3_CONTROL, 0);
346 rdev->irq.hpd[2] = false;
347 break;
348 default:
349 break;
350 }
351 }
352 }
86} 353}
87 354
355/*
356 * R600 PCIE GART
357 */
88void r600_pcie_gart_tlb_flush(struct radeon_device *rdev) 358void r600_pcie_gart_tlb_flush(struct radeon_device *rdev)
89{ 359{
90 unsigned i; 360 unsigned i;
91 u32 tmp; 361 u32 tmp;
92 362
363 /* flush hdp cache so updates hit vram */
364 WREG32(R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
365
93 WREG32(VM_CONTEXT0_INVALIDATION_LOW_ADDR, rdev->mc.gtt_start >> 12); 366 WREG32(VM_CONTEXT0_INVALIDATION_LOW_ADDR, rdev->mc.gtt_start >> 12);
94 WREG32(VM_CONTEXT0_INVALIDATION_HIGH_ADDR, (rdev->mc.gtt_end - 1) >> 12); 367 WREG32(VM_CONTEXT0_INVALIDATION_HIGH_ADDR, (rdev->mc.gtt_end - 1) >> 12);
95 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1)); 368 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
@@ -136,6 +409,7 @@ int r600_pcie_gart_enable(struct radeon_device *rdev)
136 r = radeon_gart_table_vram_pin(rdev); 409 r = radeon_gart_table_vram_pin(rdev);
137 if (r) 410 if (r)
138 return r; 411 return r;
412 radeon_gart_restore(rdev);
139 413
140 /* Setup L2 cache */ 414 /* Setup L2 cache */
141 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING | 415 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
@@ -180,7 +454,7 @@ int r600_pcie_gart_enable(struct radeon_device *rdev)
180void r600_pcie_gart_disable(struct radeon_device *rdev) 454void r600_pcie_gart_disable(struct radeon_device *rdev)
181{ 455{
182 u32 tmp; 456 u32 tmp;
183 int i; 457 int i, r;
184 458
185 /* Disable all tables */ 459 /* Disable all tables */
186 for (i = 0; i < 7; i++) 460 for (i = 0; i < 7; i++)
@@ -208,16 +482,20 @@ void r600_pcie_gart_disable(struct radeon_device *rdev)
208 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp); 482 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp);
209 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp); 483 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
210 if (rdev->gart.table.vram.robj) { 484 if (rdev->gart.table.vram.robj) {
211 radeon_object_kunmap(rdev->gart.table.vram.robj); 485 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
212 radeon_object_unpin(rdev->gart.table.vram.robj); 486 if (likely(r == 0)) {
487 radeon_bo_kunmap(rdev->gart.table.vram.robj);
488 radeon_bo_unpin(rdev->gart.table.vram.robj);
489 radeon_bo_unreserve(rdev->gart.table.vram.robj);
490 }
213 } 491 }
214} 492}
215 493
216void r600_pcie_gart_fini(struct radeon_device *rdev) 494void r600_pcie_gart_fini(struct radeon_device *rdev)
217{ 495{
496 radeon_gart_fini(rdev);
218 r600_pcie_gart_disable(rdev); 497 r600_pcie_gart_disable(rdev);
219 radeon_gart_table_vram_free(rdev); 498 radeon_gart_table_vram_free(rdev);
220 radeon_gart_fini(rdev);
221} 499}
222 500
223void r600_agp_enable(struct radeon_device *rdev) 501void r600_agp_enable(struct radeon_device *rdev)
@@ -335,12 +613,72 @@ static void r600_mc_program(struct radeon_device *rdev)
335 rv515_vga_render_disable(rdev); 613 rv515_vga_render_disable(rdev);
336} 614}
337 615
616/**
617 * r600_vram_gtt_location - try to find VRAM & GTT location
618 * @rdev: radeon device structure holding all necessary informations
619 * @mc: memory controller structure holding memory informations
620 *
621 * Function will place try to place VRAM at same place as in CPU (PCI)
622 * address space as some GPU seems to have issue when we reprogram at
623 * different address space.
624 *
625 * If there is not enough space to fit the unvisible VRAM after the
626 * aperture then we limit the VRAM size to the aperture.
627 *
628 * If we are using AGP then place VRAM adjacent to AGP aperture are we need
629 * them to be in one from GPU point of view so that we can program GPU to
630 * catch access outside them (weird GPU policy see ??).
631 *
632 * This function will never fails, worst case are limiting VRAM or GTT.
633 *
634 * Note: GTT start, end, size should be initialized before calling this
635 * function on AGP platform.
636 */
637void r600_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
638{
639 u64 size_bf, size_af;
640
641 if (mc->mc_vram_size > 0xE0000000) {
642 /* leave room for at least 512M GTT */
643 dev_warn(rdev->dev, "limiting VRAM\n");
644 mc->real_vram_size = 0xE0000000;
645 mc->mc_vram_size = 0xE0000000;
646 }
647 if (rdev->flags & RADEON_IS_AGP) {
648 size_bf = mc->gtt_start;
649 size_af = 0xFFFFFFFF - mc->gtt_end + 1;
650 if (size_bf > size_af) {
651 if (mc->mc_vram_size > size_bf) {
652 dev_warn(rdev->dev, "limiting VRAM\n");
653 mc->real_vram_size = size_bf;
654 mc->mc_vram_size = size_bf;
655 }
656 mc->vram_start = mc->gtt_start - mc->mc_vram_size;
657 } else {
658 if (mc->mc_vram_size > size_af) {
659 dev_warn(rdev->dev, "limiting VRAM\n");
660 mc->real_vram_size = size_af;
661 mc->mc_vram_size = size_af;
662 }
663 mc->vram_start = mc->gtt_end;
664 }
665 mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
666 dev_info(rdev->dev, "VRAM: %lluM 0x%08llX - 0x%08llX (%lluM used)\n",
667 mc->mc_vram_size >> 20, mc->vram_start,
668 mc->vram_end, mc->real_vram_size >> 20);
669 } else {
670 u64 base = 0;
671 if (rdev->flags & RADEON_IS_IGP)
672 base = (RREG32(MC_VM_FB_LOCATION) & 0xFFFF) << 24;
673 radeon_vram_location(rdev, &rdev->mc, base);
674 radeon_gtt_location(rdev, mc);
675 }
676}
677
338int r600_mc_init(struct radeon_device *rdev) 678int r600_mc_init(struct radeon_device *rdev)
339{ 679{
340 fixed20_12 a;
341 u32 tmp; 680 u32 tmp;
342 int chansize, numchan; 681 int chansize, numchan;
343 int r;
344 682
345 /* Get VRAM informations */ 683 /* Get VRAM informations */
346 rdev->mc.vram_is_ddr = true; 684 rdev->mc.vram_is_ddr = true;
@@ -375,74 +713,17 @@ int r600_mc_init(struct radeon_device *rdev)
375 /* Setup GPU memory space */ 713 /* Setup GPU memory space */
376 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE); 714 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
377 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE); 715 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
378 716 rdev->mc.visible_vram_size = rdev->mc.aper_size;
379 if (rdev->mc.mc_vram_size > rdev->mc.aper_size) 717 /* FIXME remove this once we support unmappable VRAM */
718 if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
380 rdev->mc.mc_vram_size = rdev->mc.aper_size; 719 rdev->mc.mc_vram_size = rdev->mc.aper_size;
381
382 if (rdev->mc.real_vram_size > rdev->mc.aper_size)
383 rdev->mc.real_vram_size = rdev->mc.aper_size; 720 rdev->mc.real_vram_size = rdev->mc.aper_size;
721 }
722 r600_vram_gtt_location(rdev, &rdev->mc);
384 723
385 if (rdev->flags & RADEON_IS_AGP) { 724 if (rdev->flags & RADEON_IS_IGP)
386 r = radeon_agp_init(rdev); 725 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
387 if (r) 726 radeon_update_bandwidth_info(rdev);
388 return r;
389 /* gtt_size is setup by radeon_agp_init */
390 rdev->mc.gtt_location = rdev->mc.agp_base;
391 tmp = 0xFFFFFFFFUL - rdev->mc.agp_base - rdev->mc.gtt_size;
392 /* Try to put vram before or after AGP because we
393 * we want SYSTEM_APERTURE to cover both VRAM and
394 * AGP so that GPU can catch out of VRAM/AGP access
395 */
396 if (rdev->mc.gtt_location > rdev->mc.mc_vram_size) {
397 /* Enought place before */
398 rdev->mc.vram_location = rdev->mc.gtt_location -
399 rdev->mc.mc_vram_size;
400 } else if (tmp > rdev->mc.mc_vram_size) {
401 /* Enought place after */
402 rdev->mc.vram_location = rdev->mc.gtt_location +
403 rdev->mc.gtt_size;
404 } else {
405 /* Try to setup VRAM then AGP might not
406 * not work on some card
407 */
408 rdev->mc.vram_location = 0x00000000UL;
409 rdev->mc.gtt_location = rdev->mc.mc_vram_size;
410 }
411 } else {
412 rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024;
413 rdev->mc.vram_location = (RREG32(MC_VM_FB_LOCATION) &
414 0xFFFF) << 24;
415 tmp = rdev->mc.vram_location + rdev->mc.mc_vram_size;
416 if ((0xFFFFFFFFUL - tmp) >= rdev->mc.gtt_size) {
417 /* Enough place after vram */
418 rdev->mc.gtt_location = tmp;
419 } else if (rdev->mc.vram_location >= rdev->mc.gtt_size) {
420 /* Enough place before vram */
421 rdev->mc.gtt_location = 0;
422 } else {
423 /* Not enough place after or before shrink
424 * gart size
425 */
426 if (rdev->mc.vram_location > (0xFFFFFFFFUL - tmp)) {
427 rdev->mc.gtt_location = 0;
428 rdev->mc.gtt_size = rdev->mc.vram_location;
429 } else {
430 rdev->mc.gtt_location = tmp;
431 rdev->mc.gtt_size = 0xFFFFFFFFUL - tmp;
432 }
433 }
434 rdev->mc.gtt_location = rdev->mc.mc_vram_size;
435 }
436 rdev->mc.vram_start = rdev->mc.vram_location;
437 rdev->mc.vram_end = rdev->mc.vram_location + rdev->mc.mc_vram_size - 1;
438 rdev->mc.gtt_start = rdev->mc.gtt_location;
439 rdev->mc.gtt_end = rdev->mc.gtt_location + rdev->mc.gtt_size - 1;
440 /* FIXME: we should enforce default clock in case GPU is not in
441 * default setup
442 */
443 a.full = rfixed_const(100);
444 rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk);
445 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
446 return 0; 727 return 0;
447} 728}
448 729
@@ -697,6 +978,9 @@ void r600_gpu_init(struct radeon_device *rdev)
697{ 978{
698 u32 tiling_config; 979 u32 tiling_config;
699 u32 ramcfg; 980 u32 ramcfg;
981 u32 backend_map;
982 u32 cc_rb_backend_disable;
983 u32 cc_gc_shader_pipe_config;
700 u32 tmp; 984 u32 tmp;
701 int i, j; 985 int i, j;
702 u32 sq_config; 986 u32 sq_config;
@@ -806,8 +1090,11 @@ void r600_gpu_init(struct radeon_device *rdev)
806 default: 1090 default:
807 break; 1091 break;
808 } 1092 }
1093 rdev->config.r600.tiling_npipes = rdev->config.r600.max_tile_pipes;
1094 rdev->config.r600.tiling_nbanks = 4 << ((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
809 tiling_config |= BANK_TILING((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT); 1095 tiling_config |= BANK_TILING((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
810 tiling_config |= GROUP_SIZE(0); 1096 tiling_config |= GROUP_SIZE(0);
1097 rdev->config.r600.tiling_group_size = 256;
811 tmp = (ramcfg & NOOFROWS_MASK) >> NOOFROWS_SHIFT; 1098 tmp = (ramcfg & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
812 if (tmp > 3) { 1099 if (tmp > 3) {
813 tiling_config |= ROW_TILING(3); 1100 tiling_config |= ROW_TILING(3);
@@ -817,24 +1104,34 @@ void r600_gpu_init(struct radeon_device *rdev)
817 tiling_config |= SAMPLE_SPLIT(tmp); 1104 tiling_config |= SAMPLE_SPLIT(tmp);
818 } 1105 }
819 tiling_config |= BANK_SWAPS(1); 1106 tiling_config |= BANK_SWAPS(1);
820 tmp = r600_get_tile_pipe_to_backend_map(rdev->config.r600.max_tile_pipes, 1107
821 rdev->config.r600.max_backends, 1108 cc_rb_backend_disable = RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000;
822 (0xff << rdev->config.r600.max_backends) & 0xff); 1109 cc_rb_backend_disable |=
823 tiling_config |= BACKEND_MAP(tmp); 1110 BACKEND_DISABLE((R6XX_MAX_BACKENDS_MASK << rdev->config.r600.max_backends) & R6XX_MAX_BACKENDS_MASK);
1111
1112 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffffff00;
1113 cc_gc_shader_pipe_config |=
1114 INACTIVE_QD_PIPES((R6XX_MAX_PIPES_MASK << rdev->config.r600.max_pipes) & R6XX_MAX_PIPES_MASK);
1115 cc_gc_shader_pipe_config |=
1116 INACTIVE_SIMDS((R6XX_MAX_SIMDS_MASK << rdev->config.r600.max_simds) & R6XX_MAX_SIMDS_MASK);
1117
1118 backend_map = r600_get_tile_pipe_to_backend_map(rdev->config.r600.max_tile_pipes,
1119 (R6XX_MAX_BACKENDS -
1120 r600_count_pipe_bits((cc_rb_backend_disable &
1121 R6XX_MAX_BACKENDS_MASK) >> 16)),
1122 (cc_rb_backend_disable >> 16));
1123
1124 tiling_config |= BACKEND_MAP(backend_map);
824 WREG32(GB_TILING_CONFIG, tiling_config); 1125 WREG32(GB_TILING_CONFIG, tiling_config);
825 WREG32(DCP_TILING_CONFIG, tiling_config & 0xffff); 1126 WREG32(DCP_TILING_CONFIG, tiling_config & 0xffff);
826 WREG32(HDP_TILING_CONFIG, tiling_config & 0xffff); 1127 WREG32(HDP_TILING_CONFIG, tiling_config & 0xffff);
827 1128
828 tmp = BACKEND_DISABLE((R6XX_MAX_BACKENDS_MASK << rdev->config.r600.max_backends) & R6XX_MAX_BACKENDS_MASK);
829 WREG32(CC_RB_BACKEND_DISABLE, tmp);
830
831 /* Setup pipes */ 1129 /* Setup pipes */
832 tmp = INACTIVE_QD_PIPES((R6XX_MAX_PIPES_MASK << rdev->config.r600.max_pipes) & R6XX_MAX_PIPES_MASK); 1130 WREG32(CC_RB_BACKEND_DISABLE, cc_rb_backend_disable);
833 tmp |= INACTIVE_SIMDS((R6XX_MAX_SIMDS_MASK << rdev->config.r600.max_simds) & R6XX_MAX_SIMDS_MASK); 1131 WREG32(CC_GC_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config);
834 WREG32(CC_GC_SHADER_PIPE_CONFIG, tmp); 1132 WREG32(GC_USER_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config);
835 WREG32(GC_USER_SHADER_PIPE_CONFIG, tmp);
836 1133
837 tmp = R6XX_MAX_BACKENDS - r600_count_pipe_bits(tmp & INACTIVE_QD_PIPES_MASK); 1134 tmp = R6XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & INACTIVE_QD_PIPES_MASK) >> 8);
838 WREG32(VGT_OUT_DEALLOC_CNTL, (tmp * 4) & DEALLOC_DIST_MASK); 1135 WREG32(VGT_OUT_DEALLOC_CNTL, (tmp * 4) & DEALLOC_DIST_MASK);
839 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((tmp * 4) - 2) & VTX_REUSE_DEPTH_MASK); 1136 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((tmp * 4) - 2) & VTX_REUSE_DEPTH_MASK);
840 1137
@@ -1101,7 +1398,6 @@ void r600_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v)
1101 (void)RREG32(PCIE_PORT_DATA); 1398 (void)RREG32(PCIE_PORT_DATA);
1102} 1399}
1103 1400
1104
1105/* 1401/*
1106 * CP & Ring 1402 * CP & Ring
1107 */ 1403 */
@@ -1110,11 +1406,12 @@ void r600_cp_stop(struct radeon_device *rdev)
1110 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1)); 1406 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
1111} 1407}
1112 1408
1113int r600_cp_init_microcode(struct radeon_device *rdev) 1409int r600_init_microcode(struct radeon_device *rdev)
1114{ 1410{
1115 struct platform_device *pdev; 1411 struct platform_device *pdev;
1116 const char *chip_name; 1412 const char *chip_name;
1117 size_t pfp_req_size, me_req_size; 1413 const char *rlc_chip_name;
1414 size_t pfp_req_size, me_req_size, rlc_req_size;
1118 char fw_name[30]; 1415 char fw_name[30];
1119 int err; 1416 int err;
1120 1417
@@ -1128,30 +1425,62 @@ int r600_cp_init_microcode(struct radeon_device *rdev)
1128 } 1425 }
1129 1426
1130 switch (rdev->family) { 1427 switch (rdev->family) {
1131 case CHIP_R600: chip_name = "R600"; break; 1428 case CHIP_R600:
1132 case CHIP_RV610: chip_name = "RV610"; break; 1429 chip_name = "R600";
1133 case CHIP_RV630: chip_name = "RV630"; break; 1430 rlc_chip_name = "R600";
1134 case CHIP_RV620: chip_name = "RV620"; break; 1431 break;
1135 case CHIP_RV635: chip_name = "RV635"; break; 1432 case CHIP_RV610:
1136 case CHIP_RV670: chip_name = "RV670"; break; 1433 chip_name = "RV610";
1434 rlc_chip_name = "R600";
1435 break;
1436 case CHIP_RV630:
1437 chip_name = "RV630";
1438 rlc_chip_name = "R600";
1439 break;
1440 case CHIP_RV620:
1441 chip_name = "RV620";
1442 rlc_chip_name = "R600";
1443 break;
1444 case CHIP_RV635:
1445 chip_name = "RV635";
1446 rlc_chip_name = "R600";
1447 break;
1448 case CHIP_RV670:
1449 chip_name = "RV670";
1450 rlc_chip_name = "R600";
1451 break;
1137 case CHIP_RS780: 1452 case CHIP_RS780:
1138 case CHIP_RS880: chip_name = "RS780"; break; 1453 case CHIP_RS880:
1139 case CHIP_RV770: chip_name = "RV770"; break; 1454 chip_name = "RS780";
1455 rlc_chip_name = "R600";
1456 break;
1457 case CHIP_RV770:
1458 chip_name = "RV770";
1459 rlc_chip_name = "R700";
1460 break;
1140 case CHIP_RV730: 1461 case CHIP_RV730:
1141 case CHIP_RV740: chip_name = "RV730"; break; 1462 case CHIP_RV740:
1142 case CHIP_RV710: chip_name = "RV710"; break; 1463 chip_name = "RV730";
1464 rlc_chip_name = "R700";
1465 break;
1466 case CHIP_RV710:
1467 chip_name = "RV710";
1468 rlc_chip_name = "R700";
1469 break;
1143 default: BUG(); 1470 default: BUG();
1144 } 1471 }
1145 1472
1146 if (rdev->family >= CHIP_RV770) { 1473 if (rdev->family >= CHIP_RV770) {
1147 pfp_req_size = R700_PFP_UCODE_SIZE * 4; 1474 pfp_req_size = R700_PFP_UCODE_SIZE * 4;
1148 me_req_size = R700_PM4_UCODE_SIZE * 4; 1475 me_req_size = R700_PM4_UCODE_SIZE * 4;
1476 rlc_req_size = R700_RLC_UCODE_SIZE * 4;
1149 } else { 1477 } else {
1150 pfp_req_size = PFP_UCODE_SIZE * 4; 1478 pfp_req_size = PFP_UCODE_SIZE * 4;
1151 me_req_size = PM4_UCODE_SIZE * 12; 1479 me_req_size = PM4_UCODE_SIZE * 12;
1480 rlc_req_size = RLC_UCODE_SIZE * 4;
1152 } 1481 }
1153 1482
1154 DRM_INFO("Loading %s CP Microcode\n", chip_name); 1483 DRM_INFO("Loading %s Microcode\n", chip_name);
1155 1484
1156 snprintf(fw_name, sizeof(fw_name), "radeon/%s_pfp.bin", chip_name); 1485 snprintf(fw_name, sizeof(fw_name), "radeon/%s_pfp.bin", chip_name);
1157 err = request_firmware(&rdev->pfp_fw, fw_name, &pdev->dev); 1486 err = request_firmware(&rdev->pfp_fw, fw_name, &pdev->dev);
@@ -1175,6 +1504,18 @@ int r600_cp_init_microcode(struct radeon_device *rdev)
1175 rdev->me_fw->size, fw_name); 1504 rdev->me_fw->size, fw_name);
1176 err = -EINVAL; 1505 err = -EINVAL;
1177 } 1506 }
1507
1508 snprintf(fw_name, sizeof(fw_name), "radeon/%s_rlc.bin", rlc_chip_name);
1509 err = request_firmware(&rdev->rlc_fw, fw_name, &pdev->dev);
1510 if (err)
1511 goto out;
1512 if (rdev->rlc_fw->size != rlc_req_size) {
1513 printk(KERN_ERR
1514 "r600_rlc: Bogus length %zu in firmware \"%s\"\n",
1515 rdev->rlc_fw->size, fw_name);
1516 err = -EINVAL;
1517 }
1518
1178out: 1519out:
1179 platform_device_unregister(pdev); 1520 platform_device_unregister(pdev);
1180 1521
@@ -1187,6 +1528,8 @@ out:
1187 rdev->pfp_fw = NULL; 1528 rdev->pfp_fw = NULL;
1188 release_firmware(rdev->me_fw); 1529 release_firmware(rdev->me_fw);
1189 rdev->me_fw = NULL; 1530 rdev->me_fw = NULL;
1531 release_firmware(rdev->rlc_fw);
1532 rdev->rlc_fw = NULL;
1190 } 1533 }
1191 return err; 1534 return err;
1192} 1535}
@@ -1324,6 +1667,12 @@ void r600_ring_init(struct radeon_device *rdev, unsigned ring_size)
1324 rdev->cp.align_mask = 16 - 1; 1667 rdev->cp.align_mask = 16 - 1;
1325} 1668}
1326 1669
1670void r600_cp_fini(struct radeon_device *rdev)
1671{
1672 r600_cp_stop(rdev);
1673 radeon_ring_fini(rdev);
1674}
1675
1327 1676
1328/* 1677/*
1329 * GPU scratch registers helpers function. 1678 * GPU scratch registers helpers function.
@@ -1381,10 +1730,16 @@ int r600_ring_test(struct radeon_device *rdev)
1381 1730
1382void r600_wb_disable(struct radeon_device *rdev) 1731void r600_wb_disable(struct radeon_device *rdev)
1383{ 1732{
1733 int r;
1734
1384 WREG32(SCRATCH_UMSK, 0); 1735 WREG32(SCRATCH_UMSK, 0);
1385 if (rdev->wb.wb_obj) { 1736 if (rdev->wb.wb_obj) {
1386 radeon_object_kunmap(rdev->wb.wb_obj); 1737 r = radeon_bo_reserve(rdev->wb.wb_obj, false);
1387 radeon_object_unpin(rdev->wb.wb_obj); 1738 if (unlikely(r != 0))
1739 return;
1740 radeon_bo_kunmap(rdev->wb.wb_obj);
1741 radeon_bo_unpin(rdev->wb.wb_obj);
1742 radeon_bo_unreserve(rdev->wb.wb_obj);
1388 } 1743 }
1389} 1744}
1390 1745
@@ -1392,7 +1747,7 @@ void r600_wb_fini(struct radeon_device *rdev)
1392{ 1747{
1393 r600_wb_disable(rdev); 1748 r600_wb_disable(rdev);
1394 if (rdev->wb.wb_obj) { 1749 if (rdev->wb.wb_obj) {
1395 radeon_object_unref(&rdev->wb.wb_obj); 1750 radeon_bo_unref(&rdev->wb.wb_obj);
1396 rdev->wb.wb = NULL; 1751 rdev->wb.wb = NULL;
1397 rdev->wb.wb_obj = NULL; 1752 rdev->wb.wb_obj = NULL;
1398 } 1753 }
@@ -1403,22 +1758,29 @@ int r600_wb_enable(struct radeon_device *rdev)
1403 int r; 1758 int r;
1404 1759
1405 if (rdev->wb.wb_obj == NULL) { 1760 if (rdev->wb.wb_obj == NULL) {
1406 r = radeon_object_create(rdev, NULL, RADEON_GPU_PAGE_SIZE, true, 1761 r = radeon_bo_create(rdev, NULL, RADEON_GPU_PAGE_SIZE, true,
1407 RADEON_GEM_DOMAIN_GTT, false, &rdev->wb.wb_obj); 1762 RADEON_GEM_DOMAIN_GTT, &rdev->wb.wb_obj);
1408 if (r) { 1763 if (r) {
1409 dev_warn(rdev->dev, "failed to create WB buffer (%d).\n", r); 1764 dev_warn(rdev->dev, "(%d) create WB bo failed\n", r);
1410 return r; 1765 return r;
1411 } 1766 }
1412 r = radeon_object_pin(rdev->wb.wb_obj, RADEON_GEM_DOMAIN_GTT, 1767 r = radeon_bo_reserve(rdev->wb.wb_obj, false);
1768 if (unlikely(r != 0)) {
1769 r600_wb_fini(rdev);
1770 return r;
1771 }
1772 r = radeon_bo_pin(rdev->wb.wb_obj, RADEON_GEM_DOMAIN_GTT,
1413 &rdev->wb.gpu_addr); 1773 &rdev->wb.gpu_addr);
1414 if (r) { 1774 if (r) {
1415 dev_warn(rdev->dev, "failed to pin WB buffer (%d).\n", r); 1775 radeon_bo_unreserve(rdev->wb.wb_obj);
1776 dev_warn(rdev->dev, "(%d) pin WB bo failed\n", r);
1416 r600_wb_fini(rdev); 1777 r600_wb_fini(rdev);
1417 return r; 1778 return r;
1418 } 1779 }
1419 r = radeon_object_kmap(rdev->wb.wb_obj, (void **)&rdev->wb.wb); 1780 r = radeon_bo_kmap(rdev->wb.wb_obj, (void **)&rdev->wb.wb);
1781 radeon_bo_unreserve(rdev->wb.wb_obj);
1420 if (r) { 1782 if (r) {
1421 dev_warn(rdev->dev, "failed to map WB buffer (%d).\n", r); 1783 dev_warn(rdev->dev, "(%d) map WB bo failed\n", r);
1422 r600_wb_fini(rdev); 1784 r600_wb_fini(rdev);
1423 return r; 1785 return r;
1424 } 1786 }
@@ -1433,41 +1795,41 @@ int r600_wb_enable(struct radeon_device *rdev)
1433void r600_fence_ring_emit(struct radeon_device *rdev, 1795void r600_fence_ring_emit(struct radeon_device *rdev,
1434 struct radeon_fence *fence) 1796 struct radeon_fence *fence)
1435{ 1797{
1798 /* Also consider EVENT_WRITE_EOP. it handles the interrupts + timestamps + events */
1799
1800 radeon_ring_write(rdev, PACKET3(PACKET3_EVENT_WRITE, 0));
1801 radeon_ring_write(rdev, CACHE_FLUSH_AND_INV_EVENT);
1802 /* wait for 3D idle clean */
1803 radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONFIG_REG, 1));
1804 radeon_ring_write(rdev, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
1805 radeon_ring_write(rdev, WAIT_3D_IDLE_bit | WAIT_3D_IDLECLEAN_bit);
1436 /* Emit fence sequence & fire IRQ */ 1806 /* Emit fence sequence & fire IRQ */
1437 radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONFIG_REG, 1)); 1807 radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONFIG_REG, 1));
1438 radeon_ring_write(rdev, ((rdev->fence_drv.scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET) >> 2)); 1808 radeon_ring_write(rdev, ((rdev->fence_drv.scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
1439 radeon_ring_write(rdev, fence->seq); 1809 radeon_ring_write(rdev, fence->seq);
1440} 1810 /* CP_INTERRUPT packet 3 no longer exists, use packet 0 */
1441 1811 radeon_ring_write(rdev, PACKET0(CP_INT_STATUS, 0));
1442int r600_copy_dma(struct radeon_device *rdev, 1812 radeon_ring_write(rdev, RB_INT_STAT);
1443 uint64_t src_offset,
1444 uint64_t dst_offset,
1445 unsigned num_pages,
1446 struct radeon_fence *fence)
1447{
1448 /* FIXME: implement */
1449 return 0;
1450} 1813}
1451 1814
1452int r600_copy_blit(struct radeon_device *rdev, 1815int r600_copy_blit(struct radeon_device *rdev,
1453 uint64_t src_offset, uint64_t dst_offset, 1816 uint64_t src_offset, uint64_t dst_offset,
1454 unsigned num_pages, struct radeon_fence *fence) 1817 unsigned num_pages, struct radeon_fence *fence)
1455{ 1818{
1456 r600_blit_prepare_copy(rdev, num_pages * RADEON_GPU_PAGE_SIZE); 1819 int r;
1820
1821 mutex_lock(&rdev->r600_blit.mutex);
1822 rdev->r600_blit.vb_ib = NULL;
1823 r = r600_blit_prepare_copy(rdev, num_pages * RADEON_GPU_PAGE_SIZE);
1824 if (r) {
1825 if (rdev->r600_blit.vb_ib)
1826 radeon_ib_free(rdev, &rdev->r600_blit.vb_ib);
1827 mutex_unlock(&rdev->r600_blit.mutex);
1828 return r;
1829 }
1457 r600_kms_blit_copy(rdev, src_offset, dst_offset, num_pages * RADEON_GPU_PAGE_SIZE); 1830 r600_kms_blit_copy(rdev, src_offset, dst_offset, num_pages * RADEON_GPU_PAGE_SIZE);
1458 r600_blit_done_copy(rdev, fence); 1831 r600_blit_done_copy(rdev, fence);
1459 return 0; 1832 mutex_unlock(&rdev->r600_blit.mutex);
1460}
1461
1462int r600_irq_process(struct radeon_device *rdev)
1463{
1464 /* FIXME: implement */
1465 return 0;
1466}
1467
1468int r600_irq_set(struct radeon_device *rdev)
1469{
1470 /* FIXME: implement */
1471 return 0; 1833 return 0;
1472} 1834}
1473 1835
@@ -1506,6 +1868,14 @@ int r600_startup(struct radeon_device *rdev)
1506{ 1868{
1507 int r; 1869 int r;
1508 1870
1871 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
1872 r = r600_init_microcode(rdev);
1873 if (r) {
1874 DRM_ERROR("Failed to load firmware!\n");
1875 return r;
1876 }
1877 }
1878
1509 r600_mc_program(rdev); 1879 r600_mc_program(rdev);
1510 if (rdev->flags & RADEON_IS_AGP) { 1880 if (rdev->flags & RADEON_IS_AGP) {
1511 r600_agp_enable(rdev); 1881 r600_agp_enable(rdev);
@@ -1515,13 +1885,33 @@ int r600_startup(struct radeon_device *rdev)
1515 return r; 1885 return r;
1516 } 1886 }
1517 r600_gpu_init(rdev); 1887 r600_gpu_init(rdev);
1518 1888 r = r600_blit_init(rdev);
1519 r = radeon_object_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
1520 &rdev->r600_blit.shader_gpu_addr);
1521 if (r) { 1889 if (r) {
1522 DRM_ERROR("failed to pin blit object %d\n", r); 1890 r600_blit_fini(rdev);
1891 rdev->asic->copy = NULL;
1892 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
1893 }
1894 /* pin copy shader into vram */
1895 if (rdev->r600_blit.shader_obj) {
1896 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1897 if (unlikely(r != 0))
1898 return r;
1899 r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
1900 &rdev->r600_blit.shader_gpu_addr);
1901 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1902 if (r) {
1903 dev_err(rdev->dev, "(%d) pin blit object failed\n", r);
1904 return r;
1905 }
1906 }
1907 /* Enable IRQ */
1908 r = r600_irq_init(rdev);
1909 if (r) {
1910 DRM_ERROR("radeon: IH init failed (%d).\n", r);
1911 radeon_irq_kms_fini(rdev);
1523 return r; 1912 return r;
1524 } 1913 }
1914 r600_irq_set(rdev);
1525 1915
1526 r = radeon_ring_init(rdev, rdev->cp.ring_size); 1916 r = radeon_ring_init(rdev, rdev->cp.ring_size);
1527 if (r) 1917 if (r)
@@ -1578,18 +1968,35 @@ int r600_resume(struct radeon_device *rdev)
1578 DRM_ERROR("radeon: failled testing IB (%d).\n", r); 1968 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
1579 return r; 1969 return r;
1580 } 1970 }
1971
1972 r = r600_audio_init(rdev);
1973 if (r) {
1974 DRM_ERROR("radeon: audio resume failed\n");
1975 return r;
1976 }
1977
1581 return r; 1978 return r;
1582} 1979}
1583 1980
1584int r600_suspend(struct radeon_device *rdev) 1981int r600_suspend(struct radeon_device *rdev)
1585{ 1982{
1983 int r;
1984
1985 r600_audio_fini(rdev);
1586 /* FIXME: we should wait for ring to be empty */ 1986 /* FIXME: we should wait for ring to be empty */
1587 r600_cp_stop(rdev); 1987 r600_cp_stop(rdev);
1588 rdev->cp.ready = false; 1988 rdev->cp.ready = false;
1989 r600_irq_suspend(rdev);
1589 r600_wb_disable(rdev); 1990 r600_wb_disable(rdev);
1590 r600_pcie_gart_disable(rdev); 1991 r600_pcie_gart_disable(rdev);
1591 /* unpin shaders bo */ 1992 /* unpin shaders bo */
1592 radeon_object_unpin(rdev->r600_blit.shader_obj); 1993 if (rdev->r600_blit.shader_obj) {
1994 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1995 if (!r) {
1996 radeon_bo_unpin(rdev->r600_blit.shader_obj);
1997 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1998 }
1999 }
1593 return 0; 2000 return 0;
1594} 2001}
1595 2002
@@ -1627,7 +2034,11 @@ int r600_init(struct radeon_device *rdev)
1627 if (r) 2034 if (r)
1628 return r; 2035 return r;
1629 /* Post card if necessary */ 2036 /* Post card if necessary */
1630 if (!r600_card_posted(rdev) && rdev->bios) { 2037 if (!r600_card_posted(rdev)) {
2038 if (!rdev->bios) {
2039 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2040 return -EINVAL;
2041 }
1631 DRM_INFO("GPU not posted. posting now...\n"); 2042 DRM_INFO("GPU not posted. posting now...\n");
1632 atom_asic_init(rdev->mode_info.atom_context); 2043 atom_asic_init(rdev->mode_info.atom_context);
1633 } 2044 }
@@ -1646,73 +2057,79 @@ int r600_init(struct radeon_device *rdev)
1646 r = radeon_fence_driver_init(rdev); 2057 r = radeon_fence_driver_init(rdev);
1647 if (r) 2058 if (r)
1648 return r; 2059 return r;
2060 if (rdev->flags & RADEON_IS_AGP) {
2061 r = radeon_agp_init(rdev);
2062 if (r)
2063 radeon_agp_disable(rdev);
2064 }
1649 r = r600_mc_init(rdev); 2065 r = r600_mc_init(rdev);
1650 if (r) 2066 if (r)
1651 return r; 2067 return r;
1652 /* Memory manager */ 2068 /* Memory manager */
1653 r = radeon_object_init(rdev); 2069 r = radeon_bo_init(rdev);
2070 if (r)
2071 return r;
2072
2073 r = radeon_irq_kms_init(rdev);
1654 if (r) 2074 if (r)
1655 return r; 2075 return r;
2076
1656 rdev->cp.ring_obj = NULL; 2077 rdev->cp.ring_obj = NULL;
1657 r600_ring_init(rdev, 1024 * 1024); 2078 r600_ring_init(rdev, 1024 * 1024);
1658 2079
1659 if (!rdev->me_fw || !rdev->pfp_fw) { 2080 rdev->ih.ring_obj = NULL;
1660 r = r600_cp_init_microcode(rdev); 2081 r600_ih_ring_init(rdev, 64 * 1024);
1661 if (r) {
1662 DRM_ERROR("Failed to load firmware!\n");
1663 return r;
1664 }
1665 }
1666 2082
1667 r = r600_pcie_gart_init(rdev); 2083 r = r600_pcie_gart_init(rdev);
1668 if (r) 2084 if (r)
1669 return r; 2085 return r;
1670 2086
1671 rdev->accel_working = true; 2087 rdev->accel_working = true;
1672 r = r600_blit_init(rdev);
1673 if (r) {
1674 DRM_ERROR("radeon: failled blitter (%d).\n", r);
1675 return r;
1676 }
1677
1678 r = r600_startup(rdev); 2088 r = r600_startup(rdev);
1679 if (r) { 2089 if (r) {
1680 r600_suspend(rdev); 2090 dev_err(rdev->dev, "disabling GPU acceleration\n");
2091 r600_cp_fini(rdev);
1681 r600_wb_fini(rdev); 2092 r600_wb_fini(rdev);
1682 radeon_ring_fini(rdev); 2093 r600_irq_fini(rdev);
2094 radeon_irq_kms_fini(rdev);
1683 r600_pcie_gart_fini(rdev); 2095 r600_pcie_gart_fini(rdev);
1684 rdev->accel_working = false; 2096 rdev->accel_working = false;
1685 } 2097 }
1686 if (rdev->accel_working) { 2098 if (rdev->accel_working) {
1687 r = radeon_ib_pool_init(rdev); 2099 r = radeon_ib_pool_init(rdev);
1688 if (r) { 2100 if (r) {
1689 DRM_ERROR("radeon: failled initializing IB pool (%d).\n", r); 2101 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
1690 rdev->accel_working = false;
1691 }
1692 r = r600_ib_test(rdev);
1693 if (r) {
1694 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
1695 rdev->accel_working = false; 2102 rdev->accel_working = false;
2103 } else {
2104 r = r600_ib_test(rdev);
2105 if (r) {
2106 dev_err(rdev->dev, "IB test failed (%d).\n", r);
2107 rdev->accel_working = false;
2108 }
1696 } 2109 }
1697 } 2110 }
2111
2112 r = r600_audio_init(rdev);
2113 if (r)
2114 return r; /* TODO error handling */
1698 return 0; 2115 return 0;
1699} 2116}
1700 2117
1701void r600_fini(struct radeon_device *rdev) 2118void r600_fini(struct radeon_device *rdev)
1702{ 2119{
1703 /* Suspend operations */ 2120 radeon_pm_fini(rdev);
1704 r600_suspend(rdev); 2121 r600_audio_fini(rdev);
1705
1706 r600_blit_fini(rdev); 2122 r600_blit_fini(rdev);
1707 radeon_ring_fini(rdev); 2123 r600_cp_fini(rdev);
1708 r600_wb_fini(rdev); 2124 r600_wb_fini(rdev);
2125 r600_irq_fini(rdev);
2126 radeon_irq_kms_fini(rdev);
1709 r600_pcie_gart_fini(rdev); 2127 r600_pcie_gart_fini(rdev);
2128 radeon_agp_fini(rdev);
1710 radeon_gem_fini(rdev); 2129 radeon_gem_fini(rdev);
1711 radeon_fence_driver_fini(rdev); 2130 radeon_fence_driver_fini(rdev);
1712 radeon_clocks_fini(rdev); 2131 radeon_clocks_fini(rdev);
1713 if (rdev->flags & RADEON_IS_AGP) 2132 radeon_bo_fini(rdev);
1714 radeon_agp_fini(rdev);
1715 radeon_object_fini(rdev);
1716 radeon_atombios_fini(rdev); 2133 radeon_atombios_fini(rdev);
1717 kfree(rdev->bios); 2134 kfree(rdev->bios);
1718 rdev->bios = NULL; 2135 rdev->bios = NULL;
@@ -1798,8 +2215,672 @@ int r600_ib_test(struct radeon_device *rdev)
1798 return r; 2215 return r;
1799} 2216}
1800 2217
2218/*
2219 * Interrupts
2220 *
2221 * Interrupts use a ring buffer on r6xx/r7xx hardware. It works pretty
2222 * the same as the CP ring buffer, but in reverse. Rather than the CPU
2223 * writing to the ring and the GPU consuming, the GPU writes to the ring
2224 * and host consumes. As the host irq handler processes interrupts, it
2225 * increments the rptr. When the rptr catches up with the wptr, all the
2226 * current interrupts have been processed.
2227 */
2228
2229void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size)
2230{
2231 u32 rb_bufsz;
2232
2233 /* Align ring size */
2234 rb_bufsz = drm_order(ring_size / 4);
2235 ring_size = (1 << rb_bufsz) * 4;
2236 rdev->ih.ring_size = ring_size;
2237 rdev->ih.ptr_mask = rdev->ih.ring_size - 1;
2238 rdev->ih.rptr = 0;
2239}
2240
2241static int r600_ih_ring_alloc(struct radeon_device *rdev)
2242{
2243 int r;
2244
2245 /* Allocate ring buffer */
2246 if (rdev->ih.ring_obj == NULL) {
2247 r = radeon_bo_create(rdev, NULL, rdev->ih.ring_size,
2248 true,
2249 RADEON_GEM_DOMAIN_GTT,
2250 &rdev->ih.ring_obj);
2251 if (r) {
2252 DRM_ERROR("radeon: failed to create ih ring buffer (%d).\n", r);
2253 return r;
2254 }
2255 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
2256 if (unlikely(r != 0))
2257 return r;
2258 r = radeon_bo_pin(rdev->ih.ring_obj,
2259 RADEON_GEM_DOMAIN_GTT,
2260 &rdev->ih.gpu_addr);
2261 if (r) {
2262 radeon_bo_unreserve(rdev->ih.ring_obj);
2263 DRM_ERROR("radeon: failed to pin ih ring buffer (%d).\n", r);
2264 return r;
2265 }
2266 r = radeon_bo_kmap(rdev->ih.ring_obj,
2267 (void **)&rdev->ih.ring);
2268 radeon_bo_unreserve(rdev->ih.ring_obj);
2269 if (r) {
2270 DRM_ERROR("radeon: failed to map ih ring buffer (%d).\n", r);
2271 return r;
2272 }
2273 }
2274 return 0;
2275}
2276
2277static void r600_ih_ring_fini(struct radeon_device *rdev)
2278{
2279 int r;
2280 if (rdev->ih.ring_obj) {
2281 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
2282 if (likely(r == 0)) {
2283 radeon_bo_kunmap(rdev->ih.ring_obj);
2284 radeon_bo_unpin(rdev->ih.ring_obj);
2285 radeon_bo_unreserve(rdev->ih.ring_obj);
2286 }
2287 radeon_bo_unref(&rdev->ih.ring_obj);
2288 rdev->ih.ring = NULL;
2289 rdev->ih.ring_obj = NULL;
2290 }
2291}
2292
2293static void r600_rlc_stop(struct radeon_device *rdev)
2294{
2295
2296 if (rdev->family >= CHIP_RV770) {
2297 /* r7xx asics need to soft reset RLC before halting */
2298 WREG32(SRBM_SOFT_RESET, SOFT_RESET_RLC);
2299 RREG32(SRBM_SOFT_RESET);
2300 udelay(15000);
2301 WREG32(SRBM_SOFT_RESET, 0);
2302 RREG32(SRBM_SOFT_RESET);
2303 }
2304
2305 WREG32(RLC_CNTL, 0);
2306}
2307
2308static void r600_rlc_start(struct radeon_device *rdev)
2309{
2310 WREG32(RLC_CNTL, RLC_ENABLE);
2311}
2312
2313static int r600_rlc_init(struct radeon_device *rdev)
2314{
2315 u32 i;
2316 const __be32 *fw_data;
2317
2318 if (!rdev->rlc_fw)
2319 return -EINVAL;
2320
2321 r600_rlc_stop(rdev);
2322
2323 WREG32(RLC_HB_BASE, 0);
2324 WREG32(RLC_HB_CNTL, 0);
2325 WREG32(RLC_HB_RPTR, 0);
2326 WREG32(RLC_HB_WPTR, 0);
2327 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
2328 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
2329 WREG32(RLC_MC_CNTL, 0);
2330 WREG32(RLC_UCODE_CNTL, 0);
2331
2332 fw_data = (const __be32 *)rdev->rlc_fw->data;
2333 if (rdev->family >= CHIP_RV770) {
2334 for (i = 0; i < R700_RLC_UCODE_SIZE; i++) {
2335 WREG32(RLC_UCODE_ADDR, i);
2336 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
2337 }
2338 } else {
2339 for (i = 0; i < RLC_UCODE_SIZE; i++) {
2340 WREG32(RLC_UCODE_ADDR, i);
2341 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
2342 }
2343 }
2344 WREG32(RLC_UCODE_ADDR, 0);
2345
2346 r600_rlc_start(rdev);
2347
2348 return 0;
2349}
2350
2351static void r600_enable_interrupts(struct radeon_device *rdev)
2352{
2353 u32 ih_cntl = RREG32(IH_CNTL);
2354 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
2355
2356 ih_cntl |= ENABLE_INTR;
2357 ih_rb_cntl |= IH_RB_ENABLE;
2358 WREG32(IH_CNTL, ih_cntl);
2359 WREG32(IH_RB_CNTL, ih_rb_cntl);
2360 rdev->ih.enabled = true;
2361}
2362
2363static void r600_disable_interrupts(struct radeon_device *rdev)
2364{
2365 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
2366 u32 ih_cntl = RREG32(IH_CNTL);
2367
2368 ih_rb_cntl &= ~IH_RB_ENABLE;
2369 ih_cntl &= ~ENABLE_INTR;
2370 WREG32(IH_RB_CNTL, ih_rb_cntl);
2371 WREG32(IH_CNTL, ih_cntl);
2372 /* set rptr, wptr to 0 */
2373 WREG32(IH_RB_RPTR, 0);
2374 WREG32(IH_RB_WPTR, 0);
2375 rdev->ih.enabled = false;
2376 rdev->ih.wptr = 0;
2377 rdev->ih.rptr = 0;
2378}
2379
2380static void r600_disable_interrupt_state(struct radeon_device *rdev)
2381{
2382 u32 tmp;
2383
2384 WREG32(CP_INT_CNTL, 0);
2385 WREG32(GRBM_INT_CNTL, 0);
2386 WREG32(DxMODE_INT_MASK, 0);
2387 if (ASIC_IS_DCE3(rdev)) {
2388 WREG32(DCE3_DACA_AUTODETECT_INT_CONTROL, 0);
2389 WREG32(DCE3_DACB_AUTODETECT_INT_CONTROL, 0);
2390 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2391 WREG32(DC_HPD1_INT_CONTROL, tmp);
2392 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2393 WREG32(DC_HPD2_INT_CONTROL, tmp);
2394 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2395 WREG32(DC_HPD3_INT_CONTROL, tmp);
2396 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2397 WREG32(DC_HPD4_INT_CONTROL, tmp);
2398 if (ASIC_IS_DCE32(rdev)) {
2399 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2400 WREG32(DC_HPD5_INT_CONTROL, tmp);
2401 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2402 WREG32(DC_HPD6_INT_CONTROL, tmp);
2403 }
2404 } else {
2405 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
2406 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
2407 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
2408 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
2409 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
2410 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
2411 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
2412 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
2413 }
2414}
2415
2416int r600_irq_init(struct radeon_device *rdev)
2417{
2418 int ret = 0;
2419 int rb_bufsz;
2420 u32 interrupt_cntl, ih_cntl, ih_rb_cntl;
2421
2422 /* allocate ring */
2423 ret = r600_ih_ring_alloc(rdev);
2424 if (ret)
2425 return ret;
2426
2427 /* disable irqs */
2428 r600_disable_interrupts(rdev);
2429
2430 /* init rlc */
2431 ret = r600_rlc_init(rdev);
2432 if (ret) {
2433 r600_ih_ring_fini(rdev);
2434 return ret;
2435 }
2436
2437 /* setup interrupt control */
2438 /* set dummy read address to ring address */
2439 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8);
2440 interrupt_cntl = RREG32(INTERRUPT_CNTL);
2441 /* IH_DUMMY_RD_OVERRIDE=0 - dummy read disabled with msi, enabled without msi
2442 * IH_DUMMY_RD_OVERRIDE=1 - dummy read controlled by IH_DUMMY_RD_EN
2443 */
2444 interrupt_cntl &= ~IH_DUMMY_RD_OVERRIDE;
2445 /* IH_REQ_NONSNOOP_EN=1 if ring is in non-cacheable memory, e.g., vram */
2446 interrupt_cntl &= ~IH_REQ_NONSNOOP_EN;
2447 WREG32(INTERRUPT_CNTL, interrupt_cntl);
2448
2449 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
2450 rb_bufsz = drm_order(rdev->ih.ring_size / 4);
2451
2452 ih_rb_cntl = (IH_WPTR_OVERFLOW_ENABLE |
2453 IH_WPTR_OVERFLOW_CLEAR |
2454 (rb_bufsz << 1));
2455 /* WPTR writeback, not yet */
2456 /*ih_rb_cntl |= IH_WPTR_WRITEBACK_ENABLE;*/
2457 WREG32(IH_RB_WPTR_ADDR_LO, 0);
2458 WREG32(IH_RB_WPTR_ADDR_HI, 0);
2459
2460 WREG32(IH_RB_CNTL, ih_rb_cntl);
2461
2462 /* set rptr, wptr to 0 */
2463 WREG32(IH_RB_RPTR, 0);
2464 WREG32(IH_RB_WPTR, 0);
2465
2466 /* Default settings for IH_CNTL (disabled at first) */
2467 ih_cntl = MC_WRREQ_CREDIT(0x10) | MC_WR_CLEAN_CNT(0x10);
2468 /* RPTR_REARM only works if msi's are enabled */
2469 if (rdev->msi_enabled)
2470 ih_cntl |= RPTR_REARM;
2471
2472#ifdef __BIG_ENDIAN
2473 ih_cntl |= IH_MC_SWAP(IH_MC_SWAP_32BIT);
2474#endif
2475 WREG32(IH_CNTL, ih_cntl);
2476
2477 /* force the active interrupt state to all disabled */
2478 r600_disable_interrupt_state(rdev);
2479
2480 /* enable irqs */
2481 r600_enable_interrupts(rdev);
2482
2483 return ret;
2484}
2485
2486void r600_irq_suspend(struct radeon_device *rdev)
2487{
2488 r600_disable_interrupts(rdev);
2489 r600_rlc_stop(rdev);
2490}
2491
2492void r600_irq_fini(struct radeon_device *rdev)
2493{
2494 r600_irq_suspend(rdev);
2495 r600_ih_ring_fini(rdev);
2496}
2497
2498int r600_irq_set(struct radeon_device *rdev)
2499{
2500 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
2501 u32 mode_int = 0;
2502 u32 hpd1, hpd2, hpd3, hpd4 = 0, hpd5 = 0, hpd6 = 0;
2503
2504 if (!rdev->irq.installed) {
2505 WARN(1, "Can't enable IRQ/MSI because no handler is installed.\n");
2506 return -EINVAL;
2507 }
2508 /* don't enable anything if the ih is disabled */
2509 if (!rdev->ih.enabled) {
2510 r600_disable_interrupts(rdev);
2511 /* force the active interrupt state to all disabled */
2512 r600_disable_interrupt_state(rdev);
2513 return 0;
2514 }
2515
2516 if (ASIC_IS_DCE3(rdev)) {
2517 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
2518 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
2519 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
2520 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
2521 if (ASIC_IS_DCE32(rdev)) {
2522 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
2523 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
2524 }
2525 } else {
2526 hpd1 = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL) & ~DC_HPDx_INT_EN;
2527 hpd2 = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL) & ~DC_HPDx_INT_EN;
2528 hpd3 = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL) & ~DC_HPDx_INT_EN;
2529 }
2530
2531 if (rdev->irq.sw_int) {
2532 DRM_DEBUG("r600_irq_set: sw int\n");
2533 cp_int_cntl |= RB_INT_ENABLE;
2534 }
2535 if (rdev->irq.crtc_vblank_int[0]) {
2536 DRM_DEBUG("r600_irq_set: vblank 0\n");
2537 mode_int |= D1MODE_VBLANK_INT_MASK;
2538 }
2539 if (rdev->irq.crtc_vblank_int[1]) {
2540 DRM_DEBUG("r600_irq_set: vblank 1\n");
2541 mode_int |= D2MODE_VBLANK_INT_MASK;
2542 }
2543 if (rdev->irq.hpd[0]) {
2544 DRM_DEBUG("r600_irq_set: hpd 1\n");
2545 hpd1 |= DC_HPDx_INT_EN;
2546 }
2547 if (rdev->irq.hpd[1]) {
2548 DRM_DEBUG("r600_irq_set: hpd 2\n");
2549 hpd2 |= DC_HPDx_INT_EN;
2550 }
2551 if (rdev->irq.hpd[2]) {
2552 DRM_DEBUG("r600_irq_set: hpd 3\n");
2553 hpd3 |= DC_HPDx_INT_EN;
2554 }
2555 if (rdev->irq.hpd[3]) {
2556 DRM_DEBUG("r600_irq_set: hpd 4\n");
2557 hpd4 |= DC_HPDx_INT_EN;
2558 }
2559 if (rdev->irq.hpd[4]) {
2560 DRM_DEBUG("r600_irq_set: hpd 5\n");
2561 hpd5 |= DC_HPDx_INT_EN;
2562 }
2563 if (rdev->irq.hpd[5]) {
2564 DRM_DEBUG("r600_irq_set: hpd 6\n");
2565 hpd6 |= DC_HPDx_INT_EN;
2566 }
2567
2568 WREG32(CP_INT_CNTL, cp_int_cntl);
2569 WREG32(DxMODE_INT_MASK, mode_int);
2570 if (ASIC_IS_DCE3(rdev)) {
2571 WREG32(DC_HPD1_INT_CONTROL, hpd1);
2572 WREG32(DC_HPD2_INT_CONTROL, hpd2);
2573 WREG32(DC_HPD3_INT_CONTROL, hpd3);
2574 WREG32(DC_HPD4_INT_CONTROL, hpd4);
2575 if (ASIC_IS_DCE32(rdev)) {
2576 WREG32(DC_HPD5_INT_CONTROL, hpd5);
2577 WREG32(DC_HPD6_INT_CONTROL, hpd6);
2578 }
2579 } else {
2580 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, hpd1);
2581 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, hpd2);
2582 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, hpd3);
2583 }
2584
2585 return 0;
2586}
2587
2588static inline void r600_irq_ack(struct radeon_device *rdev,
2589 u32 *disp_int,
2590 u32 *disp_int_cont,
2591 u32 *disp_int_cont2)
2592{
2593 u32 tmp;
2594
2595 if (ASIC_IS_DCE3(rdev)) {
2596 *disp_int = RREG32(DCE3_DISP_INTERRUPT_STATUS);
2597 *disp_int_cont = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE);
2598 *disp_int_cont2 = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE2);
2599 } else {
2600 *disp_int = RREG32(DISP_INTERRUPT_STATUS);
2601 *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
2602 *disp_int_cont2 = 0;
2603 }
2604
2605 if (*disp_int & LB_D1_VBLANK_INTERRUPT)
2606 WREG32(D1MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
2607 if (*disp_int & LB_D1_VLINE_INTERRUPT)
2608 WREG32(D1MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
2609 if (*disp_int & LB_D2_VBLANK_INTERRUPT)
2610 WREG32(D2MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
2611 if (*disp_int & LB_D2_VLINE_INTERRUPT)
2612 WREG32(D2MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
2613 if (*disp_int & DC_HPD1_INTERRUPT) {
2614 if (ASIC_IS_DCE3(rdev)) {
2615 tmp = RREG32(DC_HPD1_INT_CONTROL);
2616 tmp |= DC_HPDx_INT_ACK;
2617 WREG32(DC_HPD1_INT_CONTROL, tmp);
2618 } else {
2619 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL);
2620 tmp |= DC_HPDx_INT_ACK;
2621 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
2622 }
2623 }
2624 if (*disp_int & DC_HPD2_INTERRUPT) {
2625 if (ASIC_IS_DCE3(rdev)) {
2626 tmp = RREG32(DC_HPD2_INT_CONTROL);
2627 tmp |= DC_HPDx_INT_ACK;
2628 WREG32(DC_HPD2_INT_CONTROL, tmp);
2629 } else {
2630 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL);
2631 tmp |= DC_HPDx_INT_ACK;
2632 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
2633 }
2634 }
2635 if (*disp_int_cont & DC_HPD3_INTERRUPT) {
2636 if (ASIC_IS_DCE3(rdev)) {
2637 tmp = RREG32(DC_HPD3_INT_CONTROL);
2638 tmp |= DC_HPDx_INT_ACK;
2639 WREG32(DC_HPD3_INT_CONTROL, tmp);
2640 } else {
2641 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL);
2642 tmp |= DC_HPDx_INT_ACK;
2643 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
2644 }
2645 }
2646 if (*disp_int_cont & DC_HPD4_INTERRUPT) {
2647 tmp = RREG32(DC_HPD4_INT_CONTROL);
2648 tmp |= DC_HPDx_INT_ACK;
2649 WREG32(DC_HPD4_INT_CONTROL, tmp);
2650 }
2651 if (ASIC_IS_DCE32(rdev)) {
2652 if (*disp_int_cont2 & DC_HPD5_INTERRUPT) {
2653 tmp = RREG32(DC_HPD5_INT_CONTROL);
2654 tmp |= DC_HPDx_INT_ACK;
2655 WREG32(DC_HPD5_INT_CONTROL, tmp);
2656 }
2657 if (*disp_int_cont2 & DC_HPD6_INTERRUPT) {
2658 tmp = RREG32(DC_HPD5_INT_CONTROL);
2659 tmp |= DC_HPDx_INT_ACK;
2660 WREG32(DC_HPD6_INT_CONTROL, tmp);
2661 }
2662 }
2663}
2664
2665void r600_irq_disable(struct radeon_device *rdev)
2666{
2667 u32 disp_int, disp_int_cont, disp_int_cont2;
2668
2669 r600_disable_interrupts(rdev);
2670 /* Wait and acknowledge irq */
2671 mdelay(1);
2672 r600_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2);
2673 r600_disable_interrupt_state(rdev);
2674}
2675
2676static inline u32 r600_get_ih_wptr(struct radeon_device *rdev)
2677{
2678 u32 wptr, tmp;
1801 2679
2680 /* XXX use writeback */
2681 wptr = RREG32(IH_RB_WPTR);
1802 2682
2683 if (wptr & RB_OVERFLOW) {
2684 /* When a ring buffer overflow happen start parsing interrupt
2685 * from the last not overwritten vector (wptr + 16). Hopefully
2686 * this should allow us to catchup.
2687 */
2688 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
2689 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
2690 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
2691 tmp = RREG32(IH_RB_CNTL);
2692 tmp |= IH_WPTR_OVERFLOW_CLEAR;
2693 WREG32(IH_RB_CNTL, tmp);
2694 }
2695 return (wptr & rdev->ih.ptr_mask);
2696}
2697
2698/* r600 IV Ring
2699 * Each IV ring entry is 128 bits:
2700 * [7:0] - interrupt source id
2701 * [31:8] - reserved
2702 * [59:32] - interrupt source data
2703 * [127:60] - reserved
2704 *
2705 * The basic interrupt vector entries
2706 * are decoded as follows:
2707 * src_id src_data description
2708 * 1 0 D1 Vblank
2709 * 1 1 D1 Vline
2710 * 5 0 D2 Vblank
2711 * 5 1 D2 Vline
2712 * 19 0 FP Hot plug detection A
2713 * 19 1 FP Hot plug detection B
2714 * 19 2 DAC A auto-detection
2715 * 19 3 DAC B auto-detection
2716 * 176 - CP_INT RB
2717 * 177 - CP_INT IB1
2718 * 178 - CP_INT IB2
2719 * 181 - EOP Interrupt
2720 * 233 - GUI Idle
2721 *
2722 * Note, these are based on r600 and may need to be
2723 * adjusted or added to on newer asics
2724 */
2725
2726int r600_irq_process(struct radeon_device *rdev)
2727{
2728 u32 wptr = r600_get_ih_wptr(rdev);
2729 u32 rptr = rdev->ih.rptr;
2730 u32 src_id, src_data;
2731 u32 ring_index, disp_int, disp_int_cont, disp_int_cont2;
2732 unsigned long flags;
2733 bool queue_hotplug = false;
2734
2735 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
2736 if (!rdev->ih.enabled)
2737 return IRQ_NONE;
2738
2739 spin_lock_irqsave(&rdev->ih.lock, flags);
2740
2741 if (rptr == wptr) {
2742 spin_unlock_irqrestore(&rdev->ih.lock, flags);
2743 return IRQ_NONE;
2744 }
2745 if (rdev->shutdown) {
2746 spin_unlock_irqrestore(&rdev->ih.lock, flags);
2747 return IRQ_NONE;
2748 }
2749
2750restart_ih:
2751 /* display interrupts */
2752 r600_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2);
2753
2754 rdev->ih.wptr = wptr;
2755 while (rptr != wptr) {
2756 /* wptr/rptr are in bytes! */
2757 ring_index = rptr / 4;
2758 src_id = rdev->ih.ring[ring_index] & 0xff;
2759 src_data = rdev->ih.ring[ring_index + 1] & 0xfffffff;
2760
2761 switch (src_id) {
2762 case 1: /* D1 vblank/vline */
2763 switch (src_data) {
2764 case 0: /* D1 vblank */
2765 if (disp_int & LB_D1_VBLANK_INTERRUPT) {
2766 drm_handle_vblank(rdev->ddev, 0);
2767 rdev->pm.vblank_sync = true;
2768 wake_up(&rdev->irq.vblank_queue);
2769 disp_int &= ~LB_D1_VBLANK_INTERRUPT;
2770 DRM_DEBUG("IH: D1 vblank\n");
2771 }
2772 break;
2773 case 1: /* D1 vline */
2774 if (disp_int & LB_D1_VLINE_INTERRUPT) {
2775 disp_int &= ~LB_D1_VLINE_INTERRUPT;
2776 DRM_DEBUG("IH: D1 vline\n");
2777 }
2778 break;
2779 default:
2780 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2781 break;
2782 }
2783 break;
2784 case 5: /* D2 vblank/vline */
2785 switch (src_data) {
2786 case 0: /* D2 vblank */
2787 if (disp_int & LB_D2_VBLANK_INTERRUPT) {
2788 drm_handle_vblank(rdev->ddev, 1);
2789 rdev->pm.vblank_sync = true;
2790 wake_up(&rdev->irq.vblank_queue);
2791 disp_int &= ~LB_D2_VBLANK_INTERRUPT;
2792 DRM_DEBUG("IH: D2 vblank\n");
2793 }
2794 break;
2795 case 1: /* D1 vline */
2796 if (disp_int & LB_D2_VLINE_INTERRUPT) {
2797 disp_int &= ~LB_D2_VLINE_INTERRUPT;
2798 DRM_DEBUG("IH: D2 vline\n");
2799 }
2800 break;
2801 default:
2802 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2803 break;
2804 }
2805 break;
2806 case 19: /* HPD/DAC hotplug */
2807 switch (src_data) {
2808 case 0:
2809 if (disp_int & DC_HPD1_INTERRUPT) {
2810 disp_int &= ~DC_HPD1_INTERRUPT;
2811 queue_hotplug = true;
2812 DRM_DEBUG("IH: HPD1\n");
2813 }
2814 break;
2815 case 1:
2816 if (disp_int & DC_HPD2_INTERRUPT) {
2817 disp_int &= ~DC_HPD2_INTERRUPT;
2818 queue_hotplug = true;
2819 DRM_DEBUG("IH: HPD2\n");
2820 }
2821 break;
2822 case 4:
2823 if (disp_int_cont & DC_HPD3_INTERRUPT) {
2824 disp_int_cont &= ~DC_HPD3_INTERRUPT;
2825 queue_hotplug = true;
2826 DRM_DEBUG("IH: HPD3\n");
2827 }
2828 break;
2829 case 5:
2830 if (disp_int_cont & DC_HPD4_INTERRUPT) {
2831 disp_int_cont &= ~DC_HPD4_INTERRUPT;
2832 queue_hotplug = true;
2833 DRM_DEBUG("IH: HPD4\n");
2834 }
2835 break;
2836 case 10:
2837 if (disp_int_cont2 & DC_HPD5_INTERRUPT) {
2838 disp_int_cont2 &= ~DC_HPD5_INTERRUPT;
2839 queue_hotplug = true;
2840 DRM_DEBUG("IH: HPD5\n");
2841 }
2842 break;
2843 case 12:
2844 if (disp_int_cont2 & DC_HPD6_INTERRUPT) {
2845 disp_int_cont2 &= ~DC_HPD6_INTERRUPT;
2846 queue_hotplug = true;
2847 DRM_DEBUG("IH: HPD6\n");
2848 }
2849 break;
2850 default:
2851 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2852 break;
2853 }
2854 break;
2855 case 176: /* CP_INT in ring buffer */
2856 case 177: /* CP_INT in IB1 */
2857 case 178: /* CP_INT in IB2 */
2858 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
2859 radeon_fence_process(rdev);
2860 break;
2861 case 181: /* CP EOP event */
2862 DRM_DEBUG("IH: CP EOP\n");
2863 break;
2864 default:
2865 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2866 break;
2867 }
2868
2869 /* wptr/rptr are in bytes! */
2870 rptr += 16;
2871 rptr &= rdev->ih.ptr_mask;
2872 }
2873 /* make sure wptr hasn't changed while processing */
2874 wptr = r600_get_ih_wptr(rdev);
2875 if (wptr != rdev->ih.wptr)
2876 goto restart_ih;
2877 if (queue_hotplug)
2878 queue_work(rdev->wq, &rdev->hotplug_work);
2879 rdev->ih.rptr = rptr;
2880 WREG32(IH_RB_RPTR, rdev->ih.rptr);
2881 spin_unlock_irqrestore(&rdev->ih.lock, flags);
2882 return IRQ_HANDLED;
2883}
1803 2884
1804/* 2885/*
1805 * Debugfs info 2886 * Debugfs info
@@ -1811,21 +2892,21 @@ static int r600_debugfs_cp_ring_info(struct seq_file *m, void *data)
1811 struct drm_info_node *node = (struct drm_info_node *) m->private; 2892 struct drm_info_node *node = (struct drm_info_node *) m->private;
1812 struct drm_device *dev = node->minor->dev; 2893 struct drm_device *dev = node->minor->dev;
1813 struct radeon_device *rdev = dev->dev_private; 2894 struct radeon_device *rdev = dev->dev_private;
1814 uint32_t rdp, wdp;
1815 unsigned count, i, j; 2895 unsigned count, i, j;
1816 2896
1817 radeon_ring_free_size(rdev); 2897 radeon_ring_free_size(rdev);
1818 rdp = RREG32(CP_RB_RPTR); 2898 count = (rdev->cp.ring_size / 4) - rdev->cp.ring_free_dw;
1819 wdp = RREG32(CP_RB_WPTR);
1820 count = (rdp + rdev->cp.ring_size - wdp) & rdev->cp.ptr_mask;
1821 seq_printf(m, "CP_STAT 0x%08x\n", RREG32(CP_STAT)); 2899 seq_printf(m, "CP_STAT 0x%08x\n", RREG32(CP_STAT));
1822 seq_printf(m, "CP_RB_WPTR 0x%08x\n", wdp); 2900 seq_printf(m, "CP_RB_WPTR 0x%08x\n", RREG32(CP_RB_WPTR));
1823 seq_printf(m, "CP_RB_RPTR 0x%08x\n", rdp); 2901 seq_printf(m, "CP_RB_RPTR 0x%08x\n", RREG32(CP_RB_RPTR));
2902 seq_printf(m, "driver's copy of the CP_RB_WPTR 0x%08x\n", rdev->cp.wptr);
2903 seq_printf(m, "driver's copy of the CP_RB_RPTR 0x%08x\n", rdev->cp.rptr);
1824 seq_printf(m, "%u free dwords in ring\n", rdev->cp.ring_free_dw); 2904 seq_printf(m, "%u free dwords in ring\n", rdev->cp.ring_free_dw);
1825 seq_printf(m, "%u dwords in ring\n", count); 2905 seq_printf(m, "%u dwords in ring\n", count);
2906 i = rdev->cp.rptr;
1826 for (j = 0; j <= count; j++) { 2907 for (j = 0; j <= count; j++) {
1827 i = (rdp + j) & rdev->cp.ptr_mask;
1828 seq_printf(m, "r[%04d]=0x%08x\n", i, rdev->cp.ring[i]); 2908 seq_printf(m, "r[%04d]=0x%08x\n", i, rdev->cp.ring[i]);
2909 i = (i + 1) & rdev->cp.ptr_mask;
1829 } 2910 }
1830 return 0; 2911 return 0;
1831} 2912}
@@ -1855,3 +2936,18 @@ int r600_debugfs_mc_info_init(struct radeon_device *rdev)
1855 return 0; 2936 return 0;
1856#endif 2937#endif
1857} 2938}
2939
2940/**
2941 * r600_ioctl_wait_idle - flush host path cache on wait idle ioctl
2942 * rdev: radeon device structure
2943 * bo: buffer object struct which userspace is waiting for idle
2944 *
2945 * Some R6XX/R7XX doesn't seems to take into account HDP flush performed
2946 * through ring buffer, this leads to corruption in rendering, see
2947 * http://bugzilla.kernel.org/show_bug.cgi?id=15186 to avoid this we
2948 * directly perform HDP flush by writing register through MMIO.
2949 */
2950void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo)
2951{
2952 WREG32(R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2953}
diff --git a/drivers/gpu/drm/radeon/r600_audio.c b/drivers/gpu/drm/radeon/r600_audio.c
new file mode 100644
index 000000000000..1d898051c631
--- /dev/null
+++ b/drivers/gpu/drm/radeon/r600_audio.c
@@ -0,0 +1,243 @@
1/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Christian König.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Christian König
25 */
26#include "drmP.h"
27#include "radeon.h"
28#include "radeon_reg.h"
29#include "atom.h"
30
31#define AUDIO_TIMER_INTERVALL 100 /* 1/10 sekund should be enough */
32
33/*
34 * check if the chipset is supported
35 */
36static int r600_audio_chipset_supported(struct radeon_device *rdev)
37{
38 return (rdev->family >= CHIP_R600 && rdev->family < CHIP_CEDAR)
39 || rdev->family == CHIP_RS600
40 || rdev->family == CHIP_RS690
41 || rdev->family == CHIP_RS740;
42}
43
44/*
45 * current number of channels
46 */
47static int r600_audio_channels(struct radeon_device *rdev)
48{
49 return (RREG32(R600_AUDIO_RATE_BPS_CHANNEL) & 0x7) + 1;
50}
51
52/*
53 * current bits per sample
54 */
55static int r600_audio_bits_per_sample(struct radeon_device *rdev)
56{
57 uint32_t value = (RREG32(R600_AUDIO_RATE_BPS_CHANNEL) & 0xF0) >> 4;
58 switch (value) {
59 case 0x0: return 8;
60 case 0x1: return 16;
61 case 0x2: return 20;
62 case 0x3: return 24;
63 case 0x4: return 32;
64 }
65
66 DRM_ERROR("Unknown bits per sample 0x%x using 16 instead.\n", (int)value);
67
68 return 16;
69}
70
71/*
72 * current sampling rate in HZ
73 */
74static int r600_audio_rate(struct radeon_device *rdev)
75{
76 uint32_t value = RREG32(R600_AUDIO_RATE_BPS_CHANNEL);
77 uint32_t result;
78
79 if (value & 0x4000)
80 result = 44100;
81 else
82 result = 48000;
83
84 result *= ((value >> 11) & 0x7) + 1;
85 result /= ((value >> 8) & 0x7) + 1;
86
87 return result;
88}
89
90/*
91 * iec 60958 status bits
92 */
93static uint8_t r600_audio_status_bits(struct radeon_device *rdev)
94{
95 return RREG32(R600_AUDIO_STATUS_BITS) & 0xff;
96}
97
98/*
99 * iec 60958 category code
100 */
101static uint8_t r600_audio_category_code(struct radeon_device *rdev)
102{
103 return (RREG32(R600_AUDIO_STATUS_BITS) >> 8) & 0xff;
104}
105
106/*
107 * update all hdmi interfaces with current audio parameters
108 */
109static void r600_audio_update_hdmi(unsigned long param)
110{
111 struct radeon_device *rdev = (struct radeon_device *)param;
112 struct drm_device *dev = rdev->ddev;
113
114 int channels = r600_audio_channels(rdev);
115 int rate = r600_audio_rate(rdev);
116 int bps = r600_audio_bits_per_sample(rdev);
117 uint8_t status_bits = r600_audio_status_bits(rdev);
118 uint8_t category_code = r600_audio_category_code(rdev);
119
120 struct drm_encoder *encoder;
121 int changes = 0;
122
123 changes |= channels != rdev->audio_channels;
124 changes |= rate != rdev->audio_rate;
125 changes |= bps != rdev->audio_bits_per_sample;
126 changes |= status_bits != rdev->audio_status_bits;
127 changes |= category_code != rdev->audio_category_code;
128
129 if (changes) {
130 rdev->audio_channels = channels;
131 rdev->audio_rate = rate;
132 rdev->audio_bits_per_sample = bps;
133 rdev->audio_status_bits = status_bits;
134 rdev->audio_category_code = category_code;
135 }
136
137 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
138 if (changes || r600_hdmi_buffer_status_changed(encoder))
139 r600_hdmi_update_audio_settings(
140 encoder, channels,
141 rate, bps, status_bits,
142 category_code);
143 }
144
145 mod_timer(&rdev->audio_timer,
146 jiffies + msecs_to_jiffies(AUDIO_TIMER_INTERVALL));
147}
148
149/*
150 * turn on/off audio engine
151 */
152static void r600_audio_engine_enable(struct radeon_device *rdev, bool enable)
153{
154 DRM_INFO("%s audio support", enable ? "Enabling" : "Disabling");
155 WREG32_P(R600_AUDIO_ENABLE, enable ? 0x81000000 : 0x0, ~0x81000000);
156}
157
158/*
159 * initialize the audio vars and register the update timer
160 */
161int r600_audio_init(struct radeon_device *rdev)
162{
163 if (!radeon_audio || !r600_audio_chipset_supported(rdev))
164 return 0;
165
166 r600_audio_engine_enable(rdev, true);
167
168 rdev->audio_channels = -1;
169 rdev->audio_rate = -1;
170 rdev->audio_bits_per_sample = -1;
171 rdev->audio_status_bits = 0;
172 rdev->audio_category_code = 0;
173
174 setup_timer(
175 &rdev->audio_timer,
176 r600_audio_update_hdmi,
177 (unsigned long)rdev);
178
179 mod_timer(&rdev->audio_timer, jiffies + 1);
180
181 return 0;
182}
183
184/*
185 * atach the audio codec to the clock source of the encoder
186 */
187void r600_audio_set_clock(struct drm_encoder *encoder, int clock)
188{
189 struct drm_device *dev = encoder->dev;
190 struct radeon_device *rdev = dev->dev_private;
191 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
192 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
193 int base_rate = 48000;
194
195 switch (radeon_encoder->encoder_id) {
196 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
197 case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
198 WREG32_P(R600_AUDIO_TIMING, 0, ~0x301);
199 break;
200 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
201 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
202 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
203 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
204 WREG32_P(R600_AUDIO_TIMING, 0x100, ~0x301);
205 break;
206 default:
207 DRM_ERROR("Unsupported encoder type 0x%02X\n",
208 radeon_encoder->encoder_id);
209 return;
210 }
211
212 switch (dig->dig_encoder) {
213 case 0:
214 WREG32(R600_AUDIO_PLL1_MUL, base_rate * 50);
215 WREG32(R600_AUDIO_PLL1_DIV, clock * 100);
216 WREG32(R600_AUDIO_CLK_SRCSEL, 0);
217 break;
218
219 case 1:
220 WREG32(R600_AUDIO_PLL2_MUL, base_rate * 50);
221 WREG32(R600_AUDIO_PLL2_DIV, clock * 100);
222 WREG32(R600_AUDIO_CLK_SRCSEL, 1);
223 break;
224 default:
225 dev_err(rdev->dev, "Unsupported DIG on encoder 0x%02X\n",
226 radeon_encoder->encoder_id);
227 return;
228 }
229}
230
231/*
232 * release the audio timer
233 * TODO: How to do this correctly on SMP systems?
234 */
235void r600_audio_fini(struct radeon_device *rdev)
236{
237 if (!radeon_audio || !r600_audio_chipset_supported(rdev))
238 return;
239
240 del_timer(&rdev->audio_timer);
241
242 r600_audio_engine_enable(rdev, false);
243}
diff --git a/drivers/gpu/drm/radeon/r600_blit.c b/drivers/gpu/drm/radeon/r600_blit.c
index 5ea432347589..f4fb88ece2bb 100644
--- a/drivers/gpu/drm/radeon/r600_blit.c
+++ b/drivers/gpu/drm/radeon/r600_blit.c
@@ -49,7 +49,7 @@ set_render_target(drm_radeon_private_t *dev_priv, int format, int w, int h, u64
49 RING_LOCALS; 49 RING_LOCALS;
50 DRM_DEBUG("\n"); 50 DRM_DEBUG("\n");
51 51
52 h = (h + 7) & ~7; 52 h = ALIGN(h, 8);
53 if (h < 8) 53 if (h < 8)
54 h = 8; 54 h = 8;
55 55
diff --git a/drivers/gpu/drm/radeon/r600_blit_kms.c b/drivers/gpu/drm/radeon/r600_blit_kms.c
index dbf716e1fbf3..f6c6c77db7e0 100644
--- a/drivers/gpu/drm/radeon/r600_blit_kms.c
+++ b/drivers/gpu/drm/radeon/r600_blit_kms.c
@@ -25,7 +25,7 @@ set_render_target(struct radeon_device *rdev, int format,
25 u32 cb_color_info; 25 u32 cb_color_info;
26 int pitch, slice; 26 int pitch, slice;
27 27
28 h = (h + 7) & ~7; 28 h = ALIGN(h, 8);
29 if (h < 8) 29 if (h < 8)
30 h = 8; 30 h = 8;
31 31
@@ -396,15 +396,13 @@ set_default_state(struct radeon_device *rdev)
396 NUM_ES_STACK_ENTRIES(num_es_stack_entries)); 396 NUM_ES_STACK_ENTRIES(num_es_stack_entries));
397 397
398 /* emit an IB pointing at default state */ 398 /* emit an IB pointing at default state */
399 dwords = (rdev->r600_blit.state_len + 0xf) & ~0xf; 399 dwords = ALIGN(rdev->r600_blit.state_len, 0x10);
400 gpu_addr = rdev->r600_blit.shader_gpu_addr + rdev->r600_blit.state_offset; 400 gpu_addr = rdev->r600_blit.shader_gpu_addr + rdev->r600_blit.state_offset;
401 radeon_ring_write(rdev, PACKET3(PACKET3_INDIRECT_BUFFER, 2)); 401 radeon_ring_write(rdev, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
402 radeon_ring_write(rdev, gpu_addr & 0xFFFFFFFC); 402 radeon_ring_write(rdev, gpu_addr & 0xFFFFFFFC);
403 radeon_ring_write(rdev, upper_32_bits(gpu_addr) & 0xFF); 403 radeon_ring_write(rdev, upper_32_bits(gpu_addr) & 0xFF);
404 radeon_ring_write(rdev, dwords); 404 radeon_ring_write(rdev, dwords);
405 405
406 radeon_ring_write(rdev, PACKET3(PACKET3_EVENT_WRITE, 0));
407 radeon_ring_write(rdev, CACHE_FLUSH_AND_INV_EVENT);
408 /* SQ config */ 406 /* SQ config */
409 radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONFIG_REG, 6)); 407 radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONFIG_REG, 6));
410 radeon_ring_write(rdev, (SQ_CONFIG - PACKET3_SET_CONFIG_REG_OFFSET) >> 2); 408 radeon_ring_write(rdev, (SQ_CONFIG - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
@@ -449,6 +447,7 @@ int r600_blit_init(struct radeon_device *rdev)
449 u32 packet2s[16]; 447 u32 packet2s[16];
450 int num_packet2s = 0; 448 int num_packet2s = 0;
451 449
450 mutex_init(&rdev->r600_blit.mutex);
452 rdev->r600_blit.state_offset = 0; 451 rdev->r600_blit.state_offset = 0;
453 452
454 if (rdev->family >= CHIP_RV770) 453 if (rdev->family >= CHIP_RV770)
@@ -473,9 +472,8 @@ int r600_blit_init(struct radeon_device *rdev)
473 obj_size += r6xx_ps_size * 4; 472 obj_size += r6xx_ps_size * 4;
474 obj_size = ALIGN(obj_size, 256); 473 obj_size = ALIGN(obj_size, 256);
475 474
476 r = radeon_object_create(rdev, NULL, obj_size, 475 r = radeon_bo_create(rdev, NULL, obj_size, true, RADEON_GEM_DOMAIN_VRAM,
477 true, RADEON_GEM_DOMAIN_VRAM, 476 &rdev->r600_blit.shader_obj);
478 false, &rdev->r600_blit.shader_obj);
479 if (r) { 477 if (r) {
480 DRM_ERROR("r600 failed to allocate shader\n"); 478 DRM_ERROR("r600 failed to allocate shader\n");
481 return r; 479 return r;
@@ -485,12 +483,14 @@ int r600_blit_init(struct radeon_device *rdev)
485 obj_size, 483 obj_size,
486 rdev->r600_blit.vs_offset, rdev->r600_blit.ps_offset); 484 rdev->r600_blit.vs_offset, rdev->r600_blit.ps_offset);
487 485
488 r = radeon_object_kmap(rdev->r600_blit.shader_obj, &ptr); 486 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
487 if (unlikely(r != 0))
488 return r;
489 r = radeon_bo_kmap(rdev->r600_blit.shader_obj, &ptr);
489 if (r) { 490 if (r) {
490 DRM_ERROR("failed to map blit object %d\n", r); 491 DRM_ERROR("failed to map blit object %d\n", r);
491 return r; 492 return r;
492 } 493 }
493
494 if (rdev->family >= CHIP_RV770) 494 if (rdev->family >= CHIP_RV770)
495 memcpy_toio(ptr + rdev->r600_blit.state_offset, 495 memcpy_toio(ptr + rdev->r600_blit.state_offset,
496 r7xx_default_state, rdev->r600_blit.state_len * 4); 496 r7xx_default_state, rdev->r600_blit.state_len * 4);
@@ -500,19 +500,28 @@ int r600_blit_init(struct radeon_device *rdev)
500 if (num_packet2s) 500 if (num_packet2s)
501 memcpy_toio(ptr + rdev->r600_blit.state_offset + (rdev->r600_blit.state_len * 4), 501 memcpy_toio(ptr + rdev->r600_blit.state_offset + (rdev->r600_blit.state_len * 4),
502 packet2s, num_packet2s * 4); 502 packet2s, num_packet2s * 4);
503
504
505 memcpy(ptr + rdev->r600_blit.vs_offset, r6xx_vs, r6xx_vs_size * 4); 503 memcpy(ptr + rdev->r600_blit.vs_offset, r6xx_vs, r6xx_vs_size * 4);
506 memcpy(ptr + rdev->r600_blit.ps_offset, r6xx_ps, r6xx_ps_size * 4); 504 memcpy(ptr + rdev->r600_blit.ps_offset, r6xx_ps, r6xx_ps_size * 4);
507 505 radeon_bo_kunmap(rdev->r600_blit.shader_obj);
508 radeon_object_kunmap(rdev->r600_blit.shader_obj); 506 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
509 return 0; 507 return 0;
510} 508}
511 509
512void r600_blit_fini(struct radeon_device *rdev) 510void r600_blit_fini(struct radeon_device *rdev)
513{ 511{
514 radeon_object_unpin(rdev->r600_blit.shader_obj); 512 int r;
515 radeon_object_unref(&rdev->r600_blit.shader_obj); 513
514 if (rdev->r600_blit.shader_obj == NULL)
515 return;
516 /* If we can't reserve the bo, unref should be enough to destroy
517 * it when it becomes idle.
518 */
519 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
520 if (!r) {
521 radeon_bo_unpin(rdev->r600_blit.shader_obj);
522 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
523 }
524 radeon_bo_unref(&rdev->r600_blit.shader_obj);
516} 525}
517 526
518int r600_vb_ib_get(struct radeon_device *rdev) 527int r600_vb_ib_get(struct radeon_device *rdev)
@@ -532,9 +541,6 @@ int r600_vb_ib_get(struct radeon_device *rdev)
532void r600_vb_ib_put(struct radeon_device *rdev) 541void r600_vb_ib_put(struct radeon_device *rdev)
533{ 542{
534 radeon_fence_emit(rdev, rdev->r600_blit.vb_ib->fence); 543 radeon_fence_emit(rdev, rdev->r600_blit.vb_ib->fence);
535 mutex_lock(&rdev->ib_pool.mutex);
536 list_add_tail(&rdev->r600_blit.vb_ib->list, &rdev->ib_pool.scheduled_ibs);
537 mutex_unlock(&rdev->ib_pool.mutex);
538 radeon_ib_free(rdev, &rdev->r600_blit.vb_ib); 544 radeon_ib_free(rdev, &rdev->r600_blit.vb_ib);
539} 545}
540 546
@@ -547,7 +553,8 @@ int r600_blit_prepare_copy(struct radeon_device *rdev, int size_bytes)
547 int dwords_per_loop = 76, num_loops; 553 int dwords_per_loop = 76, num_loops;
548 554
549 r = r600_vb_ib_get(rdev); 555 r = r600_vb_ib_get(rdev);
550 WARN_ON(r); 556 if (r)
557 return r;
551 558
552 /* set_render_target emits 2 extra dwords on rv6xx */ 559 /* set_render_target emits 2 extra dwords on rv6xx */
553 if (rdev->family > CHIP_R600 && rdev->family < CHIP_RV770) 560 if (rdev->family > CHIP_R600 && rdev->family < CHIP_RV770)
@@ -569,11 +576,12 @@ int r600_blit_prepare_copy(struct radeon_device *rdev, int size_bytes)
569 ring_size = num_loops * dwords_per_loop; 576 ring_size = num_loops * dwords_per_loop;
570 /* set default + shaders */ 577 /* set default + shaders */
571 ring_size += 40; /* shaders + def state */ 578 ring_size += 40; /* shaders + def state */
572 ring_size += 3; /* fence emit for VB IB */ 579 ring_size += 10; /* fence emit for VB IB */
573 ring_size += 5; /* done copy */ 580 ring_size += 5; /* done copy */
574 ring_size += 3; /* fence emit for done copy */ 581 ring_size += 10; /* fence emit for done copy */
575 r = radeon_ring_lock(rdev, ring_size); 582 r = radeon_ring_lock(rdev, ring_size);
576 WARN_ON(r); 583 if (r)
584 return r;
577 585
578 set_default_state(rdev); /* 14 */ 586 set_default_state(rdev); /* 14 */
579 set_shaders(rdev); /* 26 */ 587 set_shaders(rdev); /* 26 */
@@ -584,13 +592,6 @@ void r600_blit_done_copy(struct radeon_device *rdev, struct radeon_fence *fence)
584{ 592{
585 int r; 593 int r;
586 594
587 radeon_ring_write(rdev, PACKET3(PACKET3_EVENT_WRITE, 0));
588 radeon_ring_write(rdev, CACHE_FLUSH_AND_INV_EVENT);
589 /* wait for 3D idle clean */
590 radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONFIG_REG, 1));
591 radeon_ring_write(rdev, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
592 radeon_ring_write(rdev, WAIT_3D_IDLE_bit | WAIT_3D_IDLECLEAN_bit);
593
594 if (rdev->r600_blit.vb_ib) 595 if (rdev->r600_blit.vb_ib)
595 r600_vb_ib_put(rdev); 596 r600_vb_ib_put(rdev);
596 597
diff --git a/drivers/gpu/drm/radeon/r600_blit_shaders.c b/drivers/gpu/drm/radeon/r600_blit_shaders.c
index d745e815c2e8..0271b53fa2dd 100644
--- a/drivers/gpu/drm/radeon/r600_blit_shaders.c
+++ b/drivers/gpu/drm/radeon/r600_blit_shaders.c
@@ -1,7 +1,42 @@
1/*
2 * Copyright 2009 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 *
23 * Authors:
24 * Alex Deucher <alexander.deucher@amd.com>
25 */
1 26
2#include <linux/types.h> 27#include <linux/types.h>
3#include <linux/kernel.h> 28#include <linux/kernel.h>
4 29
30/*
31 * R6xx+ cards need to use the 3D engine to blit data which requires
32 * quite a bit of hw state setup. Rather than pull the whole 3D driver
33 * (which normally generates the 3D state) into the DRM, we opt to use
34 * statically generated state tables. The regsiter state and shaders
35 * were hand generated to support blitting functionality. See the 3D
36 * driver or documentation for descriptions of the registers and
37 * shader instructions.
38 */
39
5const u32 r6xx_default_state[] = 40const u32 r6xx_default_state[] =
6{ 41{
7 0xc0002400, 42 0xc0002400,
@@ -9,11 +44,6 @@ const u32 r6xx_default_state[] =
9 0xc0012800, 44 0xc0012800,
10 0x80000000, 45 0x80000000,
11 0x80000000, 46 0x80000000,
12 0xc0004600,
13 0x00000016,
14 0xc0016800,
15 0x00000010,
16 0x00028000,
17 0xc0016800, 47 0xc0016800,
18 0x00000010, 48 0x00000010,
19 0x00008000, 49 0x00008000,
@@ -531,11 +561,6 @@ const u32 r7xx_default_state[] =
531 0xc0012800, 561 0xc0012800,
532 0x80000000, 562 0x80000000,
533 0x80000000, 563 0x80000000,
534 0xc0004600,
535 0x00000016,
536 0xc0016800,
537 0x00000010,
538 0x00028000,
539 0xc0016800, 564 0xc0016800,
540 0x00000010, 565 0x00000010,
541 0x00008000, 566 0x00008000,
diff --git a/drivers/gpu/drm/radeon/r600_cp.c b/drivers/gpu/drm/radeon/r600_cp.c
index 6d5a711c2e91..68e6f4349309 100644
--- a/drivers/gpu/drm/radeon/r600_cp.c
+++ b/drivers/gpu/drm/radeon/r600_cp.c
@@ -734,8 +734,8 @@ static void r600_gfx_init(struct drm_device *dev,
734 u32 hdp_host_path_cntl; 734 u32 hdp_host_path_cntl;
735 u32 backend_map; 735 u32 backend_map;
736 u32 gb_tiling_config = 0; 736 u32 gb_tiling_config = 0;
737 u32 cc_rb_backend_disable = 0; 737 u32 cc_rb_backend_disable;
738 u32 cc_gc_shader_pipe_config = 0; 738 u32 cc_gc_shader_pipe_config;
739 u32 ramcfg; 739 u32 ramcfg;
740 740
741 /* setup chip specs */ 741 /* setup chip specs */
@@ -857,29 +857,44 @@ static void r600_gfx_init(struct drm_device *dev,
857 857
858 gb_tiling_config |= R600_BANK_SWAPS(1); 858 gb_tiling_config |= R600_BANK_SWAPS(1);
859 859
860 backend_map = r600_get_tile_pipe_to_backend_map(dev_priv->r600_max_tile_pipes, 860 cc_rb_backend_disable = RADEON_READ(R600_CC_RB_BACKEND_DISABLE) & 0x00ff0000;
861 dev_priv->r600_max_backends, 861 cc_rb_backend_disable |=
862 (0xff << dev_priv->r600_max_backends) & 0xff); 862 R600_BACKEND_DISABLE((R6XX_MAX_BACKENDS_MASK << dev_priv->r600_max_backends) & R6XX_MAX_BACKENDS_MASK);
863 gb_tiling_config |= R600_BACKEND_MAP(backend_map);
864 863
865 cc_gc_shader_pipe_config = 864 cc_gc_shader_pipe_config = RADEON_READ(R600_CC_GC_SHADER_PIPE_CONFIG) & 0xffffff00;
865 cc_gc_shader_pipe_config |=
866 R600_INACTIVE_QD_PIPES((R6XX_MAX_PIPES_MASK << dev_priv->r600_max_pipes) & R6XX_MAX_PIPES_MASK); 866 R600_INACTIVE_QD_PIPES((R6XX_MAX_PIPES_MASK << dev_priv->r600_max_pipes) & R6XX_MAX_PIPES_MASK);
867 cc_gc_shader_pipe_config |= 867 cc_gc_shader_pipe_config |=
868 R600_INACTIVE_SIMDS((R6XX_MAX_SIMDS_MASK << dev_priv->r600_max_simds) & R6XX_MAX_SIMDS_MASK); 868 R600_INACTIVE_SIMDS((R6XX_MAX_SIMDS_MASK << dev_priv->r600_max_simds) & R6XX_MAX_SIMDS_MASK);
869 869
870 cc_rb_backend_disable = 870 backend_map = r600_get_tile_pipe_to_backend_map(dev_priv->r600_max_tile_pipes,
871 R600_BACKEND_DISABLE((R6XX_MAX_BACKENDS_MASK << dev_priv->r600_max_backends) & R6XX_MAX_BACKENDS_MASK); 871 (R6XX_MAX_BACKENDS -
872 r600_count_pipe_bits((cc_rb_backend_disable &
873 R6XX_MAX_BACKENDS_MASK) >> 16)),
874 (cc_rb_backend_disable >> 16));
875 gb_tiling_config |= R600_BACKEND_MAP(backend_map);
872 876
873 RADEON_WRITE(R600_GB_TILING_CONFIG, gb_tiling_config); 877 RADEON_WRITE(R600_GB_TILING_CONFIG, gb_tiling_config);
874 RADEON_WRITE(R600_DCP_TILING_CONFIG, (gb_tiling_config & 0xffff)); 878 RADEON_WRITE(R600_DCP_TILING_CONFIG, (gb_tiling_config & 0xffff));
875 RADEON_WRITE(R600_HDP_TILING_CONFIG, (gb_tiling_config & 0xffff)); 879 RADEON_WRITE(R600_HDP_TILING_CONFIG, (gb_tiling_config & 0xffff));
880 if (gb_tiling_config & 0xc0) {
881 dev_priv->r600_group_size = 512;
882 } else {
883 dev_priv->r600_group_size = 256;
884 }
885 dev_priv->r600_npipes = 1 << ((gb_tiling_config >> 1) & 0x7);
886 if (gb_tiling_config & 0x30) {
887 dev_priv->r600_nbanks = 8;
888 } else {
889 dev_priv->r600_nbanks = 4;
890 }
876 891
877 RADEON_WRITE(R600_CC_RB_BACKEND_DISABLE, cc_rb_backend_disable); 892 RADEON_WRITE(R600_CC_RB_BACKEND_DISABLE, cc_rb_backend_disable);
878 RADEON_WRITE(R600_CC_GC_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config); 893 RADEON_WRITE(R600_CC_GC_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config);
879 RADEON_WRITE(R600_GC_USER_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config); 894 RADEON_WRITE(R600_GC_USER_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config);
880 895
881 num_qd_pipes = 896 num_qd_pipes =
882 R6XX_MAX_BACKENDS - r600_count_pipe_bits(cc_gc_shader_pipe_config & R600_INACTIVE_QD_PIPES_MASK); 897 R6XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & R600_INACTIVE_QD_PIPES_MASK) >> 8);
883 RADEON_WRITE(R600_VGT_OUT_DEALLOC_CNTL, (num_qd_pipes * 4) & R600_DEALLOC_DIST_MASK); 898 RADEON_WRITE(R600_VGT_OUT_DEALLOC_CNTL, (num_qd_pipes * 4) & R600_DEALLOC_DIST_MASK);
884 RADEON_WRITE(R600_VGT_VERTEX_REUSE_BLOCK_CNTL, ((num_qd_pipes * 4) - 2) & R600_VTX_REUSE_DEPTH_MASK); 899 RADEON_WRITE(R600_VGT_VERTEX_REUSE_BLOCK_CNTL, ((num_qd_pipes * 4) - 2) & R600_VTX_REUSE_DEPTH_MASK);
885 900
@@ -1151,7 +1166,8 @@ static void r600_gfx_init(struct drm_device *dev,
1151 1166
1152} 1167}
1153 1168
1154static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes, 1169static u32 r700_get_tile_pipe_to_backend_map(drm_radeon_private_t *dev_priv,
1170 u32 num_tile_pipes,
1155 u32 num_backends, 1171 u32 num_backends,
1156 u32 backend_disable_mask) 1172 u32 backend_disable_mask)
1157{ 1173{
@@ -1162,6 +1178,7 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
1162 u32 swizzle_pipe[R7XX_MAX_PIPES]; 1178 u32 swizzle_pipe[R7XX_MAX_PIPES];
1163 u32 cur_backend; 1179 u32 cur_backend;
1164 u32 i; 1180 u32 i;
1181 bool force_no_swizzle;
1165 1182
1166 if (num_tile_pipes > R7XX_MAX_PIPES) 1183 if (num_tile_pipes > R7XX_MAX_PIPES)
1167 num_tile_pipes = R7XX_MAX_PIPES; 1184 num_tile_pipes = R7XX_MAX_PIPES;
@@ -1191,6 +1208,18 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
1191 if (enabled_backends_count != num_backends) 1208 if (enabled_backends_count != num_backends)
1192 num_backends = enabled_backends_count; 1209 num_backends = enabled_backends_count;
1193 1210
1211 switch (dev_priv->flags & RADEON_FAMILY_MASK) {
1212 case CHIP_RV770:
1213 case CHIP_RV730:
1214 force_no_swizzle = false;
1215 break;
1216 case CHIP_RV710:
1217 case CHIP_RV740:
1218 default:
1219 force_no_swizzle = true;
1220 break;
1221 }
1222
1194 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * R7XX_MAX_PIPES); 1223 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * R7XX_MAX_PIPES);
1195 switch (num_tile_pipes) { 1224 switch (num_tile_pipes) {
1196 case 1: 1225 case 1:
@@ -1201,49 +1230,100 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
1201 swizzle_pipe[1] = 1; 1230 swizzle_pipe[1] = 1;
1202 break; 1231 break;
1203 case 3: 1232 case 3:
1204 swizzle_pipe[0] = 0; 1233 if (force_no_swizzle) {
1205 swizzle_pipe[1] = 2; 1234 swizzle_pipe[0] = 0;
1206 swizzle_pipe[2] = 1; 1235 swizzle_pipe[1] = 1;
1236 swizzle_pipe[2] = 2;
1237 } else {
1238 swizzle_pipe[0] = 0;
1239 swizzle_pipe[1] = 2;
1240 swizzle_pipe[2] = 1;
1241 }
1207 break; 1242 break;
1208 case 4: 1243 case 4:
1209 swizzle_pipe[0] = 0; 1244 if (force_no_swizzle) {
1210 swizzle_pipe[1] = 2; 1245 swizzle_pipe[0] = 0;
1211 swizzle_pipe[2] = 3; 1246 swizzle_pipe[1] = 1;
1212 swizzle_pipe[3] = 1; 1247 swizzle_pipe[2] = 2;
1248 swizzle_pipe[3] = 3;
1249 } else {
1250 swizzle_pipe[0] = 0;
1251 swizzle_pipe[1] = 2;
1252 swizzle_pipe[2] = 3;
1253 swizzle_pipe[3] = 1;
1254 }
1213 break; 1255 break;
1214 case 5: 1256 case 5:
1215 swizzle_pipe[0] = 0; 1257 if (force_no_swizzle) {
1216 swizzle_pipe[1] = 2; 1258 swizzle_pipe[0] = 0;
1217 swizzle_pipe[2] = 4; 1259 swizzle_pipe[1] = 1;
1218 swizzle_pipe[3] = 1; 1260 swizzle_pipe[2] = 2;
1219 swizzle_pipe[4] = 3; 1261 swizzle_pipe[3] = 3;
1262 swizzle_pipe[4] = 4;
1263 } else {
1264 swizzle_pipe[0] = 0;
1265 swizzle_pipe[1] = 2;
1266 swizzle_pipe[2] = 4;
1267 swizzle_pipe[3] = 1;
1268 swizzle_pipe[4] = 3;
1269 }
1220 break; 1270 break;
1221 case 6: 1271 case 6:
1222 swizzle_pipe[0] = 0; 1272 if (force_no_swizzle) {
1223 swizzle_pipe[1] = 2; 1273 swizzle_pipe[0] = 0;
1224 swizzle_pipe[2] = 4; 1274 swizzle_pipe[1] = 1;
1225 swizzle_pipe[3] = 5; 1275 swizzle_pipe[2] = 2;
1226 swizzle_pipe[4] = 3; 1276 swizzle_pipe[3] = 3;
1227 swizzle_pipe[5] = 1; 1277 swizzle_pipe[4] = 4;
1278 swizzle_pipe[5] = 5;
1279 } else {
1280 swizzle_pipe[0] = 0;
1281 swizzle_pipe[1] = 2;
1282 swizzle_pipe[2] = 4;
1283 swizzle_pipe[3] = 5;
1284 swizzle_pipe[4] = 3;
1285 swizzle_pipe[5] = 1;
1286 }
1228 break; 1287 break;
1229 case 7: 1288 case 7:
1230 swizzle_pipe[0] = 0; 1289 if (force_no_swizzle) {
1231 swizzle_pipe[1] = 2; 1290 swizzle_pipe[0] = 0;
1232 swizzle_pipe[2] = 4; 1291 swizzle_pipe[1] = 1;
1233 swizzle_pipe[3] = 6; 1292 swizzle_pipe[2] = 2;
1234 swizzle_pipe[4] = 3; 1293 swizzle_pipe[3] = 3;
1235 swizzle_pipe[5] = 1; 1294 swizzle_pipe[4] = 4;
1236 swizzle_pipe[6] = 5; 1295 swizzle_pipe[5] = 5;
1296 swizzle_pipe[6] = 6;
1297 } else {
1298 swizzle_pipe[0] = 0;
1299 swizzle_pipe[1] = 2;
1300 swizzle_pipe[2] = 4;
1301 swizzle_pipe[3] = 6;
1302 swizzle_pipe[4] = 3;
1303 swizzle_pipe[5] = 1;
1304 swizzle_pipe[6] = 5;
1305 }
1237 break; 1306 break;
1238 case 8: 1307 case 8:
1239 swizzle_pipe[0] = 0; 1308 if (force_no_swizzle) {
1240 swizzle_pipe[1] = 2; 1309 swizzle_pipe[0] = 0;
1241 swizzle_pipe[2] = 4; 1310 swizzle_pipe[1] = 1;
1242 swizzle_pipe[3] = 6; 1311 swizzle_pipe[2] = 2;
1243 swizzle_pipe[4] = 3; 1312 swizzle_pipe[3] = 3;
1244 swizzle_pipe[5] = 1; 1313 swizzle_pipe[4] = 4;
1245 swizzle_pipe[6] = 7; 1314 swizzle_pipe[5] = 5;
1246 swizzle_pipe[7] = 5; 1315 swizzle_pipe[6] = 6;
1316 swizzle_pipe[7] = 7;
1317 } else {
1318 swizzle_pipe[0] = 0;
1319 swizzle_pipe[1] = 2;
1320 swizzle_pipe[2] = 4;
1321 swizzle_pipe[3] = 6;
1322 swizzle_pipe[4] = 3;
1323 swizzle_pipe[5] = 1;
1324 swizzle_pipe[6] = 7;
1325 swizzle_pipe[7] = 5;
1326 }
1247 break; 1327 break;
1248 } 1328 }
1249 1329
@@ -1264,8 +1344,10 @@ static void r700_gfx_init(struct drm_device *dev,
1264 drm_radeon_private_t *dev_priv) 1344 drm_radeon_private_t *dev_priv)
1265{ 1345{
1266 int i, j, num_qd_pipes; 1346 int i, j, num_qd_pipes;
1347 u32 ta_aux_cntl;
1267 u32 sx_debug_1; 1348 u32 sx_debug_1;
1268 u32 smx_dc_ctl0; 1349 u32 smx_dc_ctl0;
1350 u32 db_debug3;
1269 u32 num_gs_verts_per_thread; 1351 u32 num_gs_verts_per_thread;
1270 u32 vgt_gs_per_es; 1352 u32 vgt_gs_per_es;
1271 u32 gs_prim_buffer_depth = 0; 1353 u32 gs_prim_buffer_depth = 0;
@@ -1276,8 +1358,8 @@ static void r700_gfx_init(struct drm_device *dev,
1276 u32 sq_dyn_gpr_size_simd_ab_0; 1358 u32 sq_dyn_gpr_size_simd_ab_0;
1277 u32 backend_map; 1359 u32 backend_map;
1278 u32 gb_tiling_config = 0; 1360 u32 gb_tiling_config = 0;
1279 u32 cc_rb_backend_disable = 0; 1361 u32 cc_rb_backend_disable;
1280 u32 cc_gc_shader_pipe_config = 0; 1362 u32 cc_gc_shader_pipe_config;
1281 u32 mc_arb_ramcfg; 1363 u32 mc_arb_ramcfg;
1282 u32 db_debug4; 1364 u32 db_debug4;
1283 1365
@@ -1428,22 +1510,41 @@ static void r700_gfx_init(struct drm_device *dev,
1428 1510
1429 gb_tiling_config |= R600_BANK_SWAPS(1); 1511 gb_tiling_config |= R600_BANK_SWAPS(1);
1430 1512
1431 backend_map = r700_get_tile_pipe_to_backend_map(dev_priv->r600_max_tile_pipes, 1513 cc_rb_backend_disable = RADEON_READ(R600_CC_RB_BACKEND_DISABLE) & 0x00ff0000;
1432 dev_priv->r600_max_backends, 1514 cc_rb_backend_disable |=
1433 (0xff << dev_priv->r600_max_backends) & 0xff); 1515 R600_BACKEND_DISABLE((R7XX_MAX_BACKENDS_MASK << dev_priv->r600_max_backends) & R7XX_MAX_BACKENDS_MASK);
1434 gb_tiling_config |= R600_BACKEND_MAP(backend_map);
1435 1516
1436 cc_gc_shader_pipe_config = 1517 cc_gc_shader_pipe_config = RADEON_READ(R600_CC_GC_SHADER_PIPE_CONFIG) & 0xffffff00;
1518 cc_gc_shader_pipe_config |=
1437 R600_INACTIVE_QD_PIPES((R7XX_MAX_PIPES_MASK << dev_priv->r600_max_pipes) & R7XX_MAX_PIPES_MASK); 1519 R600_INACTIVE_QD_PIPES((R7XX_MAX_PIPES_MASK << dev_priv->r600_max_pipes) & R7XX_MAX_PIPES_MASK);
1438 cc_gc_shader_pipe_config |= 1520 cc_gc_shader_pipe_config |=
1439 R600_INACTIVE_SIMDS((R7XX_MAX_SIMDS_MASK << dev_priv->r600_max_simds) & R7XX_MAX_SIMDS_MASK); 1521 R600_INACTIVE_SIMDS((R7XX_MAX_SIMDS_MASK << dev_priv->r600_max_simds) & R7XX_MAX_SIMDS_MASK);
1440 1522
1441 cc_rb_backend_disable = 1523 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV740)
1442 R600_BACKEND_DISABLE((R7XX_MAX_BACKENDS_MASK << dev_priv->r600_max_backends) & R7XX_MAX_BACKENDS_MASK); 1524 backend_map = 0x28;
1525 else
1526 backend_map = r700_get_tile_pipe_to_backend_map(dev_priv,
1527 dev_priv->r600_max_tile_pipes,
1528 (R7XX_MAX_BACKENDS -
1529 r600_count_pipe_bits((cc_rb_backend_disable &
1530 R7XX_MAX_BACKENDS_MASK) >> 16)),
1531 (cc_rb_backend_disable >> 16));
1532 gb_tiling_config |= R600_BACKEND_MAP(backend_map);
1443 1533
1444 RADEON_WRITE(R600_GB_TILING_CONFIG, gb_tiling_config); 1534 RADEON_WRITE(R600_GB_TILING_CONFIG, gb_tiling_config);
1445 RADEON_WRITE(R600_DCP_TILING_CONFIG, (gb_tiling_config & 0xffff)); 1535 RADEON_WRITE(R600_DCP_TILING_CONFIG, (gb_tiling_config & 0xffff));
1446 RADEON_WRITE(R600_HDP_TILING_CONFIG, (gb_tiling_config & 0xffff)); 1536 RADEON_WRITE(R600_HDP_TILING_CONFIG, (gb_tiling_config & 0xffff));
1537 if (gb_tiling_config & 0xc0) {
1538 dev_priv->r600_group_size = 512;
1539 } else {
1540 dev_priv->r600_group_size = 256;
1541 }
1542 dev_priv->r600_npipes = 1 << ((gb_tiling_config >> 1) & 0x7);
1543 if (gb_tiling_config & 0x30) {
1544 dev_priv->r600_nbanks = 8;
1545 } else {
1546 dev_priv->r600_nbanks = 4;
1547 }
1447 1548
1448 RADEON_WRITE(R600_CC_RB_BACKEND_DISABLE, cc_rb_backend_disable); 1549 RADEON_WRITE(R600_CC_RB_BACKEND_DISABLE, cc_rb_backend_disable);
1449 RADEON_WRITE(R600_CC_GC_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config); 1550 RADEON_WRITE(R600_CC_GC_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config);
@@ -1456,7 +1557,7 @@ static void r700_gfx_init(struct drm_device *dev,
1456 RADEON_WRITE(R700_CGTS_USER_TCC_DISABLE, 0); 1557 RADEON_WRITE(R700_CGTS_USER_TCC_DISABLE, 0);
1457 1558
1458 num_qd_pipes = 1559 num_qd_pipes =
1459 R7XX_MAX_BACKENDS - r600_count_pipe_bits(cc_gc_shader_pipe_config & R600_INACTIVE_QD_PIPES_MASK); 1560 R7XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & R600_INACTIVE_QD_PIPES_MASK) >> 8);
1460 RADEON_WRITE(R600_VGT_OUT_DEALLOC_CNTL, (num_qd_pipes * 4) & R600_DEALLOC_DIST_MASK); 1561 RADEON_WRITE(R600_VGT_OUT_DEALLOC_CNTL, (num_qd_pipes * 4) & R600_DEALLOC_DIST_MASK);
1461 RADEON_WRITE(R600_VGT_VERTEX_REUSE_BLOCK_CNTL, ((num_qd_pipes * 4) - 2) & R600_VTX_REUSE_DEPTH_MASK); 1562 RADEON_WRITE(R600_VGT_VERTEX_REUSE_BLOCK_CNTL, ((num_qd_pipes * 4) - 2) & R600_VTX_REUSE_DEPTH_MASK);
1462 1563
@@ -1466,10 +1567,8 @@ static void r700_gfx_init(struct drm_device *dev,
1466 1567
1467 RADEON_WRITE(R600_CP_MEQ_THRESHOLDS, R700_STQ_SPLIT(0x30)); 1568 RADEON_WRITE(R600_CP_MEQ_THRESHOLDS, R700_STQ_SPLIT(0x30));
1468 1569
1469 RADEON_WRITE(R600_TA_CNTL_AUX, (R600_DISABLE_CUBE_ANISO | 1570 ta_aux_cntl = RADEON_READ(R600_TA_CNTL_AUX);
1470 R600_SYNC_GRADIENT | 1571 RADEON_WRITE(R600_TA_CNTL_AUX, ta_aux_cntl | R600_DISABLE_CUBE_ANISO);
1471 R600_SYNC_WALKER |
1472 R600_SYNC_ALIGNER));
1473 1572
1474 sx_debug_1 = RADEON_READ(R700_SX_DEBUG_1); 1573 sx_debug_1 = RADEON_READ(R700_SX_DEBUG_1);
1475 sx_debug_1 |= R700_ENABLE_NEW_SMX_ADDRESS; 1574 sx_debug_1 |= R700_ENABLE_NEW_SMX_ADDRESS;
@@ -1480,14 +1579,28 @@ static void r700_gfx_init(struct drm_device *dev,
1480 smx_dc_ctl0 |= R700_CACHE_DEPTH((dev_priv->r700_sx_num_of_sets * 64) - 1); 1579 smx_dc_ctl0 |= R700_CACHE_DEPTH((dev_priv->r700_sx_num_of_sets * 64) - 1);
1481 RADEON_WRITE(R600_SMX_DC_CTL0, smx_dc_ctl0); 1580 RADEON_WRITE(R600_SMX_DC_CTL0, smx_dc_ctl0);
1482 1581
1483 RADEON_WRITE(R700_SMX_EVENT_CTL, (R700_ES_FLUSH_CTL(4) | 1582 if ((dev_priv->flags & RADEON_FAMILY_MASK) != CHIP_RV740)
1484 R700_GS_FLUSH_CTL(4) | 1583 RADEON_WRITE(R700_SMX_EVENT_CTL, (R700_ES_FLUSH_CTL(4) |
1485 R700_ACK_FLUSH_CTL(3) | 1584 R700_GS_FLUSH_CTL(4) |
1486 R700_SYNC_FLUSH_CTL)); 1585 R700_ACK_FLUSH_CTL(3) |
1586 R700_SYNC_FLUSH_CTL));
1487 1587
1488 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV770) 1588 db_debug3 = RADEON_READ(R700_DB_DEBUG3);
1489 RADEON_WRITE(R700_DB_DEBUG3, R700_DB_CLK_OFF_DELAY(0x1f)); 1589 db_debug3 &= ~R700_DB_CLK_OFF_DELAY(0x1f);
1490 else { 1590 switch (dev_priv->flags & RADEON_FAMILY_MASK) {
1591 case CHIP_RV770:
1592 case CHIP_RV740:
1593 db_debug3 |= R700_DB_CLK_OFF_DELAY(0x1f);
1594 break;
1595 case CHIP_RV710:
1596 case CHIP_RV730:
1597 default:
1598 db_debug3 |= R700_DB_CLK_OFF_DELAY(2);
1599 break;
1600 }
1601 RADEON_WRITE(R700_DB_DEBUG3, db_debug3);
1602
1603 if ((dev_priv->flags & RADEON_FAMILY_MASK) != CHIP_RV770) {
1491 db_debug4 = RADEON_READ(RV700_DB_DEBUG4); 1604 db_debug4 = RADEON_READ(RV700_DB_DEBUG4);
1492 db_debug4 |= RV700_DISABLE_TILE_COVERED_FOR_PS_ITER; 1605 db_debug4 |= RV700_DISABLE_TILE_COVERED_FOR_PS_ITER;
1493 RADEON_WRITE(RV700_DB_DEBUG4, db_debug4); 1606 RADEON_WRITE(RV700_DB_DEBUG4, db_debug4);
@@ -1516,10 +1629,10 @@ static void r700_gfx_init(struct drm_device *dev,
1516 R600_ALU_UPDATE_FIFO_HIWATER(0x8)); 1629 R600_ALU_UPDATE_FIFO_HIWATER(0x8));
1517 switch (dev_priv->flags & RADEON_FAMILY_MASK) { 1630 switch (dev_priv->flags & RADEON_FAMILY_MASK) {
1518 case CHIP_RV770: 1631 case CHIP_RV770:
1519 sq_ms_fifo_sizes |= R600_FETCH_FIFO_HIWATER(0x1);
1520 break;
1521 case CHIP_RV730: 1632 case CHIP_RV730:
1522 case CHIP_RV710: 1633 case CHIP_RV710:
1634 sq_ms_fifo_sizes |= R600_FETCH_FIFO_HIWATER(0x1);
1635 break;
1523 case CHIP_RV740: 1636 case CHIP_RV740:
1524 default: 1637 default:
1525 sq_ms_fifo_sizes |= R600_FETCH_FIFO_HIWATER(0x4); 1638 sq_ms_fifo_sizes |= R600_FETCH_FIFO_HIWATER(0x4);
@@ -2526,3 +2639,12 @@ out:
2526 mutex_unlock(&dev_priv->cs_mutex); 2639 mutex_unlock(&dev_priv->cs_mutex);
2527 return r; 2640 return r;
2528} 2641}
2642
2643void r600_cs_legacy_get_tiling_conf(struct drm_device *dev, u32 *npipes, u32 *nbanks, u32 *group_size)
2644{
2645 struct drm_radeon_private *dev_priv = dev->dev_private;
2646
2647 *npipes = dev_priv->r600_npipes;
2648 *nbanks = dev_priv->r600_nbanks;
2649 *group_size = dev_priv->r600_group_size;
2650}
diff --git a/drivers/gpu/drm/radeon/r600_cs.c b/drivers/gpu/drm/radeon/r600_cs.c
index 0d820764f340..c39c1bc13016 100644
--- a/drivers/gpu/drm/radeon/r600_cs.c
+++ b/drivers/gpu/drm/radeon/r600_cs.c
@@ -28,6 +28,7 @@
28#include "drmP.h" 28#include "drmP.h"
29#include "radeon.h" 29#include "radeon.h"
30#include "r600d.h" 30#include "r600d.h"
31#include "r600_reg_safe.h"
31 32
32static int r600_cs_packet_next_reloc_mm(struct radeon_cs_parser *p, 33static int r600_cs_packet_next_reloc_mm(struct radeon_cs_parser *p,
33 struct radeon_cs_reloc **cs_reloc); 34 struct radeon_cs_reloc **cs_reloc);
@@ -35,6 +36,315 @@ static int r600_cs_packet_next_reloc_nomm(struct radeon_cs_parser *p,
35 struct radeon_cs_reloc **cs_reloc); 36 struct radeon_cs_reloc **cs_reloc);
36typedef int (*next_reloc_t)(struct radeon_cs_parser*, struct radeon_cs_reloc**); 37typedef int (*next_reloc_t)(struct radeon_cs_parser*, struct radeon_cs_reloc**);
37static next_reloc_t r600_cs_packet_next_reloc = &r600_cs_packet_next_reloc_mm; 38static next_reloc_t r600_cs_packet_next_reloc = &r600_cs_packet_next_reloc_mm;
39extern void r600_cs_legacy_get_tiling_conf(struct drm_device *dev, u32 *npipes, u32 *nbanks, u32 *group_size);
40
41
42struct r600_cs_track {
43 /* configuration we miror so that we use same code btw kms/ums */
44 u32 group_size;
45 u32 nbanks;
46 u32 npipes;
47 /* value we track */
48 u32 sq_config;
49 u32 nsamples;
50 u32 cb_color_base_last[8];
51 struct radeon_bo *cb_color_bo[8];
52 u32 cb_color_bo_offset[8];
53 struct radeon_bo *cb_color_frag_bo[8];
54 struct radeon_bo *cb_color_tile_bo[8];
55 u32 cb_color_info[8];
56 u32 cb_color_size_idx[8];
57 u32 cb_target_mask;
58 u32 cb_shader_mask;
59 u32 cb_color_size[8];
60 u32 vgt_strmout_en;
61 u32 vgt_strmout_buffer_en;
62 u32 db_depth_control;
63 u32 db_depth_info;
64 u32 db_depth_size_idx;
65 u32 db_depth_view;
66 u32 db_depth_size;
67 u32 db_offset;
68 struct radeon_bo *db_bo;
69};
70
71static inline int r600_bpe_from_format(u32 *bpe, u32 format)
72{
73 switch (format) {
74 case V_038004_COLOR_8:
75 case V_038004_COLOR_4_4:
76 case V_038004_COLOR_3_3_2:
77 case V_038004_FMT_1:
78 *bpe = 1;
79 break;
80 case V_038004_COLOR_16:
81 case V_038004_COLOR_16_FLOAT:
82 case V_038004_COLOR_8_8:
83 case V_038004_COLOR_5_6_5:
84 case V_038004_COLOR_6_5_5:
85 case V_038004_COLOR_1_5_5_5:
86 case V_038004_COLOR_4_4_4_4:
87 case V_038004_COLOR_5_5_5_1:
88 *bpe = 2;
89 break;
90 case V_038004_FMT_8_8_8:
91 *bpe = 3;
92 break;
93 case V_038004_COLOR_32:
94 case V_038004_COLOR_32_FLOAT:
95 case V_038004_COLOR_16_16:
96 case V_038004_COLOR_16_16_FLOAT:
97 case V_038004_COLOR_8_24:
98 case V_038004_COLOR_8_24_FLOAT:
99 case V_038004_COLOR_24_8:
100 case V_038004_COLOR_24_8_FLOAT:
101 case V_038004_COLOR_10_11_11:
102 case V_038004_COLOR_10_11_11_FLOAT:
103 case V_038004_COLOR_11_11_10:
104 case V_038004_COLOR_11_11_10_FLOAT:
105 case V_038004_COLOR_2_10_10_10:
106 case V_038004_COLOR_8_8_8_8:
107 case V_038004_COLOR_10_10_10_2:
108 case V_038004_FMT_5_9_9_9_SHAREDEXP:
109 case V_038004_FMT_32_AS_8:
110 case V_038004_FMT_32_AS_8_8:
111 *bpe = 4;
112 break;
113 case V_038004_COLOR_X24_8_32_FLOAT:
114 case V_038004_COLOR_32_32:
115 case V_038004_COLOR_32_32_FLOAT:
116 case V_038004_COLOR_16_16_16_16:
117 case V_038004_COLOR_16_16_16_16_FLOAT:
118 *bpe = 8;
119 break;
120 case V_038004_FMT_16_16_16:
121 case V_038004_FMT_16_16_16_FLOAT:
122 *bpe = 6;
123 break;
124 case V_038004_FMT_32_32_32:
125 case V_038004_FMT_32_32_32_FLOAT:
126 *bpe = 12;
127 break;
128 case V_038004_COLOR_32_32_32_32:
129 case V_038004_COLOR_32_32_32_32_FLOAT:
130 *bpe = 16;
131 break;
132 case V_038004_FMT_GB_GR:
133 case V_038004_FMT_BG_RG:
134 case V_038004_COLOR_INVALID:
135 *bpe = 16;
136 return -EINVAL;
137 }
138 return 0;
139}
140
141static void r600_cs_track_init(struct r600_cs_track *track)
142{
143 int i;
144
145 /* assume DX9 mode */
146 track->sq_config = DX9_CONSTS;
147 for (i = 0; i < 8; i++) {
148 track->cb_color_base_last[i] = 0;
149 track->cb_color_size[i] = 0;
150 track->cb_color_size_idx[i] = 0;
151 track->cb_color_info[i] = 0;
152 track->cb_color_bo[i] = NULL;
153 track->cb_color_bo_offset[i] = 0xFFFFFFFF;
154 }
155 track->cb_target_mask = 0xFFFFFFFF;
156 track->cb_shader_mask = 0xFFFFFFFF;
157 track->db_bo = NULL;
158 /* assume the biggest format and that htile is enabled */
159 track->db_depth_info = 7 | (1 << 25);
160 track->db_depth_view = 0xFFFFC000;
161 track->db_depth_size = 0xFFFFFFFF;
162 track->db_depth_size_idx = 0;
163 track->db_depth_control = 0xFFFFFFFF;
164}
165
166static inline int r600_cs_track_validate_cb(struct radeon_cs_parser *p, int i)
167{
168 struct r600_cs_track *track = p->track;
169 u32 bpe = 0, pitch, slice_tile_max, size, tmp, height;
170 volatile u32 *ib = p->ib->ptr;
171
172 if (G_0280A0_TILE_MODE(track->cb_color_info[i])) {
173 dev_warn(p->dev, "FMASK or CMASK buffer are not supported by this kernel\n");
174 return -EINVAL;
175 }
176 size = radeon_bo_size(track->cb_color_bo[i]);
177 if (r600_bpe_from_format(&bpe, G_0280A0_FORMAT(track->cb_color_info[i]))) {
178 dev_warn(p->dev, "%s:%d cb invalid format %d for %d (0x%08X)\n",
179 __func__, __LINE__, G_0280A0_FORMAT(track->cb_color_info[i]),
180 i, track->cb_color_info[i]);
181 return -EINVAL;
182 }
183 pitch = (G_028060_PITCH_TILE_MAX(track->cb_color_size[i]) + 1) << 3;
184 slice_tile_max = G_028060_SLICE_TILE_MAX(track->cb_color_size[i]) + 1;
185 if (!pitch) {
186 dev_warn(p->dev, "%s:%d cb pitch (%d) for %d invalid (0x%08X)\n",
187 __func__, __LINE__, pitch, i, track->cb_color_size[i]);
188 return -EINVAL;
189 }
190 height = size / (pitch * bpe);
191 if (height > 8192)
192 height = 8192;
193 switch (G_0280A0_ARRAY_MODE(track->cb_color_info[i])) {
194 case V_0280A0_ARRAY_LINEAR_GENERAL:
195 case V_0280A0_ARRAY_LINEAR_ALIGNED:
196 if (pitch & 0x3f) {
197 dev_warn(p->dev, "%s:%d cb pitch (%d x %d = %d) invalid\n",
198 __func__, __LINE__, pitch, bpe, pitch * bpe);
199 return -EINVAL;
200 }
201 if ((pitch * bpe) & (track->group_size - 1)) {
202 dev_warn(p->dev, "%s:%d cb pitch (%d) invalid\n",
203 __func__, __LINE__, pitch);
204 return -EINVAL;
205 }
206 break;
207 case V_0280A0_ARRAY_1D_TILED_THIN1:
208 if ((pitch * 8 * bpe * track->nsamples) & (track->group_size - 1)) {
209 dev_warn(p->dev, "%s:%d cb pitch (%d) invalid\n",
210 __func__, __LINE__, pitch);
211 return -EINVAL;
212 }
213 height &= ~0x7;
214 if (!height)
215 height = 8;
216 break;
217 case V_0280A0_ARRAY_2D_TILED_THIN1:
218 if (pitch & ((8 * track->nbanks) - 1)) {
219 dev_warn(p->dev, "%s:%d cb pitch (%d) invalid\n",
220 __func__, __LINE__, pitch);
221 return -EINVAL;
222 }
223 tmp = pitch * 8 * bpe * track->nsamples;
224 tmp = tmp / track->nbanks;
225 if (tmp & (track->group_size - 1)) {
226 dev_warn(p->dev, "%s:%d cb pitch (%d) invalid\n",
227 __func__, __LINE__, pitch);
228 return -EINVAL;
229 }
230 height &= ~((16 * track->npipes) - 1);
231 if (!height)
232 height = 16 * track->npipes;
233 break;
234 default:
235 dev_warn(p->dev, "%s invalid tiling %d for %d (0x%08X)\n", __func__,
236 G_0280A0_ARRAY_MODE(track->cb_color_info[i]), i,
237 track->cb_color_info[i]);
238 return -EINVAL;
239 }
240 /* check offset */
241 tmp = height * pitch;
242 if ((tmp + track->cb_color_bo_offset[i]) > radeon_bo_size(track->cb_color_bo[i])) {
243 dev_warn(p->dev, "%s offset[%d] %d to big\n", __func__, i, track->cb_color_bo_offset[i]);
244 return -EINVAL;
245 }
246 /* limit max tile */
247 tmp = (height * pitch) >> 6;
248 if (tmp < slice_tile_max)
249 slice_tile_max = tmp;
250 tmp = S_028060_PITCH_TILE_MAX((pitch >> 3) - 1) |
251 S_028060_SLICE_TILE_MAX(slice_tile_max - 1);
252 ib[track->cb_color_size_idx[i]] = tmp;
253 return 0;
254}
255
256static int r600_cs_track_check(struct radeon_cs_parser *p)
257{
258 struct r600_cs_track *track = p->track;
259 u32 tmp;
260 int r, i;
261 volatile u32 *ib = p->ib->ptr;
262
263 /* on legacy kernel we don't perform advanced check */
264 if (p->rdev == NULL)
265 return 0;
266 /* we don't support out buffer yet */
267 if (track->vgt_strmout_en || track->vgt_strmout_buffer_en) {
268 dev_warn(p->dev, "this kernel doesn't support SMX output buffer\n");
269 return -EINVAL;
270 }
271 /* check that we have a cb for each enabled target, we don't check
272 * shader_mask because it seems mesa isn't always setting it :(
273 */
274 tmp = track->cb_target_mask;
275 for (i = 0; i < 8; i++) {
276 if ((tmp >> (i * 4)) & 0xF) {
277 /* at least one component is enabled */
278 if (track->cb_color_bo[i] == NULL) {
279 dev_warn(p->dev, "%s:%d mask 0x%08X | 0x%08X no cb for %d\n",
280 __func__, __LINE__, track->cb_target_mask, track->cb_shader_mask, i);
281 return -EINVAL;
282 }
283 /* perform rewrite of CB_COLOR[0-7]_SIZE */
284 r = r600_cs_track_validate_cb(p, i);
285 if (r)
286 return r;
287 }
288 }
289 /* Check depth buffer */
290 if (G_028800_STENCIL_ENABLE(track->db_depth_control) ||
291 G_028800_Z_ENABLE(track->db_depth_control)) {
292 u32 nviews, bpe, ntiles;
293 if (track->db_bo == NULL) {
294 dev_warn(p->dev, "z/stencil with no depth buffer\n");
295 return -EINVAL;
296 }
297 if (G_028010_TILE_SURFACE_ENABLE(track->db_depth_info)) {
298 dev_warn(p->dev, "this kernel doesn't support z/stencil htile\n");
299 return -EINVAL;
300 }
301 switch (G_028010_FORMAT(track->db_depth_info)) {
302 case V_028010_DEPTH_16:
303 bpe = 2;
304 break;
305 case V_028010_DEPTH_X8_24:
306 case V_028010_DEPTH_8_24:
307 case V_028010_DEPTH_X8_24_FLOAT:
308 case V_028010_DEPTH_8_24_FLOAT:
309 case V_028010_DEPTH_32_FLOAT:
310 bpe = 4;
311 break;
312 case V_028010_DEPTH_X24_8_32_FLOAT:
313 bpe = 8;
314 break;
315 default:
316 dev_warn(p->dev, "z/stencil with invalid format %d\n", G_028010_FORMAT(track->db_depth_info));
317 return -EINVAL;
318 }
319 if ((track->db_depth_size & 0xFFFFFC00) == 0xFFFFFC00) {
320 if (!track->db_depth_size_idx) {
321 dev_warn(p->dev, "z/stencil buffer size not set\n");
322 return -EINVAL;
323 }
324 printk_once(KERN_WARNING "You have old & broken userspace please consider updating mesa\n");
325 tmp = radeon_bo_size(track->db_bo) - track->db_offset;
326 tmp = (tmp / bpe) >> 6;
327 if (!tmp) {
328 dev_warn(p->dev, "z/stencil buffer too small (0x%08X %d %d %ld)\n",
329 track->db_depth_size, bpe, track->db_offset,
330 radeon_bo_size(track->db_bo));
331 return -EINVAL;
332 }
333 ib[track->db_depth_size_idx] = S_028000_SLICE_TILE_MAX(tmp - 1) | (track->db_depth_size & 0x3FF);
334 } else {
335 ntiles = G_028000_SLICE_TILE_MAX(track->db_depth_size) + 1;
336 nviews = G_028004_SLICE_MAX(track->db_depth_view) + 1;
337 tmp = ntiles * bpe * 64 * nviews;
338 if ((tmp + track->db_offset) > radeon_bo_size(track->db_bo)) {
339 dev_warn(p->dev, "z/stencil buffer too small (0x%08X %d %d %d -> %d have %ld)\n",
340 track->db_depth_size, ntiles, nviews, bpe, tmp + track->db_offset,
341 radeon_bo_size(track->db_bo));
342 return -EINVAL;
343 }
344 }
345 }
346 return 0;
347}
38 348
39/** 349/**
40 * r600_cs_packet_parse() - parse cp packet and point ib index to next packet 350 * r600_cs_packet_parse() - parse cp packet and point ib index to next packet
@@ -170,13 +480,35 @@ static int r600_cs_packet_next_reloc_nomm(struct radeon_cs_parser *p,
170 idx, relocs_chunk->length_dw); 480 idx, relocs_chunk->length_dw);
171 return -EINVAL; 481 return -EINVAL;
172 } 482 }
173 *cs_reloc = &p->relocs[0]; 483 *cs_reloc = p->relocs;
174 (*cs_reloc)->lobj.gpu_offset = (u64)relocs_chunk->kdata[idx + 3] << 32; 484 (*cs_reloc)->lobj.gpu_offset = (u64)relocs_chunk->kdata[idx + 3] << 32;
175 (*cs_reloc)->lobj.gpu_offset |= relocs_chunk->kdata[idx + 0]; 485 (*cs_reloc)->lobj.gpu_offset |= relocs_chunk->kdata[idx + 0];
176 return 0; 486 return 0;
177} 487}
178 488
179/** 489/**
490 * r600_cs_packet_next_is_pkt3_nop() - test if next packet is packet3 nop for reloc
491 * @parser: parser structure holding parsing context.
492 *
493 * Check next packet is relocation packet3, do bo validation and compute
494 * GPU offset using the provided start.
495 **/
496static inline int r600_cs_packet_next_is_pkt3_nop(struct radeon_cs_parser *p)
497{
498 struct radeon_cs_packet p3reloc;
499 int r;
500
501 r = r600_cs_packet_parse(p, &p3reloc, p->idx);
502 if (r) {
503 return 0;
504 }
505 if (p3reloc.type != PACKET_TYPE3 || p3reloc.opcode != PACKET3_NOP) {
506 return 0;
507 }
508 return 1;
509}
510
511/**
180 * r600_cs_packet_next_vline() - parse userspace VLINE packet 512 * r600_cs_packet_next_vline() - parse userspace VLINE packet
181 * @parser: parser structure holding parsing context. 513 * @parser: parser structure holding parsing context.
182 * 514 *
@@ -333,10 +665,390 @@ static int r600_cs_parse_packet0(struct radeon_cs_parser *p,
333 return 0; 665 return 0;
334} 666}
335 667
668/**
669 * r600_cs_check_reg() - check if register is authorized or not
670 * @parser: parser structure holding parsing context
671 * @reg: register we are testing
672 * @idx: index into the cs buffer
673 *
674 * This function will test against r600_reg_safe_bm and return 0
675 * if register is safe. If register is not flag as safe this function
676 * will test it against a list of register needind special handling.
677 */
678static inline int r600_cs_check_reg(struct radeon_cs_parser *p, u32 reg, u32 idx)
679{
680 struct r600_cs_track *track = (struct r600_cs_track *)p->track;
681 struct radeon_cs_reloc *reloc;
682 u32 last_reg = ARRAY_SIZE(r600_reg_safe_bm);
683 u32 m, i, tmp, *ib;
684 int r;
685
686 i = (reg >> 7);
687 if (i > last_reg) {
688 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx);
689 return -EINVAL;
690 }
691 m = 1 << ((reg >> 2) & 31);
692 if (!(r600_reg_safe_bm[i] & m))
693 return 0;
694 ib = p->ib->ptr;
695 switch (reg) {
696 /* force following reg to 0 in an attemp to disable out buffer
697 * which will need us to better understand how it works to perform
698 * security check on it (Jerome)
699 */
700 case R_0288A8_SQ_ESGS_RING_ITEMSIZE:
701 case R_008C44_SQ_ESGS_RING_SIZE:
702 case R_0288B0_SQ_ESTMP_RING_ITEMSIZE:
703 case R_008C54_SQ_ESTMP_RING_SIZE:
704 case R_0288C0_SQ_FBUF_RING_ITEMSIZE:
705 case R_008C74_SQ_FBUF_RING_SIZE:
706 case R_0288B4_SQ_GSTMP_RING_ITEMSIZE:
707 case R_008C5C_SQ_GSTMP_RING_SIZE:
708 case R_0288AC_SQ_GSVS_RING_ITEMSIZE:
709 case R_008C4C_SQ_GSVS_RING_SIZE:
710 case R_0288BC_SQ_PSTMP_RING_ITEMSIZE:
711 case R_008C6C_SQ_PSTMP_RING_SIZE:
712 case R_0288C4_SQ_REDUC_RING_ITEMSIZE:
713 case R_008C7C_SQ_REDUC_RING_SIZE:
714 case R_0288B8_SQ_VSTMP_RING_ITEMSIZE:
715 case R_008C64_SQ_VSTMP_RING_SIZE:
716 case R_0288C8_SQ_GS_VERT_ITEMSIZE:
717 /* get value to populate the IB don't remove */
718 tmp =radeon_get_ib_value(p, idx);
719 ib[idx] = 0;
720 break;
721 case SQ_CONFIG:
722 track->sq_config = radeon_get_ib_value(p, idx);
723 break;
724 case R_028800_DB_DEPTH_CONTROL:
725 track->db_depth_control = radeon_get_ib_value(p, idx);
726 break;
727 case R_028010_DB_DEPTH_INFO:
728 track->db_depth_info = radeon_get_ib_value(p, idx);
729 break;
730 case R_028004_DB_DEPTH_VIEW:
731 track->db_depth_view = radeon_get_ib_value(p, idx);
732 break;
733 case R_028000_DB_DEPTH_SIZE:
734 track->db_depth_size = radeon_get_ib_value(p, idx);
735 track->db_depth_size_idx = idx;
736 break;
737 case R_028AB0_VGT_STRMOUT_EN:
738 track->vgt_strmout_en = radeon_get_ib_value(p, idx);
739 break;
740 case R_028B20_VGT_STRMOUT_BUFFER_EN:
741 track->vgt_strmout_buffer_en = radeon_get_ib_value(p, idx);
742 break;
743 case R_028238_CB_TARGET_MASK:
744 track->cb_target_mask = radeon_get_ib_value(p, idx);
745 break;
746 case R_02823C_CB_SHADER_MASK:
747 track->cb_shader_mask = radeon_get_ib_value(p, idx);
748 break;
749 case R_028C04_PA_SC_AA_CONFIG:
750 tmp = G_028C04_MSAA_NUM_SAMPLES(radeon_get_ib_value(p, idx));
751 track->nsamples = 1 << tmp;
752 break;
753 case R_0280A0_CB_COLOR0_INFO:
754 case R_0280A4_CB_COLOR1_INFO:
755 case R_0280A8_CB_COLOR2_INFO:
756 case R_0280AC_CB_COLOR3_INFO:
757 case R_0280B0_CB_COLOR4_INFO:
758 case R_0280B4_CB_COLOR5_INFO:
759 case R_0280B8_CB_COLOR6_INFO:
760 case R_0280BC_CB_COLOR7_INFO:
761 tmp = (reg - R_0280A0_CB_COLOR0_INFO) / 4;
762 track->cb_color_info[tmp] = radeon_get_ib_value(p, idx);
763 break;
764 case R_028060_CB_COLOR0_SIZE:
765 case R_028064_CB_COLOR1_SIZE:
766 case R_028068_CB_COLOR2_SIZE:
767 case R_02806C_CB_COLOR3_SIZE:
768 case R_028070_CB_COLOR4_SIZE:
769 case R_028074_CB_COLOR5_SIZE:
770 case R_028078_CB_COLOR6_SIZE:
771 case R_02807C_CB_COLOR7_SIZE:
772 tmp = (reg - R_028060_CB_COLOR0_SIZE) / 4;
773 track->cb_color_size[tmp] = radeon_get_ib_value(p, idx);
774 track->cb_color_size_idx[tmp] = idx;
775 break;
776 /* This register were added late, there is userspace
777 * which does provide relocation for those but set
778 * 0 offset. In order to avoid breaking old userspace
779 * we detect this and set address to point to last
780 * CB_COLOR0_BASE, note that if userspace doesn't set
781 * CB_COLOR0_BASE before this register we will report
782 * error. Old userspace always set CB_COLOR0_BASE
783 * before any of this.
784 */
785 case R_0280E0_CB_COLOR0_FRAG:
786 case R_0280E4_CB_COLOR1_FRAG:
787 case R_0280E8_CB_COLOR2_FRAG:
788 case R_0280EC_CB_COLOR3_FRAG:
789 case R_0280F0_CB_COLOR4_FRAG:
790 case R_0280F4_CB_COLOR5_FRAG:
791 case R_0280F8_CB_COLOR6_FRAG:
792 case R_0280FC_CB_COLOR7_FRAG:
793 tmp = (reg - R_0280E0_CB_COLOR0_FRAG) / 4;
794 if (!r600_cs_packet_next_is_pkt3_nop(p)) {
795 if (!track->cb_color_base_last[tmp]) {
796 dev_err(p->dev, "Broken old userspace ? no cb_color0_base supplied before trying to write 0x%08X\n", reg);
797 return -EINVAL;
798 }
799 ib[idx] = track->cb_color_base_last[tmp];
800 printk_once(KERN_WARNING "You have old & broken userspace "
801 "please consider updating mesa & xf86-video-ati\n");
802 track->cb_color_frag_bo[tmp] = track->cb_color_bo[tmp];
803 } else {
804 r = r600_cs_packet_next_reloc(p, &reloc);
805 if (r) {
806 dev_err(p->dev, "bad SET_CONTEXT_REG 0x%04X\n", reg);
807 return -EINVAL;
808 }
809 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
810 track->cb_color_frag_bo[tmp] = reloc->robj;
811 }
812 break;
813 case R_0280C0_CB_COLOR0_TILE:
814 case R_0280C4_CB_COLOR1_TILE:
815 case R_0280C8_CB_COLOR2_TILE:
816 case R_0280CC_CB_COLOR3_TILE:
817 case R_0280D0_CB_COLOR4_TILE:
818 case R_0280D4_CB_COLOR5_TILE:
819 case R_0280D8_CB_COLOR6_TILE:
820 case R_0280DC_CB_COLOR7_TILE:
821 tmp = (reg - R_0280C0_CB_COLOR0_TILE) / 4;
822 if (!r600_cs_packet_next_is_pkt3_nop(p)) {
823 if (!track->cb_color_base_last[tmp]) {
824 dev_err(p->dev, "Broken old userspace ? no cb_color0_base supplied before trying to write 0x%08X\n", reg);
825 return -EINVAL;
826 }
827 ib[idx] = track->cb_color_base_last[tmp];
828 printk_once(KERN_WARNING "You have old & broken userspace "
829 "please consider updating mesa & xf86-video-ati\n");
830 track->cb_color_tile_bo[tmp] = track->cb_color_bo[tmp];
831 } else {
832 r = r600_cs_packet_next_reloc(p, &reloc);
833 if (r) {
834 dev_err(p->dev, "bad SET_CONTEXT_REG 0x%04X\n", reg);
835 return -EINVAL;
836 }
837 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
838 track->cb_color_tile_bo[tmp] = reloc->robj;
839 }
840 break;
841 case CB_COLOR0_BASE:
842 case CB_COLOR1_BASE:
843 case CB_COLOR2_BASE:
844 case CB_COLOR3_BASE:
845 case CB_COLOR4_BASE:
846 case CB_COLOR5_BASE:
847 case CB_COLOR6_BASE:
848 case CB_COLOR7_BASE:
849 r = r600_cs_packet_next_reloc(p, &reloc);
850 if (r) {
851 dev_warn(p->dev, "bad SET_CONTEXT_REG "
852 "0x%04X\n", reg);
853 return -EINVAL;
854 }
855 tmp = (reg - CB_COLOR0_BASE) / 4;
856 track->cb_color_bo_offset[tmp] = radeon_get_ib_value(p, idx);
857 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
858 track->cb_color_base_last[tmp] = ib[idx];
859 track->cb_color_bo[tmp] = reloc->robj;
860 break;
861 case DB_DEPTH_BASE:
862 r = r600_cs_packet_next_reloc(p, &reloc);
863 if (r) {
864 dev_warn(p->dev, "bad SET_CONTEXT_REG "
865 "0x%04X\n", reg);
866 return -EINVAL;
867 }
868 track->db_offset = radeon_get_ib_value(p, idx);
869 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
870 track->db_bo = reloc->robj;
871 break;
872 case DB_HTILE_DATA_BASE:
873 case SQ_PGM_START_FS:
874 case SQ_PGM_START_ES:
875 case SQ_PGM_START_VS:
876 case SQ_PGM_START_GS:
877 case SQ_PGM_START_PS:
878 case SQ_ALU_CONST_CACHE_GS_0:
879 case SQ_ALU_CONST_CACHE_GS_1:
880 case SQ_ALU_CONST_CACHE_GS_2:
881 case SQ_ALU_CONST_CACHE_GS_3:
882 case SQ_ALU_CONST_CACHE_GS_4:
883 case SQ_ALU_CONST_CACHE_GS_5:
884 case SQ_ALU_CONST_CACHE_GS_6:
885 case SQ_ALU_CONST_CACHE_GS_7:
886 case SQ_ALU_CONST_CACHE_GS_8:
887 case SQ_ALU_CONST_CACHE_GS_9:
888 case SQ_ALU_CONST_CACHE_GS_10:
889 case SQ_ALU_CONST_CACHE_GS_11:
890 case SQ_ALU_CONST_CACHE_GS_12:
891 case SQ_ALU_CONST_CACHE_GS_13:
892 case SQ_ALU_CONST_CACHE_GS_14:
893 case SQ_ALU_CONST_CACHE_GS_15:
894 case SQ_ALU_CONST_CACHE_PS_0:
895 case SQ_ALU_CONST_CACHE_PS_1:
896 case SQ_ALU_CONST_CACHE_PS_2:
897 case SQ_ALU_CONST_CACHE_PS_3:
898 case SQ_ALU_CONST_CACHE_PS_4:
899 case SQ_ALU_CONST_CACHE_PS_5:
900 case SQ_ALU_CONST_CACHE_PS_6:
901 case SQ_ALU_CONST_CACHE_PS_7:
902 case SQ_ALU_CONST_CACHE_PS_8:
903 case SQ_ALU_CONST_CACHE_PS_9:
904 case SQ_ALU_CONST_CACHE_PS_10:
905 case SQ_ALU_CONST_CACHE_PS_11:
906 case SQ_ALU_CONST_CACHE_PS_12:
907 case SQ_ALU_CONST_CACHE_PS_13:
908 case SQ_ALU_CONST_CACHE_PS_14:
909 case SQ_ALU_CONST_CACHE_PS_15:
910 case SQ_ALU_CONST_CACHE_VS_0:
911 case SQ_ALU_CONST_CACHE_VS_1:
912 case SQ_ALU_CONST_CACHE_VS_2:
913 case SQ_ALU_CONST_CACHE_VS_3:
914 case SQ_ALU_CONST_CACHE_VS_4:
915 case SQ_ALU_CONST_CACHE_VS_5:
916 case SQ_ALU_CONST_CACHE_VS_6:
917 case SQ_ALU_CONST_CACHE_VS_7:
918 case SQ_ALU_CONST_CACHE_VS_8:
919 case SQ_ALU_CONST_CACHE_VS_9:
920 case SQ_ALU_CONST_CACHE_VS_10:
921 case SQ_ALU_CONST_CACHE_VS_11:
922 case SQ_ALU_CONST_CACHE_VS_12:
923 case SQ_ALU_CONST_CACHE_VS_13:
924 case SQ_ALU_CONST_CACHE_VS_14:
925 case SQ_ALU_CONST_CACHE_VS_15:
926 r = r600_cs_packet_next_reloc(p, &reloc);
927 if (r) {
928 dev_warn(p->dev, "bad SET_CONTEXT_REG "
929 "0x%04X\n", reg);
930 return -EINVAL;
931 }
932 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
933 break;
934 default:
935 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx);
936 return -EINVAL;
937 }
938 return 0;
939}
940
941static inline unsigned minify(unsigned size, unsigned levels)
942{
943 size = size >> levels;
944 if (size < 1)
945 size = 1;
946 return size;
947}
948
949static void r600_texture_size(unsigned nfaces, unsigned blevel, unsigned nlevels,
950 unsigned w0, unsigned h0, unsigned d0, unsigned bpe,
951 unsigned *l0_size, unsigned *mipmap_size)
952{
953 unsigned offset, i, level, face;
954 unsigned width, height, depth, rowstride, size;
955
956 w0 = minify(w0, 0);
957 h0 = minify(h0, 0);
958 d0 = minify(d0, 0);
959 for(i = 0, offset = 0, level = blevel; i < nlevels; i++, level++) {
960 width = minify(w0, i);
961 height = minify(h0, i);
962 depth = minify(d0, i);
963 for(face = 0; face < nfaces; face++) {
964 rowstride = ((width * bpe) + 255) & ~255;
965 size = height * rowstride * depth;
966 offset += size;
967 offset = (offset + 0x1f) & ~0x1f;
968 }
969 }
970 *l0_size = (((w0 * bpe) + 255) & ~255) * h0 * d0;
971 *mipmap_size = offset;
972 if (!blevel)
973 *mipmap_size -= *l0_size;
974 if (!nlevels)
975 *mipmap_size = *l0_size;
976}
977
978/**
979 * r600_check_texture_resource() - check if register is authorized or not
980 * @p: parser structure holding parsing context
981 * @idx: index into the cs buffer
982 * @texture: texture's bo structure
983 * @mipmap: mipmap's bo structure
984 *
985 * This function will check that the resource has valid field and that
986 * the texture and mipmap bo object are big enough to cover this resource.
987 */
988static inline int r600_check_texture_resource(struct radeon_cs_parser *p, u32 idx,
989 struct radeon_bo *texture,
990 struct radeon_bo *mipmap)
991{
992 u32 nfaces, nlevels, blevel, w0, h0, d0, bpe = 0;
993 u32 word0, word1, l0_size, mipmap_size;
994
995 /* on legacy kernel we don't perform advanced check */
996 if (p->rdev == NULL)
997 return 0;
998 word0 = radeon_get_ib_value(p, idx + 0);
999 word1 = radeon_get_ib_value(p, idx + 1);
1000 w0 = G_038000_TEX_WIDTH(word0) + 1;
1001 h0 = G_038004_TEX_HEIGHT(word1) + 1;
1002 d0 = G_038004_TEX_DEPTH(word1);
1003 nfaces = 1;
1004 switch (G_038000_DIM(word0)) {
1005 case V_038000_SQ_TEX_DIM_1D:
1006 case V_038000_SQ_TEX_DIM_2D:
1007 case V_038000_SQ_TEX_DIM_3D:
1008 break;
1009 case V_038000_SQ_TEX_DIM_CUBEMAP:
1010 nfaces = 6;
1011 break;
1012 case V_038000_SQ_TEX_DIM_1D_ARRAY:
1013 case V_038000_SQ_TEX_DIM_2D_ARRAY:
1014 case V_038000_SQ_TEX_DIM_2D_MSAA:
1015 case V_038000_SQ_TEX_DIM_2D_ARRAY_MSAA:
1016 default:
1017 dev_warn(p->dev, "this kernel doesn't support %d texture dim\n", G_038000_DIM(word0));
1018 return -EINVAL;
1019 }
1020 if (r600_bpe_from_format(&bpe, G_038004_DATA_FORMAT(word1))) {
1021 dev_warn(p->dev, "%s:%d texture invalid format %d\n",
1022 __func__, __LINE__, G_038004_DATA_FORMAT(word1));
1023 return -EINVAL;
1024 }
1025 word0 = radeon_get_ib_value(p, idx + 4);
1026 word1 = radeon_get_ib_value(p, idx + 5);
1027 blevel = G_038010_BASE_LEVEL(word0);
1028 nlevels = G_038014_LAST_LEVEL(word1);
1029 r600_texture_size(nfaces, blevel, nlevels, w0, h0, d0, bpe, &l0_size, &mipmap_size);
1030 /* using get ib will give us the offset into the texture bo */
1031 word0 = radeon_get_ib_value(p, idx + 2);
1032 if ((l0_size + word0) > radeon_bo_size(texture)) {
1033 dev_warn(p->dev, "texture bo too small (%d %d %d %d -> %d have %ld)\n",
1034 w0, h0, bpe, word0, l0_size, radeon_bo_size(texture));
1035 return -EINVAL;
1036 }
1037 /* using get ib will give us the offset into the mipmap bo */
1038 word0 = radeon_get_ib_value(p, idx + 3);
1039 if ((mipmap_size + word0) > radeon_bo_size(mipmap)) {
1040 dev_warn(p->dev, "mipmap bo too small (%d %d %d %d %d %d -> %d have %ld)\n",
1041 w0, h0, bpe, blevel, nlevels, word0, mipmap_size, radeon_bo_size(texture));
1042 return -EINVAL;
1043 }
1044 return 0;
1045}
1046
336static int r600_packet3_check(struct radeon_cs_parser *p, 1047static int r600_packet3_check(struct radeon_cs_parser *p,
337 struct radeon_cs_packet *pkt) 1048 struct radeon_cs_packet *pkt)
338{ 1049{
339 struct radeon_cs_reloc *reloc; 1050 struct radeon_cs_reloc *reloc;
1051 struct r600_cs_track *track;
340 volatile u32 *ib; 1052 volatile u32 *ib;
341 unsigned idx; 1053 unsigned idx;
342 unsigned i; 1054 unsigned i;
@@ -344,6 +1056,7 @@ static int r600_packet3_check(struct radeon_cs_parser *p,
344 int r; 1056 int r;
345 u32 idx_value; 1057 u32 idx_value;
346 1058
1059 track = (struct r600_cs_track *)p->track;
347 ib = p->ib->ptr; 1060 ib = p->ib->ptr;
348 idx = pkt->idx + 1; 1061 idx = pkt->idx + 1;
349 idx_value = radeon_get_ib_value(p, idx); 1062 idx_value = radeon_get_ib_value(p, idx);
@@ -380,12 +1093,22 @@ static int r600_packet3_check(struct radeon_cs_parser *p,
380 } 1093 }
381 ib[idx+0] = idx_value + (u32)(reloc->lobj.gpu_offset & 0xffffffff); 1094 ib[idx+0] = idx_value + (u32)(reloc->lobj.gpu_offset & 0xffffffff);
382 ib[idx+1] += upper_32_bits(reloc->lobj.gpu_offset) & 0xff; 1095 ib[idx+1] += upper_32_bits(reloc->lobj.gpu_offset) & 0xff;
1096 r = r600_cs_track_check(p);
1097 if (r) {
1098 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
1099 return r;
1100 }
383 break; 1101 break;
384 case PACKET3_DRAW_INDEX_AUTO: 1102 case PACKET3_DRAW_INDEX_AUTO:
385 if (pkt->count != 1) { 1103 if (pkt->count != 1) {
386 DRM_ERROR("bad DRAW_INDEX_AUTO\n"); 1104 DRM_ERROR("bad DRAW_INDEX_AUTO\n");
387 return -EINVAL; 1105 return -EINVAL;
388 } 1106 }
1107 r = r600_cs_track_check(p);
1108 if (r) {
1109 dev_warn(p->dev, "%s:%d invalid cmd stream %d\n", __func__, __LINE__, idx);
1110 return r;
1111 }
389 break; 1112 break;
390 case PACKET3_DRAW_INDEX_IMMD_BE: 1113 case PACKET3_DRAW_INDEX_IMMD_BE:
391 case PACKET3_DRAW_INDEX_IMMD: 1114 case PACKET3_DRAW_INDEX_IMMD:
@@ -393,6 +1116,11 @@ static int r600_packet3_check(struct radeon_cs_parser *p,
393 DRM_ERROR("bad DRAW_INDEX_IMMD\n"); 1116 DRM_ERROR("bad DRAW_INDEX_IMMD\n");
394 return -EINVAL; 1117 return -EINVAL;
395 } 1118 }
1119 r = r600_cs_track_check(p);
1120 if (r) {
1121 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
1122 return r;
1123 }
396 break; 1124 break;
397 case PACKET3_WAIT_REG_MEM: 1125 case PACKET3_WAIT_REG_MEM:
398 if (pkt->count != 5) { 1126 if (pkt->count != 5) {
@@ -465,30 +1193,9 @@ static int r600_packet3_check(struct radeon_cs_parser *p,
465 } 1193 }
466 for (i = 0; i < pkt->count; i++) { 1194 for (i = 0; i < pkt->count; i++) {
467 reg = start_reg + (4 * i); 1195 reg = start_reg + (4 * i);
468 switch (reg) { 1196 r = r600_cs_check_reg(p, reg, idx+1+i);
469 case SQ_ESGS_RING_BASE: 1197 if (r)
470 case SQ_GSVS_RING_BASE: 1198 return r;
471 case SQ_ESTMP_RING_BASE:
472 case SQ_GSTMP_RING_BASE:
473 case SQ_VSTMP_RING_BASE:
474 case SQ_PSTMP_RING_BASE:
475 case SQ_FBUF_RING_BASE:
476 case SQ_REDUC_RING_BASE:
477 case SX_MEMORY_EXPORT_BASE:
478 r = r600_cs_packet_next_reloc(p, &reloc);
479 if (r) {
480 DRM_ERROR("bad SET_CONFIG_REG "
481 "0x%04X\n", reg);
482 return -EINVAL;
483 }
484 ib[idx+1+i] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
485 break;
486 case CP_COHER_BASE:
487 /* use PACKET3_SURFACE_SYNC */
488 return -EINVAL;
489 default:
490 break;
491 }
492 } 1199 }
493 break; 1200 break;
494 case PACKET3_SET_CONTEXT_REG: 1201 case PACKET3_SET_CONTEXT_REG:
@@ -502,55 +1209,9 @@ static int r600_packet3_check(struct radeon_cs_parser *p,
502 } 1209 }
503 for (i = 0; i < pkt->count; i++) { 1210 for (i = 0; i < pkt->count; i++) {
504 reg = start_reg + (4 * i); 1211 reg = start_reg + (4 * i);
505 switch (reg) { 1212 r = r600_cs_check_reg(p, reg, idx+1+i);
506 case DB_DEPTH_BASE: 1213 if (r)
507 case DB_HTILE_DATA_BASE: 1214 return r;
508 case CB_COLOR0_BASE:
509 case CB_COLOR1_BASE:
510 case CB_COLOR2_BASE:
511 case CB_COLOR3_BASE:
512 case CB_COLOR4_BASE:
513 case CB_COLOR5_BASE:
514 case CB_COLOR6_BASE:
515 case CB_COLOR7_BASE:
516 case SQ_PGM_START_FS:
517 case SQ_PGM_START_ES:
518 case SQ_PGM_START_VS:
519 case SQ_PGM_START_GS:
520 case SQ_PGM_START_PS:
521 r = r600_cs_packet_next_reloc(p, &reloc);
522 if (r) {
523 DRM_ERROR("bad SET_CONTEXT_REG "
524 "0x%04X\n", reg);
525 return -EINVAL;
526 }
527 ib[idx+1+i] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
528 break;
529 case VGT_DMA_BASE:
530 case VGT_DMA_BASE_HI:
531 /* These should be handled by DRAW_INDEX packet 3 */
532 case VGT_STRMOUT_BASE_OFFSET_0:
533 case VGT_STRMOUT_BASE_OFFSET_1:
534 case VGT_STRMOUT_BASE_OFFSET_2:
535 case VGT_STRMOUT_BASE_OFFSET_3:
536 case VGT_STRMOUT_BASE_OFFSET_HI_0:
537 case VGT_STRMOUT_BASE_OFFSET_HI_1:
538 case VGT_STRMOUT_BASE_OFFSET_HI_2:
539 case VGT_STRMOUT_BASE_OFFSET_HI_3:
540 case VGT_STRMOUT_BUFFER_BASE_0:
541 case VGT_STRMOUT_BUFFER_BASE_1:
542 case VGT_STRMOUT_BUFFER_BASE_2:
543 case VGT_STRMOUT_BUFFER_BASE_3:
544 case VGT_STRMOUT_BUFFER_OFFSET_0:
545 case VGT_STRMOUT_BUFFER_OFFSET_1:
546 case VGT_STRMOUT_BUFFER_OFFSET_2:
547 case VGT_STRMOUT_BUFFER_OFFSET_3:
548 /* These should be handled by STRMOUT_BUFFER packet 3 */
549 DRM_ERROR("bad context reg: 0x%08x\n", reg);
550 return -EINVAL;
551 default:
552 break;
553 }
554 } 1215 }
555 break; 1216 break;
556 case PACKET3_SET_RESOURCE: 1217 case PACKET3_SET_RESOURCE:
@@ -567,6 +1228,9 @@ static int r600_packet3_check(struct radeon_cs_parser *p,
567 return -EINVAL; 1228 return -EINVAL;
568 } 1229 }
569 for (i = 0; i < (pkt->count / 7); i++) { 1230 for (i = 0; i < (pkt->count / 7); i++) {
1231 struct radeon_bo *texture, *mipmap;
1232 u32 size, offset;
1233
570 switch (G__SQ_VTX_CONSTANT_TYPE(radeon_get_ib_value(p, idx+(i*7)+6+1))) { 1234 switch (G__SQ_VTX_CONSTANT_TYPE(radeon_get_ib_value(p, idx+(i*7)+6+1))) {
571 case SQ_TEX_VTX_VALID_TEXTURE: 1235 case SQ_TEX_VTX_VALID_TEXTURE:
572 /* tex base */ 1236 /* tex base */
@@ -576,6 +1240,7 @@ static int r600_packet3_check(struct radeon_cs_parser *p,
576 return -EINVAL; 1240 return -EINVAL;
577 } 1241 }
578 ib[idx+1+(i*7)+2] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff); 1242 ib[idx+1+(i*7)+2] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
1243 texture = reloc->robj;
579 /* tex mip base */ 1244 /* tex mip base */
580 r = r600_cs_packet_next_reloc(p, &reloc); 1245 r = r600_cs_packet_next_reloc(p, &reloc);
581 if (r) { 1246 if (r) {
@@ -583,6 +1248,11 @@ static int r600_packet3_check(struct radeon_cs_parser *p,
583 return -EINVAL; 1248 return -EINVAL;
584 } 1249 }
585 ib[idx+1+(i*7)+3] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff); 1250 ib[idx+1+(i*7)+3] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
1251 mipmap = reloc->robj;
1252 r = r600_check_texture_resource(p, idx+(i*7)+1,
1253 texture, mipmap);
1254 if (r)
1255 return r;
586 break; 1256 break;
587 case SQ_TEX_VTX_VALID_BUFFER: 1257 case SQ_TEX_VTX_VALID_BUFFER:
588 /* vtx base */ 1258 /* vtx base */
@@ -591,6 +1261,13 @@ static int r600_packet3_check(struct radeon_cs_parser *p,
591 DRM_ERROR("bad SET_RESOURCE\n"); 1261 DRM_ERROR("bad SET_RESOURCE\n");
592 return -EINVAL; 1262 return -EINVAL;
593 } 1263 }
1264 offset = radeon_get_ib_value(p, idx+1+(i*7)+0);
1265 size = radeon_get_ib_value(p, idx+1+(i*7)+1);
1266 if (p->rdev && (size + offset) > radeon_bo_size(reloc->robj)) {
1267 /* force size to size of the buffer */
1268 dev_warn(p->dev, "vbo resource seems too big for the bo\n");
1269 ib[idx+1+(i*7)+1] = radeon_bo_size(reloc->robj);
1270 }
594 ib[idx+1+(i*7)+0] += (u32)((reloc->lobj.gpu_offset) & 0xffffffff); 1271 ib[idx+1+(i*7)+0] += (u32)((reloc->lobj.gpu_offset) & 0xffffffff);
595 ib[idx+1+(i*7)+2] += upper_32_bits(reloc->lobj.gpu_offset) & 0xff; 1272 ib[idx+1+(i*7)+2] += upper_32_bits(reloc->lobj.gpu_offset) & 0xff;
596 break; 1273 break;
@@ -603,13 +1280,15 @@ static int r600_packet3_check(struct radeon_cs_parser *p,
603 } 1280 }
604 break; 1281 break;
605 case PACKET3_SET_ALU_CONST: 1282 case PACKET3_SET_ALU_CONST:
606 start_reg = (idx_value << 2) + PACKET3_SET_ALU_CONST_OFFSET; 1283 if (track->sq_config & DX9_CONSTS) {
607 end_reg = 4 * pkt->count + start_reg - 4; 1284 start_reg = (idx_value << 2) + PACKET3_SET_ALU_CONST_OFFSET;
608 if ((start_reg < PACKET3_SET_ALU_CONST_OFFSET) || 1285 end_reg = 4 * pkt->count + start_reg - 4;
609 (start_reg >= PACKET3_SET_ALU_CONST_END) || 1286 if ((start_reg < PACKET3_SET_ALU_CONST_OFFSET) ||
610 (end_reg >= PACKET3_SET_ALU_CONST_END)) { 1287 (start_reg >= PACKET3_SET_ALU_CONST_END) ||
611 DRM_ERROR("bad SET_ALU_CONST\n"); 1288 (end_reg >= PACKET3_SET_ALU_CONST_END)) {
612 return -EINVAL; 1289 DRM_ERROR("bad SET_ALU_CONST\n");
1290 return -EINVAL;
1291 }
613 } 1292 }
614 break; 1293 break;
615 case PACKET3_SET_BOOL_CONST: 1294 case PACKET3_SET_BOOL_CONST:
@@ -678,11 +1357,31 @@ static int r600_packet3_check(struct radeon_cs_parser *p,
678int r600_cs_parse(struct radeon_cs_parser *p) 1357int r600_cs_parse(struct radeon_cs_parser *p)
679{ 1358{
680 struct radeon_cs_packet pkt; 1359 struct radeon_cs_packet pkt;
1360 struct r600_cs_track *track;
681 int r; 1361 int r;
682 1362
1363 if (p->track == NULL) {
1364 /* initialize tracker, we are in kms */
1365 track = kzalloc(sizeof(*track), GFP_KERNEL);
1366 if (track == NULL)
1367 return -ENOMEM;
1368 r600_cs_track_init(track);
1369 if (p->rdev->family < CHIP_RV770) {
1370 track->npipes = p->rdev->config.r600.tiling_npipes;
1371 track->nbanks = p->rdev->config.r600.tiling_nbanks;
1372 track->group_size = p->rdev->config.r600.tiling_group_size;
1373 } else if (p->rdev->family <= CHIP_RV740) {
1374 track->npipes = p->rdev->config.rv770.tiling_npipes;
1375 track->nbanks = p->rdev->config.rv770.tiling_nbanks;
1376 track->group_size = p->rdev->config.rv770.tiling_group_size;
1377 }
1378 p->track = track;
1379 }
683 do { 1380 do {
684 r = r600_cs_packet_parse(p, &pkt, p->idx); 1381 r = r600_cs_packet_parse(p, &pkt, p->idx);
685 if (r) { 1382 if (r) {
1383 kfree(p->track);
1384 p->track = NULL;
686 return r; 1385 return r;
687 } 1386 }
688 p->idx += pkt.count + 2; 1387 p->idx += pkt.count + 2;
@@ -697,9 +1396,13 @@ int r600_cs_parse(struct radeon_cs_parser *p)
697 break; 1396 break;
698 default: 1397 default:
699 DRM_ERROR("Unknown packet type %d !\n", pkt.type); 1398 DRM_ERROR("Unknown packet type %d !\n", pkt.type);
1399 kfree(p->track);
1400 p->track = NULL;
700 return -EINVAL; 1401 return -EINVAL;
701 } 1402 }
702 if (r) { 1403 if (r) {
1404 kfree(p->track);
1405 p->track = NULL;
703 return r; 1406 return r;
704 } 1407 }
705 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); 1408 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw);
@@ -709,6 +1412,8 @@ int r600_cs_parse(struct radeon_cs_parser *p)
709 mdelay(1); 1412 mdelay(1);
710 } 1413 }
711#endif 1414#endif
1415 kfree(p->track);
1416 p->track = NULL;
712 return 0; 1417 return 0;
713} 1418}
714 1419
@@ -717,7 +1422,7 @@ static int r600_cs_parser_relocs_legacy(struct radeon_cs_parser *p)
717 if (p->chunk_relocs_idx == -1) { 1422 if (p->chunk_relocs_idx == -1) {
718 return 0; 1423 return 0;
719 } 1424 }
720 p->relocs = kcalloc(1, sizeof(struct radeon_cs_reloc), GFP_KERNEL); 1425 p->relocs = kzalloc(sizeof(struct radeon_cs_reloc), GFP_KERNEL);
721 if (p->relocs == NULL) { 1426 if (p->relocs == NULL) {
722 return -ENOMEM; 1427 return -ENOMEM;
723 } 1428 }
@@ -751,15 +1456,24 @@ int r600_cs_legacy(struct drm_device *dev, void *data, struct drm_file *filp,
751{ 1456{
752 struct radeon_cs_parser parser; 1457 struct radeon_cs_parser parser;
753 struct radeon_cs_chunk *ib_chunk; 1458 struct radeon_cs_chunk *ib_chunk;
754 struct radeon_ib fake_ib; 1459 struct radeon_ib fake_ib;
1460 struct r600_cs_track *track;
755 int r; 1461 int r;
756 1462
1463 /* initialize tracker */
1464 track = kzalloc(sizeof(*track), GFP_KERNEL);
1465 if (track == NULL)
1466 return -ENOMEM;
1467 r600_cs_track_init(track);
1468 r600_cs_legacy_get_tiling_conf(dev, &track->npipes, &track->nbanks, &track->group_size);
757 /* initialize parser */ 1469 /* initialize parser */
758 memset(&parser, 0, sizeof(struct radeon_cs_parser)); 1470 memset(&parser, 0, sizeof(struct radeon_cs_parser));
759 parser.filp = filp; 1471 parser.filp = filp;
1472 parser.dev = &dev->pdev->dev;
760 parser.rdev = NULL; 1473 parser.rdev = NULL;
761 parser.family = family; 1474 parser.family = family;
762 parser.ib = &fake_ib; 1475 parser.ib = &fake_ib;
1476 parser.track = track;
763 fake_ib.ptr = ib; 1477 fake_ib.ptr = ib;
764 r = radeon_cs_parser_init(&parser, data); 1478 r = radeon_cs_parser_init(&parser, data);
765 if (r) { 1479 if (r) {
diff --git a/drivers/gpu/drm/radeon/r600_hdmi.c b/drivers/gpu/drm/radeon/r600_hdmi.c
new file mode 100644
index 000000000000..2616b822ba68
--- /dev/null
+++ b/drivers/gpu/drm/radeon/r600_hdmi.c
@@ -0,0 +1,566 @@
1/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Christian König.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Christian König
25 */
26#include "drmP.h"
27#include "radeon_drm.h"
28#include "radeon.h"
29#include "atom.h"
30
31/*
32 * HDMI color format
33 */
34enum r600_hdmi_color_format {
35 RGB = 0,
36 YCC_422 = 1,
37 YCC_444 = 2
38};
39
40/*
41 * IEC60958 status bits
42 */
43enum r600_hdmi_iec_status_bits {
44 AUDIO_STATUS_DIG_ENABLE = 0x01,
45 AUDIO_STATUS_V = 0x02,
46 AUDIO_STATUS_VCFG = 0x04,
47 AUDIO_STATUS_EMPHASIS = 0x08,
48 AUDIO_STATUS_COPYRIGHT = 0x10,
49 AUDIO_STATUS_NONAUDIO = 0x20,
50 AUDIO_STATUS_PROFESSIONAL = 0x40,
51 AUDIO_STATUS_LEVEL = 0x80
52};
53
54struct {
55 uint32_t Clock;
56
57 int N_32kHz;
58 int CTS_32kHz;
59
60 int N_44_1kHz;
61 int CTS_44_1kHz;
62
63 int N_48kHz;
64 int CTS_48kHz;
65
66} r600_hdmi_ACR[] = {
67 /* 32kHz 44.1kHz 48kHz */
68 /* Clock N CTS N CTS N CTS */
69 { 25174, 4576, 28125, 7007, 31250, 6864, 28125 }, /* 25,20/1.001 MHz */
70 { 25200, 4096, 25200, 6272, 28000, 6144, 25200 }, /* 25.20 MHz */
71 { 27000, 4096, 27000, 6272, 30000, 6144, 27000 }, /* 27.00 MHz */
72 { 27027, 4096, 27027, 6272, 30030, 6144, 27027 }, /* 27.00*1.001 MHz */
73 { 54000, 4096, 54000, 6272, 60000, 6144, 54000 }, /* 54.00 MHz */
74 { 54054, 4096, 54054, 6272, 60060, 6144, 54054 }, /* 54.00*1.001 MHz */
75 { 74175, 11648, 210937, 17836, 234375, 11648, 140625 }, /* 74.25/1.001 MHz */
76 { 74250, 4096, 74250, 6272, 82500, 6144, 74250 }, /* 74.25 MHz */
77 { 148351, 11648, 421875, 8918, 234375, 5824, 140625 }, /* 148.50/1.001 MHz */
78 { 148500, 4096, 148500, 6272, 165000, 6144, 148500 }, /* 148.50 MHz */
79 { 0, 4096, 0, 6272, 0, 6144, 0 } /* Other */
80};
81
82/*
83 * calculate CTS value if it's not found in the table
84 */
85static void r600_hdmi_calc_CTS(uint32_t clock, int *CTS, int N, int freq)
86{
87 if (*CTS == 0)
88 *CTS = clock * N / (128 * freq) * 1000;
89 DRM_DEBUG("Using ACR timing N=%d CTS=%d for frequency %d\n",
90 N, *CTS, freq);
91}
92
93/*
94 * update the N and CTS parameters for a given pixel clock rate
95 */
96static void r600_hdmi_update_ACR(struct drm_encoder *encoder, uint32_t clock)
97{
98 struct drm_device *dev = encoder->dev;
99 struct radeon_device *rdev = dev->dev_private;
100 uint32_t offset = to_radeon_encoder(encoder)->hdmi_offset;
101 int CTS;
102 int N;
103 int i;
104
105 for (i = 0; r600_hdmi_ACR[i].Clock != clock && r600_hdmi_ACR[i].Clock != 0; i++);
106
107 CTS = r600_hdmi_ACR[i].CTS_32kHz;
108 N = r600_hdmi_ACR[i].N_32kHz;
109 r600_hdmi_calc_CTS(clock, &CTS, N, 32000);
110 WREG32(offset+R600_HDMI_32kHz_CTS, CTS << 12);
111 WREG32(offset+R600_HDMI_32kHz_N, N);
112
113 CTS = r600_hdmi_ACR[i].CTS_44_1kHz;
114 N = r600_hdmi_ACR[i].N_44_1kHz;
115 r600_hdmi_calc_CTS(clock, &CTS, N, 44100);
116 WREG32(offset+R600_HDMI_44_1kHz_CTS, CTS << 12);
117 WREG32(offset+R600_HDMI_44_1kHz_N, N);
118
119 CTS = r600_hdmi_ACR[i].CTS_48kHz;
120 N = r600_hdmi_ACR[i].N_48kHz;
121 r600_hdmi_calc_CTS(clock, &CTS, N, 48000);
122 WREG32(offset+R600_HDMI_48kHz_CTS, CTS << 12);
123 WREG32(offset+R600_HDMI_48kHz_N, N);
124}
125
126/*
127 * calculate the crc for a given info frame
128 */
129static void r600_hdmi_infoframe_checksum(uint8_t packetType,
130 uint8_t versionNumber,
131 uint8_t length,
132 uint8_t *frame)
133{
134 int i;
135 frame[0] = packetType + versionNumber + length;
136 for (i = 1; i <= length; i++)
137 frame[0] += frame[i];
138 frame[0] = 0x100 - frame[0];
139}
140
141/*
142 * build a HDMI Video Info Frame
143 */
144static void r600_hdmi_videoinfoframe(
145 struct drm_encoder *encoder,
146 enum r600_hdmi_color_format color_format,
147 int active_information_present,
148 uint8_t active_format_aspect_ratio,
149 uint8_t scan_information,
150 uint8_t colorimetry,
151 uint8_t ex_colorimetry,
152 uint8_t quantization,
153 int ITC,
154 uint8_t picture_aspect_ratio,
155 uint8_t video_format_identification,
156 uint8_t pixel_repetition,
157 uint8_t non_uniform_picture_scaling,
158 uint8_t bar_info_data_valid,
159 uint16_t top_bar,
160 uint16_t bottom_bar,
161 uint16_t left_bar,
162 uint16_t right_bar
163)
164{
165 struct drm_device *dev = encoder->dev;
166 struct radeon_device *rdev = dev->dev_private;
167 uint32_t offset = to_radeon_encoder(encoder)->hdmi_offset;
168
169 uint8_t frame[14];
170
171 frame[0x0] = 0;
172 frame[0x1] =
173 (scan_information & 0x3) |
174 ((bar_info_data_valid & 0x3) << 2) |
175 ((active_information_present & 0x1) << 4) |
176 ((color_format & 0x3) << 5);
177 frame[0x2] =
178 (active_format_aspect_ratio & 0xF) |
179 ((picture_aspect_ratio & 0x3) << 4) |
180 ((colorimetry & 0x3) << 6);
181 frame[0x3] =
182 (non_uniform_picture_scaling & 0x3) |
183 ((quantization & 0x3) << 2) |
184 ((ex_colorimetry & 0x7) << 4) |
185 ((ITC & 0x1) << 7);
186 frame[0x4] = (video_format_identification & 0x7F);
187 frame[0x5] = (pixel_repetition & 0xF);
188 frame[0x6] = (top_bar & 0xFF);
189 frame[0x7] = (top_bar >> 8);
190 frame[0x8] = (bottom_bar & 0xFF);
191 frame[0x9] = (bottom_bar >> 8);
192 frame[0xA] = (left_bar & 0xFF);
193 frame[0xB] = (left_bar >> 8);
194 frame[0xC] = (right_bar & 0xFF);
195 frame[0xD] = (right_bar >> 8);
196
197 r600_hdmi_infoframe_checksum(0x82, 0x02, 0x0D, frame);
198
199 WREG32(offset+R600_HDMI_VIDEOINFOFRAME_0,
200 frame[0x0] | (frame[0x1] << 8) | (frame[0x2] << 16) | (frame[0x3] << 24));
201 WREG32(offset+R600_HDMI_VIDEOINFOFRAME_1,
202 frame[0x4] | (frame[0x5] << 8) | (frame[0x6] << 16) | (frame[0x7] << 24));
203 WREG32(offset+R600_HDMI_VIDEOINFOFRAME_2,
204 frame[0x8] | (frame[0x9] << 8) | (frame[0xA] << 16) | (frame[0xB] << 24));
205 WREG32(offset+R600_HDMI_VIDEOINFOFRAME_3,
206 frame[0xC] | (frame[0xD] << 8));
207}
208
209/*
210 * build a Audio Info Frame
211 */
212static void r600_hdmi_audioinfoframe(
213 struct drm_encoder *encoder,
214 uint8_t channel_count,
215 uint8_t coding_type,
216 uint8_t sample_size,
217 uint8_t sample_frequency,
218 uint8_t format,
219 uint8_t channel_allocation,
220 uint8_t level_shift,
221 int downmix_inhibit
222)
223{
224 struct drm_device *dev = encoder->dev;
225 struct radeon_device *rdev = dev->dev_private;
226 uint32_t offset = to_radeon_encoder(encoder)->hdmi_offset;
227
228 uint8_t frame[11];
229
230 frame[0x0] = 0;
231 frame[0x1] = (channel_count & 0x7) | ((coding_type & 0xF) << 4);
232 frame[0x2] = (sample_size & 0x3) | ((sample_frequency & 0x7) << 2);
233 frame[0x3] = format;
234 frame[0x4] = channel_allocation;
235 frame[0x5] = ((level_shift & 0xF) << 3) | ((downmix_inhibit & 0x1) << 7);
236 frame[0x6] = 0;
237 frame[0x7] = 0;
238 frame[0x8] = 0;
239 frame[0x9] = 0;
240 frame[0xA] = 0;
241
242 r600_hdmi_infoframe_checksum(0x84, 0x01, 0x0A, frame);
243
244 WREG32(offset+R600_HDMI_AUDIOINFOFRAME_0,
245 frame[0x0] | (frame[0x1] << 8) | (frame[0x2] << 16) | (frame[0x3] << 24));
246 WREG32(offset+R600_HDMI_AUDIOINFOFRAME_1,
247 frame[0x4] | (frame[0x5] << 8) | (frame[0x6] << 16) | (frame[0x8] << 24));
248}
249
250/*
251 * test if audio buffer is filled enough to start playing
252 */
253static int r600_hdmi_is_audio_buffer_filled(struct drm_encoder *encoder)
254{
255 struct drm_device *dev = encoder->dev;
256 struct radeon_device *rdev = dev->dev_private;
257 uint32_t offset = to_radeon_encoder(encoder)->hdmi_offset;
258
259 return (RREG32(offset+R600_HDMI_STATUS) & 0x10) != 0;
260}
261
262/*
263 * have buffer status changed since last call?
264 */
265int r600_hdmi_buffer_status_changed(struct drm_encoder *encoder)
266{
267 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
268 int status, result;
269
270 if (!radeon_encoder->hdmi_offset)
271 return 0;
272
273 status = r600_hdmi_is_audio_buffer_filled(encoder);
274 result = radeon_encoder->hdmi_buffer_status != status;
275 radeon_encoder->hdmi_buffer_status = status;
276
277 return result;
278}
279
280/*
281 * write the audio workaround status to the hardware
282 */
283void r600_hdmi_audio_workaround(struct drm_encoder *encoder)
284{
285 struct drm_device *dev = encoder->dev;
286 struct radeon_device *rdev = dev->dev_private;
287 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
288 uint32_t offset = radeon_encoder->hdmi_offset;
289
290 if (!offset)
291 return;
292
293 if (r600_hdmi_is_audio_buffer_filled(encoder)) {
294 /* disable audio workaround and start delivering of audio frames */
295 WREG32_P(offset+R600_HDMI_CNTL, 0x00000001, ~0x00001001);
296
297 } else if (radeon_encoder->hdmi_audio_workaround) {
298 /* enable audio workaround and start delivering of audio frames */
299 WREG32_P(offset+R600_HDMI_CNTL, 0x00001001, ~0x00001001);
300
301 } else {
302 /* disable audio workaround and stop delivering of audio frames */
303 WREG32_P(offset+R600_HDMI_CNTL, 0x00000000, ~0x00001001);
304 }
305}
306
307
308/*
309 * update the info frames with the data from the current display mode
310 */
311void r600_hdmi_setmode(struct drm_encoder *encoder, struct drm_display_mode *mode)
312{
313 struct drm_device *dev = encoder->dev;
314 struct radeon_device *rdev = dev->dev_private;
315 uint32_t offset = to_radeon_encoder(encoder)->hdmi_offset;
316
317 if (ASIC_IS_DCE4(rdev))
318 return;
319
320 if (!offset)
321 return;
322
323 r600_audio_set_clock(encoder, mode->clock);
324
325 WREG32(offset+R600_HDMI_UNKNOWN_0, 0x1000);
326 WREG32(offset+R600_HDMI_UNKNOWN_1, 0x0);
327 WREG32(offset+R600_HDMI_UNKNOWN_2, 0x1000);
328
329 r600_hdmi_update_ACR(encoder, mode->clock);
330
331 WREG32(offset+R600_HDMI_VIDEOCNTL, 0x13);
332
333 WREG32(offset+R600_HDMI_VERSION, 0x202);
334
335 r600_hdmi_videoinfoframe(encoder, RGB, 0, 0, 0, 0,
336 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
337
338 /* it's unknown what these bits do excatly, but it's indeed quite usefull for debugging */
339 WREG32(offset+R600_HDMI_AUDIO_DEBUG_0, 0x00FFFFFF);
340 WREG32(offset+R600_HDMI_AUDIO_DEBUG_1, 0x007FFFFF);
341 WREG32(offset+R600_HDMI_AUDIO_DEBUG_2, 0x00000001);
342 WREG32(offset+R600_HDMI_AUDIO_DEBUG_3, 0x00000001);
343
344 r600_hdmi_audio_workaround(encoder);
345
346 /* audio packets per line, does anyone know how to calc this ? */
347 WREG32_P(offset+R600_HDMI_CNTL, 0x00040000, ~0x001F0000);
348
349 /* update? reset? don't realy know */
350 WREG32_P(offset+R600_HDMI_CNTL, 0x14000000, ~0x14000000);
351}
352
353/*
354 * update settings with current parameters from audio engine
355 */
356void r600_hdmi_update_audio_settings(struct drm_encoder *encoder,
357 int channels,
358 int rate,
359 int bps,
360 uint8_t status_bits,
361 uint8_t category_code)
362{
363 struct drm_device *dev = encoder->dev;
364 struct radeon_device *rdev = dev->dev_private;
365 uint32_t offset = to_radeon_encoder(encoder)->hdmi_offset;
366
367 uint32_t iec;
368
369 if (!offset)
370 return;
371
372 DRM_DEBUG("%s with %d channels, %d Hz sampling rate, %d bits per sample,\n",
373 r600_hdmi_is_audio_buffer_filled(encoder) ? "playing" : "stopped",
374 channels, rate, bps);
375 DRM_DEBUG("0x%02X IEC60958 status bits and 0x%02X category code\n",
376 (int)status_bits, (int)category_code);
377
378 iec = 0;
379 if (status_bits & AUDIO_STATUS_PROFESSIONAL)
380 iec |= 1 << 0;
381 if (status_bits & AUDIO_STATUS_NONAUDIO)
382 iec |= 1 << 1;
383 if (status_bits & AUDIO_STATUS_COPYRIGHT)
384 iec |= 1 << 2;
385 if (status_bits & AUDIO_STATUS_EMPHASIS)
386 iec |= 1 << 3;
387
388 iec |= category_code << 8;
389
390 switch (rate) {
391 case 32000: iec |= 0x3 << 24; break;
392 case 44100: iec |= 0x0 << 24; break;
393 case 88200: iec |= 0x8 << 24; break;
394 case 176400: iec |= 0xc << 24; break;
395 case 48000: iec |= 0x2 << 24; break;
396 case 96000: iec |= 0xa << 24; break;
397 case 192000: iec |= 0xe << 24; break;
398 }
399
400 WREG32(offset+R600_HDMI_IEC60958_1, iec);
401
402 iec = 0;
403 switch (bps) {
404 case 16: iec |= 0x2; break;
405 case 20: iec |= 0x3; break;
406 case 24: iec |= 0xb; break;
407 }
408 if (status_bits & AUDIO_STATUS_V)
409 iec |= 0x5 << 16;
410
411 WREG32_P(offset+R600_HDMI_IEC60958_2, iec, ~0x5000f);
412
413 /* 0x021 or 0x031 sets the audio frame length */
414 WREG32(offset+R600_HDMI_AUDIOCNTL, 0x31);
415 r600_hdmi_audioinfoframe(encoder, channels-1, 0, 0, 0, 0, 0, 0, 0);
416
417 r600_hdmi_audio_workaround(encoder);
418
419 /* update? reset? don't realy know */
420 WREG32_P(offset+R600_HDMI_CNTL, 0x04000000, ~0x04000000);
421}
422
423static int r600_hdmi_find_free_block(struct drm_device *dev)
424{
425 struct radeon_device *rdev = dev->dev_private;
426 struct drm_encoder *encoder;
427 struct radeon_encoder *radeon_encoder;
428 bool free_blocks[3] = { true, true, true };
429
430 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
431 radeon_encoder = to_radeon_encoder(encoder);
432 switch (radeon_encoder->hdmi_offset) {
433 case R600_HDMI_BLOCK1:
434 free_blocks[0] = false;
435 break;
436 case R600_HDMI_BLOCK2:
437 free_blocks[1] = false;
438 break;
439 case R600_HDMI_BLOCK3:
440 free_blocks[2] = false;
441 break;
442 }
443 }
444
445 if (rdev->family == CHIP_RS600 || rdev->family == CHIP_RS690) {
446 return free_blocks[0] ? R600_HDMI_BLOCK1 : 0;
447 } else if (rdev->family >= CHIP_R600) {
448 if (free_blocks[0])
449 return R600_HDMI_BLOCK1;
450 else if (free_blocks[1])
451 return R600_HDMI_BLOCK2;
452 }
453 return 0;
454}
455
456static void r600_hdmi_assign_block(struct drm_encoder *encoder)
457{
458 struct drm_device *dev = encoder->dev;
459 struct radeon_device *rdev = dev->dev_private;
460 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
461 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
462
463 if (!dig) {
464 dev_err(rdev->dev, "Enabling HDMI on non-dig encoder\n");
465 return;
466 }
467
468 if (ASIC_IS_DCE4(rdev)) {
469 /* TODO */
470 } else if (ASIC_IS_DCE3(rdev)) {
471 radeon_encoder->hdmi_offset = dig->dig_encoder ?
472 R600_HDMI_BLOCK3 : R600_HDMI_BLOCK1;
473 if (ASIC_IS_DCE32(rdev))
474 radeon_encoder->hdmi_config_offset = dig->dig_encoder ?
475 R600_HDMI_CONFIG2 : R600_HDMI_CONFIG1;
476 } else if (rdev->family >= CHIP_R600) {
477 radeon_encoder->hdmi_offset = r600_hdmi_find_free_block(dev);
478 }
479}
480
481/*
482 * enable the HDMI engine
483 */
484void r600_hdmi_enable(struct drm_encoder *encoder)
485{
486 struct drm_device *dev = encoder->dev;
487 struct radeon_device *rdev = dev->dev_private;
488 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
489
490 if (ASIC_IS_DCE4(rdev))
491 return;
492
493 if (!radeon_encoder->hdmi_offset) {
494 r600_hdmi_assign_block(encoder);
495 if (!radeon_encoder->hdmi_offset) {
496 dev_warn(rdev->dev, "Could not find HDMI block for "
497 "0x%x encoder\n", radeon_encoder->encoder_id);
498 return;
499 }
500 }
501
502 if (ASIC_IS_DCE32(rdev) && !ASIC_IS_DCE4(rdev)) {
503 WREG32_P(radeon_encoder->hdmi_config_offset + 0x4, 0x1, ~0x1);
504 } else if (rdev->family >= CHIP_R600 && !ASIC_IS_DCE3(rdev)) {
505 int offset = radeon_encoder->hdmi_offset;
506 switch (radeon_encoder->encoder_id) {
507 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
508 WREG32_P(AVIVO_TMDSA_CNTL, 0x4, ~0x4);
509 WREG32(offset + R600_HDMI_ENABLE, 0x101);
510 break;
511 case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
512 WREG32_P(AVIVO_LVTMA_CNTL, 0x4, ~0x4);
513 WREG32(offset + R600_HDMI_ENABLE, 0x105);
514 break;
515 default:
516 dev_err(rdev->dev, "Unknown HDMI output type\n");
517 break;
518 }
519 }
520
521 DRM_DEBUG("Enabling HDMI interface @ 0x%04X for encoder 0x%x\n",
522 radeon_encoder->hdmi_offset, radeon_encoder->encoder_id);
523}
524
525/*
526 * disable the HDMI engine
527 */
528void r600_hdmi_disable(struct drm_encoder *encoder)
529{
530 struct drm_device *dev = encoder->dev;
531 struct radeon_device *rdev = dev->dev_private;
532 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
533
534 if (ASIC_IS_DCE4(rdev))
535 return;
536
537 if (!radeon_encoder->hdmi_offset) {
538 dev_err(rdev->dev, "Disabling not enabled HDMI\n");
539 return;
540 }
541
542 DRM_DEBUG("Disabling HDMI interface @ 0x%04X for encoder 0x%x\n",
543 radeon_encoder->hdmi_offset, radeon_encoder->encoder_id);
544
545 if (ASIC_IS_DCE32(rdev) && !ASIC_IS_DCE4(rdev)) {
546 WREG32_P(radeon_encoder->hdmi_config_offset + 0x4, 0, ~0x1);
547 } else if (rdev->family >= CHIP_R600 && !ASIC_IS_DCE3(rdev)) {
548 int offset = radeon_encoder->hdmi_offset;
549 switch (radeon_encoder->encoder_id) {
550 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
551 WREG32_P(AVIVO_TMDSA_CNTL, 0, ~0x4);
552 WREG32(offset + R600_HDMI_ENABLE, 0);
553 break;
554 case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
555 WREG32_P(AVIVO_LVTMA_CNTL, 0, ~0x4);
556 WREG32(offset + R600_HDMI_ENABLE, 0);
557 break;
558 default:
559 dev_err(rdev->dev, "Unknown HDMI output type\n");
560 break;
561 }
562 }
563
564 radeon_encoder->hdmi_offset = 0;
565 radeon_encoder->hdmi_config_offset = 0;
566}
diff --git a/drivers/gpu/drm/radeon/r600_reg.h b/drivers/gpu/drm/radeon/r600_reg.h
index e2d1f5f33f7e..7b1d22370f6e 100644
--- a/drivers/gpu/drm/radeon/r600_reg.h
+++ b/drivers/gpu/drm/radeon/r600_reg.h
@@ -110,5 +110,83 @@
110#define R600_BIOS_6_SCRATCH 0x173c 110#define R600_BIOS_6_SCRATCH 0x173c
111#define R600_BIOS_7_SCRATCH 0x1740 111#define R600_BIOS_7_SCRATCH 0x1740
112 112
113/* Audio, these regs were reverse enginered,
114 * so the chance is high that the naming is wrong
115 * R6xx+ ??? */
116
117/* Audio clocks */
118#define R600_AUDIO_PLL1_MUL 0x0514
119#define R600_AUDIO_PLL1_DIV 0x0518
120#define R600_AUDIO_PLL2_MUL 0x0524
121#define R600_AUDIO_PLL2_DIV 0x0528
122#define R600_AUDIO_CLK_SRCSEL 0x0534
123
124/* Audio general */
125#define R600_AUDIO_ENABLE 0x7300
126#define R600_AUDIO_TIMING 0x7344
127
128/* Audio params */
129#define R600_AUDIO_VENDOR_ID 0x7380
130#define R600_AUDIO_REVISION_ID 0x7384
131#define R600_AUDIO_ROOT_NODE_COUNT 0x7388
132#define R600_AUDIO_NID1_NODE_COUNT 0x738c
133#define R600_AUDIO_NID1_TYPE 0x7390
134#define R600_AUDIO_SUPPORTED_SIZE_RATE 0x7394
135#define R600_AUDIO_SUPPORTED_CODEC 0x7398
136#define R600_AUDIO_SUPPORTED_POWER_STATES 0x739c
137#define R600_AUDIO_NID2_CAPS 0x73a0
138#define R600_AUDIO_NID3_CAPS 0x73a4
139#define R600_AUDIO_NID3_PIN_CAPS 0x73a8
140
141/* Audio conn list */
142#define R600_AUDIO_CONN_LIST_LEN 0x73ac
143#define R600_AUDIO_CONN_LIST 0x73b0
144
145/* Audio verbs */
146#define R600_AUDIO_RATE_BPS_CHANNEL 0x73c0
147#define R600_AUDIO_PLAYING 0x73c4
148#define R600_AUDIO_IMPLEMENTATION_ID 0x73c8
149#define R600_AUDIO_CONFIG_DEFAULT 0x73cc
150#define R600_AUDIO_PIN_SENSE 0x73d0
151#define R600_AUDIO_PIN_WIDGET_CNTL 0x73d4
152#define R600_AUDIO_STATUS_BITS 0x73d8
153
154/* HDMI base register addresses */
155#define R600_HDMI_BLOCK1 0x7400
156#define R600_HDMI_BLOCK2 0x7700
157#define R600_HDMI_BLOCK3 0x7800
158
159/* HDMI registers */
160#define R600_HDMI_ENABLE 0x00
161#define R600_HDMI_STATUS 0x04
162#define R600_HDMI_CNTL 0x08
163#define R600_HDMI_UNKNOWN_0 0x0C
164#define R600_HDMI_AUDIOCNTL 0x10
165#define R600_HDMI_VIDEOCNTL 0x14
166#define R600_HDMI_VERSION 0x18
167#define R600_HDMI_UNKNOWN_1 0x28
168#define R600_HDMI_VIDEOINFOFRAME_0 0x54
169#define R600_HDMI_VIDEOINFOFRAME_1 0x58
170#define R600_HDMI_VIDEOINFOFRAME_2 0x5c
171#define R600_HDMI_VIDEOINFOFRAME_3 0x60
172#define R600_HDMI_32kHz_CTS 0xac
173#define R600_HDMI_32kHz_N 0xb0
174#define R600_HDMI_44_1kHz_CTS 0xb4
175#define R600_HDMI_44_1kHz_N 0xb8
176#define R600_HDMI_48kHz_CTS 0xbc
177#define R600_HDMI_48kHz_N 0xc0
178#define R600_HDMI_AUDIOINFOFRAME_0 0xcc
179#define R600_HDMI_AUDIOINFOFRAME_1 0xd0
180#define R600_HDMI_IEC60958_1 0xd4
181#define R600_HDMI_IEC60958_2 0xd8
182#define R600_HDMI_UNKNOWN_2 0xdc
183#define R600_HDMI_AUDIO_DEBUG_0 0xe0
184#define R600_HDMI_AUDIO_DEBUG_1 0xe4
185#define R600_HDMI_AUDIO_DEBUG_2 0xe8
186#define R600_HDMI_AUDIO_DEBUG_3 0xec
187
188/* HDMI additional config base register addresses */
189#define R600_HDMI_CONFIG1 0x7600
190#define R600_HDMI_CONFIG2 0x7a00
113 191
114#endif 192#endif
diff --git a/drivers/gpu/drm/radeon/r600d.h b/drivers/gpu/drm/radeon/r600d.h
index 27ab428b149b..59c1f8793e60 100644
--- a/drivers/gpu/drm/radeon/r600d.h
+++ b/drivers/gpu/drm/radeon/r600d.h
@@ -77,6 +77,55 @@
77#define CB_COLOR0_FRAG 0x280e0 77#define CB_COLOR0_FRAG 0x280e0
78#define CB_COLOR0_MASK 0x28100 78#define CB_COLOR0_MASK 0x28100
79 79
80#define SQ_ALU_CONST_CACHE_PS_0 0x28940
81#define SQ_ALU_CONST_CACHE_PS_1 0x28944
82#define SQ_ALU_CONST_CACHE_PS_2 0x28948
83#define SQ_ALU_CONST_CACHE_PS_3 0x2894c
84#define SQ_ALU_CONST_CACHE_PS_4 0x28950
85#define SQ_ALU_CONST_CACHE_PS_5 0x28954
86#define SQ_ALU_CONST_CACHE_PS_6 0x28958
87#define SQ_ALU_CONST_CACHE_PS_7 0x2895c
88#define SQ_ALU_CONST_CACHE_PS_8 0x28960
89#define SQ_ALU_CONST_CACHE_PS_9 0x28964
90#define SQ_ALU_CONST_CACHE_PS_10 0x28968
91#define SQ_ALU_CONST_CACHE_PS_11 0x2896c
92#define SQ_ALU_CONST_CACHE_PS_12 0x28970
93#define SQ_ALU_CONST_CACHE_PS_13 0x28974
94#define SQ_ALU_CONST_CACHE_PS_14 0x28978
95#define SQ_ALU_CONST_CACHE_PS_15 0x2897c
96#define SQ_ALU_CONST_CACHE_VS_0 0x28980
97#define SQ_ALU_CONST_CACHE_VS_1 0x28984
98#define SQ_ALU_CONST_CACHE_VS_2 0x28988
99#define SQ_ALU_CONST_CACHE_VS_3 0x2898c
100#define SQ_ALU_CONST_CACHE_VS_4 0x28990
101#define SQ_ALU_CONST_CACHE_VS_5 0x28994
102#define SQ_ALU_CONST_CACHE_VS_6 0x28998
103#define SQ_ALU_CONST_CACHE_VS_7 0x2899c
104#define SQ_ALU_CONST_CACHE_VS_8 0x289a0
105#define SQ_ALU_CONST_CACHE_VS_9 0x289a4
106#define SQ_ALU_CONST_CACHE_VS_10 0x289a8
107#define SQ_ALU_CONST_CACHE_VS_11 0x289ac
108#define SQ_ALU_CONST_CACHE_VS_12 0x289b0
109#define SQ_ALU_CONST_CACHE_VS_13 0x289b4
110#define SQ_ALU_CONST_CACHE_VS_14 0x289b8
111#define SQ_ALU_CONST_CACHE_VS_15 0x289bc
112#define SQ_ALU_CONST_CACHE_GS_0 0x289c0
113#define SQ_ALU_CONST_CACHE_GS_1 0x289c4
114#define SQ_ALU_CONST_CACHE_GS_2 0x289c8
115#define SQ_ALU_CONST_CACHE_GS_3 0x289cc
116#define SQ_ALU_CONST_CACHE_GS_4 0x289d0
117#define SQ_ALU_CONST_CACHE_GS_5 0x289d4
118#define SQ_ALU_CONST_CACHE_GS_6 0x289d8
119#define SQ_ALU_CONST_CACHE_GS_7 0x289dc
120#define SQ_ALU_CONST_CACHE_GS_8 0x289e0
121#define SQ_ALU_CONST_CACHE_GS_9 0x289e4
122#define SQ_ALU_CONST_CACHE_GS_10 0x289e8
123#define SQ_ALU_CONST_CACHE_GS_11 0x289ec
124#define SQ_ALU_CONST_CACHE_GS_12 0x289f0
125#define SQ_ALU_CONST_CACHE_GS_13 0x289f4
126#define SQ_ALU_CONST_CACHE_GS_14 0x289f8
127#define SQ_ALU_CONST_CACHE_GS_15 0x289fc
128
80#define CONFIG_MEMSIZE 0x5428 129#define CONFIG_MEMSIZE 0x5428
81#define CONFIG_CNTL 0x5424 130#define CONFIG_CNTL 0x5424
82#define CP_STAT 0x8680 131#define CP_STAT 0x8680
@@ -456,7 +505,215 @@
456#define WAIT_2D_IDLECLEAN_bit (1 << 16) 505#define WAIT_2D_IDLECLEAN_bit (1 << 16)
457#define WAIT_3D_IDLECLEAN_bit (1 << 17) 506#define WAIT_3D_IDLECLEAN_bit (1 << 17)
458 507
508#define IH_RB_CNTL 0x3e00
509# define IH_RB_ENABLE (1 << 0)
510# define IH_IB_SIZE(x) ((x) << 1) /* log2 */
511# define IH_RB_FULL_DRAIN_ENABLE (1 << 6)
512# define IH_WPTR_WRITEBACK_ENABLE (1 << 8)
513# define IH_WPTR_WRITEBACK_TIMER(x) ((x) << 9) /* log2 */
514# define IH_WPTR_OVERFLOW_ENABLE (1 << 16)
515# define IH_WPTR_OVERFLOW_CLEAR (1 << 31)
516#define IH_RB_BASE 0x3e04
517#define IH_RB_RPTR 0x3e08
518#define IH_RB_WPTR 0x3e0c
519# define RB_OVERFLOW (1 << 0)
520# define WPTR_OFFSET_MASK 0x3fffc
521#define IH_RB_WPTR_ADDR_HI 0x3e10
522#define IH_RB_WPTR_ADDR_LO 0x3e14
523#define IH_CNTL 0x3e18
524# define ENABLE_INTR (1 << 0)
525# define IH_MC_SWAP(x) ((x) << 2)
526# define IH_MC_SWAP_NONE 0
527# define IH_MC_SWAP_16BIT 1
528# define IH_MC_SWAP_32BIT 2
529# define IH_MC_SWAP_64BIT 3
530# define RPTR_REARM (1 << 4)
531# define MC_WRREQ_CREDIT(x) ((x) << 15)
532# define MC_WR_CLEAN_CNT(x) ((x) << 20)
533
534#define RLC_CNTL 0x3f00
535# define RLC_ENABLE (1 << 0)
536#define RLC_HB_BASE 0x3f10
537#define RLC_HB_CNTL 0x3f0c
538#define RLC_HB_RPTR 0x3f20
539#define RLC_HB_WPTR 0x3f1c
540#define RLC_HB_WPTR_LSB_ADDR 0x3f14
541#define RLC_HB_WPTR_MSB_ADDR 0x3f18
542#define RLC_MC_CNTL 0x3f44
543#define RLC_UCODE_CNTL 0x3f48
544#define RLC_UCODE_ADDR 0x3f2c
545#define RLC_UCODE_DATA 0x3f30
546
547#define SRBM_SOFT_RESET 0xe60
548# define SOFT_RESET_RLC (1 << 13)
549
550#define CP_INT_CNTL 0xc124
551# define CNTX_BUSY_INT_ENABLE (1 << 19)
552# define CNTX_EMPTY_INT_ENABLE (1 << 20)
553# define SCRATCH_INT_ENABLE (1 << 25)
554# define TIME_STAMP_INT_ENABLE (1 << 26)
555# define IB2_INT_ENABLE (1 << 29)
556# define IB1_INT_ENABLE (1 << 30)
557# define RB_INT_ENABLE (1 << 31)
558#define CP_INT_STATUS 0xc128
559# define SCRATCH_INT_STAT (1 << 25)
560# define TIME_STAMP_INT_STAT (1 << 26)
561# define IB2_INT_STAT (1 << 29)
562# define IB1_INT_STAT (1 << 30)
563# define RB_INT_STAT (1 << 31)
564
565#define GRBM_INT_CNTL 0x8060
566# define RDERR_INT_ENABLE (1 << 0)
567# define WAIT_COUNT_TIMEOUT_INT_ENABLE (1 << 1)
568# define GUI_IDLE_INT_ENABLE (1 << 19)
569
570#define INTERRUPT_CNTL 0x5468
571# define IH_DUMMY_RD_OVERRIDE (1 << 0)
572# define IH_DUMMY_RD_EN (1 << 1)
573# define IH_REQ_NONSNOOP_EN (1 << 3)
574# define GEN_IH_INT_EN (1 << 8)
575#define INTERRUPT_CNTL2 0x546c
576
577#define D1MODE_VBLANK_STATUS 0x6534
578#define D2MODE_VBLANK_STATUS 0x6d34
579# define DxMODE_VBLANK_OCCURRED (1 << 0)
580# define DxMODE_VBLANK_ACK (1 << 4)
581# define DxMODE_VBLANK_STAT (1 << 12)
582# define DxMODE_VBLANK_INTERRUPT (1 << 16)
583# define DxMODE_VBLANK_INTERRUPT_TYPE (1 << 17)
584#define D1MODE_VLINE_STATUS 0x653c
585#define D2MODE_VLINE_STATUS 0x6d3c
586# define DxMODE_VLINE_OCCURRED (1 << 0)
587# define DxMODE_VLINE_ACK (1 << 4)
588# define DxMODE_VLINE_STAT (1 << 12)
589# define DxMODE_VLINE_INTERRUPT (1 << 16)
590# define DxMODE_VLINE_INTERRUPT_TYPE (1 << 17)
591#define DxMODE_INT_MASK 0x6540
592# define D1MODE_VBLANK_INT_MASK (1 << 0)
593# define D1MODE_VLINE_INT_MASK (1 << 4)
594# define D2MODE_VBLANK_INT_MASK (1 << 8)
595# define D2MODE_VLINE_INT_MASK (1 << 12)
596#define DCE3_DISP_INTERRUPT_STATUS 0x7ddc
597# define DC_HPD1_INTERRUPT (1 << 18)
598# define DC_HPD2_INTERRUPT (1 << 19)
599#define DISP_INTERRUPT_STATUS 0x7edc
600# define LB_D1_VLINE_INTERRUPT (1 << 2)
601# define LB_D2_VLINE_INTERRUPT (1 << 3)
602# define LB_D1_VBLANK_INTERRUPT (1 << 4)
603# define LB_D2_VBLANK_INTERRUPT (1 << 5)
604# define DACA_AUTODETECT_INTERRUPT (1 << 16)
605# define DACB_AUTODETECT_INTERRUPT (1 << 17)
606# define DC_HOT_PLUG_DETECT1_INTERRUPT (1 << 18)
607# define DC_HOT_PLUG_DETECT2_INTERRUPT (1 << 19)
608# define DC_I2C_SW_DONE_INTERRUPT (1 << 20)
609# define DC_I2C_HW_DONE_INTERRUPT (1 << 21)
610#define DISP_INTERRUPT_STATUS_CONTINUE 0x7ee8
611#define DCE3_DISP_INTERRUPT_STATUS_CONTINUE 0x7de8
612# define DC_HPD4_INTERRUPT (1 << 14)
613# define DC_HPD4_RX_INTERRUPT (1 << 15)
614# define DC_HPD3_INTERRUPT (1 << 28)
615# define DC_HPD1_RX_INTERRUPT (1 << 29)
616# define DC_HPD2_RX_INTERRUPT (1 << 30)
617#define DCE3_DISP_INTERRUPT_STATUS_CONTINUE2 0x7dec
618# define DC_HPD3_RX_INTERRUPT (1 << 0)
619# define DIGA_DP_VID_STREAM_DISABLE_INTERRUPT (1 << 1)
620# define DIGA_DP_STEER_FIFO_OVERFLOW_INTERRUPT (1 << 2)
621# define DIGB_DP_VID_STREAM_DISABLE_INTERRUPT (1 << 3)
622# define DIGB_DP_STEER_FIFO_OVERFLOW_INTERRUPT (1 << 4)
623# define AUX1_SW_DONE_INTERRUPT (1 << 5)
624# define AUX1_LS_DONE_INTERRUPT (1 << 6)
625# define AUX2_SW_DONE_INTERRUPT (1 << 7)
626# define AUX2_LS_DONE_INTERRUPT (1 << 8)
627# define AUX3_SW_DONE_INTERRUPT (1 << 9)
628# define AUX3_LS_DONE_INTERRUPT (1 << 10)
629# define AUX4_SW_DONE_INTERRUPT (1 << 11)
630# define AUX4_LS_DONE_INTERRUPT (1 << 12)
631# define DIGA_DP_FAST_TRAINING_COMPLETE_INTERRUPT (1 << 13)
632# define DIGB_DP_FAST_TRAINING_COMPLETE_INTERRUPT (1 << 14)
633/* DCE 3.2 */
634# define AUX5_SW_DONE_INTERRUPT (1 << 15)
635# define AUX5_LS_DONE_INTERRUPT (1 << 16)
636# define AUX6_SW_DONE_INTERRUPT (1 << 17)
637# define AUX6_LS_DONE_INTERRUPT (1 << 18)
638# define DC_HPD5_INTERRUPT (1 << 19)
639# define DC_HPD5_RX_INTERRUPT (1 << 20)
640# define DC_HPD6_INTERRUPT (1 << 21)
641# define DC_HPD6_RX_INTERRUPT (1 << 22)
642
643#define DACA_AUTO_DETECT_CONTROL 0x7828
644#define DACB_AUTO_DETECT_CONTROL 0x7a28
645#define DCE3_DACA_AUTO_DETECT_CONTROL 0x7028
646#define DCE3_DACB_AUTO_DETECT_CONTROL 0x7128
647# define DACx_AUTODETECT_MODE(x) ((x) << 0)
648# define DACx_AUTODETECT_MODE_NONE 0
649# define DACx_AUTODETECT_MODE_CONNECT 1
650# define DACx_AUTODETECT_MODE_DISCONNECT 2
651# define DACx_AUTODETECT_FRAME_TIME_COUNTER(x) ((x) << 8)
652/* bit 18 = R/C, 17 = G/Y, 16 = B/Comp */
653# define DACx_AUTODETECT_CHECK_MASK(x) ((x) << 16)
459 654
655#define DCE3_DACA_AUTODETECT_INT_CONTROL 0x7038
656#define DCE3_DACB_AUTODETECT_INT_CONTROL 0x7138
657#define DACA_AUTODETECT_INT_CONTROL 0x7838
658#define DACB_AUTODETECT_INT_CONTROL 0x7a38
659# define DACx_AUTODETECT_ACK (1 << 0)
660# define DACx_AUTODETECT_INT_ENABLE (1 << 16)
661
662#define DC_HOT_PLUG_DETECT1_CONTROL 0x7d00
663#define DC_HOT_PLUG_DETECT2_CONTROL 0x7d10
664#define DC_HOT_PLUG_DETECT3_CONTROL 0x7d24
665# define DC_HOT_PLUG_DETECTx_EN (1 << 0)
666
667#define DC_HOT_PLUG_DETECT1_INT_STATUS 0x7d04
668#define DC_HOT_PLUG_DETECT2_INT_STATUS 0x7d14
669#define DC_HOT_PLUG_DETECT3_INT_STATUS 0x7d28
670# define DC_HOT_PLUG_DETECTx_INT_STATUS (1 << 0)
671# define DC_HOT_PLUG_DETECTx_SENSE (1 << 1)
672
673/* DCE 3.0 */
674#define DC_HPD1_INT_STATUS 0x7d00
675#define DC_HPD2_INT_STATUS 0x7d0c
676#define DC_HPD3_INT_STATUS 0x7d18
677#define DC_HPD4_INT_STATUS 0x7d24
678/* DCE 3.2 */
679#define DC_HPD5_INT_STATUS 0x7dc0
680#define DC_HPD6_INT_STATUS 0x7df4
681# define DC_HPDx_INT_STATUS (1 << 0)
682# define DC_HPDx_SENSE (1 << 1)
683# define DC_HPDx_RX_INT_STATUS (1 << 8)
684
685#define DC_HOT_PLUG_DETECT1_INT_CONTROL 0x7d08
686#define DC_HOT_PLUG_DETECT2_INT_CONTROL 0x7d18
687#define DC_HOT_PLUG_DETECT3_INT_CONTROL 0x7d2c
688# define DC_HOT_PLUG_DETECTx_INT_ACK (1 << 0)
689# define DC_HOT_PLUG_DETECTx_INT_POLARITY (1 << 8)
690# define DC_HOT_PLUG_DETECTx_INT_EN (1 << 16)
691/* DCE 3.0 */
692#define DC_HPD1_INT_CONTROL 0x7d04
693#define DC_HPD2_INT_CONTROL 0x7d10
694#define DC_HPD3_INT_CONTROL 0x7d1c
695#define DC_HPD4_INT_CONTROL 0x7d28
696/* DCE 3.2 */
697#define DC_HPD5_INT_CONTROL 0x7dc4
698#define DC_HPD6_INT_CONTROL 0x7df8
699# define DC_HPDx_INT_ACK (1 << 0)
700# define DC_HPDx_INT_POLARITY (1 << 8)
701# define DC_HPDx_INT_EN (1 << 16)
702# define DC_HPDx_RX_INT_ACK (1 << 20)
703# define DC_HPDx_RX_INT_EN (1 << 24)
704
705/* DCE 3.0 */
706#define DC_HPD1_CONTROL 0x7d08
707#define DC_HPD2_CONTROL 0x7d14
708#define DC_HPD3_CONTROL 0x7d20
709#define DC_HPD4_CONTROL 0x7d2c
710/* DCE 3.2 */
711#define DC_HPD5_CONTROL 0x7dc8
712#define DC_HPD6_CONTROL 0x7dfc
713# define DC_HPDx_CONNECTION_TIMER(x) ((x) << 0)
714# define DC_HPDx_RX_INT_TIMER(x) ((x) << 16)
715/* DCE 3.2 */
716# define DC_HPDx_EN (1 << 28)
460 717
461/* 718/*
462 * PM4 719 * PM4
@@ -500,7 +757,6 @@
500#define PACKET3_WAIT_REG_MEM 0x3C 757#define PACKET3_WAIT_REG_MEM 0x3C
501#define PACKET3_MEM_WRITE 0x3D 758#define PACKET3_MEM_WRITE 0x3D
502#define PACKET3_INDIRECT_BUFFER 0x32 759#define PACKET3_INDIRECT_BUFFER 0x32
503#define PACKET3_CP_INTERRUPT 0x40
504#define PACKET3_SURFACE_SYNC 0x43 760#define PACKET3_SURFACE_SYNC 0x43
505# define PACKET3_CB0_DEST_BASE_ENA (1 << 6) 761# define PACKET3_CB0_DEST_BASE_ENA (1 << 6)
506# define PACKET3_TC_ACTION_ENA (1 << 23) 762# define PACKET3_TC_ACTION_ENA (1 << 23)
@@ -674,4 +930,495 @@
674#define S_000E60_SOFT_RESET_TSC(x) (((x) & 1) << 16) 930#define S_000E60_SOFT_RESET_TSC(x) (((x) & 1) << 16)
675#define S_000E60_SOFT_RESET_VMC(x) (((x) & 1) << 17) 931#define S_000E60_SOFT_RESET_VMC(x) (((x) & 1) << 17)
676 932
933#define R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL 0x5480
934
935#define R_028C04_PA_SC_AA_CONFIG 0x028C04
936#define S_028C04_MSAA_NUM_SAMPLES(x) (((x) & 0x3) << 0)
937#define G_028C04_MSAA_NUM_SAMPLES(x) (((x) >> 0) & 0x3)
938#define C_028C04_MSAA_NUM_SAMPLES 0xFFFFFFFC
939#define S_028C04_AA_MASK_CENTROID_DTMN(x) (((x) & 0x1) << 4)
940#define G_028C04_AA_MASK_CENTROID_DTMN(x) (((x) >> 4) & 0x1)
941#define C_028C04_AA_MASK_CENTROID_DTMN 0xFFFFFFEF
942#define S_028C04_MAX_SAMPLE_DIST(x) (((x) & 0xF) << 13)
943#define G_028C04_MAX_SAMPLE_DIST(x) (((x) >> 13) & 0xF)
944#define C_028C04_MAX_SAMPLE_DIST 0xFFFE1FFF
945#define R_0280E0_CB_COLOR0_FRAG 0x0280E0
946#define S_0280E0_BASE_256B(x) (((x) & 0xFFFFFFFF) << 0)
947#define G_0280E0_BASE_256B(x) (((x) >> 0) & 0xFFFFFFFF)
948#define C_0280E0_BASE_256B 0x00000000
949#define R_0280E4_CB_COLOR1_FRAG 0x0280E4
950#define R_0280E8_CB_COLOR2_FRAG 0x0280E8
951#define R_0280EC_CB_COLOR3_FRAG 0x0280EC
952#define R_0280F0_CB_COLOR4_FRAG 0x0280F0
953#define R_0280F4_CB_COLOR5_FRAG 0x0280F4
954#define R_0280F8_CB_COLOR6_FRAG 0x0280F8
955#define R_0280FC_CB_COLOR7_FRAG 0x0280FC
956#define R_0280C0_CB_COLOR0_TILE 0x0280C0
957#define S_0280C0_BASE_256B(x) (((x) & 0xFFFFFFFF) << 0)
958#define G_0280C0_BASE_256B(x) (((x) >> 0) & 0xFFFFFFFF)
959#define C_0280C0_BASE_256B 0x00000000
960#define R_0280C4_CB_COLOR1_TILE 0x0280C4
961#define R_0280C8_CB_COLOR2_TILE 0x0280C8
962#define R_0280CC_CB_COLOR3_TILE 0x0280CC
963#define R_0280D0_CB_COLOR4_TILE 0x0280D0
964#define R_0280D4_CB_COLOR5_TILE 0x0280D4
965#define R_0280D8_CB_COLOR6_TILE 0x0280D8
966#define R_0280DC_CB_COLOR7_TILE 0x0280DC
967#define R_0280A0_CB_COLOR0_INFO 0x0280A0
968#define S_0280A0_ENDIAN(x) (((x) & 0x3) << 0)
969#define G_0280A0_ENDIAN(x) (((x) >> 0) & 0x3)
970#define C_0280A0_ENDIAN 0xFFFFFFFC
971#define S_0280A0_FORMAT(x) (((x) & 0x3F) << 2)
972#define G_0280A0_FORMAT(x) (((x) >> 2) & 0x3F)
973#define C_0280A0_FORMAT 0xFFFFFF03
974#define V_0280A0_COLOR_INVALID 0x00000000
975#define V_0280A0_COLOR_8 0x00000001
976#define V_0280A0_COLOR_4_4 0x00000002
977#define V_0280A0_COLOR_3_3_2 0x00000003
978#define V_0280A0_COLOR_16 0x00000005
979#define V_0280A0_COLOR_16_FLOAT 0x00000006
980#define V_0280A0_COLOR_8_8 0x00000007
981#define V_0280A0_COLOR_5_6_5 0x00000008
982#define V_0280A0_COLOR_6_5_5 0x00000009
983#define V_0280A0_COLOR_1_5_5_5 0x0000000A
984#define V_0280A0_COLOR_4_4_4_4 0x0000000B
985#define V_0280A0_COLOR_5_5_5_1 0x0000000C
986#define V_0280A0_COLOR_32 0x0000000D
987#define V_0280A0_COLOR_32_FLOAT 0x0000000E
988#define V_0280A0_COLOR_16_16 0x0000000F
989#define V_0280A0_COLOR_16_16_FLOAT 0x00000010
990#define V_0280A0_COLOR_8_24 0x00000011
991#define V_0280A0_COLOR_8_24_FLOAT 0x00000012
992#define V_0280A0_COLOR_24_8 0x00000013
993#define V_0280A0_COLOR_24_8_FLOAT 0x00000014
994#define V_0280A0_COLOR_10_11_11 0x00000015
995#define V_0280A0_COLOR_10_11_11_FLOAT 0x00000016
996#define V_0280A0_COLOR_11_11_10 0x00000017
997#define V_0280A0_COLOR_11_11_10_FLOAT 0x00000018
998#define V_0280A0_COLOR_2_10_10_10 0x00000019
999#define V_0280A0_COLOR_8_8_8_8 0x0000001A
1000#define V_0280A0_COLOR_10_10_10_2 0x0000001B
1001#define V_0280A0_COLOR_X24_8_32_FLOAT 0x0000001C
1002#define V_0280A0_COLOR_32_32 0x0000001D
1003#define V_0280A0_COLOR_32_32_FLOAT 0x0000001E
1004#define V_0280A0_COLOR_16_16_16_16 0x0000001F
1005#define V_0280A0_COLOR_16_16_16_16_FLOAT 0x00000020
1006#define V_0280A0_COLOR_32_32_32_32 0x00000022
1007#define V_0280A0_COLOR_32_32_32_32_FLOAT 0x00000023
1008#define S_0280A0_ARRAY_MODE(x) (((x) & 0xF) << 8)
1009#define G_0280A0_ARRAY_MODE(x) (((x) >> 8) & 0xF)
1010#define C_0280A0_ARRAY_MODE 0xFFFFF0FF
1011#define V_0280A0_ARRAY_LINEAR_GENERAL 0x00000000
1012#define V_0280A0_ARRAY_LINEAR_ALIGNED 0x00000001
1013#define V_0280A0_ARRAY_1D_TILED_THIN1 0x00000002
1014#define V_0280A0_ARRAY_2D_TILED_THIN1 0x00000004
1015#define S_0280A0_NUMBER_TYPE(x) (((x) & 0x7) << 12)
1016#define G_0280A0_NUMBER_TYPE(x) (((x) >> 12) & 0x7)
1017#define C_0280A0_NUMBER_TYPE 0xFFFF8FFF
1018#define S_0280A0_READ_SIZE(x) (((x) & 0x1) << 15)
1019#define G_0280A0_READ_SIZE(x) (((x) >> 15) & 0x1)
1020#define C_0280A0_READ_SIZE 0xFFFF7FFF
1021#define S_0280A0_COMP_SWAP(x) (((x) & 0x3) << 16)
1022#define G_0280A0_COMP_SWAP(x) (((x) >> 16) & 0x3)
1023#define C_0280A0_COMP_SWAP 0xFFFCFFFF
1024#define S_0280A0_TILE_MODE(x) (((x) & 0x3) << 18)
1025#define G_0280A0_TILE_MODE(x) (((x) >> 18) & 0x3)
1026#define C_0280A0_TILE_MODE 0xFFF3FFFF
1027#define S_0280A0_BLEND_CLAMP(x) (((x) & 0x1) << 20)
1028#define G_0280A0_BLEND_CLAMP(x) (((x) >> 20) & 0x1)
1029#define C_0280A0_BLEND_CLAMP 0xFFEFFFFF
1030#define S_0280A0_CLEAR_COLOR(x) (((x) & 0x1) << 21)
1031#define G_0280A0_CLEAR_COLOR(x) (((x) >> 21) & 0x1)
1032#define C_0280A0_CLEAR_COLOR 0xFFDFFFFF
1033#define S_0280A0_BLEND_BYPASS(x) (((x) & 0x1) << 22)
1034#define G_0280A0_BLEND_BYPASS(x) (((x) >> 22) & 0x1)
1035#define C_0280A0_BLEND_BYPASS 0xFFBFFFFF
1036#define S_0280A0_BLEND_FLOAT32(x) (((x) & 0x1) << 23)
1037#define G_0280A0_BLEND_FLOAT32(x) (((x) >> 23) & 0x1)
1038#define C_0280A0_BLEND_FLOAT32 0xFF7FFFFF
1039#define S_0280A0_SIMPLE_FLOAT(x) (((x) & 0x1) << 24)
1040#define G_0280A0_SIMPLE_FLOAT(x) (((x) >> 24) & 0x1)
1041#define C_0280A0_SIMPLE_FLOAT 0xFEFFFFFF
1042#define S_0280A0_ROUND_MODE(x) (((x) & 0x1) << 25)
1043#define G_0280A0_ROUND_MODE(x) (((x) >> 25) & 0x1)
1044#define C_0280A0_ROUND_MODE 0xFDFFFFFF
1045#define S_0280A0_TILE_COMPACT(x) (((x) & 0x1) << 26)
1046#define G_0280A0_TILE_COMPACT(x) (((x) >> 26) & 0x1)
1047#define C_0280A0_TILE_COMPACT 0xFBFFFFFF
1048#define S_0280A0_SOURCE_FORMAT(x) (((x) & 0x1) << 27)
1049#define G_0280A0_SOURCE_FORMAT(x) (((x) >> 27) & 0x1)
1050#define C_0280A0_SOURCE_FORMAT 0xF7FFFFFF
1051#define R_0280A4_CB_COLOR1_INFO 0x0280A4
1052#define R_0280A8_CB_COLOR2_INFO 0x0280A8
1053#define R_0280AC_CB_COLOR3_INFO 0x0280AC
1054#define R_0280B0_CB_COLOR4_INFO 0x0280B0
1055#define R_0280B4_CB_COLOR5_INFO 0x0280B4
1056#define R_0280B8_CB_COLOR6_INFO 0x0280B8
1057#define R_0280BC_CB_COLOR7_INFO 0x0280BC
1058#define R_028060_CB_COLOR0_SIZE 0x028060
1059#define S_028060_PITCH_TILE_MAX(x) (((x) & 0x3FF) << 0)
1060#define G_028060_PITCH_TILE_MAX(x) (((x) >> 0) & 0x3FF)
1061#define C_028060_PITCH_TILE_MAX 0xFFFFFC00
1062#define S_028060_SLICE_TILE_MAX(x) (((x) & 0xFFFFF) << 10)
1063#define G_028060_SLICE_TILE_MAX(x) (((x) >> 10) & 0xFFFFF)
1064#define C_028060_SLICE_TILE_MAX 0xC00003FF
1065#define R_028064_CB_COLOR1_SIZE 0x028064
1066#define R_028068_CB_COLOR2_SIZE 0x028068
1067#define R_02806C_CB_COLOR3_SIZE 0x02806C
1068#define R_028070_CB_COLOR4_SIZE 0x028070
1069#define R_028074_CB_COLOR5_SIZE 0x028074
1070#define R_028078_CB_COLOR6_SIZE 0x028078
1071#define R_02807C_CB_COLOR7_SIZE 0x02807C
1072#define R_028238_CB_TARGET_MASK 0x028238
1073#define S_028238_TARGET0_ENABLE(x) (((x) & 0xF) << 0)
1074#define G_028238_TARGET0_ENABLE(x) (((x) >> 0) & 0xF)
1075#define C_028238_TARGET0_ENABLE 0xFFFFFFF0
1076#define S_028238_TARGET1_ENABLE(x) (((x) & 0xF) << 4)
1077#define G_028238_TARGET1_ENABLE(x) (((x) >> 4) & 0xF)
1078#define C_028238_TARGET1_ENABLE 0xFFFFFF0F
1079#define S_028238_TARGET2_ENABLE(x) (((x) & 0xF) << 8)
1080#define G_028238_TARGET2_ENABLE(x) (((x) >> 8) & 0xF)
1081#define C_028238_TARGET2_ENABLE 0xFFFFF0FF
1082#define S_028238_TARGET3_ENABLE(x) (((x) & 0xF) << 12)
1083#define G_028238_TARGET3_ENABLE(x) (((x) >> 12) & 0xF)
1084#define C_028238_TARGET3_ENABLE 0xFFFF0FFF
1085#define S_028238_TARGET4_ENABLE(x) (((x) & 0xF) << 16)
1086#define G_028238_TARGET4_ENABLE(x) (((x) >> 16) & 0xF)
1087#define C_028238_TARGET4_ENABLE 0xFFF0FFFF
1088#define S_028238_TARGET5_ENABLE(x) (((x) & 0xF) << 20)
1089#define G_028238_TARGET5_ENABLE(x) (((x) >> 20) & 0xF)
1090#define C_028238_TARGET5_ENABLE 0xFF0FFFFF
1091#define S_028238_TARGET6_ENABLE(x) (((x) & 0xF) << 24)
1092#define G_028238_TARGET6_ENABLE(x) (((x) >> 24) & 0xF)
1093#define C_028238_TARGET6_ENABLE 0xF0FFFFFF
1094#define S_028238_TARGET7_ENABLE(x) (((x) & 0xF) << 28)
1095#define G_028238_TARGET7_ENABLE(x) (((x) >> 28) & 0xF)
1096#define C_028238_TARGET7_ENABLE 0x0FFFFFFF
1097#define R_02823C_CB_SHADER_MASK 0x02823C
1098#define S_02823C_OUTPUT0_ENABLE(x) (((x) & 0xF) << 0)
1099#define G_02823C_OUTPUT0_ENABLE(x) (((x) >> 0) & 0xF)
1100#define C_02823C_OUTPUT0_ENABLE 0xFFFFFFF0
1101#define S_02823C_OUTPUT1_ENABLE(x) (((x) & 0xF) << 4)
1102#define G_02823C_OUTPUT1_ENABLE(x) (((x) >> 4) & 0xF)
1103#define C_02823C_OUTPUT1_ENABLE 0xFFFFFF0F
1104#define S_02823C_OUTPUT2_ENABLE(x) (((x) & 0xF) << 8)
1105#define G_02823C_OUTPUT2_ENABLE(x) (((x) >> 8) & 0xF)
1106#define C_02823C_OUTPUT2_ENABLE 0xFFFFF0FF
1107#define S_02823C_OUTPUT3_ENABLE(x) (((x) & 0xF) << 12)
1108#define G_02823C_OUTPUT3_ENABLE(x) (((x) >> 12) & 0xF)
1109#define C_02823C_OUTPUT3_ENABLE 0xFFFF0FFF
1110#define S_02823C_OUTPUT4_ENABLE(x) (((x) & 0xF) << 16)
1111#define G_02823C_OUTPUT4_ENABLE(x) (((x) >> 16) & 0xF)
1112#define C_02823C_OUTPUT4_ENABLE 0xFFF0FFFF
1113#define S_02823C_OUTPUT5_ENABLE(x) (((x) & 0xF) << 20)
1114#define G_02823C_OUTPUT5_ENABLE(x) (((x) >> 20) & 0xF)
1115#define C_02823C_OUTPUT5_ENABLE 0xFF0FFFFF
1116#define S_02823C_OUTPUT6_ENABLE(x) (((x) & 0xF) << 24)
1117#define G_02823C_OUTPUT6_ENABLE(x) (((x) >> 24) & 0xF)
1118#define C_02823C_OUTPUT6_ENABLE 0xF0FFFFFF
1119#define S_02823C_OUTPUT7_ENABLE(x) (((x) & 0xF) << 28)
1120#define G_02823C_OUTPUT7_ENABLE(x) (((x) >> 28) & 0xF)
1121#define C_02823C_OUTPUT7_ENABLE 0x0FFFFFFF
1122#define R_028AB0_VGT_STRMOUT_EN 0x028AB0
1123#define S_028AB0_STREAMOUT(x) (((x) & 0x1) << 0)
1124#define G_028AB0_STREAMOUT(x) (((x) >> 0) & 0x1)
1125#define C_028AB0_STREAMOUT 0xFFFFFFFE
1126#define R_028B20_VGT_STRMOUT_BUFFER_EN 0x028B20
1127#define S_028B20_BUFFER_0_EN(x) (((x) & 0x1) << 0)
1128#define G_028B20_BUFFER_0_EN(x) (((x) >> 0) & 0x1)
1129#define C_028B20_BUFFER_0_EN 0xFFFFFFFE
1130#define S_028B20_BUFFER_1_EN(x) (((x) & 0x1) << 1)
1131#define G_028B20_BUFFER_1_EN(x) (((x) >> 1) & 0x1)
1132#define C_028B20_BUFFER_1_EN 0xFFFFFFFD
1133#define S_028B20_BUFFER_2_EN(x) (((x) & 0x1) << 2)
1134#define G_028B20_BUFFER_2_EN(x) (((x) >> 2) & 0x1)
1135#define C_028B20_BUFFER_2_EN 0xFFFFFFFB
1136#define S_028B20_BUFFER_3_EN(x) (((x) & 0x1) << 3)
1137#define G_028B20_BUFFER_3_EN(x) (((x) >> 3) & 0x1)
1138#define C_028B20_BUFFER_3_EN 0xFFFFFFF7
1139#define S_028B20_SIZE(x) (((x) & 0xFFFFFFFF) << 0)
1140#define G_028B20_SIZE(x) (((x) >> 0) & 0xFFFFFFFF)
1141#define C_028B20_SIZE 0x00000000
1142#define R_038000_SQ_TEX_RESOURCE_WORD0_0 0x038000
1143#define S_038000_DIM(x) (((x) & 0x7) << 0)
1144#define G_038000_DIM(x) (((x) >> 0) & 0x7)
1145#define C_038000_DIM 0xFFFFFFF8
1146#define V_038000_SQ_TEX_DIM_1D 0x00000000
1147#define V_038000_SQ_TEX_DIM_2D 0x00000001
1148#define V_038000_SQ_TEX_DIM_3D 0x00000002
1149#define V_038000_SQ_TEX_DIM_CUBEMAP 0x00000003
1150#define V_038000_SQ_TEX_DIM_1D_ARRAY 0x00000004
1151#define V_038000_SQ_TEX_DIM_2D_ARRAY 0x00000005
1152#define V_038000_SQ_TEX_DIM_2D_MSAA 0x00000006
1153#define V_038000_SQ_TEX_DIM_2D_ARRAY_MSAA 0x00000007
1154#define S_038000_TILE_MODE(x) (((x) & 0xF) << 3)
1155#define G_038000_TILE_MODE(x) (((x) >> 3) & 0xF)
1156#define C_038000_TILE_MODE 0xFFFFFF87
1157#define S_038000_TILE_TYPE(x) (((x) & 0x1) << 7)
1158#define G_038000_TILE_TYPE(x) (((x) >> 7) & 0x1)
1159#define C_038000_TILE_TYPE 0xFFFFFF7F
1160#define S_038000_PITCH(x) (((x) & 0x7FF) << 8)
1161#define G_038000_PITCH(x) (((x) >> 8) & 0x7FF)
1162#define C_038000_PITCH 0xFFF800FF
1163#define S_038000_TEX_WIDTH(x) (((x) & 0x1FFF) << 19)
1164#define G_038000_TEX_WIDTH(x) (((x) >> 19) & 0x1FFF)
1165#define C_038000_TEX_WIDTH 0x0007FFFF
1166#define R_038004_SQ_TEX_RESOURCE_WORD1_0 0x038004
1167#define S_038004_TEX_HEIGHT(x) (((x) & 0x1FFF) << 0)
1168#define G_038004_TEX_HEIGHT(x) (((x) >> 0) & 0x1FFF)
1169#define C_038004_TEX_HEIGHT 0xFFFFE000
1170#define S_038004_TEX_DEPTH(x) (((x) & 0x1FFF) << 13)
1171#define G_038004_TEX_DEPTH(x) (((x) >> 13) & 0x1FFF)
1172#define C_038004_TEX_DEPTH 0xFC001FFF
1173#define S_038004_DATA_FORMAT(x) (((x) & 0x3F) << 26)
1174#define G_038004_DATA_FORMAT(x) (((x) >> 26) & 0x3F)
1175#define C_038004_DATA_FORMAT 0x03FFFFFF
1176#define V_038004_COLOR_INVALID 0x00000000
1177#define V_038004_COLOR_8 0x00000001
1178#define V_038004_COLOR_4_4 0x00000002
1179#define V_038004_COLOR_3_3_2 0x00000003
1180#define V_038004_COLOR_16 0x00000005
1181#define V_038004_COLOR_16_FLOAT 0x00000006
1182#define V_038004_COLOR_8_8 0x00000007
1183#define V_038004_COLOR_5_6_5 0x00000008
1184#define V_038004_COLOR_6_5_5 0x00000009
1185#define V_038004_COLOR_1_5_5_5 0x0000000A
1186#define V_038004_COLOR_4_4_4_4 0x0000000B
1187#define V_038004_COLOR_5_5_5_1 0x0000000C
1188#define V_038004_COLOR_32 0x0000000D
1189#define V_038004_COLOR_32_FLOAT 0x0000000E
1190#define V_038004_COLOR_16_16 0x0000000F
1191#define V_038004_COLOR_16_16_FLOAT 0x00000010
1192#define V_038004_COLOR_8_24 0x00000011
1193#define V_038004_COLOR_8_24_FLOAT 0x00000012
1194#define V_038004_COLOR_24_8 0x00000013
1195#define V_038004_COLOR_24_8_FLOAT 0x00000014
1196#define V_038004_COLOR_10_11_11 0x00000015
1197#define V_038004_COLOR_10_11_11_FLOAT 0x00000016
1198#define V_038004_COLOR_11_11_10 0x00000017
1199#define V_038004_COLOR_11_11_10_FLOAT 0x00000018
1200#define V_038004_COLOR_2_10_10_10 0x00000019
1201#define V_038004_COLOR_8_8_8_8 0x0000001A
1202#define V_038004_COLOR_10_10_10_2 0x0000001B
1203#define V_038004_COLOR_X24_8_32_FLOAT 0x0000001C
1204#define V_038004_COLOR_32_32 0x0000001D
1205#define V_038004_COLOR_32_32_FLOAT 0x0000001E
1206#define V_038004_COLOR_16_16_16_16 0x0000001F
1207#define V_038004_COLOR_16_16_16_16_FLOAT 0x00000020
1208#define V_038004_COLOR_32_32_32_32 0x00000022
1209#define V_038004_COLOR_32_32_32_32_FLOAT 0x00000023
1210#define V_038004_FMT_1 0x00000025
1211#define V_038004_FMT_GB_GR 0x00000027
1212#define V_038004_FMT_BG_RG 0x00000028
1213#define V_038004_FMT_32_AS_8 0x00000029
1214#define V_038004_FMT_32_AS_8_8 0x0000002A
1215#define V_038004_FMT_5_9_9_9_SHAREDEXP 0x0000002B
1216#define V_038004_FMT_8_8_8 0x0000002C
1217#define V_038004_FMT_16_16_16 0x0000002D
1218#define V_038004_FMT_16_16_16_FLOAT 0x0000002E
1219#define V_038004_FMT_32_32_32 0x0000002F
1220#define V_038004_FMT_32_32_32_FLOAT 0x00000030
1221#define R_038010_SQ_TEX_RESOURCE_WORD4_0 0x038010
1222#define S_038010_FORMAT_COMP_X(x) (((x) & 0x3) << 0)
1223#define G_038010_FORMAT_COMP_X(x) (((x) >> 0) & 0x3)
1224#define C_038010_FORMAT_COMP_X 0xFFFFFFFC
1225#define S_038010_FORMAT_COMP_Y(x) (((x) & 0x3) << 2)
1226#define G_038010_FORMAT_COMP_Y(x) (((x) >> 2) & 0x3)
1227#define C_038010_FORMAT_COMP_Y 0xFFFFFFF3
1228#define S_038010_FORMAT_COMP_Z(x) (((x) & 0x3) << 4)
1229#define G_038010_FORMAT_COMP_Z(x) (((x) >> 4) & 0x3)
1230#define C_038010_FORMAT_COMP_Z 0xFFFFFFCF
1231#define S_038010_FORMAT_COMP_W(x) (((x) & 0x3) << 6)
1232#define G_038010_FORMAT_COMP_W(x) (((x) >> 6) & 0x3)
1233#define C_038010_FORMAT_COMP_W 0xFFFFFF3F
1234#define S_038010_NUM_FORMAT_ALL(x) (((x) & 0x3) << 8)
1235#define G_038010_NUM_FORMAT_ALL(x) (((x) >> 8) & 0x3)
1236#define C_038010_NUM_FORMAT_ALL 0xFFFFFCFF
1237#define S_038010_SRF_MODE_ALL(x) (((x) & 0x1) << 10)
1238#define G_038010_SRF_MODE_ALL(x) (((x) >> 10) & 0x1)
1239#define C_038010_SRF_MODE_ALL 0xFFFFFBFF
1240#define S_038010_FORCE_DEGAMMA(x) (((x) & 0x1) << 11)
1241#define G_038010_FORCE_DEGAMMA(x) (((x) >> 11) & 0x1)
1242#define C_038010_FORCE_DEGAMMA 0xFFFFF7FF
1243#define S_038010_ENDIAN_SWAP(x) (((x) & 0x3) << 12)
1244#define G_038010_ENDIAN_SWAP(x) (((x) >> 12) & 0x3)
1245#define C_038010_ENDIAN_SWAP 0xFFFFCFFF
1246#define S_038010_REQUEST_SIZE(x) (((x) & 0x3) << 14)
1247#define G_038010_REQUEST_SIZE(x) (((x) >> 14) & 0x3)
1248#define C_038010_REQUEST_SIZE 0xFFFF3FFF
1249#define S_038010_DST_SEL_X(x) (((x) & 0x7) << 16)
1250#define G_038010_DST_SEL_X(x) (((x) >> 16) & 0x7)
1251#define C_038010_DST_SEL_X 0xFFF8FFFF
1252#define S_038010_DST_SEL_Y(x) (((x) & 0x7) << 19)
1253#define G_038010_DST_SEL_Y(x) (((x) >> 19) & 0x7)
1254#define C_038010_DST_SEL_Y 0xFFC7FFFF
1255#define S_038010_DST_SEL_Z(x) (((x) & 0x7) << 22)
1256#define G_038010_DST_SEL_Z(x) (((x) >> 22) & 0x7)
1257#define C_038010_DST_SEL_Z 0xFE3FFFFF
1258#define S_038010_DST_SEL_W(x) (((x) & 0x7) << 25)
1259#define G_038010_DST_SEL_W(x) (((x) >> 25) & 0x7)
1260#define C_038010_DST_SEL_W 0xF1FFFFFF
1261#define S_038010_BASE_LEVEL(x) (((x) & 0xF) << 28)
1262#define G_038010_BASE_LEVEL(x) (((x) >> 28) & 0xF)
1263#define C_038010_BASE_LEVEL 0x0FFFFFFF
1264#define R_038014_SQ_TEX_RESOURCE_WORD5_0 0x038014
1265#define S_038014_LAST_LEVEL(x) (((x) & 0xF) << 0)
1266#define G_038014_LAST_LEVEL(x) (((x) >> 0) & 0xF)
1267#define C_038014_LAST_LEVEL 0xFFFFFFF0
1268#define S_038014_BASE_ARRAY(x) (((x) & 0x1FFF) << 4)
1269#define G_038014_BASE_ARRAY(x) (((x) >> 4) & 0x1FFF)
1270#define C_038014_BASE_ARRAY 0xFFFE000F
1271#define S_038014_LAST_ARRAY(x) (((x) & 0x1FFF) << 17)
1272#define G_038014_LAST_ARRAY(x) (((x) >> 17) & 0x1FFF)
1273#define C_038014_LAST_ARRAY 0xC001FFFF
1274#define R_0288A8_SQ_ESGS_RING_ITEMSIZE 0x0288A8
1275#define S_0288A8_ITEMSIZE(x) (((x) & 0x7FFF) << 0)
1276#define G_0288A8_ITEMSIZE(x) (((x) >> 0) & 0x7FFF)
1277#define C_0288A8_ITEMSIZE 0xFFFF8000
1278#define R_008C44_SQ_ESGS_RING_SIZE 0x008C44
1279#define S_008C44_MEM_SIZE(x) (((x) & 0xFFFFFFFF) << 0)
1280#define G_008C44_MEM_SIZE(x) (((x) >> 0) & 0xFFFFFFFF)
1281#define C_008C44_MEM_SIZE 0x00000000
1282#define R_0288B0_SQ_ESTMP_RING_ITEMSIZE 0x0288B0
1283#define S_0288B0_ITEMSIZE(x) (((x) & 0x7FFF) << 0)
1284#define G_0288B0_ITEMSIZE(x) (((x) >> 0) & 0x7FFF)
1285#define C_0288B0_ITEMSIZE 0xFFFF8000
1286#define R_008C54_SQ_ESTMP_RING_SIZE 0x008C54
1287#define S_008C54_MEM_SIZE(x) (((x) & 0xFFFFFFFF) << 0)
1288#define G_008C54_MEM_SIZE(x) (((x) >> 0) & 0xFFFFFFFF)
1289#define C_008C54_MEM_SIZE 0x00000000
1290#define R_0288C0_SQ_FBUF_RING_ITEMSIZE 0x0288C0
1291#define S_0288C0_ITEMSIZE(x) (((x) & 0x7FFF) << 0)
1292#define G_0288C0_ITEMSIZE(x) (((x) >> 0) & 0x7FFF)
1293#define C_0288C0_ITEMSIZE 0xFFFF8000
1294#define R_008C74_SQ_FBUF_RING_SIZE 0x008C74
1295#define S_008C74_MEM_SIZE(x) (((x) & 0xFFFFFFFF) << 0)
1296#define G_008C74_MEM_SIZE(x) (((x) >> 0) & 0xFFFFFFFF)
1297#define C_008C74_MEM_SIZE 0x00000000
1298#define R_0288B4_SQ_GSTMP_RING_ITEMSIZE 0x0288B4
1299#define S_0288B4_ITEMSIZE(x) (((x) & 0x7FFF) << 0)
1300#define G_0288B4_ITEMSIZE(x) (((x) >> 0) & 0x7FFF)
1301#define C_0288B4_ITEMSIZE 0xFFFF8000
1302#define R_008C5C_SQ_GSTMP_RING_SIZE 0x008C5C
1303#define S_008C5C_MEM_SIZE(x) (((x) & 0xFFFFFFFF) << 0)
1304#define G_008C5C_MEM_SIZE(x) (((x) >> 0) & 0xFFFFFFFF)
1305#define C_008C5C_MEM_SIZE 0x00000000
1306#define R_0288AC_SQ_GSVS_RING_ITEMSIZE 0x0288AC
1307#define S_0288AC_ITEMSIZE(x) (((x) & 0x7FFF) << 0)
1308#define G_0288AC_ITEMSIZE(x) (((x) >> 0) & 0x7FFF)
1309#define C_0288AC_ITEMSIZE 0xFFFF8000
1310#define R_008C4C_SQ_GSVS_RING_SIZE 0x008C4C
1311#define S_008C4C_MEM_SIZE(x) (((x) & 0xFFFFFFFF) << 0)
1312#define G_008C4C_MEM_SIZE(x) (((x) >> 0) & 0xFFFFFFFF)
1313#define C_008C4C_MEM_SIZE 0x00000000
1314#define R_0288BC_SQ_PSTMP_RING_ITEMSIZE 0x0288BC
1315#define S_0288BC_ITEMSIZE(x) (((x) & 0x7FFF) << 0)
1316#define G_0288BC_ITEMSIZE(x) (((x) >> 0) & 0x7FFF)
1317#define C_0288BC_ITEMSIZE 0xFFFF8000
1318#define R_008C6C_SQ_PSTMP_RING_SIZE 0x008C6C
1319#define S_008C6C_MEM_SIZE(x) (((x) & 0xFFFFFFFF) << 0)
1320#define G_008C6C_MEM_SIZE(x) (((x) >> 0) & 0xFFFFFFFF)
1321#define C_008C6C_MEM_SIZE 0x00000000
1322#define R_0288C4_SQ_REDUC_RING_ITEMSIZE 0x0288C4
1323#define S_0288C4_ITEMSIZE(x) (((x) & 0x7FFF) << 0)
1324#define G_0288C4_ITEMSIZE(x) (((x) >> 0) & 0x7FFF)
1325#define C_0288C4_ITEMSIZE 0xFFFF8000
1326#define R_008C7C_SQ_REDUC_RING_SIZE 0x008C7C
1327#define S_008C7C_MEM_SIZE(x) (((x) & 0xFFFFFFFF) << 0)
1328#define G_008C7C_MEM_SIZE(x) (((x) >> 0) & 0xFFFFFFFF)
1329#define C_008C7C_MEM_SIZE 0x00000000
1330#define R_0288B8_SQ_VSTMP_RING_ITEMSIZE 0x0288B8
1331#define S_0288B8_ITEMSIZE(x) (((x) & 0x7FFF) << 0)
1332#define G_0288B8_ITEMSIZE(x) (((x) >> 0) & 0x7FFF)
1333#define C_0288B8_ITEMSIZE 0xFFFF8000
1334#define R_008C64_SQ_VSTMP_RING_SIZE 0x008C64
1335#define S_008C64_MEM_SIZE(x) (((x) & 0xFFFFFFFF) << 0)
1336#define G_008C64_MEM_SIZE(x) (((x) >> 0) & 0xFFFFFFFF)
1337#define C_008C64_MEM_SIZE 0x00000000
1338#define R_0288C8_SQ_GS_VERT_ITEMSIZE 0x0288C8
1339#define S_0288C8_ITEMSIZE(x) (((x) & 0x7FFF) << 0)
1340#define G_0288C8_ITEMSIZE(x) (((x) >> 0) & 0x7FFF)
1341#define C_0288C8_ITEMSIZE 0xFFFF8000
1342#define R_028010_DB_DEPTH_INFO 0x028010
1343#define S_028010_FORMAT(x) (((x) & 0x7) << 0)
1344#define G_028010_FORMAT(x) (((x) >> 0) & 0x7)
1345#define C_028010_FORMAT 0xFFFFFFF8
1346#define V_028010_DEPTH_INVALID 0x00000000
1347#define V_028010_DEPTH_16 0x00000001
1348#define V_028010_DEPTH_X8_24 0x00000002
1349#define V_028010_DEPTH_8_24 0x00000003
1350#define V_028010_DEPTH_X8_24_FLOAT 0x00000004
1351#define V_028010_DEPTH_8_24_FLOAT 0x00000005
1352#define V_028010_DEPTH_32_FLOAT 0x00000006
1353#define V_028010_DEPTH_X24_8_32_FLOAT 0x00000007
1354#define S_028010_READ_SIZE(x) (((x) & 0x1) << 3)
1355#define G_028010_READ_SIZE(x) (((x) >> 3) & 0x1)
1356#define C_028010_READ_SIZE 0xFFFFFFF7
1357#define S_028010_ARRAY_MODE(x) (((x) & 0xF) << 15)
1358#define G_028010_ARRAY_MODE(x) (((x) >> 15) & 0xF)
1359#define C_028010_ARRAY_MODE 0xFFF87FFF
1360#define S_028010_TILE_SURFACE_ENABLE(x) (((x) & 0x1) << 25)
1361#define G_028010_TILE_SURFACE_ENABLE(x) (((x) >> 25) & 0x1)
1362#define C_028010_TILE_SURFACE_ENABLE 0xFDFFFFFF
1363#define S_028010_TILE_COMPACT(x) (((x) & 0x1) << 26)
1364#define G_028010_TILE_COMPACT(x) (((x) >> 26) & 0x1)
1365#define C_028010_TILE_COMPACT 0xFBFFFFFF
1366#define S_028010_ZRANGE_PRECISION(x) (((x) & 0x1) << 31)
1367#define G_028010_ZRANGE_PRECISION(x) (((x) >> 31) & 0x1)
1368#define C_028010_ZRANGE_PRECISION 0x7FFFFFFF
1369#define R_028000_DB_DEPTH_SIZE 0x028000
1370#define S_028000_PITCH_TILE_MAX(x) (((x) & 0x3FF) << 0)
1371#define G_028000_PITCH_TILE_MAX(x) (((x) >> 0) & 0x3FF)
1372#define C_028000_PITCH_TILE_MAX 0xFFFFFC00
1373#define S_028000_SLICE_TILE_MAX(x) (((x) & 0xFFFFF) << 10)
1374#define G_028000_SLICE_TILE_MAX(x) (((x) >> 10) & 0xFFFFF)
1375#define C_028000_SLICE_TILE_MAX 0xC00003FF
1376#define R_028004_DB_DEPTH_VIEW 0x028004
1377#define S_028004_SLICE_START(x) (((x) & 0x7FF) << 0)
1378#define G_028004_SLICE_START(x) (((x) >> 0) & 0x7FF)
1379#define C_028004_SLICE_START 0xFFFFF800
1380#define S_028004_SLICE_MAX(x) (((x) & 0x7FF) << 13)
1381#define G_028004_SLICE_MAX(x) (((x) >> 13) & 0x7FF)
1382#define C_028004_SLICE_MAX 0xFF001FFF
1383#define R_028800_DB_DEPTH_CONTROL 0x028800
1384#define S_028800_STENCIL_ENABLE(x) (((x) & 0x1) << 0)
1385#define G_028800_STENCIL_ENABLE(x) (((x) >> 0) & 0x1)
1386#define C_028800_STENCIL_ENABLE 0xFFFFFFFE
1387#define S_028800_Z_ENABLE(x) (((x) & 0x1) << 1)
1388#define G_028800_Z_ENABLE(x) (((x) >> 1) & 0x1)
1389#define C_028800_Z_ENABLE 0xFFFFFFFD
1390#define S_028800_Z_WRITE_ENABLE(x) (((x) & 0x1) << 2)
1391#define G_028800_Z_WRITE_ENABLE(x) (((x) >> 2) & 0x1)
1392#define C_028800_Z_WRITE_ENABLE 0xFFFFFFFB
1393#define S_028800_ZFUNC(x) (((x) & 0x7) << 4)
1394#define G_028800_ZFUNC(x) (((x) >> 4) & 0x7)
1395#define C_028800_ZFUNC 0xFFFFFF8F
1396#define S_028800_BACKFACE_ENABLE(x) (((x) & 0x1) << 7)
1397#define G_028800_BACKFACE_ENABLE(x) (((x) >> 7) & 0x1)
1398#define C_028800_BACKFACE_ENABLE 0xFFFFFF7F
1399#define S_028800_STENCILFUNC(x) (((x) & 0x7) << 8)
1400#define G_028800_STENCILFUNC(x) (((x) >> 8) & 0x7)
1401#define C_028800_STENCILFUNC 0xFFFFF8FF
1402#define S_028800_STENCILFAIL(x) (((x) & 0x7) << 11)
1403#define G_028800_STENCILFAIL(x) (((x) >> 11) & 0x7)
1404#define C_028800_STENCILFAIL 0xFFFFC7FF
1405#define S_028800_STENCILZPASS(x) (((x) & 0x7) << 14)
1406#define G_028800_STENCILZPASS(x) (((x) >> 14) & 0x7)
1407#define C_028800_STENCILZPASS 0xFFFE3FFF
1408#define S_028800_STENCILZFAIL(x) (((x) & 0x7) << 17)
1409#define G_028800_STENCILZFAIL(x) (((x) >> 17) & 0x7)
1410#define C_028800_STENCILZFAIL 0xFFF1FFFF
1411#define S_028800_STENCILFUNC_BF(x) (((x) & 0x7) << 20)
1412#define G_028800_STENCILFUNC_BF(x) (((x) >> 20) & 0x7)
1413#define C_028800_STENCILFUNC_BF 0xFF8FFFFF
1414#define S_028800_STENCILFAIL_BF(x) (((x) & 0x7) << 23)
1415#define G_028800_STENCILFAIL_BF(x) (((x) >> 23) & 0x7)
1416#define C_028800_STENCILFAIL_BF 0xFC7FFFFF
1417#define S_028800_STENCILZPASS_BF(x) (((x) & 0x7) << 26)
1418#define G_028800_STENCILZPASS_BF(x) (((x) >> 26) & 0x7)
1419#define C_028800_STENCILZPASS_BF 0xE3FFFFFF
1420#define S_028800_STENCILZFAIL_BF(x) (((x) & 0x7) << 29)
1421#define G_028800_STENCILZFAIL_BF(x) (((x) >> 29) & 0x7)
1422#define C_028800_STENCILZFAIL_BF 0x1FFFFFFF
1423
677#endif 1424#endif
diff --git a/drivers/gpu/drm/radeon/radeon.h b/drivers/gpu/drm/radeon/radeon.h
index 224506a2f7b1..034218c3dbbb 100644
--- a/drivers/gpu/drm/radeon/radeon.h
+++ b/drivers/gpu/drm/radeon/radeon.h
@@ -28,8 +28,6 @@
28#ifndef __RADEON_H__ 28#ifndef __RADEON_H__
29#define __RADEON_H__ 29#define __RADEON_H__
30 30
31#include "radeon_object.h"
32
33/* TODO: Here are things that needs to be done : 31/* TODO: Here are things that needs to be done :
34 * - surface allocator & initializer : (bit like scratch reg) should 32 * - surface allocator & initializer : (bit like scratch reg) should
35 * initialize HDP_ stuff on RS600, R600, R700 hw, well anythings 33 * initialize HDP_ stuff on RS600, R600, R700 hw, well anythings
@@ -67,6 +65,11 @@
67#include <linux/list.h> 65#include <linux/list.h>
68#include <linux/kref.h> 66#include <linux/kref.h>
69 67
68#include <ttm/ttm_bo_api.h>
69#include <ttm/ttm_bo_driver.h>
70#include <ttm/ttm_placement.h>
71#include <ttm/ttm_module.h>
72
70#include "radeon_family.h" 73#include "radeon_family.h"
71#include "radeon_mode.h" 74#include "radeon_mode.h"
72#include "radeon_reg.h" 75#include "radeon_reg.h"
@@ -85,12 +88,18 @@ extern int radeon_benchmarking;
85extern int radeon_testing; 88extern int radeon_testing;
86extern int radeon_connector_table; 89extern int radeon_connector_table;
87extern int radeon_tv; 90extern int radeon_tv;
91extern int radeon_new_pll;
92extern int radeon_dynpm;
93extern int radeon_audio;
94extern int radeon_disp_priority;
95extern int radeon_hw_i2c;
88 96
89/* 97/*
90 * Copy from radeon_drv.h so we don't have to include both and have conflicting 98 * Copy from radeon_drv.h so we don't have to include both and have conflicting
91 * symbol; 99 * symbol;
92 */ 100 */
93#define RADEON_MAX_USEC_TIMEOUT 100000 /* 100 ms */ 101#define RADEON_MAX_USEC_TIMEOUT 100000 /* 100 ms */
102/* RADEON_IB_POOL_SIZE must be a power of 2 */
94#define RADEON_IB_POOL_SIZE 16 103#define RADEON_IB_POOL_SIZE 16
95#define RADEON_DEBUGFS_MAX_NUM_FILES 32 104#define RADEON_DEBUGFS_MAX_NUM_FILES 32
96#define RADEONFB_CONN_LIMIT 4 105#define RADEONFB_CONN_LIMIT 4
@@ -112,6 +121,21 @@ struct radeon_device;
112/* 121/*
113 * BIOS. 122 * BIOS.
114 */ 123 */
124#define ATRM_BIOS_PAGE 4096
125
126#if defined(CONFIG_VGA_SWITCHEROO)
127bool radeon_atrm_supported(struct pci_dev *pdev);
128int radeon_atrm_get_bios_chunk(uint8_t *bios, int offset, int len);
129#else
130static inline bool radeon_atrm_supported(struct pci_dev *pdev)
131{
132 return false;
133}
134
135static inline int radeon_atrm_get_bios_chunk(uint8_t *bios, int offset, int len){
136 return -EINVAL;
137}
138#endif
115bool radeon_get_bios(struct radeon_device *rdev); 139bool radeon_get_bios(struct radeon_device *rdev);
116 140
117 141
@@ -132,17 +156,24 @@ void radeon_dummy_page_fini(struct radeon_device *rdev);
132struct radeon_clock { 156struct radeon_clock {
133 struct radeon_pll p1pll; 157 struct radeon_pll p1pll;
134 struct radeon_pll p2pll; 158 struct radeon_pll p2pll;
159 struct radeon_pll dcpll;
135 struct radeon_pll spll; 160 struct radeon_pll spll;
136 struct radeon_pll mpll; 161 struct radeon_pll mpll;
137 /* 10 Khz units */ 162 /* 10 Khz units */
138 uint32_t default_mclk; 163 uint32_t default_mclk;
139 uint32_t default_sclk; 164 uint32_t default_sclk;
165 uint32_t default_dispclk;
166 uint32_t dp_extclk;
140}; 167};
141 168
142/* 169/*
143 * Power management 170 * Power management
144 */ 171 */
145int radeon_pm_init(struct radeon_device *rdev); 172int radeon_pm_init(struct radeon_device *rdev);
173void radeon_pm_fini(struct radeon_device *rdev);
174void radeon_pm_compute_clocks(struct radeon_device *rdev);
175void radeon_combios_get_power_modes(struct radeon_device *rdev);
176void radeon_atombios_get_power_modes(struct radeon_device *rdev);
146 177
147/* 178/*
148 * Fences. 179 * Fences.
@@ -157,6 +188,7 @@ struct radeon_fence_driver {
157 struct list_head created; 188 struct list_head created;
158 struct list_head emited; 189 struct list_head emited;
159 struct list_head signaled; 190 struct list_head signaled;
191 bool initialized;
160}; 192};
161 193
162struct radeon_fence { 194struct radeon_fence {
@@ -186,76 +218,63 @@ void radeon_fence_unref(struct radeon_fence **fence);
186 * Tiling registers 218 * Tiling registers
187 */ 219 */
188struct radeon_surface_reg { 220struct radeon_surface_reg {
189 struct radeon_object *robj; 221 struct radeon_bo *bo;
190}; 222};
191 223
192#define RADEON_GEM_MAX_SURFACES 8 224#define RADEON_GEM_MAX_SURFACES 8
193 225
194/* 226/*
195 * Radeon buffer. 227 * TTM.
196 */ 228 */
197struct radeon_object; 229struct radeon_mman {
230 struct ttm_bo_global_ref bo_global_ref;
231 struct ttm_global_reference mem_global_ref;
232 struct ttm_bo_device bdev;
233 bool mem_global_referenced;
234 bool initialized;
235};
236
237struct radeon_bo {
238 /* Protected by gem.mutex */
239 struct list_head list;
240 /* Protected by tbo.reserved */
241 u32 placements[3];
242 struct ttm_placement placement;
243 struct ttm_buffer_object tbo;
244 struct ttm_bo_kmap_obj kmap;
245 unsigned pin_count;
246 void *kptr;
247 u32 tiling_flags;
248 u32 pitch;
249 int surface_reg;
250 /* Constant after initialization */
251 struct radeon_device *rdev;
252 struct drm_gem_object *gobj;
253};
198 254
199struct radeon_object_list { 255struct radeon_bo_list {
200 struct list_head list; 256 struct list_head list;
201 struct radeon_object *robj; 257 struct radeon_bo *bo;
202 uint64_t gpu_offset; 258 uint64_t gpu_offset;
203 unsigned rdomain; 259 unsigned rdomain;
204 unsigned wdomain; 260 unsigned wdomain;
205 uint32_t tiling_flags; 261 u32 tiling_flags;
206}; 262};
207 263
208int radeon_object_init(struct radeon_device *rdev);
209void radeon_object_fini(struct radeon_device *rdev);
210int radeon_object_create(struct radeon_device *rdev,
211 struct drm_gem_object *gobj,
212 unsigned long size,
213 bool kernel,
214 uint32_t domain,
215 bool interruptible,
216 struct radeon_object **robj_ptr);
217int radeon_object_kmap(struct radeon_object *robj, void **ptr);
218void radeon_object_kunmap(struct radeon_object *robj);
219void radeon_object_unref(struct radeon_object **robj);
220int radeon_object_pin(struct radeon_object *robj, uint32_t domain,
221 uint64_t *gpu_addr);
222void radeon_object_unpin(struct radeon_object *robj);
223int radeon_object_wait(struct radeon_object *robj);
224int radeon_object_busy_domain(struct radeon_object *robj, uint32_t *cur_placement);
225int radeon_object_evict_vram(struct radeon_device *rdev);
226int radeon_object_mmap(struct radeon_object *robj, uint64_t *offset);
227void radeon_object_force_delete(struct radeon_device *rdev);
228void radeon_object_list_add_object(struct radeon_object_list *lobj,
229 struct list_head *head);
230int radeon_object_list_validate(struct list_head *head, void *fence);
231void radeon_object_list_unvalidate(struct list_head *head);
232void radeon_object_list_clean(struct list_head *head);
233int radeon_object_fbdev_mmap(struct radeon_object *robj,
234 struct vm_area_struct *vma);
235unsigned long radeon_object_size(struct radeon_object *robj);
236void radeon_object_clear_surface_reg(struct radeon_object *robj);
237int radeon_object_check_tiling(struct radeon_object *robj, bool has_moved,
238 bool force_drop);
239void radeon_object_set_tiling_flags(struct radeon_object *robj,
240 uint32_t tiling_flags, uint32_t pitch);
241void radeon_object_get_tiling_flags(struct radeon_object *robj, uint32_t *tiling_flags, uint32_t *pitch);
242void radeon_bo_move_notify(struct ttm_buffer_object *bo,
243 struct ttm_mem_reg *mem);
244void radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo);
245/* 264/*
246 * GEM objects. 265 * GEM objects.
247 */ 266 */
248struct radeon_gem { 267struct radeon_gem {
268 struct mutex mutex;
249 struct list_head objects; 269 struct list_head objects;
250}; 270};
251 271
252int radeon_gem_init(struct radeon_device *rdev); 272int radeon_gem_init(struct radeon_device *rdev);
253void radeon_gem_fini(struct radeon_device *rdev); 273void radeon_gem_fini(struct radeon_device *rdev);
254int radeon_gem_object_create(struct radeon_device *rdev, int size, 274int radeon_gem_object_create(struct radeon_device *rdev, int size,
255 int alignment, int initial_domain, 275 int alignment, int initial_domain,
256 bool discardable, bool kernel, 276 bool discardable, bool kernel,
257 bool interruptible, 277 struct drm_gem_object **obj);
258 struct drm_gem_object **obj);
259int radeon_gem_object_pin(struct drm_gem_object *obj, uint32_t pin_domain, 278int radeon_gem_object_pin(struct drm_gem_object *obj, uint32_t pin_domain,
260 uint64_t *gpu_addr); 279 uint64_t *gpu_addr);
261void radeon_gem_object_unpin(struct drm_gem_object *obj); 280void radeon_gem_object_unpin(struct drm_gem_object *obj);
@@ -271,7 +290,7 @@ struct radeon_gart_table_ram {
271}; 290};
272 291
273struct radeon_gart_table_vram { 292struct radeon_gart_table_vram {
274 struct radeon_object *robj; 293 struct radeon_bo *robj;
275 volatile uint32_t *ptr; 294 volatile uint32_t *ptr;
276}; 295};
277 296
@@ -281,6 +300,7 @@ union radeon_gart_table {
281}; 300};
282 301
283#define RADEON_GPU_PAGE_SIZE 4096 302#define RADEON_GPU_PAGE_SIZE 4096
303#define RADEON_GPU_PAGE_MASK (RADEON_GPU_PAGE_SIZE - 1)
284 304
285struct radeon_gart { 305struct radeon_gart {
286 dma_addr_t table_addr; 306 dma_addr_t table_addr;
@@ -315,21 +335,21 @@ struct radeon_mc {
315 /* for some chips with <= 32MB we need to lie 335 /* for some chips with <= 32MB we need to lie
316 * about vram size near mc fb location */ 336 * about vram size near mc fb location */
317 u64 mc_vram_size; 337 u64 mc_vram_size;
318 u64 gtt_location; 338 u64 visible_vram_size;
319 u64 gtt_size; 339 u64 gtt_size;
320 u64 gtt_start; 340 u64 gtt_start;
321 u64 gtt_end; 341 u64 gtt_end;
322 u64 vram_location;
323 u64 vram_start; 342 u64 vram_start;
324 u64 vram_end; 343 u64 vram_end;
325 unsigned vram_width; 344 unsigned vram_width;
326 u64 real_vram_size; 345 u64 real_vram_size;
327 int vram_mtrr; 346 int vram_mtrr;
328 bool vram_is_ddr; 347 bool vram_is_ddr;
348 bool igp_sideport_enabled;
329}; 349};
330 350
331int radeon_mc_setup(struct radeon_device *rdev); 351bool radeon_combios_sideport_present(struct radeon_device *rdev);
332 352bool radeon_atombios_sideport_present(struct radeon_device *rdev);
333 353
334/* 354/*
335 * GPU scratch registers structures, functions & helpers 355 * GPU scratch registers structures, functions & helpers
@@ -352,22 +372,29 @@ struct radeon_irq {
352 bool sw_int; 372 bool sw_int;
353 /* FIXME: use a define max crtc rather than hardcode it */ 373 /* FIXME: use a define max crtc rather than hardcode it */
354 bool crtc_vblank_int[2]; 374 bool crtc_vblank_int[2];
375 wait_queue_head_t vblank_queue;
376 /* FIXME: use defines for max hpd/dacs */
377 bool hpd[6];
378 spinlock_t sw_lock;
379 int sw_refcount;
355}; 380};
356 381
357int radeon_irq_kms_init(struct radeon_device *rdev); 382int radeon_irq_kms_init(struct radeon_device *rdev);
358void radeon_irq_kms_fini(struct radeon_device *rdev); 383void radeon_irq_kms_fini(struct radeon_device *rdev);
359 384void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev);
385void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev);
360 386
361/* 387/*
362 * CP & ring. 388 * CP & ring.
363 */ 389 */
364struct radeon_ib { 390struct radeon_ib {
365 struct list_head list; 391 struct list_head list;
366 unsigned long idx; 392 unsigned idx;
367 uint64_t gpu_addr; 393 uint64_t gpu_addr;
368 struct radeon_fence *fence; 394 struct radeon_fence *fence;
369 uint32_t *ptr; 395 uint32_t *ptr;
370 uint32_t length_dw; 396 uint32_t length_dw;
397 bool free;
371}; 398};
372 399
373/* 400/*
@@ -376,15 +403,15 @@ struct radeon_ib {
376 */ 403 */
377struct radeon_ib_pool { 404struct radeon_ib_pool {
378 struct mutex mutex; 405 struct mutex mutex;
379 struct radeon_object *robj; 406 struct radeon_bo *robj;
380 struct list_head scheduled_ibs; 407 struct list_head bogus_ib;
381 struct radeon_ib ibs[RADEON_IB_POOL_SIZE]; 408 struct radeon_ib ibs[RADEON_IB_POOL_SIZE];
382 bool ready; 409 bool ready;
383 DECLARE_BITMAP(alloc_bm, RADEON_IB_POOL_SIZE); 410 unsigned head_id;
384}; 411};
385 412
386struct radeon_cp { 413struct radeon_cp {
387 struct radeon_object *ring_obj; 414 struct radeon_bo *ring_obj;
388 volatile uint32_t *ring; 415 volatile uint32_t *ring;
389 unsigned rptr; 416 unsigned rptr;
390 unsigned wptr; 417 unsigned wptr;
@@ -399,8 +426,25 @@ struct radeon_cp {
399 bool ready; 426 bool ready;
400}; 427};
401 428
429/*
430 * R6xx+ IH ring
431 */
432struct r600_ih {
433 struct radeon_bo *ring_obj;
434 volatile uint32_t *ring;
435 unsigned rptr;
436 unsigned wptr;
437 unsigned wptr_old;
438 unsigned ring_size;
439 uint64_t gpu_addr;
440 uint32_t ptr_mask;
441 spinlock_t lock;
442 bool enabled;
443};
444
402struct r600_blit { 445struct r600_blit {
403 struct radeon_object *shader_obj; 446 struct mutex mutex;
447 struct radeon_bo *shader_obj;
404 u64 shader_gpu_addr; 448 u64 shader_gpu_addr;
405 u32 vs_offset, ps_offset; 449 u32 vs_offset, ps_offset;
406 u32 state_offset; 450 u32 state_offset;
@@ -415,6 +459,7 @@ int radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib);
415int radeon_ib_pool_init(struct radeon_device *rdev); 459int radeon_ib_pool_init(struct radeon_device *rdev);
416void radeon_ib_pool_fini(struct radeon_device *rdev); 460void radeon_ib_pool_fini(struct radeon_device *rdev);
417int radeon_ib_test(struct radeon_device *rdev); 461int radeon_ib_test(struct radeon_device *rdev);
462extern void radeon_ib_bogus_add(struct radeon_device *rdev, struct radeon_ib *ib);
418/* Ring access between begin & end cannot sleep */ 463/* Ring access between begin & end cannot sleep */
419void radeon_ring_free_size(struct radeon_device *rdev); 464void radeon_ring_free_size(struct radeon_device *rdev);
420int radeon_ring_lock(struct radeon_device *rdev, unsigned ndw); 465int radeon_ring_lock(struct radeon_device *rdev, unsigned ndw);
@@ -430,8 +475,8 @@ void radeon_ring_fini(struct radeon_device *rdev);
430 */ 475 */
431struct radeon_cs_reloc { 476struct radeon_cs_reloc {
432 struct drm_gem_object *gobj; 477 struct drm_gem_object *gobj;
433 struct radeon_object *robj; 478 struct radeon_bo *robj;
434 struct radeon_object_list lobj; 479 struct radeon_bo_list lobj;
435 uint32_t handle; 480 uint32_t handle;
436 uint32_t flags; 481 uint32_t flags;
437}; 482};
@@ -448,6 +493,7 @@ struct radeon_cs_chunk {
448}; 493};
449 494
450struct radeon_cs_parser { 495struct radeon_cs_parser {
496 struct device *dev;
451 struct radeon_device *rdev; 497 struct radeon_device *rdev;
452 struct drm_file *filp; 498 struct drm_file *filp;
453 /* chunks */ 499 /* chunks */
@@ -527,7 +573,7 @@ void radeon_agp_fini(struct radeon_device *rdev);
527 * Writeback 573 * Writeback
528 */ 574 */
529struct radeon_wb { 575struct radeon_wb {
530 struct radeon_object *wb_obj; 576 struct radeon_bo *wb_obj;
531 volatile uint32_t *wb; 577 volatile uint32_t *wb;
532 uint64_t gpu_addr; 578 uint64_t gpu_addr;
533}; 579};
@@ -551,7 +597,100 @@ struct radeon_wb {
551 * Equation between gpu/memory clock and available bandwidth is hw dependent 597 * Equation between gpu/memory clock and available bandwidth is hw dependent
552 * (type of memory, bus size, efficiency, ...) 598 * (type of memory, bus size, efficiency, ...)
553 */ 599 */
600enum radeon_pm_state {
601 PM_STATE_DISABLED,
602 PM_STATE_MINIMUM,
603 PM_STATE_PAUSED,
604 PM_STATE_ACTIVE
605};
606enum radeon_pm_action {
607 PM_ACTION_NONE,
608 PM_ACTION_MINIMUM,
609 PM_ACTION_DOWNCLOCK,
610 PM_ACTION_UPCLOCK
611};
612
613enum radeon_voltage_type {
614 VOLTAGE_NONE = 0,
615 VOLTAGE_GPIO,
616 VOLTAGE_VDDC,
617 VOLTAGE_SW
618};
619
620enum radeon_pm_state_type {
621 POWER_STATE_TYPE_DEFAULT,
622 POWER_STATE_TYPE_POWERSAVE,
623 POWER_STATE_TYPE_BATTERY,
624 POWER_STATE_TYPE_BALANCED,
625 POWER_STATE_TYPE_PERFORMANCE,
626};
627
628enum radeon_pm_clock_mode_type {
629 POWER_MODE_TYPE_DEFAULT,
630 POWER_MODE_TYPE_LOW,
631 POWER_MODE_TYPE_MID,
632 POWER_MODE_TYPE_HIGH,
633};
634
635struct radeon_voltage {
636 enum radeon_voltage_type type;
637 /* gpio voltage */
638 struct radeon_gpio_rec gpio;
639 u32 delay; /* delay in usec from voltage drop to sclk change */
640 bool active_high; /* voltage drop is active when bit is high */
641 /* VDDC voltage */
642 u8 vddc_id; /* index into vddc voltage table */
643 u8 vddci_id; /* index into vddci voltage table */
644 bool vddci_enabled;
645 /* r6xx+ sw */
646 u32 voltage;
647};
648
649struct radeon_pm_non_clock_info {
650 /* pcie lanes */
651 int pcie_lanes;
652 /* standardized non-clock flags */
653 u32 flags;
654};
655
656struct radeon_pm_clock_info {
657 /* memory clock */
658 u32 mclk;
659 /* engine clock */
660 u32 sclk;
661 /* voltage info */
662 struct radeon_voltage voltage;
663 /* standardized clock flags - not sure we'll need these */
664 u32 flags;
665};
666
667struct radeon_power_state {
668 enum radeon_pm_state_type type;
669 /* XXX: use a define for num clock modes */
670 struct radeon_pm_clock_info clock_info[8];
671 /* number of valid clock modes in this power state */
672 int num_clock_modes;
673 struct radeon_pm_clock_info *default_clock_mode;
674 /* non clock info about this state */
675 struct radeon_pm_non_clock_info non_clock_info;
676 bool voltage_drop_active;
677};
678
679/*
680 * Some modes are overclocked by very low value, accept them
681 */
682#define RADEON_MODE_OVERCLOCK_MARGIN 500 /* 5 MHz */
683
554struct radeon_pm { 684struct radeon_pm {
685 struct mutex mutex;
686 struct delayed_work idle_work;
687 enum radeon_pm_state state;
688 enum radeon_pm_action planned_action;
689 unsigned long action_timeout;
690 bool downclocked;
691 int active_crtcs;
692 int req_vblank;
693 bool vblank_sync;
555 fixed20_12 max_bandwidth; 694 fixed20_12 max_bandwidth;
556 fixed20_12 igp_sideport_mclk; 695 fixed20_12 igp_sideport_mclk;
557 fixed20_12 igp_system_mclk; 696 fixed20_12 igp_system_mclk;
@@ -562,7 +701,18 @@ struct radeon_pm {
562 fixed20_12 ht_bandwidth; 701 fixed20_12 ht_bandwidth;
563 fixed20_12 core_bandwidth; 702 fixed20_12 core_bandwidth;
564 fixed20_12 sclk; 703 fixed20_12 sclk;
704 fixed20_12 mclk;
565 fixed20_12 needed_bandwidth; 705 fixed20_12 needed_bandwidth;
706 /* XXX: use a define for num power modes */
707 struct radeon_power_state power_state[8];
708 /* number of valid power states */
709 int num_power_states;
710 struct radeon_power_state *current_power_state;
711 struct radeon_pm_clock_info *current_clock_mode;
712 struct radeon_power_state *requested_power_state;
713 struct radeon_pm_clock_info *requested_clock_mode;
714 struct radeon_power_state *default_power_state;
715 struct radeon_i2c_chan *i2c_bus;
566}; 716};
567 717
568 718
@@ -585,8 +735,6 @@ int radeon_debugfs_add_files(struct radeon_device *rdev,
585 struct drm_info_list *files, 735 struct drm_info_list *files,
586 unsigned nfiles); 736 unsigned nfiles);
587int radeon_debugfs_fence_init(struct radeon_device *rdev); 737int radeon_debugfs_fence_init(struct radeon_device *rdev);
588int r100_debugfs_rbbm_init(struct radeon_device *rdev);
589int r100_debugfs_cp_init(struct radeon_device *rdev);
590 738
591 739
592/* 740/*
@@ -632,13 +780,25 @@ struct radeon_asic {
632 void (*set_engine_clock)(struct radeon_device *rdev, uint32_t eng_clock); 780 void (*set_engine_clock)(struct radeon_device *rdev, uint32_t eng_clock);
633 uint32_t (*get_memory_clock)(struct radeon_device *rdev); 781 uint32_t (*get_memory_clock)(struct radeon_device *rdev);
634 void (*set_memory_clock)(struct radeon_device *rdev, uint32_t mem_clock); 782 void (*set_memory_clock)(struct radeon_device *rdev, uint32_t mem_clock);
783 int (*get_pcie_lanes)(struct radeon_device *rdev);
635 void (*set_pcie_lanes)(struct radeon_device *rdev, int lanes); 784 void (*set_pcie_lanes)(struct radeon_device *rdev, int lanes);
636 void (*set_clock_gating)(struct radeon_device *rdev, int enable); 785 void (*set_clock_gating)(struct radeon_device *rdev, int enable);
637 int (*set_surface_reg)(struct radeon_device *rdev, int reg, 786 int (*set_surface_reg)(struct radeon_device *rdev, int reg,
638 uint32_t tiling_flags, uint32_t pitch, 787 uint32_t tiling_flags, uint32_t pitch,
639 uint32_t offset, uint32_t obj_size); 788 uint32_t offset, uint32_t obj_size);
640 int (*clear_surface_reg)(struct radeon_device *rdev, int reg); 789 void (*clear_surface_reg)(struct radeon_device *rdev, int reg);
641 void (*bandwidth_update)(struct radeon_device *rdev); 790 void (*bandwidth_update)(struct radeon_device *rdev);
791 void (*hpd_init)(struct radeon_device *rdev);
792 void (*hpd_fini)(struct radeon_device *rdev);
793 bool (*hpd_sense)(struct radeon_device *rdev, enum radeon_hpd_id hpd);
794 void (*hpd_set_polarity)(struct radeon_device *rdev, enum radeon_hpd_id hpd);
795 /* ioctl hw specific callback. Some hw might want to perform special
796 * operation on specific ioctl. For instance on wait idle some hw
797 * might want to perform and HDP flush through MMIO as it seems that
798 * some R6XX/R7XX hw doesn't take HDP flush into account if programmed
799 * through ring.
800 */
801 void (*ioctl_wait_idle)(struct radeon_device *rdev, struct radeon_bo *bo);
642}; 802};
643 803
644/* 804/*
@@ -647,11 +807,14 @@ struct radeon_asic {
647struct r100_asic { 807struct r100_asic {
648 const unsigned *reg_safe_bm; 808 const unsigned *reg_safe_bm;
649 unsigned reg_safe_bm_size; 809 unsigned reg_safe_bm_size;
810 u32 hdp_cntl;
650}; 811};
651 812
652struct r300_asic { 813struct r300_asic {
653 const unsigned *reg_safe_bm; 814 const unsigned *reg_safe_bm;
654 unsigned reg_safe_bm_size; 815 unsigned reg_safe_bm_size;
816 u32 resync_scratch;
817 u32 hdp_cntl;
655}; 818};
656 819
657struct r600_asic { 820struct r600_asic {
@@ -668,6 +831,9 @@ struct r600_asic {
668 unsigned sx_max_export_pos_size; 831 unsigned sx_max_export_pos_size;
669 unsigned sx_max_export_smx_size; 832 unsigned sx_max_export_smx_size;
670 unsigned sq_num_cf_insts; 833 unsigned sq_num_cf_insts;
834 unsigned tiling_nbanks;
835 unsigned tiling_npipes;
836 unsigned tiling_group_size;
671}; 837};
672 838
673struct rv770_asic { 839struct rv770_asic {
@@ -688,6 +854,9 @@ struct rv770_asic {
688 unsigned sc_prim_fifo_size; 854 unsigned sc_prim_fifo_size;
689 unsigned sc_hiz_tile_fifo_size; 855 unsigned sc_hiz_tile_fifo_size;
690 unsigned sc_earlyz_tile_fifo_fize; 856 unsigned sc_earlyz_tile_fifo_fize;
857 unsigned tiling_nbanks;
858 unsigned tiling_npipes;
859 unsigned tiling_group_size;
691}; 860};
692 861
693union radeon_asic_config { 862union radeon_asic_config {
@@ -697,6 +866,12 @@ union radeon_asic_config {
697 struct rv770_asic rv770; 866 struct rv770_asic rv770;
698}; 867};
699 868
869/*
870 * asic initizalization from radeon_asic.c
871 */
872void radeon_agp_disable(struct radeon_device *rdev);
873int radeon_asic_init(struct radeon_device *rdev);
874
700 875
701/* 876/*
702 * IOCTL. 877 * IOCTL.
@@ -751,9 +926,9 @@ struct radeon_device {
751 uint8_t *bios; 926 uint8_t *bios;
752 bool is_atom_bios; 927 bool is_atom_bios;
753 uint16_t bios_header_start; 928 uint16_t bios_header_start;
754 struct radeon_object *stollen_vga_memory; 929 struct radeon_bo *stollen_vga_memory;
755 struct fb_info *fbdev_info; 930 struct fb_info *fbdev_info;
756 struct radeon_object *fbdev_robj; 931 struct radeon_bo *fbdev_rbo;
757 struct radeon_framebuffer *fbdev_rfb; 932 struct radeon_framebuffer *fbdev_rfb;
758 /* Register mmio */ 933 /* Register mmio */
759 resource_size_t rmmio_base; 934 resource_size_t rmmio_base;
@@ -791,8 +966,24 @@ struct radeon_device {
791 struct radeon_surface_reg surface_regs[RADEON_GEM_MAX_SURFACES]; 966 struct radeon_surface_reg surface_regs[RADEON_GEM_MAX_SURFACES];
792 const struct firmware *me_fw; /* all family ME firmware */ 967 const struct firmware *me_fw; /* all family ME firmware */
793 const struct firmware *pfp_fw; /* r6/700 PFP firmware */ 968 const struct firmware *pfp_fw; /* r6/700 PFP firmware */
969 const struct firmware *rlc_fw; /* r6/700 RLC firmware */
794 struct r600_blit r600_blit; 970 struct r600_blit r600_blit;
795 int msi_enabled; /* msi enabled */ 971 int msi_enabled; /* msi enabled */
972 struct r600_ih ih; /* r6/700 interrupt ring */
973 struct workqueue_struct *wq;
974 struct work_struct hotplug_work;
975 int num_crtc; /* number of crtcs */
976 struct mutex dc_hw_i2c_mutex; /* display controller hw i2c mutex */
977
978 /* audio stuff */
979 struct timer_list audio_timer;
980 int audio_channels;
981 int audio_rate;
982 int audio_bits_per_sample;
983 uint8_t audio_status_bits;
984 uint8_t audio_category_code;
985
986 bool powered_down;
796}; 987};
797 988
798int radeon_device_init(struct radeon_device *rdev, 989int radeon_device_init(struct radeon_device *rdev,
@@ -811,7 +1002,7 @@ void r600_kms_blit_copy(struct radeon_device *rdev,
811 1002
812static inline uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg) 1003static inline uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg)
813{ 1004{
814 if (reg < 0x10000) 1005 if (reg < rdev->rmmio_size)
815 return readl(((void __iomem *)rdev->rmmio) + reg); 1006 return readl(((void __iomem *)rdev->rmmio) + reg);
816 else { 1007 else {
817 writel(reg, ((void __iomem *)rdev->rmmio) + RADEON_MM_INDEX); 1008 writel(reg, ((void __iomem *)rdev->rmmio) + RADEON_MM_INDEX);
@@ -821,7 +1012,7 @@ static inline uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg)
821 1012
822static inline void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) 1013static inline void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
823{ 1014{
824 if (reg < 0x10000) 1015 if (reg < rdev->rmmio_size)
825 writel(v, ((void __iomem *)rdev->rmmio) + reg); 1016 writel(v, ((void __iomem *)rdev->rmmio) + reg);
826 else { 1017 else {
827 writel(reg, ((void __iomem *)rdev->rmmio) + RADEON_MM_INDEX); 1018 writel(reg, ((void __iomem *)rdev->rmmio) + RADEON_MM_INDEX);
@@ -829,6 +1020,10 @@ static inline void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32
829 } 1020 }
830} 1021}
831 1022
1023/*
1024 * Cast helper
1025 */
1026#define to_radeon_fence(p) ((struct radeon_fence *)(p))
832 1027
833/* 1028/*
834 * Registers read & write functions. 1029 * Registers read & write functions.
@@ -846,6 +1041,8 @@ static inline void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32
846#define WREG32_MC(reg, v) rdev->mc_wreg(rdev, (reg), (v)) 1041#define WREG32_MC(reg, v) rdev->mc_wreg(rdev, (reg), (v))
847#define RREG32_PCIE(reg) rv370_pcie_rreg(rdev, (reg)) 1042#define RREG32_PCIE(reg) rv370_pcie_rreg(rdev, (reg))
848#define WREG32_PCIE(reg, v) rv370_pcie_wreg(rdev, (reg), (v)) 1043#define WREG32_PCIE(reg, v) rv370_pcie_wreg(rdev, (reg), (v))
1044#define RREG32_PCIE_P(reg) rdev->pciep_rreg(rdev, (reg))
1045#define WREG32_PCIE_P(reg, v) rdev->pciep_wreg(rdev, (reg), (v))
849#define WREG32_P(reg, val, mask) \ 1046#define WREG32_P(reg, val, mask) \
850 do { \ 1047 do { \
851 uint32_t tmp_ = RREG32(reg); \ 1048 uint32_t tmp_ = RREG32(reg); \
@@ -907,7 +1104,7 @@ void r100_pll_errata_after_index(struct radeon_device *rdev);
907#define ASIC_IS_AVIVO(rdev) ((rdev->family >= CHIP_RS600)) 1104#define ASIC_IS_AVIVO(rdev) ((rdev->family >= CHIP_RS600))
908#define ASIC_IS_DCE3(rdev) ((rdev->family >= CHIP_RV620)) 1105#define ASIC_IS_DCE3(rdev) ((rdev->family >= CHIP_RV620))
909#define ASIC_IS_DCE32(rdev) ((rdev->family >= CHIP_RV730)) 1106#define ASIC_IS_DCE32(rdev) ((rdev->family >= CHIP_RV730))
910 1107#define ASIC_IS_DCE4(rdev) ((rdev->family >= CHIP_CEDAR))
911 1108
912/* 1109/*
913 * BIOS helpers. 1110 * BIOS helpers.
@@ -965,18 +1162,29 @@ static inline void radeon_ring_write(struct radeon_device *rdev, uint32_t v)
965#define radeon_get_engine_clock(rdev) (rdev)->asic->get_engine_clock((rdev)) 1162#define radeon_get_engine_clock(rdev) (rdev)->asic->get_engine_clock((rdev))
966#define radeon_set_engine_clock(rdev, e) (rdev)->asic->set_engine_clock((rdev), (e)) 1163#define radeon_set_engine_clock(rdev, e) (rdev)->asic->set_engine_clock((rdev), (e))
967#define radeon_get_memory_clock(rdev) (rdev)->asic->get_memory_clock((rdev)) 1164#define radeon_get_memory_clock(rdev) (rdev)->asic->get_memory_clock((rdev))
968#define radeon_set_memory_clock(rdev, e) (rdev)->asic->set_engine_clock((rdev), (e)) 1165#define radeon_set_memory_clock(rdev, e) (rdev)->asic->set_memory_clock((rdev), (e))
1166#define radeon_get_pcie_lanes(rdev) (rdev)->asic->get_pcie_lanes((rdev))
969#define radeon_set_pcie_lanes(rdev, l) (rdev)->asic->set_pcie_lanes((rdev), (l)) 1167#define radeon_set_pcie_lanes(rdev, l) (rdev)->asic->set_pcie_lanes((rdev), (l))
970#define radeon_set_clock_gating(rdev, e) (rdev)->asic->set_clock_gating((rdev), (e)) 1168#define radeon_set_clock_gating(rdev, e) (rdev)->asic->set_clock_gating((rdev), (e))
971#define radeon_set_surface_reg(rdev, r, f, p, o, s) ((rdev)->asic->set_surface_reg((rdev), (r), (f), (p), (o), (s))) 1169#define radeon_set_surface_reg(rdev, r, f, p, o, s) ((rdev)->asic->set_surface_reg((rdev), (r), (f), (p), (o), (s)))
972#define radeon_clear_surface_reg(rdev, r) ((rdev)->asic->clear_surface_reg((rdev), (r))) 1170#define radeon_clear_surface_reg(rdev, r) ((rdev)->asic->clear_surface_reg((rdev), (r)))
973#define radeon_bandwidth_update(rdev) (rdev)->asic->bandwidth_update((rdev)) 1171#define radeon_bandwidth_update(rdev) (rdev)->asic->bandwidth_update((rdev))
1172#define radeon_hpd_init(rdev) (rdev)->asic->hpd_init((rdev))
1173#define radeon_hpd_fini(rdev) (rdev)->asic->hpd_fini((rdev))
1174#define radeon_hpd_sense(rdev, hpd) (rdev)->asic->hpd_sense((rdev), (hpd))
1175#define radeon_hpd_set_polarity(rdev, hpd) (rdev)->asic->hpd_set_polarity((rdev), (hpd))
974 1176
975/* Common functions */ 1177/* Common functions */
1178/* AGP */
1179extern void radeon_agp_disable(struct radeon_device *rdev);
976extern int radeon_gart_table_vram_pin(struct radeon_device *rdev); 1180extern int radeon_gart_table_vram_pin(struct radeon_device *rdev);
1181extern void radeon_gart_restore(struct radeon_device *rdev);
977extern int radeon_modeset_init(struct radeon_device *rdev); 1182extern int radeon_modeset_init(struct radeon_device *rdev);
978extern void radeon_modeset_fini(struct radeon_device *rdev); 1183extern void radeon_modeset_fini(struct radeon_device *rdev);
979extern bool radeon_card_posted(struct radeon_device *rdev); 1184extern bool radeon_card_posted(struct radeon_device *rdev);
1185extern void radeon_update_bandwidth_info(struct radeon_device *rdev);
1186extern void radeon_update_display_priority(struct radeon_device *rdev);
1187extern bool radeon_boot_test_post_card(struct radeon_device *rdev);
980extern int radeon_clocks_init(struct radeon_device *rdev); 1188extern int radeon_clocks_init(struct radeon_device *rdev);
981extern void radeon_clocks_fini(struct radeon_device *rdev); 1189extern void radeon_clocks_fini(struct radeon_device *rdev);
982extern void radeon_scratch_init(struct radeon_device *rdev); 1190extern void radeon_scratch_init(struct radeon_device *rdev);
@@ -984,51 +1192,14 @@ extern void radeon_surface_init(struct radeon_device *rdev);
984extern int radeon_cs_parser_init(struct radeon_cs_parser *p, void *data); 1192extern int radeon_cs_parser_init(struct radeon_cs_parser *p, void *data);
985extern void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); 1193extern void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
986extern void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); 1194extern void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
1195extern void radeon_ttm_placement_from_domain(struct radeon_bo *rbo, u32 domain);
1196extern bool radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo);
1197extern void radeon_vram_location(struct radeon_device *rdev, struct radeon_mc *mc, u64 base);
1198extern void radeon_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
1199extern int radeon_resume_kms(struct drm_device *dev);
1200extern int radeon_suspend_kms(struct drm_device *dev, pm_message_t state);
987 1201
988/* r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 */ 1202/* r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 */
989struct r100_mc_save {
990 u32 GENMO_WT;
991 u32 CRTC_EXT_CNTL;
992 u32 CRTC_GEN_CNTL;
993 u32 CRTC2_GEN_CNTL;
994 u32 CUR_OFFSET;
995 u32 CUR2_OFFSET;
996};
997extern void r100_cp_disable(struct radeon_device *rdev);
998extern int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
999extern void r100_cp_fini(struct radeon_device *rdev);
1000extern void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
1001extern int r100_pci_gart_init(struct radeon_device *rdev);
1002extern void r100_pci_gart_fini(struct radeon_device *rdev);
1003extern int r100_pci_gart_enable(struct radeon_device *rdev);
1004extern void r100_pci_gart_disable(struct radeon_device *rdev);
1005extern int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
1006extern int r100_debugfs_mc_info_init(struct radeon_device *rdev);
1007extern int r100_gui_wait_for_idle(struct radeon_device *rdev);
1008extern void r100_ib_fini(struct radeon_device *rdev);
1009extern int r100_ib_init(struct radeon_device *rdev);
1010extern void r100_irq_disable(struct radeon_device *rdev);
1011extern int r100_irq_set(struct radeon_device *rdev);
1012extern void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save);
1013extern void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save);
1014extern void r100_vram_init_sizes(struct radeon_device *rdev);
1015extern void r100_wb_disable(struct radeon_device *rdev);
1016extern void r100_wb_fini(struct radeon_device *rdev);
1017extern int r100_wb_init(struct radeon_device *rdev);
1018extern void r100_hdp_reset(struct radeon_device *rdev);
1019extern int r100_rb2d_reset(struct radeon_device *rdev);
1020extern int r100_cp_reset(struct radeon_device *rdev);
1021extern void r100_vga_render_disable(struct radeon_device *rdev);
1022extern int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p,
1023 struct radeon_cs_packet *pkt,
1024 struct radeon_object *robj);
1025extern int r100_cs_parse_packet0(struct radeon_cs_parser *p,
1026 struct radeon_cs_packet *pkt,
1027 const unsigned *auth, unsigned n,
1028 radeon_packet0_check_t check);
1029extern int r100_cs_packet_parse(struct radeon_cs_parser *p,
1030 struct radeon_cs_packet *pkt,
1031 unsigned idx);
1032 1203
1033/* rv200,rv250,rv280 */ 1204/* rv200,rv250,rv280 */
1034extern void r200_set_safe_registers(struct radeon_device *rdev); 1205extern void r200_set_safe_registers(struct radeon_device *rdev);
@@ -1036,7 +1207,7 @@ extern void r200_set_safe_registers(struct radeon_device *rdev);
1036/* r300,r350,rv350,rv370,rv380 */ 1207/* r300,r350,rv350,rv370,rv380 */
1037extern void r300_set_reg_safe(struct radeon_device *rdev); 1208extern void r300_set_reg_safe(struct radeon_device *rdev);
1038extern void r300_mc_program(struct radeon_device *rdev); 1209extern void r300_mc_program(struct radeon_device *rdev);
1039extern void r300_vram_info(struct radeon_device *rdev); 1210extern void r300_mc_init(struct radeon_device *rdev);
1040extern void r300_clock_startup(struct radeon_device *rdev); 1211extern void r300_clock_startup(struct radeon_device *rdev);
1041extern int r300_mc_wait_for_idle(struct radeon_device *rdev); 1212extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
1042extern int rv370_pcie_gart_init(struct radeon_device *rdev); 1213extern int rv370_pcie_gart_init(struct radeon_device *rdev);
@@ -1045,7 +1216,6 @@ extern int rv370_pcie_gart_enable(struct radeon_device *rdev);
1045extern void rv370_pcie_gart_disable(struct radeon_device *rdev); 1216extern void rv370_pcie_gart_disable(struct radeon_device *rdev);
1046 1217
1047/* r420,r423,rv410 */ 1218/* r420,r423,rv410 */
1048extern int r420_mc_init(struct radeon_device *rdev);
1049extern u32 r420_mc_rreg(struct radeon_device *rdev, u32 reg); 1219extern u32 r420_mc_rreg(struct radeon_device *rdev, u32 reg);
1050extern void r420_mc_wreg(struct radeon_device *rdev, u32 reg, u32 v); 1220extern void r420_mc_wreg(struct radeon_device *rdev, u32 reg, u32 v);
1051extern int r420_debugfs_pipes_info_init(struct radeon_device *rdev); 1221extern int r420_debugfs_pipes_info_init(struct radeon_device *rdev);
@@ -1087,12 +1257,13 @@ extern void rs690_line_buffer_adjust(struct radeon_device *rdev,
1087 struct drm_display_mode *mode2); 1257 struct drm_display_mode *mode2);
1088 1258
1089/* r600, rv610, rv630, rv620, rv635, rv670, rs780, rs880 */ 1259/* r600, rv610, rv630, rv620, rv635, rv670, rs780, rs880 */
1260extern void r600_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
1090extern bool r600_card_posted(struct radeon_device *rdev); 1261extern bool r600_card_posted(struct radeon_device *rdev);
1091extern void r600_cp_stop(struct radeon_device *rdev); 1262extern void r600_cp_stop(struct radeon_device *rdev);
1092extern void r600_ring_init(struct radeon_device *rdev, unsigned ring_size); 1263extern void r600_ring_init(struct radeon_device *rdev, unsigned ring_size);
1093extern int r600_cp_resume(struct radeon_device *rdev); 1264extern int r600_cp_resume(struct radeon_device *rdev);
1265extern void r600_cp_fini(struct radeon_device *rdev);
1094extern int r600_count_pipe_bits(uint32_t val); 1266extern int r600_count_pipe_bits(uint32_t val);
1095extern int r600_gart_clear_page(struct radeon_device *rdev, int i);
1096extern int r600_mc_wait_for_idle(struct radeon_device *rdev); 1267extern int r600_mc_wait_for_idle(struct radeon_device *rdev);
1097extern int r600_pcie_gart_init(struct radeon_device *rdev); 1268extern int r600_pcie_gart_init(struct radeon_device *rdev);
1098extern void r600_pcie_gart_tlb_flush(struct radeon_device *rdev); 1269extern void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
@@ -1104,7 +1275,39 @@ extern void r600_wb_disable(struct radeon_device *rdev);
1104extern void r600_scratch_init(struct radeon_device *rdev); 1275extern void r600_scratch_init(struct radeon_device *rdev);
1105extern int r600_blit_init(struct radeon_device *rdev); 1276extern int r600_blit_init(struct radeon_device *rdev);
1106extern void r600_blit_fini(struct radeon_device *rdev); 1277extern void r600_blit_fini(struct radeon_device *rdev);
1107extern int r600_cp_init_microcode(struct radeon_device *rdev); 1278extern int r600_init_microcode(struct radeon_device *rdev);
1108extern int r600_gpu_reset(struct radeon_device *rdev); 1279extern int r600_gpu_reset(struct radeon_device *rdev);
1280/* r600 irq */
1281extern int r600_irq_init(struct radeon_device *rdev);
1282extern void r600_irq_fini(struct radeon_device *rdev);
1283extern void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size);
1284extern int r600_irq_set(struct radeon_device *rdev);
1285extern void r600_irq_suspend(struct radeon_device *rdev);
1286/* r600 audio */
1287extern int r600_audio_init(struct radeon_device *rdev);
1288extern int r600_audio_tmds_index(struct drm_encoder *encoder);
1289extern void r600_audio_set_clock(struct drm_encoder *encoder, int clock);
1290extern void r600_audio_fini(struct radeon_device *rdev);
1291extern void r600_hdmi_init(struct drm_encoder *encoder);
1292extern void r600_hdmi_enable(struct drm_encoder *encoder);
1293extern void r600_hdmi_disable(struct drm_encoder *encoder);
1294extern void r600_hdmi_setmode(struct drm_encoder *encoder, struct drm_display_mode *mode);
1295extern int r600_hdmi_buffer_status_changed(struct drm_encoder *encoder);
1296extern void r600_hdmi_update_audio_settings(struct drm_encoder *encoder,
1297 int channels,
1298 int rate,
1299 int bps,
1300 uint8_t status_bits,
1301 uint8_t category_code);
1302
1303/* evergreen */
1304struct evergreen_mc_save {
1305 u32 vga_control[6];
1306 u32 vga_render_control;
1307 u32 vga_hdp_control;
1308 u32 crtc_control[6];
1309};
1310
1311#include "radeon_object.h"
1109 1312
1110#endif 1313#endif
diff --git a/drivers/gpu/drm/radeon/radeon_agp.c b/drivers/gpu/drm/radeon/radeon_agp.c
index 54bf49a6d676..28e473f1f56f 100644
--- a/drivers/gpu/drm/radeon/radeon_agp.c
+++ b/drivers/gpu/drm/radeon/radeon_agp.c
@@ -134,19 +134,27 @@ int radeon_agp_init(struct radeon_device *rdev)
134 int ret; 134 int ret;
135 135
136 /* Acquire AGP. */ 136 /* Acquire AGP. */
137 if (!rdev->ddev->agp->acquired) { 137 ret = drm_agp_acquire(rdev->ddev);
138 ret = drm_agp_acquire(rdev->ddev); 138 if (ret) {
139 if (ret) { 139 DRM_ERROR("Unable to acquire AGP: %d\n", ret);
140 DRM_ERROR("Unable to acquire AGP: %d\n", ret); 140 return ret;
141 return ret;
142 }
143 } 141 }
144 142
145 ret = drm_agp_info(rdev->ddev, &info); 143 ret = drm_agp_info(rdev->ddev, &info);
146 if (ret) { 144 if (ret) {
145 drm_agp_release(rdev->ddev);
147 DRM_ERROR("Unable to get AGP info: %d\n", ret); 146 DRM_ERROR("Unable to get AGP info: %d\n", ret);
148 return ret; 147 return ret;
149 } 148 }
149
150 if (rdev->ddev->agp->agp_info.aper_size < 32) {
151 drm_agp_release(rdev->ddev);
152 dev_warn(rdev->dev, "AGP aperture too small (%zuM) "
153 "need at least 32M, disabling AGP\n",
154 rdev->ddev->agp->agp_info.aper_size);
155 return -EINVAL;
156 }
157
150 mode.mode = info.mode; 158 mode.mode = info.mode;
151 agp_status = (RREG32(RADEON_AGP_STATUS) | RADEON_AGPv3_MODE) & mode.mode; 159 agp_status = (RREG32(RADEON_AGP_STATUS) | RADEON_AGPv3_MODE) & mode.mode;
152 is_v3 = !!(agp_status & RADEON_AGPv3_MODE); 160 is_v3 = !!(agp_status & RADEON_AGPv3_MODE);
@@ -221,11 +229,16 @@ int radeon_agp_init(struct radeon_device *rdev)
221 ret = drm_agp_enable(rdev->ddev, mode); 229 ret = drm_agp_enable(rdev->ddev, mode);
222 if (ret) { 230 if (ret) {
223 DRM_ERROR("Unable to enable AGP (mode = 0x%lx)\n", mode.mode); 231 DRM_ERROR("Unable to enable AGP (mode = 0x%lx)\n", mode.mode);
232 drm_agp_release(rdev->ddev);
224 return ret; 233 return ret;
225 } 234 }
226 235
227 rdev->mc.agp_base = rdev->ddev->agp->agp_info.aper_base; 236 rdev->mc.agp_base = rdev->ddev->agp->agp_info.aper_base;
228 rdev->mc.gtt_size = rdev->ddev->agp->agp_info.aper_size << 20; 237 rdev->mc.gtt_size = rdev->ddev->agp->agp_info.aper_size << 20;
238 rdev->mc.gtt_start = rdev->mc.agp_base;
239 rdev->mc.gtt_end = rdev->mc.gtt_start + rdev->mc.gtt_size - 1;
240 dev_info(rdev->dev, "GTT: %lluM 0x%08llX - 0x%08llX\n",
241 rdev->mc.gtt_size >> 20, rdev->mc.gtt_start, rdev->mc.gtt_end);
229 242
230 /* workaround some hw issues */ 243 /* workaround some hw issues */
231 if (rdev->family < CHIP_R200) { 244 if (rdev->family < CHIP_R200) {
@@ -252,10 +265,8 @@ void radeon_agp_resume(struct radeon_device *rdev)
252void radeon_agp_fini(struct radeon_device *rdev) 265void radeon_agp_fini(struct radeon_device *rdev)
253{ 266{
254#if __OS_HAS_AGP 267#if __OS_HAS_AGP
255 if (rdev->flags & RADEON_IS_AGP) { 268 if (rdev->ddev->agp && rdev->ddev->agp->acquired) {
256 if (rdev->ddev->agp && rdev->ddev->agp->acquired) { 269 drm_agp_release(rdev->ddev);
257 drm_agp_release(rdev->ddev);
258 }
259 } 270 }
260#endif 271#endif
261} 272}
diff --git a/drivers/gpu/drm/radeon/radeon_asic.c b/drivers/gpu/drm/radeon/radeon_asic.c
new file mode 100644
index 000000000000..a4b4bc9fa322
--- /dev/null
+++ b/drivers/gpu/drm/radeon/radeon_asic.c
@@ -0,0 +1,772 @@
1/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28
29#include <linux/console.h>
30#include <drm/drmP.h>
31#include <drm/drm_crtc_helper.h>
32#include <drm/radeon_drm.h>
33#include <linux/vgaarb.h>
34#include <linux/vga_switcheroo.h>
35#include "radeon_reg.h"
36#include "radeon.h"
37#include "radeon_asic.h"
38#include "atom.h"
39
40/*
41 * Registers accessors functions.
42 */
43static uint32_t radeon_invalid_rreg(struct radeon_device *rdev, uint32_t reg)
44{
45 DRM_ERROR("Invalid callback to read register 0x%04X\n", reg);
46 BUG_ON(1);
47 return 0;
48}
49
50static void radeon_invalid_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
51{
52 DRM_ERROR("Invalid callback to write register 0x%04X with 0x%08X\n",
53 reg, v);
54 BUG_ON(1);
55}
56
57static void radeon_register_accessor_init(struct radeon_device *rdev)
58{
59 rdev->mc_rreg = &radeon_invalid_rreg;
60 rdev->mc_wreg = &radeon_invalid_wreg;
61 rdev->pll_rreg = &radeon_invalid_rreg;
62 rdev->pll_wreg = &radeon_invalid_wreg;
63 rdev->pciep_rreg = &radeon_invalid_rreg;
64 rdev->pciep_wreg = &radeon_invalid_wreg;
65
66 /* Don't change order as we are overridding accessor. */
67 if (rdev->family < CHIP_RV515) {
68 rdev->pcie_reg_mask = 0xff;
69 } else {
70 rdev->pcie_reg_mask = 0x7ff;
71 }
72 /* FIXME: not sure here */
73 if (rdev->family <= CHIP_R580) {
74 rdev->pll_rreg = &r100_pll_rreg;
75 rdev->pll_wreg = &r100_pll_wreg;
76 }
77 if (rdev->family >= CHIP_R420) {
78 rdev->mc_rreg = &r420_mc_rreg;
79 rdev->mc_wreg = &r420_mc_wreg;
80 }
81 if (rdev->family >= CHIP_RV515) {
82 rdev->mc_rreg = &rv515_mc_rreg;
83 rdev->mc_wreg = &rv515_mc_wreg;
84 }
85 if (rdev->family == CHIP_RS400 || rdev->family == CHIP_RS480) {
86 rdev->mc_rreg = &rs400_mc_rreg;
87 rdev->mc_wreg = &rs400_mc_wreg;
88 }
89 if (rdev->family == CHIP_RS690 || rdev->family == CHIP_RS740) {
90 rdev->mc_rreg = &rs690_mc_rreg;
91 rdev->mc_wreg = &rs690_mc_wreg;
92 }
93 if (rdev->family == CHIP_RS600) {
94 rdev->mc_rreg = &rs600_mc_rreg;
95 rdev->mc_wreg = &rs600_mc_wreg;
96 }
97 if ((rdev->family >= CHIP_R600) && (rdev->family <= CHIP_RV740)) {
98 rdev->pciep_rreg = &r600_pciep_rreg;
99 rdev->pciep_wreg = &r600_pciep_wreg;
100 }
101}
102
103
104/* helper to disable agp */
105void radeon_agp_disable(struct radeon_device *rdev)
106{
107 rdev->flags &= ~RADEON_IS_AGP;
108 if (rdev->family >= CHIP_R600) {
109 DRM_INFO("Forcing AGP to PCIE mode\n");
110 rdev->flags |= RADEON_IS_PCIE;
111 } else if (rdev->family >= CHIP_RV515 ||
112 rdev->family == CHIP_RV380 ||
113 rdev->family == CHIP_RV410 ||
114 rdev->family == CHIP_R423) {
115 DRM_INFO("Forcing AGP to PCIE mode\n");
116 rdev->flags |= RADEON_IS_PCIE;
117 rdev->asic->gart_tlb_flush = &rv370_pcie_gart_tlb_flush;
118 rdev->asic->gart_set_page = &rv370_pcie_gart_set_page;
119 } else {
120 DRM_INFO("Forcing AGP to PCI mode\n");
121 rdev->flags |= RADEON_IS_PCI;
122 rdev->asic->gart_tlb_flush = &r100_pci_gart_tlb_flush;
123 rdev->asic->gart_set_page = &r100_pci_gart_set_page;
124 }
125 rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024;
126}
127
128/*
129 * ASIC
130 */
131static struct radeon_asic r100_asic = {
132 .init = &r100_init,
133 .fini = &r100_fini,
134 .suspend = &r100_suspend,
135 .resume = &r100_resume,
136 .vga_set_state = &r100_vga_set_state,
137 .gpu_reset = &r100_gpu_reset,
138 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
139 .gart_set_page = &r100_pci_gart_set_page,
140 .cp_commit = &r100_cp_commit,
141 .ring_start = &r100_ring_start,
142 .ring_test = &r100_ring_test,
143 .ring_ib_execute = &r100_ring_ib_execute,
144 .irq_set = &r100_irq_set,
145 .irq_process = &r100_irq_process,
146 .get_vblank_counter = &r100_get_vblank_counter,
147 .fence_ring_emit = &r100_fence_ring_emit,
148 .cs_parse = &r100_cs_parse,
149 .copy_blit = &r100_copy_blit,
150 .copy_dma = NULL,
151 .copy = &r100_copy_blit,
152 .get_engine_clock = &radeon_legacy_get_engine_clock,
153 .set_engine_clock = &radeon_legacy_set_engine_clock,
154 .get_memory_clock = &radeon_legacy_get_memory_clock,
155 .set_memory_clock = NULL,
156 .get_pcie_lanes = NULL,
157 .set_pcie_lanes = NULL,
158 .set_clock_gating = &radeon_legacy_set_clock_gating,
159 .set_surface_reg = r100_set_surface_reg,
160 .clear_surface_reg = r100_clear_surface_reg,
161 .bandwidth_update = &r100_bandwidth_update,
162 .hpd_init = &r100_hpd_init,
163 .hpd_fini = &r100_hpd_fini,
164 .hpd_sense = &r100_hpd_sense,
165 .hpd_set_polarity = &r100_hpd_set_polarity,
166 .ioctl_wait_idle = NULL,
167};
168
169static struct radeon_asic r200_asic = {
170 .init = &r100_init,
171 .fini = &r100_fini,
172 .suspend = &r100_suspend,
173 .resume = &r100_resume,
174 .vga_set_state = &r100_vga_set_state,
175 .gpu_reset = &r100_gpu_reset,
176 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
177 .gart_set_page = &r100_pci_gart_set_page,
178 .cp_commit = &r100_cp_commit,
179 .ring_start = &r100_ring_start,
180 .ring_test = &r100_ring_test,
181 .ring_ib_execute = &r100_ring_ib_execute,
182 .irq_set = &r100_irq_set,
183 .irq_process = &r100_irq_process,
184 .get_vblank_counter = &r100_get_vblank_counter,
185 .fence_ring_emit = &r100_fence_ring_emit,
186 .cs_parse = &r100_cs_parse,
187 .copy_blit = &r100_copy_blit,
188 .copy_dma = &r200_copy_dma,
189 .copy = &r100_copy_blit,
190 .get_engine_clock = &radeon_legacy_get_engine_clock,
191 .set_engine_clock = &radeon_legacy_set_engine_clock,
192 .get_memory_clock = &radeon_legacy_get_memory_clock,
193 .set_memory_clock = NULL,
194 .set_pcie_lanes = NULL,
195 .set_clock_gating = &radeon_legacy_set_clock_gating,
196 .set_surface_reg = r100_set_surface_reg,
197 .clear_surface_reg = r100_clear_surface_reg,
198 .bandwidth_update = &r100_bandwidth_update,
199 .hpd_init = &r100_hpd_init,
200 .hpd_fini = &r100_hpd_fini,
201 .hpd_sense = &r100_hpd_sense,
202 .hpd_set_polarity = &r100_hpd_set_polarity,
203 .ioctl_wait_idle = NULL,
204};
205
206static struct radeon_asic r300_asic = {
207 .init = &r300_init,
208 .fini = &r300_fini,
209 .suspend = &r300_suspend,
210 .resume = &r300_resume,
211 .vga_set_state = &r100_vga_set_state,
212 .gpu_reset = &r300_gpu_reset,
213 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
214 .gart_set_page = &r100_pci_gart_set_page,
215 .cp_commit = &r100_cp_commit,
216 .ring_start = &r300_ring_start,
217 .ring_test = &r100_ring_test,
218 .ring_ib_execute = &r100_ring_ib_execute,
219 .irq_set = &r100_irq_set,
220 .irq_process = &r100_irq_process,
221 .get_vblank_counter = &r100_get_vblank_counter,
222 .fence_ring_emit = &r300_fence_ring_emit,
223 .cs_parse = &r300_cs_parse,
224 .copy_blit = &r100_copy_blit,
225 .copy_dma = &r200_copy_dma,
226 .copy = &r100_copy_blit,
227 .get_engine_clock = &radeon_legacy_get_engine_clock,
228 .set_engine_clock = &radeon_legacy_set_engine_clock,
229 .get_memory_clock = &radeon_legacy_get_memory_clock,
230 .set_memory_clock = NULL,
231 .get_pcie_lanes = &rv370_get_pcie_lanes,
232 .set_pcie_lanes = &rv370_set_pcie_lanes,
233 .set_clock_gating = &radeon_legacy_set_clock_gating,
234 .set_surface_reg = r100_set_surface_reg,
235 .clear_surface_reg = r100_clear_surface_reg,
236 .bandwidth_update = &r100_bandwidth_update,
237 .hpd_init = &r100_hpd_init,
238 .hpd_fini = &r100_hpd_fini,
239 .hpd_sense = &r100_hpd_sense,
240 .hpd_set_polarity = &r100_hpd_set_polarity,
241 .ioctl_wait_idle = NULL,
242};
243
244static struct radeon_asic r300_asic_pcie = {
245 .init = &r300_init,
246 .fini = &r300_fini,
247 .suspend = &r300_suspend,
248 .resume = &r300_resume,
249 .vga_set_state = &r100_vga_set_state,
250 .gpu_reset = &r300_gpu_reset,
251 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
252 .gart_set_page = &rv370_pcie_gart_set_page,
253 .cp_commit = &r100_cp_commit,
254 .ring_start = &r300_ring_start,
255 .ring_test = &r100_ring_test,
256 .ring_ib_execute = &r100_ring_ib_execute,
257 .irq_set = &r100_irq_set,
258 .irq_process = &r100_irq_process,
259 .get_vblank_counter = &r100_get_vblank_counter,
260 .fence_ring_emit = &r300_fence_ring_emit,
261 .cs_parse = &r300_cs_parse,
262 .copy_blit = &r100_copy_blit,
263 .copy_dma = &r200_copy_dma,
264 .copy = &r100_copy_blit,
265 .get_engine_clock = &radeon_legacy_get_engine_clock,
266 .set_engine_clock = &radeon_legacy_set_engine_clock,
267 .get_memory_clock = &radeon_legacy_get_memory_clock,
268 .set_memory_clock = NULL,
269 .set_pcie_lanes = &rv370_set_pcie_lanes,
270 .set_clock_gating = &radeon_legacy_set_clock_gating,
271 .set_surface_reg = r100_set_surface_reg,
272 .clear_surface_reg = r100_clear_surface_reg,
273 .bandwidth_update = &r100_bandwidth_update,
274 .hpd_init = &r100_hpd_init,
275 .hpd_fini = &r100_hpd_fini,
276 .hpd_sense = &r100_hpd_sense,
277 .hpd_set_polarity = &r100_hpd_set_polarity,
278 .ioctl_wait_idle = NULL,
279};
280
281static struct radeon_asic r420_asic = {
282 .init = &r420_init,
283 .fini = &r420_fini,
284 .suspend = &r420_suspend,
285 .resume = &r420_resume,
286 .vga_set_state = &r100_vga_set_state,
287 .gpu_reset = &r300_gpu_reset,
288 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
289 .gart_set_page = &rv370_pcie_gart_set_page,
290 .cp_commit = &r100_cp_commit,
291 .ring_start = &r300_ring_start,
292 .ring_test = &r100_ring_test,
293 .ring_ib_execute = &r100_ring_ib_execute,
294 .irq_set = &r100_irq_set,
295 .irq_process = &r100_irq_process,
296 .get_vblank_counter = &r100_get_vblank_counter,
297 .fence_ring_emit = &r300_fence_ring_emit,
298 .cs_parse = &r300_cs_parse,
299 .copy_blit = &r100_copy_blit,
300 .copy_dma = &r200_copy_dma,
301 .copy = &r100_copy_blit,
302 .get_engine_clock = &radeon_atom_get_engine_clock,
303 .set_engine_clock = &radeon_atom_set_engine_clock,
304 .get_memory_clock = &radeon_atom_get_memory_clock,
305 .set_memory_clock = &radeon_atom_set_memory_clock,
306 .get_pcie_lanes = &rv370_get_pcie_lanes,
307 .set_pcie_lanes = &rv370_set_pcie_lanes,
308 .set_clock_gating = &radeon_atom_set_clock_gating,
309 .set_surface_reg = r100_set_surface_reg,
310 .clear_surface_reg = r100_clear_surface_reg,
311 .bandwidth_update = &r100_bandwidth_update,
312 .hpd_init = &r100_hpd_init,
313 .hpd_fini = &r100_hpd_fini,
314 .hpd_sense = &r100_hpd_sense,
315 .hpd_set_polarity = &r100_hpd_set_polarity,
316 .ioctl_wait_idle = NULL,
317};
318
319static struct radeon_asic rs400_asic = {
320 .init = &rs400_init,
321 .fini = &rs400_fini,
322 .suspend = &rs400_suspend,
323 .resume = &rs400_resume,
324 .vga_set_state = &r100_vga_set_state,
325 .gpu_reset = &r300_gpu_reset,
326 .gart_tlb_flush = &rs400_gart_tlb_flush,
327 .gart_set_page = &rs400_gart_set_page,
328 .cp_commit = &r100_cp_commit,
329 .ring_start = &r300_ring_start,
330 .ring_test = &r100_ring_test,
331 .ring_ib_execute = &r100_ring_ib_execute,
332 .irq_set = &r100_irq_set,
333 .irq_process = &r100_irq_process,
334 .get_vblank_counter = &r100_get_vblank_counter,
335 .fence_ring_emit = &r300_fence_ring_emit,
336 .cs_parse = &r300_cs_parse,
337 .copy_blit = &r100_copy_blit,
338 .copy_dma = &r200_copy_dma,
339 .copy = &r100_copy_blit,
340 .get_engine_clock = &radeon_legacy_get_engine_clock,
341 .set_engine_clock = &radeon_legacy_set_engine_clock,
342 .get_memory_clock = &radeon_legacy_get_memory_clock,
343 .set_memory_clock = NULL,
344 .get_pcie_lanes = NULL,
345 .set_pcie_lanes = NULL,
346 .set_clock_gating = &radeon_legacy_set_clock_gating,
347 .set_surface_reg = r100_set_surface_reg,
348 .clear_surface_reg = r100_clear_surface_reg,
349 .bandwidth_update = &r100_bandwidth_update,
350 .hpd_init = &r100_hpd_init,
351 .hpd_fini = &r100_hpd_fini,
352 .hpd_sense = &r100_hpd_sense,
353 .hpd_set_polarity = &r100_hpd_set_polarity,
354 .ioctl_wait_idle = NULL,
355};
356
357static struct radeon_asic rs600_asic = {
358 .init = &rs600_init,
359 .fini = &rs600_fini,
360 .suspend = &rs600_suspend,
361 .resume = &rs600_resume,
362 .vga_set_state = &r100_vga_set_state,
363 .gpu_reset = &r300_gpu_reset,
364 .gart_tlb_flush = &rs600_gart_tlb_flush,
365 .gart_set_page = &rs600_gart_set_page,
366 .cp_commit = &r100_cp_commit,
367 .ring_start = &r300_ring_start,
368 .ring_test = &r100_ring_test,
369 .ring_ib_execute = &r100_ring_ib_execute,
370 .irq_set = &rs600_irq_set,
371 .irq_process = &rs600_irq_process,
372 .get_vblank_counter = &rs600_get_vblank_counter,
373 .fence_ring_emit = &r300_fence_ring_emit,
374 .cs_parse = &r300_cs_parse,
375 .copy_blit = &r100_copy_blit,
376 .copy_dma = &r200_copy_dma,
377 .copy = &r100_copy_blit,
378 .get_engine_clock = &radeon_atom_get_engine_clock,
379 .set_engine_clock = &radeon_atom_set_engine_clock,
380 .get_memory_clock = &radeon_atom_get_memory_clock,
381 .set_memory_clock = &radeon_atom_set_memory_clock,
382 .get_pcie_lanes = NULL,
383 .set_pcie_lanes = NULL,
384 .set_clock_gating = &radeon_atom_set_clock_gating,
385 .set_surface_reg = r100_set_surface_reg,
386 .clear_surface_reg = r100_clear_surface_reg,
387 .bandwidth_update = &rs600_bandwidth_update,
388 .hpd_init = &rs600_hpd_init,
389 .hpd_fini = &rs600_hpd_fini,
390 .hpd_sense = &rs600_hpd_sense,
391 .hpd_set_polarity = &rs600_hpd_set_polarity,
392 .ioctl_wait_idle = NULL,
393};
394
395static struct radeon_asic rs690_asic = {
396 .init = &rs690_init,
397 .fini = &rs690_fini,
398 .suspend = &rs690_suspend,
399 .resume = &rs690_resume,
400 .vga_set_state = &r100_vga_set_state,
401 .gpu_reset = &r300_gpu_reset,
402 .gart_tlb_flush = &rs400_gart_tlb_flush,
403 .gart_set_page = &rs400_gart_set_page,
404 .cp_commit = &r100_cp_commit,
405 .ring_start = &r300_ring_start,
406 .ring_test = &r100_ring_test,
407 .ring_ib_execute = &r100_ring_ib_execute,
408 .irq_set = &rs600_irq_set,
409 .irq_process = &rs600_irq_process,
410 .get_vblank_counter = &rs600_get_vblank_counter,
411 .fence_ring_emit = &r300_fence_ring_emit,
412 .cs_parse = &r300_cs_parse,
413 .copy_blit = &r100_copy_blit,
414 .copy_dma = &r200_copy_dma,
415 .copy = &r200_copy_dma,
416 .get_engine_clock = &radeon_atom_get_engine_clock,
417 .set_engine_clock = &radeon_atom_set_engine_clock,
418 .get_memory_clock = &radeon_atom_get_memory_clock,
419 .set_memory_clock = &radeon_atom_set_memory_clock,
420 .get_pcie_lanes = NULL,
421 .set_pcie_lanes = NULL,
422 .set_clock_gating = &radeon_atom_set_clock_gating,
423 .set_surface_reg = r100_set_surface_reg,
424 .clear_surface_reg = r100_clear_surface_reg,
425 .bandwidth_update = &rs690_bandwidth_update,
426 .hpd_init = &rs600_hpd_init,
427 .hpd_fini = &rs600_hpd_fini,
428 .hpd_sense = &rs600_hpd_sense,
429 .hpd_set_polarity = &rs600_hpd_set_polarity,
430 .ioctl_wait_idle = NULL,
431};
432
433static struct radeon_asic rv515_asic = {
434 .init = &rv515_init,
435 .fini = &rv515_fini,
436 .suspend = &rv515_suspend,
437 .resume = &rv515_resume,
438 .vga_set_state = &r100_vga_set_state,
439 .gpu_reset = &rv515_gpu_reset,
440 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
441 .gart_set_page = &rv370_pcie_gart_set_page,
442 .cp_commit = &r100_cp_commit,
443 .ring_start = &rv515_ring_start,
444 .ring_test = &r100_ring_test,
445 .ring_ib_execute = &r100_ring_ib_execute,
446 .irq_set = &rs600_irq_set,
447 .irq_process = &rs600_irq_process,
448 .get_vblank_counter = &rs600_get_vblank_counter,
449 .fence_ring_emit = &r300_fence_ring_emit,
450 .cs_parse = &r300_cs_parse,
451 .copy_blit = &r100_copy_blit,
452 .copy_dma = &r200_copy_dma,
453 .copy = &r100_copy_blit,
454 .get_engine_clock = &radeon_atom_get_engine_clock,
455 .set_engine_clock = &radeon_atom_set_engine_clock,
456 .get_memory_clock = &radeon_atom_get_memory_clock,
457 .set_memory_clock = &radeon_atom_set_memory_clock,
458 .get_pcie_lanes = &rv370_get_pcie_lanes,
459 .set_pcie_lanes = &rv370_set_pcie_lanes,
460 .set_clock_gating = &radeon_atom_set_clock_gating,
461 .set_surface_reg = r100_set_surface_reg,
462 .clear_surface_reg = r100_clear_surface_reg,
463 .bandwidth_update = &rv515_bandwidth_update,
464 .hpd_init = &rs600_hpd_init,
465 .hpd_fini = &rs600_hpd_fini,
466 .hpd_sense = &rs600_hpd_sense,
467 .hpd_set_polarity = &rs600_hpd_set_polarity,
468 .ioctl_wait_idle = NULL,
469};
470
471static struct radeon_asic r520_asic = {
472 .init = &r520_init,
473 .fini = &rv515_fini,
474 .suspend = &rv515_suspend,
475 .resume = &r520_resume,
476 .vga_set_state = &r100_vga_set_state,
477 .gpu_reset = &rv515_gpu_reset,
478 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
479 .gart_set_page = &rv370_pcie_gart_set_page,
480 .cp_commit = &r100_cp_commit,
481 .ring_start = &rv515_ring_start,
482 .ring_test = &r100_ring_test,
483 .ring_ib_execute = &r100_ring_ib_execute,
484 .irq_set = &rs600_irq_set,
485 .irq_process = &rs600_irq_process,
486 .get_vblank_counter = &rs600_get_vblank_counter,
487 .fence_ring_emit = &r300_fence_ring_emit,
488 .cs_parse = &r300_cs_parse,
489 .copy_blit = &r100_copy_blit,
490 .copy_dma = &r200_copy_dma,
491 .copy = &r100_copy_blit,
492 .get_engine_clock = &radeon_atom_get_engine_clock,
493 .set_engine_clock = &radeon_atom_set_engine_clock,
494 .get_memory_clock = &radeon_atom_get_memory_clock,
495 .set_memory_clock = &radeon_atom_set_memory_clock,
496 .get_pcie_lanes = &rv370_get_pcie_lanes,
497 .set_pcie_lanes = &rv370_set_pcie_lanes,
498 .set_clock_gating = &radeon_atom_set_clock_gating,
499 .set_surface_reg = r100_set_surface_reg,
500 .clear_surface_reg = r100_clear_surface_reg,
501 .bandwidth_update = &rv515_bandwidth_update,
502 .hpd_init = &rs600_hpd_init,
503 .hpd_fini = &rs600_hpd_fini,
504 .hpd_sense = &rs600_hpd_sense,
505 .hpd_set_polarity = &rs600_hpd_set_polarity,
506 .ioctl_wait_idle = NULL,
507};
508
509static struct radeon_asic r600_asic = {
510 .init = &r600_init,
511 .fini = &r600_fini,
512 .suspend = &r600_suspend,
513 .resume = &r600_resume,
514 .cp_commit = &r600_cp_commit,
515 .vga_set_state = &r600_vga_set_state,
516 .gpu_reset = &r600_gpu_reset,
517 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
518 .gart_set_page = &rs600_gart_set_page,
519 .ring_test = &r600_ring_test,
520 .ring_ib_execute = &r600_ring_ib_execute,
521 .irq_set = &r600_irq_set,
522 .irq_process = &r600_irq_process,
523 .get_vblank_counter = &rs600_get_vblank_counter,
524 .fence_ring_emit = &r600_fence_ring_emit,
525 .cs_parse = &r600_cs_parse,
526 .copy_blit = &r600_copy_blit,
527 .copy_dma = &r600_copy_blit,
528 .copy = &r600_copy_blit,
529 .get_engine_clock = &radeon_atom_get_engine_clock,
530 .set_engine_clock = &radeon_atom_set_engine_clock,
531 .get_memory_clock = &radeon_atom_get_memory_clock,
532 .set_memory_clock = &radeon_atom_set_memory_clock,
533 .get_pcie_lanes = &rv370_get_pcie_lanes,
534 .set_pcie_lanes = NULL,
535 .set_clock_gating = NULL,
536 .set_surface_reg = r600_set_surface_reg,
537 .clear_surface_reg = r600_clear_surface_reg,
538 .bandwidth_update = &rv515_bandwidth_update,
539 .hpd_init = &r600_hpd_init,
540 .hpd_fini = &r600_hpd_fini,
541 .hpd_sense = &r600_hpd_sense,
542 .hpd_set_polarity = &r600_hpd_set_polarity,
543 .ioctl_wait_idle = r600_ioctl_wait_idle,
544};
545
546static struct radeon_asic rs780_asic = {
547 .init = &r600_init,
548 .fini = &r600_fini,
549 .suspend = &r600_suspend,
550 .resume = &r600_resume,
551 .cp_commit = &r600_cp_commit,
552 .vga_set_state = &r600_vga_set_state,
553 .gpu_reset = &r600_gpu_reset,
554 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
555 .gart_set_page = &rs600_gart_set_page,
556 .ring_test = &r600_ring_test,
557 .ring_ib_execute = &r600_ring_ib_execute,
558 .irq_set = &r600_irq_set,
559 .irq_process = &r600_irq_process,
560 .get_vblank_counter = &rs600_get_vblank_counter,
561 .fence_ring_emit = &r600_fence_ring_emit,
562 .cs_parse = &r600_cs_parse,
563 .copy_blit = &r600_copy_blit,
564 .copy_dma = &r600_copy_blit,
565 .copy = &r600_copy_blit,
566 .get_engine_clock = &radeon_atom_get_engine_clock,
567 .set_engine_clock = &radeon_atom_set_engine_clock,
568 .get_memory_clock = NULL,
569 .set_memory_clock = NULL,
570 .get_pcie_lanes = NULL,
571 .set_pcie_lanes = NULL,
572 .set_clock_gating = NULL,
573 .set_surface_reg = r600_set_surface_reg,
574 .clear_surface_reg = r600_clear_surface_reg,
575 .bandwidth_update = &rs690_bandwidth_update,
576 .hpd_init = &r600_hpd_init,
577 .hpd_fini = &r600_hpd_fini,
578 .hpd_sense = &r600_hpd_sense,
579 .hpd_set_polarity = &r600_hpd_set_polarity,
580 .ioctl_wait_idle = r600_ioctl_wait_idle,
581};
582
583static struct radeon_asic rv770_asic = {
584 .init = &rv770_init,
585 .fini = &rv770_fini,
586 .suspend = &rv770_suspend,
587 .resume = &rv770_resume,
588 .cp_commit = &r600_cp_commit,
589 .gpu_reset = &rv770_gpu_reset,
590 .vga_set_state = &r600_vga_set_state,
591 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
592 .gart_set_page = &rs600_gart_set_page,
593 .ring_test = &r600_ring_test,
594 .ring_ib_execute = &r600_ring_ib_execute,
595 .irq_set = &r600_irq_set,
596 .irq_process = &r600_irq_process,
597 .get_vblank_counter = &rs600_get_vblank_counter,
598 .fence_ring_emit = &r600_fence_ring_emit,
599 .cs_parse = &r600_cs_parse,
600 .copy_blit = &r600_copy_blit,
601 .copy_dma = &r600_copy_blit,
602 .copy = &r600_copy_blit,
603 .get_engine_clock = &radeon_atom_get_engine_clock,
604 .set_engine_clock = &radeon_atom_set_engine_clock,
605 .get_memory_clock = &radeon_atom_get_memory_clock,
606 .set_memory_clock = &radeon_atom_set_memory_clock,
607 .get_pcie_lanes = &rv370_get_pcie_lanes,
608 .set_pcie_lanes = NULL,
609 .set_clock_gating = &radeon_atom_set_clock_gating,
610 .set_surface_reg = r600_set_surface_reg,
611 .clear_surface_reg = r600_clear_surface_reg,
612 .bandwidth_update = &rv515_bandwidth_update,
613 .hpd_init = &r600_hpd_init,
614 .hpd_fini = &r600_hpd_fini,
615 .hpd_sense = &r600_hpd_sense,
616 .hpd_set_polarity = &r600_hpd_set_polarity,
617 .ioctl_wait_idle = r600_ioctl_wait_idle,
618};
619
620static struct radeon_asic evergreen_asic = {
621 .init = &evergreen_init,
622 .fini = &evergreen_fini,
623 .suspend = &evergreen_suspend,
624 .resume = &evergreen_resume,
625 .cp_commit = NULL,
626 .gpu_reset = &evergreen_gpu_reset,
627 .vga_set_state = &r600_vga_set_state,
628 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
629 .gart_set_page = &rs600_gart_set_page,
630 .ring_test = NULL,
631 .ring_ib_execute = NULL,
632 .irq_set = NULL,
633 .irq_process = NULL,
634 .get_vblank_counter = NULL,
635 .fence_ring_emit = NULL,
636 .cs_parse = NULL,
637 .copy_blit = NULL,
638 .copy_dma = NULL,
639 .copy = NULL,
640 .get_engine_clock = &radeon_atom_get_engine_clock,
641 .set_engine_clock = &radeon_atom_set_engine_clock,
642 .get_memory_clock = &radeon_atom_get_memory_clock,
643 .set_memory_clock = &radeon_atom_set_memory_clock,
644 .set_pcie_lanes = NULL,
645 .set_clock_gating = NULL,
646 .set_surface_reg = r600_set_surface_reg,
647 .clear_surface_reg = r600_clear_surface_reg,
648 .bandwidth_update = &evergreen_bandwidth_update,
649 .hpd_init = &evergreen_hpd_init,
650 .hpd_fini = &evergreen_hpd_fini,
651 .hpd_sense = &evergreen_hpd_sense,
652 .hpd_set_polarity = &evergreen_hpd_set_polarity,
653};
654
655int radeon_asic_init(struct radeon_device *rdev)
656{
657 radeon_register_accessor_init(rdev);
658 switch (rdev->family) {
659 case CHIP_R100:
660 case CHIP_RV100:
661 case CHIP_RS100:
662 case CHIP_RV200:
663 case CHIP_RS200:
664 rdev->asic = &r100_asic;
665 break;
666 case CHIP_R200:
667 case CHIP_RV250:
668 case CHIP_RS300:
669 case CHIP_RV280:
670 rdev->asic = &r200_asic;
671 break;
672 case CHIP_R300:
673 case CHIP_R350:
674 case CHIP_RV350:
675 case CHIP_RV380:
676 if (rdev->flags & RADEON_IS_PCIE)
677 rdev->asic = &r300_asic_pcie;
678 else
679 rdev->asic = &r300_asic;
680 break;
681 case CHIP_R420:
682 case CHIP_R423:
683 case CHIP_RV410:
684 rdev->asic = &r420_asic;
685 break;
686 case CHIP_RS400:
687 case CHIP_RS480:
688 rdev->asic = &rs400_asic;
689 break;
690 case CHIP_RS600:
691 rdev->asic = &rs600_asic;
692 break;
693 case CHIP_RS690:
694 case CHIP_RS740:
695 rdev->asic = &rs690_asic;
696 break;
697 case CHIP_RV515:
698 rdev->asic = &rv515_asic;
699 break;
700 case CHIP_R520:
701 case CHIP_RV530:
702 case CHIP_RV560:
703 case CHIP_RV570:
704 case CHIP_R580:
705 rdev->asic = &r520_asic;
706 break;
707 case CHIP_R600:
708 case CHIP_RV610:
709 case CHIP_RV630:
710 case CHIP_RV620:
711 case CHIP_RV635:
712 case CHIP_RV670:
713 rdev->asic = &r600_asic;
714 break;
715 case CHIP_RS780:
716 case CHIP_RS880:
717 rdev->asic = &rs780_asic;
718 break;
719 case CHIP_RV770:
720 case CHIP_RV730:
721 case CHIP_RV710:
722 case CHIP_RV740:
723 rdev->asic = &rv770_asic;
724 break;
725 case CHIP_CEDAR:
726 case CHIP_REDWOOD:
727 case CHIP_JUNIPER:
728 case CHIP_CYPRESS:
729 case CHIP_HEMLOCK:
730 rdev->asic = &evergreen_asic;
731 break;
732 default:
733 /* FIXME: not supported yet */
734 return -EINVAL;
735 }
736
737 if (rdev->flags & RADEON_IS_IGP) {
738 rdev->asic->get_memory_clock = NULL;
739 rdev->asic->set_memory_clock = NULL;
740 }
741
742 /* set the number of crtcs */
743 if (rdev->flags & RADEON_SINGLE_CRTC)
744 rdev->num_crtc = 1;
745 else {
746 if (ASIC_IS_DCE4(rdev))
747 rdev->num_crtc = 6;
748 else
749 rdev->num_crtc = 2;
750 }
751
752 return 0;
753}
754
755/*
756 * Wrapper around modesetting bits. Move to radeon_clocks.c?
757 */
758int radeon_clocks_init(struct radeon_device *rdev)
759{
760 int r;
761
762 r = radeon_static_clocks_init(rdev->ddev);
763 if (r) {
764 return r;
765 }
766 DRM_INFO("Clocks initialized !\n");
767 return 0;
768}
769
770void radeon_clocks_fini(struct radeon_device *rdev)
771{
772}
diff --git a/drivers/gpu/drm/radeon/radeon_asic.h b/drivers/gpu/drm/radeon/radeon_asic.h
index c18fbee387d7..a0b8280663d1 100644
--- a/drivers/gpu/drm/radeon/radeon_asic.h
+++ b/drivers/gpu/drm/radeon/radeon_asic.h
@@ -33,6 +33,7 @@
33 */ 33 */
34uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev); 34uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
35void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); 35void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
36uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
36void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); 37void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
37 38
38uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev); 39uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
@@ -42,12 +43,20 @@ void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock
42void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); 43void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
43 44
44/* 45/*
45 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 46 * r100,rv100,rs100,rv200,rs200
46 */ 47 */
47extern int r100_init(struct radeon_device *rdev); 48struct r100_mc_save {
48extern void r100_fini(struct radeon_device *rdev); 49 u32 GENMO_WT;
49extern int r100_suspend(struct radeon_device *rdev); 50 u32 CRTC_EXT_CNTL;
50extern int r100_resume(struct radeon_device *rdev); 51 u32 CRTC_GEN_CNTL;
52 u32 CRTC2_GEN_CNTL;
53 u32 CUR_OFFSET;
54 u32 CUR2_OFFSET;
55};
56int r100_init(struct radeon_device *rdev);
57void r100_fini(struct radeon_device *rdev);
58int r100_suspend(struct radeon_device *rdev);
59int r100_resume(struct radeon_device *rdev);
51uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); 60uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
52void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 61void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
53void r100_vga_set_state(struct radeon_device *rdev, bool state); 62void r100_vga_set_state(struct radeon_device *rdev, bool state);
@@ -72,43 +81,60 @@ int r100_copy_blit(struct radeon_device *rdev,
72int r100_set_surface_reg(struct radeon_device *rdev, int reg, 81int r100_set_surface_reg(struct radeon_device *rdev, int reg,
73 uint32_t tiling_flags, uint32_t pitch, 82 uint32_t tiling_flags, uint32_t pitch,
74 uint32_t offset, uint32_t obj_size); 83 uint32_t offset, uint32_t obj_size);
75int r100_clear_surface_reg(struct radeon_device *rdev, int reg); 84void r100_clear_surface_reg(struct radeon_device *rdev, int reg);
76void r100_bandwidth_update(struct radeon_device *rdev); 85void r100_bandwidth_update(struct radeon_device *rdev);
77void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 86void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
78int r100_ring_test(struct radeon_device *rdev); 87int r100_ring_test(struct radeon_device *rdev);
88void r100_hpd_init(struct radeon_device *rdev);
89void r100_hpd_fini(struct radeon_device *rdev);
90bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
91void r100_hpd_set_polarity(struct radeon_device *rdev,
92 enum radeon_hpd_id hpd);
93int r100_debugfs_rbbm_init(struct radeon_device *rdev);
94int r100_debugfs_cp_init(struct radeon_device *rdev);
95void r100_cp_disable(struct radeon_device *rdev);
96int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
97void r100_cp_fini(struct radeon_device *rdev);
98int r100_pci_gart_init(struct radeon_device *rdev);
99void r100_pci_gart_fini(struct radeon_device *rdev);
100int r100_pci_gart_enable(struct radeon_device *rdev);
101void r100_pci_gart_disable(struct radeon_device *rdev);
102int r100_debugfs_mc_info_init(struct radeon_device *rdev);
103int r100_gui_wait_for_idle(struct radeon_device *rdev);
104void r100_ib_fini(struct radeon_device *rdev);
105int r100_ib_init(struct radeon_device *rdev);
106void r100_irq_disable(struct radeon_device *rdev);
107void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save);
108void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save);
109void r100_vram_init_sizes(struct radeon_device *rdev);
110void r100_wb_disable(struct radeon_device *rdev);
111void r100_wb_fini(struct radeon_device *rdev);
112int r100_wb_init(struct radeon_device *rdev);
113void r100_hdp_reset(struct radeon_device *rdev);
114int r100_rb2d_reset(struct radeon_device *rdev);
115int r100_cp_reset(struct radeon_device *rdev);
116void r100_vga_render_disable(struct radeon_device *rdev);
117int r100_cs_track_check_pkt3_indx_buffer(struct radeon_cs_parser *p,
118 struct radeon_cs_packet *pkt,
119 struct radeon_bo *robj);
120int r100_cs_parse_packet0(struct radeon_cs_parser *p,
121 struct radeon_cs_packet *pkt,
122 const unsigned *auth, unsigned n,
123 radeon_packet0_check_t check);
124int r100_cs_packet_parse(struct radeon_cs_parser *p,
125 struct radeon_cs_packet *pkt,
126 unsigned idx);
127void r100_enable_bm(struct radeon_device *rdev);
128void r100_set_common_regs(struct radeon_device *rdev);
79 129
80static struct radeon_asic r100_asic = { 130/*
81 .init = &r100_init, 131 * r200,rv250,rs300,rv280
82 .fini = &r100_fini, 132 */
83 .suspend = &r100_suspend, 133extern int r200_copy_dma(struct radeon_device *rdev,
84 .resume = &r100_resume, 134 uint64_t src_offset,
85 .vga_set_state = &r100_vga_set_state, 135 uint64_t dst_offset,
86 .gpu_reset = &r100_gpu_reset, 136 unsigned num_pages,
87 .gart_tlb_flush = &r100_pci_gart_tlb_flush, 137 struct radeon_fence *fence);
88 .gart_set_page = &r100_pci_gart_set_page,
89 .cp_commit = &r100_cp_commit,
90 .ring_start = &r100_ring_start,
91 .ring_test = &r100_ring_test,
92 .ring_ib_execute = &r100_ring_ib_execute,
93 .irq_set = &r100_irq_set,
94 .irq_process = &r100_irq_process,
95 .get_vblank_counter = &r100_get_vblank_counter,
96 .fence_ring_emit = &r100_fence_ring_emit,
97 .cs_parse = &r100_cs_parse,
98 .copy_blit = &r100_copy_blit,
99 .copy_dma = NULL,
100 .copy = &r100_copy_blit,
101 .get_engine_clock = &radeon_legacy_get_engine_clock,
102 .set_engine_clock = &radeon_legacy_set_engine_clock,
103 .get_memory_clock = NULL,
104 .set_memory_clock = NULL,
105 .set_pcie_lanes = NULL,
106 .set_clock_gating = &radeon_legacy_set_clock_gating,
107 .set_surface_reg = r100_set_surface_reg,
108 .clear_surface_reg = r100_clear_surface_reg,
109 .bandwidth_update = &r100_bandwidth_update,
110};
111
112 138
113/* 139/*
114 * r300,r350,rv350,rv380 140 * r300,r350,rv350,rv380
@@ -127,42 +153,7 @@ extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t
127extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg); 153extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
128extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 154extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
129extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); 155extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
130extern int r300_copy_dma(struct radeon_device *rdev, 156extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
131 uint64_t src_offset,
132 uint64_t dst_offset,
133 unsigned num_pages,
134 struct radeon_fence *fence);
135static struct radeon_asic r300_asic = {
136 .init = &r300_init,
137 .fini = &r300_fini,
138 .suspend = &r300_suspend,
139 .resume = &r300_resume,
140 .vga_set_state = &r100_vga_set_state,
141 .gpu_reset = &r300_gpu_reset,
142 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
143 .gart_set_page = &r100_pci_gart_set_page,
144 .cp_commit = &r100_cp_commit,
145 .ring_start = &r300_ring_start,
146 .ring_test = &r100_ring_test,
147 .ring_ib_execute = &r100_ring_ib_execute,
148 .irq_set = &r100_irq_set,
149 .irq_process = &r100_irq_process,
150 .get_vblank_counter = &r100_get_vblank_counter,
151 .fence_ring_emit = &r300_fence_ring_emit,
152 .cs_parse = &r300_cs_parse,
153 .copy_blit = &r100_copy_blit,
154 .copy_dma = &r300_copy_dma,
155 .copy = &r100_copy_blit,
156 .get_engine_clock = &radeon_legacy_get_engine_clock,
157 .set_engine_clock = &radeon_legacy_set_engine_clock,
158 .get_memory_clock = NULL,
159 .set_memory_clock = NULL,
160 .set_pcie_lanes = &rv370_set_pcie_lanes,
161 .set_clock_gating = &radeon_legacy_set_clock_gating,
162 .set_surface_reg = r100_set_surface_reg,
163 .clear_surface_reg = r100_clear_surface_reg,
164 .bandwidth_update = &r100_bandwidth_update,
165};
166 157
167/* 158/*
168 * r420,r423,rv410 159 * r420,r423,rv410
@@ -171,38 +162,6 @@ extern int r420_init(struct radeon_device *rdev);
171extern void r420_fini(struct radeon_device *rdev); 162extern void r420_fini(struct radeon_device *rdev);
172extern int r420_suspend(struct radeon_device *rdev); 163extern int r420_suspend(struct radeon_device *rdev);
173extern int r420_resume(struct radeon_device *rdev); 164extern int r420_resume(struct radeon_device *rdev);
174static struct radeon_asic r420_asic = {
175 .init = &r420_init,
176 .fini = &r420_fini,
177 .suspend = &r420_suspend,
178 .resume = &r420_resume,
179 .vga_set_state = &r100_vga_set_state,
180 .gpu_reset = &r300_gpu_reset,
181 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
182 .gart_set_page = &rv370_pcie_gart_set_page,
183 .cp_commit = &r100_cp_commit,
184 .ring_start = &r300_ring_start,
185 .ring_test = &r100_ring_test,
186 .ring_ib_execute = &r100_ring_ib_execute,
187 .irq_set = &r100_irq_set,
188 .irq_process = &r100_irq_process,
189 .get_vblank_counter = &r100_get_vblank_counter,
190 .fence_ring_emit = &r300_fence_ring_emit,
191 .cs_parse = &r300_cs_parse,
192 .copy_blit = &r100_copy_blit,
193 .copy_dma = &r300_copy_dma,
194 .copy = &r100_copy_blit,
195 .get_engine_clock = &radeon_atom_get_engine_clock,
196 .set_engine_clock = &radeon_atom_set_engine_clock,
197 .get_memory_clock = &radeon_atom_get_memory_clock,
198 .set_memory_clock = &radeon_atom_set_memory_clock,
199 .set_pcie_lanes = &rv370_set_pcie_lanes,
200 .set_clock_gating = &radeon_atom_set_clock_gating,
201 .set_surface_reg = r100_set_surface_reg,
202 .clear_surface_reg = r100_clear_surface_reg,
203 .bandwidth_update = &r100_bandwidth_update,
204};
205
206 165
207/* 166/*
208 * rs400,rs480 167 * rs400,rs480
@@ -215,38 +174,6 @@ void rs400_gart_tlb_flush(struct radeon_device *rdev);
215int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); 174int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
216uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); 175uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
217void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 176void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
218static struct radeon_asic rs400_asic = {
219 .init = &rs400_init,
220 .fini = &rs400_fini,
221 .suspend = &rs400_suspend,
222 .resume = &rs400_resume,
223 .vga_set_state = &r100_vga_set_state,
224 .gpu_reset = &r300_gpu_reset,
225 .gart_tlb_flush = &rs400_gart_tlb_flush,
226 .gart_set_page = &rs400_gart_set_page,
227 .cp_commit = &r100_cp_commit,
228 .ring_start = &r300_ring_start,
229 .ring_test = &r100_ring_test,
230 .ring_ib_execute = &r100_ring_ib_execute,
231 .irq_set = &r100_irq_set,
232 .irq_process = &r100_irq_process,
233 .get_vblank_counter = &r100_get_vblank_counter,
234 .fence_ring_emit = &r300_fence_ring_emit,
235 .cs_parse = &r300_cs_parse,
236 .copy_blit = &r100_copy_blit,
237 .copy_dma = &r300_copy_dma,
238 .copy = &r100_copy_blit,
239 .get_engine_clock = &radeon_legacy_get_engine_clock,
240 .set_engine_clock = &radeon_legacy_set_engine_clock,
241 .get_memory_clock = NULL,
242 .set_memory_clock = NULL,
243 .set_pcie_lanes = NULL,
244 .set_clock_gating = &radeon_legacy_set_clock_gating,
245 .set_surface_reg = r100_set_surface_reg,
246 .clear_surface_reg = r100_clear_surface_reg,
247 .bandwidth_update = &r100_bandwidth_update,
248};
249
250 177
251/* 178/*
252 * rs600. 179 * rs600.
@@ -263,36 +190,11 @@ int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
263uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); 190uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
264void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 191void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
265void rs600_bandwidth_update(struct radeon_device *rdev); 192void rs600_bandwidth_update(struct radeon_device *rdev);
266static struct radeon_asic rs600_asic = { 193void rs600_hpd_init(struct radeon_device *rdev);
267 .init = &rs600_init, 194void rs600_hpd_fini(struct radeon_device *rdev);
268 .fini = &rs600_fini, 195bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
269 .suspend = &rs600_suspend, 196void rs600_hpd_set_polarity(struct radeon_device *rdev,
270 .resume = &rs600_resume, 197 enum radeon_hpd_id hpd);
271 .vga_set_state = &r100_vga_set_state,
272 .gpu_reset = &r300_gpu_reset,
273 .gart_tlb_flush = &rs600_gart_tlb_flush,
274 .gart_set_page = &rs600_gart_set_page,
275 .cp_commit = &r100_cp_commit,
276 .ring_start = &r300_ring_start,
277 .ring_test = &r100_ring_test,
278 .ring_ib_execute = &r100_ring_ib_execute,
279 .irq_set = &rs600_irq_set,
280 .irq_process = &rs600_irq_process,
281 .get_vblank_counter = &rs600_get_vblank_counter,
282 .fence_ring_emit = &r300_fence_ring_emit,
283 .cs_parse = &r300_cs_parse,
284 .copy_blit = &r100_copy_blit,
285 .copy_dma = &r300_copy_dma,
286 .copy = &r100_copy_blit,
287 .get_engine_clock = &radeon_atom_get_engine_clock,
288 .set_engine_clock = &radeon_atom_set_engine_clock,
289 .get_memory_clock = &radeon_atom_get_memory_clock,
290 .set_memory_clock = &radeon_atom_set_memory_clock,
291 .set_pcie_lanes = NULL,
292 .set_clock_gating = &radeon_atom_set_clock_gating,
293 .bandwidth_update = &rs600_bandwidth_update,
294};
295
296 198
297/* 199/*
298 * rs690,rs740 200 * rs690,rs740
@@ -304,38 +206,6 @@ int rs690_suspend(struct radeon_device *rdev);
304uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); 206uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
305void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 207void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
306void rs690_bandwidth_update(struct radeon_device *rdev); 208void rs690_bandwidth_update(struct radeon_device *rdev);
307static struct radeon_asic rs690_asic = {
308 .init = &rs690_init,
309 .fini = &rs690_fini,
310 .suspend = &rs690_suspend,
311 .resume = &rs690_resume,
312 .vga_set_state = &r100_vga_set_state,
313 .gpu_reset = &r300_gpu_reset,
314 .gart_tlb_flush = &rs400_gart_tlb_flush,
315 .gart_set_page = &rs400_gart_set_page,
316 .cp_commit = &r100_cp_commit,
317 .ring_start = &r300_ring_start,
318 .ring_test = &r100_ring_test,
319 .ring_ib_execute = &r100_ring_ib_execute,
320 .irq_set = &rs600_irq_set,
321 .irq_process = &rs600_irq_process,
322 .get_vblank_counter = &rs600_get_vblank_counter,
323 .fence_ring_emit = &r300_fence_ring_emit,
324 .cs_parse = &r300_cs_parse,
325 .copy_blit = &r100_copy_blit,
326 .copy_dma = &r300_copy_dma,
327 .copy = &r300_copy_dma,
328 .get_engine_clock = &radeon_atom_get_engine_clock,
329 .set_engine_clock = &radeon_atom_set_engine_clock,
330 .get_memory_clock = &radeon_atom_get_memory_clock,
331 .set_memory_clock = &radeon_atom_set_memory_clock,
332 .set_pcie_lanes = NULL,
333 .set_clock_gating = &radeon_atom_set_clock_gating,
334 .set_surface_reg = r100_set_surface_reg,
335 .clear_surface_reg = r100_clear_surface_reg,
336 .bandwidth_update = &rs690_bandwidth_update,
337};
338
339 209
340/* 210/*
341 * rv515 211 * rv515
@@ -351,75 +221,12 @@ void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
351void rv515_bandwidth_update(struct radeon_device *rdev); 221void rv515_bandwidth_update(struct radeon_device *rdev);
352int rv515_resume(struct radeon_device *rdev); 222int rv515_resume(struct radeon_device *rdev);
353int rv515_suspend(struct radeon_device *rdev); 223int rv515_suspend(struct radeon_device *rdev);
354static struct radeon_asic rv515_asic = {
355 .init = &rv515_init,
356 .fini = &rv515_fini,
357 .suspend = &rv515_suspend,
358 .resume = &rv515_resume,
359 .vga_set_state = &r100_vga_set_state,
360 .gpu_reset = &rv515_gpu_reset,
361 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
362 .gart_set_page = &rv370_pcie_gart_set_page,
363 .cp_commit = &r100_cp_commit,
364 .ring_start = &rv515_ring_start,
365 .ring_test = &r100_ring_test,
366 .ring_ib_execute = &r100_ring_ib_execute,
367 .irq_set = &rs600_irq_set,
368 .irq_process = &rs600_irq_process,
369 .get_vblank_counter = &rs600_get_vblank_counter,
370 .fence_ring_emit = &r300_fence_ring_emit,
371 .cs_parse = &r300_cs_parse,
372 .copy_blit = &r100_copy_blit,
373 .copy_dma = &r300_copy_dma,
374 .copy = &r100_copy_blit,
375 .get_engine_clock = &radeon_atom_get_engine_clock,
376 .set_engine_clock = &radeon_atom_set_engine_clock,
377 .get_memory_clock = &radeon_atom_get_memory_clock,
378 .set_memory_clock = &radeon_atom_set_memory_clock,
379 .set_pcie_lanes = &rv370_set_pcie_lanes,
380 .set_clock_gating = &radeon_atom_set_clock_gating,
381 .set_surface_reg = r100_set_surface_reg,
382 .clear_surface_reg = r100_clear_surface_reg,
383 .bandwidth_update = &rv515_bandwidth_update,
384};
385
386 224
387/* 225/*
388 * r520,rv530,rv560,rv570,r580 226 * r520,rv530,rv560,rv570,r580
389 */ 227 */
390int r520_init(struct radeon_device *rdev); 228int r520_init(struct radeon_device *rdev);
391int r520_resume(struct radeon_device *rdev); 229int r520_resume(struct radeon_device *rdev);
392static struct radeon_asic r520_asic = {
393 .init = &r520_init,
394 .fini = &rv515_fini,
395 .suspend = &rv515_suspend,
396 .resume = &r520_resume,
397 .vga_set_state = &r100_vga_set_state,
398 .gpu_reset = &rv515_gpu_reset,
399 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
400 .gart_set_page = &rv370_pcie_gart_set_page,
401 .cp_commit = &r100_cp_commit,
402 .ring_start = &rv515_ring_start,
403 .ring_test = &r100_ring_test,
404 .ring_ib_execute = &r100_ring_ib_execute,
405 .irq_set = &rs600_irq_set,
406 .irq_process = &rs600_irq_process,
407 .get_vblank_counter = &rs600_get_vblank_counter,
408 .fence_ring_emit = &r300_fence_ring_emit,
409 .cs_parse = &r300_cs_parse,
410 .copy_blit = &r100_copy_blit,
411 .copy_dma = &r300_copy_dma,
412 .copy = &r100_copy_blit,
413 .get_engine_clock = &radeon_atom_get_engine_clock,
414 .set_engine_clock = &radeon_atom_set_engine_clock,
415 .get_memory_clock = &radeon_atom_get_memory_clock,
416 .set_memory_clock = &radeon_atom_set_memory_clock,
417 .set_pcie_lanes = &rv370_set_pcie_lanes,
418 .set_clock_gating = &radeon_atom_set_clock_gating,
419 .set_surface_reg = r100_set_surface_reg,
420 .clear_surface_reg = r100_clear_surface_reg,
421 .bandwidth_update = &rv515_bandwidth_update,
422};
423 230
424/* 231/*
425 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880 232 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
@@ -449,42 +256,18 @@ int r600_gpu_reset(struct radeon_device *rdev);
449int r600_set_surface_reg(struct radeon_device *rdev, int reg, 256int r600_set_surface_reg(struct radeon_device *rdev, int reg,
450 uint32_t tiling_flags, uint32_t pitch, 257 uint32_t tiling_flags, uint32_t pitch,
451 uint32_t offset, uint32_t obj_size); 258 uint32_t offset, uint32_t obj_size);
452int r600_clear_surface_reg(struct radeon_device *rdev, int reg); 259void r600_clear_surface_reg(struct radeon_device *rdev, int reg);
453void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 260void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
454int r600_ring_test(struct radeon_device *rdev); 261int r600_ring_test(struct radeon_device *rdev);
455int r600_copy_blit(struct radeon_device *rdev, 262int r600_copy_blit(struct radeon_device *rdev,
456 uint64_t src_offset, uint64_t dst_offset, 263 uint64_t src_offset, uint64_t dst_offset,
457 unsigned num_pages, struct radeon_fence *fence); 264 unsigned num_pages, struct radeon_fence *fence);
458 265void r600_hpd_init(struct radeon_device *rdev);
459static struct radeon_asic r600_asic = { 266void r600_hpd_fini(struct radeon_device *rdev);
460 .init = &r600_init, 267bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
461 .fini = &r600_fini, 268void r600_hpd_set_polarity(struct radeon_device *rdev,
462 .suspend = &r600_suspend, 269 enum radeon_hpd_id hpd);
463 .resume = &r600_resume, 270extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
464 .cp_commit = &r600_cp_commit,
465 .vga_set_state = &r600_vga_set_state,
466 .gpu_reset = &r600_gpu_reset,
467 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
468 .gart_set_page = &rs600_gart_set_page,
469 .ring_test = &r600_ring_test,
470 .ring_ib_execute = &r600_ring_ib_execute,
471 .irq_set = &r600_irq_set,
472 .irq_process = &r600_irq_process,
473 .fence_ring_emit = &r600_fence_ring_emit,
474 .cs_parse = &r600_cs_parse,
475 .copy_blit = &r600_copy_blit,
476 .copy_dma = &r600_copy_blit,
477 .copy = &r600_copy_blit,
478 .get_engine_clock = &radeon_atom_get_engine_clock,
479 .set_engine_clock = &radeon_atom_set_engine_clock,
480 .get_memory_clock = &radeon_atom_get_memory_clock,
481 .set_memory_clock = &radeon_atom_set_memory_clock,
482 .set_pcie_lanes = NULL,
483 .set_clock_gating = &radeon_atom_set_clock_gating,
484 .set_surface_reg = r600_set_surface_reg,
485 .clear_surface_reg = r600_clear_surface_reg,
486 .bandwidth_update = &rv515_bandwidth_update,
487};
488 271
489/* 272/*
490 * rv770,rv730,rv710,rv740 273 * rv770,rv730,rv710,rv740
@@ -495,34 +278,18 @@ int rv770_suspend(struct radeon_device *rdev);
495int rv770_resume(struct radeon_device *rdev); 278int rv770_resume(struct radeon_device *rdev);
496int rv770_gpu_reset(struct radeon_device *rdev); 279int rv770_gpu_reset(struct radeon_device *rdev);
497 280
498static struct radeon_asic rv770_asic = { 281/*
499 .init = &rv770_init, 282 * evergreen
500 .fini = &rv770_fini, 283 */
501 .suspend = &rv770_suspend, 284int evergreen_init(struct radeon_device *rdev);
502 .resume = &rv770_resume, 285void evergreen_fini(struct radeon_device *rdev);
503 .cp_commit = &r600_cp_commit, 286int evergreen_suspend(struct radeon_device *rdev);
504 .gpu_reset = &rv770_gpu_reset, 287int evergreen_resume(struct radeon_device *rdev);
505 .vga_set_state = &r600_vga_set_state, 288int evergreen_gpu_reset(struct radeon_device *rdev);
506 .gart_tlb_flush = &r600_pcie_gart_tlb_flush, 289void evergreen_bandwidth_update(struct radeon_device *rdev);
507 .gart_set_page = &rs600_gart_set_page, 290void evergreen_hpd_init(struct radeon_device *rdev);
508 .ring_test = &r600_ring_test, 291void evergreen_hpd_fini(struct radeon_device *rdev);
509 .ring_ib_execute = &r600_ring_ib_execute, 292bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
510 .irq_set = &r600_irq_set, 293void evergreen_hpd_set_polarity(struct radeon_device *rdev,
511 .irq_process = &r600_irq_process, 294 enum radeon_hpd_id hpd);
512 .fence_ring_emit = &r600_fence_ring_emit,
513 .cs_parse = &r600_cs_parse,
514 .copy_blit = &r600_copy_blit,
515 .copy_dma = &r600_copy_blit,
516 .copy = &r600_copy_blit,
517 .get_engine_clock = &radeon_atom_get_engine_clock,
518 .set_engine_clock = &radeon_atom_set_engine_clock,
519 .get_memory_clock = &radeon_atom_get_memory_clock,
520 .set_memory_clock = &radeon_atom_set_memory_clock,
521 .set_pcie_lanes = NULL,
522 .set_clock_gating = &radeon_atom_set_clock_gating,
523 .set_surface_reg = r600_set_surface_reg,
524 .clear_surface_reg = r600_clear_surface_reg,
525 .bandwidth_update = &rv515_bandwidth_update,
526};
527
528#endif 295#endif
diff --git a/drivers/gpu/drm/radeon/radeon_atombios.c b/drivers/gpu/drm/radeon/radeon_atombios.c
index 2ed88a820935..9916d825401c 100644
--- a/drivers/gpu/drm/radeon/radeon_atombios.c
+++ b/drivers/gpu/drm/radeon/radeon_atombios.c
@@ -47,7 +47,8 @@ radeon_add_atom_connector(struct drm_device *dev,
47 int connector_type, 47 int connector_type,
48 struct radeon_i2c_bus_rec *i2c_bus, 48 struct radeon_i2c_bus_rec *i2c_bus,
49 bool linkb, uint32_t igp_lane_info, 49 bool linkb, uint32_t igp_lane_info,
50 uint16_t connector_object_id); 50 uint16_t connector_object_id,
51 struct radeon_hpd *hpd);
51 52
52/* from radeon_legacy_encoder.c */ 53/* from radeon_legacy_encoder.c */
53extern void 54extern void
@@ -60,52 +61,150 @@ union atom_supported_devices {
60 struct _ATOM_SUPPORTED_DEVICES_INFO_2d1 info_2d1; 61 struct _ATOM_SUPPORTED_DEVICES_INFO_2d1 info_2d1;
61}; 62};
62 63
63static inline struct radeon_i2c_bus_rec radeon_lookup_gpio(struct drm_device 64static inline struct radeon_i2c_bus_rec radeon_lookup_i2c_gpio(struct radeon_device *rdev,
64 *dev, uint8_t id) 65 uint8_t id)
65{ 66{
66 struct radeon_device *rdev = dev->dev_private;
67 struct atom_context *ctx = rdev->mode_info.atom_context; 67 struct atom_context *ctx = rdev->mode_info.atom_context;
68 ATOM_GPIO_I2C_ASSIGMENT gpio; 68 ATOM_GPIO_I2C_ASSIGMENT *gpio;
69 struct radeon_i2c_bus_rec i2c; 69 struct radeon_i2c_bus_rec i2c;
70 int index = GetIndexIntoMasterTable(DATA, GPIO_I2C_Info); 70 int index = GetIndexIntoMasterTable(DATA, GPIO_I2C_Info);
71 struct _ATOM_GPIO_I2C_INFO *i2c_info; 71 struct _ATOM_GPIO_I2C_INFO *i2c_info;
72 uint16_t data_offset; 72 uint16_t data_offset, size;
73 int i, num_indices;
73 74
74 memset(&i2c, 0, sizeof(struct radeon_i2c_bus_rec)); 75 memset(&i2c, 0, sizeof(struct radeon_i2c_bus_rec));
75 i2c.valid = false; 76 i2c.valid = false;
76 77
77 atom_parse_data_header(ctx, index, NULL, NULL, NULL, &data_offset); 78 if (atom_parse_data_header(ctx, index, &size, NULL, NULL, &data_offset)) {
78 79 i2c_info = (struct _ATOM_GPIO_I2C_INFO *)(ctx->bios + data_offset);
79 i2c_info = (struct _ATOM_GPIO_I2C_INFO *)(ctx->bios + data_offset); 80
80 81 num_indices = (size - sizeof(ATOM_COMMON_TABLE_HEADER)) /
81 gpio = i2c_info->asGPIO_Info[id]; 82 sizeof(ATOM_GPIO_I2C_ASSIGMENT);
82 83
83 i2c.mask_clk_reg = le16_to_cpu(gpio.usClkMaskRegisterIndex) * 4; 84 for (i = 0; i < num_indices; i++) {
84 i2c.mask_data_reg = le16_to_cpu(gpio.usDataMaskRegisterIndex) * 4; 85 gpio = &i2c_info->asGPIO_Info[i];
85 i2c.put_clk_reg = le16_to_cpu(gpio.usClkEnRegisterIndex) * 4; 86
86 i2c.put_data_reg = le16_to_cpu(gpio.usDataEnRegisterIndex) * 4; 87 if (gpio->sucI2cId.ucAccess == id) {
87 i2c.get_clk_reg = le16_to_cpu(gpio.usClkY_RegisterIndex) * 4; 88 i2c.mask_clk_reg = le16_to_cpu(gpio->usClkMaskRegisterIndex) * 4;
88 i2c.get_data_reg = le16_to_cpu(gpio.usDataY_RegisterIndex) * 4; 89 i2c.mask_data_reg = le16_to_cpu(gpio->usDataMaskRegisterIndex) * 4;
89 i2c.a_clk_reg = le16_to_cpu(gpio.usClkA_RegisterIndex) * 4; 90 i2c.en_clk_reg = le16_to_cpu(gpio->usClkEnRegisterIndex) * 4;
90 i2c.a_data_reg = le16_to_cpu(gpio.usDataA_RegisterIndex) * 4; 91 i2c.en_data_reg = le16_to_cpu(gpio->usDataEnRegisterIndex) * 4;
91 i2c.mask_clk_mask = (1 << gpio.ucClkMaskShift); 92 i2c.y_clk_reg = le16_to_cpu(gpio->usClkY_RegisterIndex) * 4;
92 i2c.mask_data_mask = (1 << gpio.ucDataMaskShift); 93 i2c.y_data_reg = le16_to_cpu(gpio->usDataY_RegisterIndex) * 4;
93 i2c.put_clk_mask = (1 << gpio.ucClkEnShift); 94 i2c.a_clk_reg = le16_to_cpu(gpio->usClkA_RegisterIndex) * 4;
94 i2c.put_data_mask = (1 << gpio.ucDataEnShift); 95 i2c.a_data_reg = le16_to_cpu(gpio->usDataA_RegisterIndex) * 4;
95 i2c.get_clk_mask = (1 << gpio.ucClkY_Shift); 96 i2c.mask_clk_mask = (1 << gpio->ucClkMaskShift);
96 i2c.get_data_mask = (1 << gpio.ucDataY_Shift); 97 i2c.mask_data_mask = (1 << gpio->ucDataMaskShift);
97 i2c.a_clk_mask = (1 << gpio.ucClkA_Shift); 98 i2c.en_clk_mask = (1 << gpio->ucClkEnShift);
98 i2c.a_data_mask = (1 << gpio.ucDataA_Shift); 99 i2c.en_data_mask = (1 << gpio->ucDataEnShift);
99 i2c.valid = true; 100 i2c.y_clk_mask = (1 << gpio->ucClkY_Shift);
101 i2c.y_data_mask = (1 << gpio->ucDataY_Shift);
102 i2c.a_clk_mask = (1 << gpio->ucClkA_Shift);
103 i2c.a_data_mask = (1 << gpio->ucDataA_Shift);
104
105 if (gpio->sucI2cId.sbfAccess.bfHW_Capable)
106 i2c.hw_capable = true;
107 else
108 i2c.hw_capable = false;
109
110 if (gpio->sucI2cId.ucAccess == 0xa0)
111 i2c.mm_i2c = true;
112 else
113 i2c.mm_i2c = false;
114
115 i2c.i2c_id = gpio->sucI2cId.ucAccess;
116
117 i2c.valid = true;
118 break;
119 }
120 }
121 }
100 122
101 return i2c; 123 return i2c;
102} 124}
103 125
126static inline struct radeon_gpio_rec radeon_lookup_gpio(struct radeon_device *rdev,
127 u8 id)
128{
129 struct atom_context *ctx = rdev->mode_info.atom_context;
130 struct radeon_gpio_rec gpio;
131 int index = GetIndexIntoMasterTable(DATA, GPIO_Pin_LUT);
132 struct _ATOM_GPIO_PIN_LUT *gpio_info;
133 ATOM_GPIO_PIN_ASSIGNMENT *pin;
134 u16 data_offset, size;
135 int i, num_indices;
136
137 memset(&gpio, 0, sizeof(struct radeon_gpio_rec));
138 gpio.valid = false;
139
140 if (atom_parse_data_header(ctx, index, &size, NULL, NULL, &data_offset)) {
141 gpio_info = (struct _ATOM_GPIO_PIN_LUT *)(ctx->bios + data_offset);
142
143 num_indices = (size - sizeof(ATOM_COMMON_TABLE_HEADER)) /
144 sizeof(ATOM_GPIO_PIN_ASSIGNMENT);
145
146 for (i = 0; i < num_indices; i++) {
147 pin = &gpio_info->asGPIO_Pin[i];
148 if (id == pin->ucGPIO_ID) {
149 gpio.id = pin->ucGPIO_ID;
150 gpio.reg = pin->usGpioPin_AIndex * 4;
151 gpio.mask = (1 << pin->ucGpioPinBitShift);
152 gpio.valid = true;
153 break;
154 }
155 }
156 }
157
158 return gpio;
159}
160
161static struct radeon_hpd radeon_atom_get_hpd_info_from_gpio(struct radeon_device *rdev,
162 struct radeon_gpio_rec *gpio)
163{
164 struct radeon_hpd hpd;
165 u32 reg;
166
167 if (ASIC_IS_DCE4(rdev))
168 reg = EVERGREEN_DC_GPIO_HPD_A;
169 else
170 reg = AVIVO_DC_GPIO_HPD_A;
171
172 hpd.gpio = *gpio;
173 if (gpio->reg == reg) {
174 switch(gpio->mask) {
175 case (1 << 0):
176 hpd.hpd = RADEON_HPD_1;
177 break;
178 case (1 << 8):
179 hpd.hpd = RADEON_HPD_2;
180 break;
181 case (1 << 16):
182 hpd.hpd = RADEON_HPD_3;
183 break;
184 case (1 << 24):
185 hpd.hpd = RADEON_HPD_4;
186 break;
187 case (1 << 26):
188 hpd.hpd = RADEON_HPD_5;
189 break;
190 case (1 << 28):
191 hpd.hpd = RADEON_HPD_6;
192 break;
193 default:
194 hpd.hpd = RADEON_HPD_NONE;
195 break;
196 }
197 } else
198 hpd.hpd = RADEON_HPD_NONE;
199 return hpd;
200}
201
104static bool radeon_atom_apply_quirks(struct drm_device *dev, 202static bool radeon_atom_apply_quirks(struct drm_device *dev,
105 uint32_t supported_device, 203 uint32_t supported_device,
106 int *connector_type, 204 int *connector_type,
107 struct radeon_i2c_bus_rec *i2c_bus, 205 struct radeon_i2c_bus_rec *i2c_bus,
108 uint16_t *line_mux) 206 uint16_t *line_mux,
207 struct radeon_hpd *hpd)
109{ 208{
110 209
111 /* Asus M2A-VM HDMI board lists the DVI port as HDMI */ 210 /* Asus M2A-VM HDMI board lists the DVI port as HDMI */
@@ -117,6 +216,15 @@ static bool radeon_atom_apply_quirks(struct drm_device *dev,
117 *connector_type = DRM_MODE_CONNECTOR_DVID; 216 *connector_type = DRM_MODE_CONNECTOR_DVID;
118 } 217 }
119 218
219 /* Asrock RS600 board lists the DVI port as HDMI */
220 if ((dev->pdev->device == 0x7941) &&
221 (dev->pdev->subsystem_vendor == 0x1849) &&
222 (dev->pdev->subsystem_device == 0x7941)) {
223 if ((*connector_type == DRM_MODE_CONNECTOR_HDMIA) &&
224 (supported_device == ATOM_DEVICE_DFP3_SUPPORT))
225 *connector_type = DRM_MODE_CONNECTOR_DVID;
226 }
227
120 /* a-bit f-i90hd - ciaranm on #radeonhd - this board has no DVI */ 228 /* a-bit f-i90hd - ciaranm on #radeonhd - this board has no DVI */
121 if ((dev->pdev->device == 0x7941) && 229 if ((dev->pdev->device == 0x7941) &&
122 (dev->pdev->subsystem_vendor == 0x147b) && 230 (dev->pdev->subsystem_vendor == 0x147b) &&
@@ -135,6 +243,23 @@ static bool radeon_atom_apply_quirks(struct drm_device *dev,
135 } 243 }
136 } 244 }
137 245
246 /* HIS X1300 is DVI+VGA, not DVI+DVI */
247 if ((dev->pdev->device == 0x7146) &&
248 (dev->pdev->subsystem_vendor == 0x17af) &&
249 (dev->pdev->subsystem_device == 0x2058)) {
250 if (supported_device == ATOM_DEVICE_DFP1_SUPPORT)
251 return false;
252 }
253
254 /* Gigabyte X1300 is DVI+VGA, not DVI+DVI */
255 if ((dev->pdev->device == 0x7142) &&
256 (dev->pdev->subsystem_vendor == 0x1458) &&
257 (dev->pdev->subsystem_device == 0x2134)) {
258 if (supported_device == ATOM_DEVICE_DFP1_SUPPORT)
259 return false;
260 }
261
262
138 /* Funky macbooks */ 263 /* Funky macbooks */
139 if ((dev->pdev->device == 0x71C5) && 264 if ((dev->pdev->device == 0x71C5) &&
140 (dev->pdev->subsystem_vendor == 0x106b) && 265 (dev->pdev->subsystem_vendor == 0x106b) &&
@@ -142,6 +267,8 @@ static bool radeon_atom_apply_quirks(struct drm_device *dev,
142 if ((supported_device == ATOM_DEVICE_CRT1_SUPPORT) || 267 if ((supported_device == ATOM_DEVICE_CRT1_SUPPORT) ||
143 (supported_device == ATOM_DEVICE_DFP2_SUPPORT)) 268 (supported_device == ATOM_DEVICE_DFP2_SUPPORT))
144 return false; 269 return false;
270 if (supported_device == ATOM_DEVICE_CRT2_SUPPORT)
271 *line_mux = 0x90;
145 } 272 }
146 273
147 /* ASUS HD 3600 XT board lists the DVI port as HDMI */ 274 /* ASUS HD 3600 XT board lists the DVI port as HDMI */
@@ -172,6 +299,24 @@ static bool radeon_atom_apply_quirks(struct drm_device *dev,
172 } 299 }
173 } 300 }
174 301
302 /* Acer laptop reports DVI-D as DVI-I */
303 if ((dev->pdev->device == 0x95c4) &&
304 (dev->pdev->subsystem_vendor == 0x1025) &&
305 (dev->pdev->subsystem_device == 0x013c)) {
306 if ((*connector_type == DRM_MODE_CONNECTOR_DVII) &&
307 (supported_device == ATOM_DEVICE_DFP1_SUPPORT))
308 *connector_type = DRM_MODE_CONNECTOR_DVID;
309 }
310
311 /* XFX Pine Group device rv730 reports no VGA DDC lines
312 * even though they are wired up to record 0x93
313 */
314 if ((dev->pdev->device == 0x9498) &&
315 (dev->pdev->subsystem_vendor == 0x1682) &&
316 (dev->pdev->subsystem_device == 0x2452)) {
317 struct radeon_device *rdev = dev->dev_private;
318 *i2c_bus = radeon_lookup_i2c_gpio(rdev, 0x93);
319 }
175 return true; 320 return true;
176} 321}
177 322
@@ -231,7 +376,9 @@ const int object_connector_convert[] = {
231 DRM_MODE_CONNECTOR_Unknown, 376 DRM_MODE_CONNECTOR_Unknown,
232 DRM_MODE_CONNECTOR_Unknown, 377 DRM_MODE_CONNECTOR_Unknown,
233 DRM_MODE_CONNECTOR_Unknown, 378 DRM_MODE_CONNECTOR_Unknown,
234 DRM_MODE_CONNECTOR_DisplayPort 379 DRM_MODE_CONNECTOR_DisplayPort,
380 DRM_MODE_CONNECTOR_eDP,
381 DRM_MODE_CONNECTOR_Unknown
235}; 382};
236 383
237bool radeon_get_atom_connector_info_from_object_table(struct drm_device *dev) 384bool radeon_get_atom_connector_info_from_object_table(struct drm_device *dev)
@@ -240,20 +387,20 @@ bool radeon_get_atom_connector_info_from_object_table(struct drm_device *dev)
240 struct radeon_mode_info *mode_info = &rdev->mode_info; 387 struct radeon_mode_info *mode_info = &rdev->mode_info;
241 struct atom_context *ctx = mode_info->atom_context; 388 struct atom_context *ctx = mode_info->atom_context;
242 int index = GetIndexIntoMasterTable(DATA, Object_Header); 389 int index = GetIndexIntoMasterTable(DATA, Object_Header);
243 uint16_t size, data_offset; 390 u16 size, data_offset;
244 uint8_t frev, crev, line_mux = 0; 391 u8 frev, crev;
245 ATOM_CONNECTOR_OBJECT_TABLE *con_obj; 392 ATOM_CONNECTOR_OBJECT_TABLE *con_obj;
246 ATOM_DISPLAY_OBJECT_PATH_TABLE *path_obj; 393 ATOM_DISPLAY_OBJECT_PATH_TABLE *path_obj;
247 ATOM_OBJECT_HEADER *obj_header; 394 ATOM_OBJECT_HEADER *obj_header;
248 int i, j, path_size, device_support; 395 int i, j, path_size, device_support;
249 int connector_type; 396 int connector_type;
250 uint16_t igp_lane_info, conn_id, connector_object_id; 397 u16 igp_lane_info, conn_id, connector_object_id;
251 bool linkb; 398 bool linkb;
252 struct radeon_i2c_bus_rec ddc_bus; 399 struct radeon_i2c_bus_rec ddc_bus;
400 struct radeon_gpio_rec gpio;
401 struct radeon_hpd hpd;
253 402
254 atom_parse_data_header(ctx, index, &size, &frev, &crev, &data_offset); 403 if (!atom_parse_data_header(ctx, index, &size, &frev, &crev, &data_offset))
255
256 if (data_offset == 0)
257 return false; 404 return false;
258 405
259 if (crev < 2) 406 if (crev < 2)
@@ -276,7 +423,6 @@ bool radeon_get_atom_connector_info_from_object_table(struct drm_device *dev)
276 path = (ATOM_DISPLAY_OBJECT_PATH *) addr; 423 path = (ATOM_DISPLAY_OBJECT_PATH *) addr;
277 path_size += le16_to_cpu(path->usSize); 424 path_size += le16_to_cpu(path->usSize);
278 linkb = false; 425 linkb = false;
279
280 if (device_support & le16_to_cpu(path->usDeviceTag)) { 426 if (device_support & le16_to_cpu(path->usDeviceTag)) {
281 uint8_t con_obj_id, con_obj_num, con_obj_type; 427 uint8_t con_obj_id, con_obj_num, con_obj_type;
282 428
@@ -306,37 +452,43 @@ bool radeon_get_atom_connector_info_from_object_table(struct drm_device *dev)
306 GetIndexIntoMasterTable(DATA, 452 GetIndexIntoMasterTable(DATA,
307 IntegratedSystemInfo); 453 IntegratedSystemInfo);
308 454
309 atom_parse_data_header(ctx, index, &size, &frev, 455 if (atom_parse_data_header(ctx, index, &size, &frev,
310 &crev, &igp_offset); 456 &crev, &igp_offset)) {
311 457
312 if (crev >= 2) { 458 if (crev >= 2) {
313 igp_obj = 459 igp_obj =
314 (ATOM_INTEGRATED_SYSTEM_INFO_V2 460 (ATOM_INTEGRATED_SYSTEM_INFO_V2
315 *) (ctx->bios + igp_offset); 461 *) (ctx->bios + igp_offset);
316 462
317 if (igp_obj) { 463 if (igp_obj) {
318 uint32_t slot_config, ct; 464 uint32_t slot_config, ct;
319 465
320 if (con_obj_num == 1) 466 if (con_obj_num == 1)
321 slot_config = 467 slot_config =
322 igp_obj-> 468 igp_obj->
323 ulDDISlot1Config; 469 ulDDISlot1Config;
324 else 470 else
325 slot_config = 471 slot_config =
326 igp_obj-> 472 igp_obj->
327 ulDDISlot2Config; 473 ulDDISlot2Config;
328 474
329 ct = (slot_config >> 16) & 0xff; 475 ct = (slot_config >> 16) & 0xff;
330 connector_type = 476 connector_type =
331 object_connector_convert 477 object_connector_convert
332 [ct]; 478 [ct];
333 connector_object_id = ct; 479 connector_object_id = ct;
334 igp_lane_info = 480 igp_lane_info =
335 slot_config & 0xffff; 481 slot_config & 0xffff;
482 } else
483 continue;
336 } else 484 } else
337 continue; 485 continue;
338 } else 486 } else {
339 continue; 487 igp_lane_info = 0;
488 connector_type =
489 object_connector_convert[con_obj_id];
490 connector_object_id = con_obj_id;
491 }
340 } else { 492 } else {
341 igp_lane_info = 0; 493 igp_lane_info = 0;
342 connector_type = 494 connector_type =
@@ -377,10 +529,9 @@ bool radeon_get_atom_connector_info_from_object_table(struct drm_device *dev)
377 } 529 }
378 } 530 }
379 531
380 /* look up gpio for ddc */ 532 /* look up gpio for ddc, hpd */
381 if ((le16_to_cpu(path->usDeviceTag) & 533 if ((le16_to_cpu(path->usDeviceTag) &
382 (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT)) 534 (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT)) == 0) {
383 == 0) {
384 for (j = 0; j < con_obj->ucNumberOfObjects; j++) { 535 for (j = 0; j < con_obj->ucNumberOfObjects; j++) {
385 if (le16_to_cpu(path->usConnObjectId) == 536 if (le16_to_cpu(path->usConnObjectId) ==
386 le16_to_cpu(con_obj->asObjects[j]. 537 le16_to_cpu(con_obj->asObjects[j].
@@ -394,21 +545,34 @@ bool radeon_get_atom_connector_info_from_object_table(struct drm_device *dev)
394 asObjects[j]. 545 asObjects[j].
395 usRecordOffset)); 546 usRecordOffset));
396 ATOM_I2C_RECORD *i2c_record; 547 ATOM_I2C_RECORD *i2c_record;
548 ATOM_HPD_INT_RECORD *hpd_record;
549 ATOM_I2C_ID_CONFIG_ACCESS *i2c_config;
550 hpd.hpd = RADEON_HPD_NONE;
397 551
398 while (record->ucRecordType > 0 552 while (record->ucRecordType > 0
399 && record-> 553 && record->
400 ucRecordType <= 554 ucRecordType <=
401 ATOM_MAX_OBJECT_RECORD_NUMBER) { 555 ATOM_MAX_OBJECT_RECORD_NUMBER) {
402 switch (record-> 556 switch (record->ucRecordType) {
403 ucRecordType) {
404 case ATOM_I2C_RECORD_TYPE: 557 case ATOM_I2C_RECORD_TYPE:
405 i2c_record = 558 i2c_record =
406 (ATOM_I2C_RECORD 559 (ATOM_I2C_RECORD *)
407 *) record; 560 record;
408 line_mux = 561 i2c_config =
409 i2c_record-> 562 (ATOM_I2C_ID_CONFIG_ACCESS *)
410 sucI2cId. 563 &i2c_record->sucI2cId;
411 bfI2C_LineMux; 564 ddc_bus = radeon_lookup_i2c_gpio(rdev,
565 i2c_config->
566 ucAccess);
567 break;
568 case ATOM_HPD_INT_RECORD_TYPE:
569 hpd_record =
570 (ATOM_HPD_INT_RECORD *)
571 record;
572 gpio = radeon_lookup_gpio(rdev,
573 hpd_record->ucHPDIntGPIOID);
574 hpd = radeon_atom_get_hpd_info_from_gpio(rdev, &gpio);
575 hpd.plugged_state = hpd_record->ucPlugged_PinState;
412 break; 576 break;
413 } 577 }
414 record = 578 record =
@@ -421,24 +585,19 @@ bool radeon_get_atom_connector_info_from_object_table(struct drm_device *dev)
421 break; 585 break;
422 } 586 }
423 } 587 }
424 } else 588 } else {
425 line_mux = 0; 589 hpd.hpd = RADEON_HPD_NONE;
426
427 if ((le16_to_cpu(path->usDeviceTag) ==
428 ATOM_DEVICE_TV1_SUPPORT)
429 || (le16_to_cpu(path->usDeviceTag) ==
430 ATOM_DEVICE_TV2_SUPPORT)
431 || (le16_to_cpu(path->usDeviceTag) ==
432 ATOM_DEVICE_CV_SUPPORT))
433 ddc_bus.valid = false; 590 ddc_bus.valid = false;
434 else 591 }
435 ddc_bus = radeon_lookup_gpio(dev, line_mux); 592
593 /* needed for aux chan transactions */
594 ddc_bus.hpd_id = hpd.hpd ? (hpd.hpd - 1) : 0;
436 595
437 conn_id = le16_to_cpu(path->usConnObjectId); 596 conn_id = le16_to_cpu(path->usConnObjectId);
438 597
439 if (!radeon_atom_apply_quirks 598 if (!radeon_atom_apply_quirks
440 (dev, le16_to_cpu(path->usDeviceTag), &connector_type, 599 (dev, le16_to_cpu(path->usDeviceTag), &connector_type,
441 &ddc_bus, &conn_id)) 600 &ddc_bus, &conn_id, &hpd))
442 continue; 601 continue;
443 602
444 radeon_add_atom_connector(dev, 603 radeon_add_atom_connector(dev,
@@ -447,7 +606,8 @@ bool radeon_get_atom_connector_info_from_object_table(struct drm_device *dev)
447 usDeviceTag), 606 usDeviceTag),
448 connector_type, &ddc_bus, 607 connector_type, &ddc_bus,
449 linkb, igp_lane_info, 608 linkb, igp_lane_info,
450 connector_object_id); 609 connector_object_id,
610 &hpd);
451 611
452 } 612 }
453 } 613 }
@@ -476,20 +636,23 @@ static uint16_t atombios_get_connector_object_id(struct drm_device *dev,
476 uint8_t frev, crev; 636 uint8_t frev, crev;
477 ATOM_XTMDS_INFO *xtmds; 637 ATOM_XTMDS_INFO *xtmds;
478 638
479 atom_parse_data_header(ctx, index, &size, &frev, &crev, &data_offset); 639 if (atom_parse_data_header(ctx, index, &size, &frev, &crev, &data_offset)) {
480 xtmds = (ATOM_XTMDS_INFO *)(ctx->bios + data_offset); 640 xtmds = (ATOM_XTMDS_INFO *)(ctx->bios + data_offset);
481 641
482 if (xtmds->ucSupportedLink & ATOM_XTMDS_SUPPORTED_DUALLINK) { 642 if (xtmds->ucSupportedLink & ATOM_XTMDS_SUPPORTED_DUALLINK) {
483 if (connector_type == DRM_MODE_CONNECTOR_DVII) 643 if (connector_type == DRM_MODE_CONNECTOR_DVII)
484 return CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_I; 644 return CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_I;
485 else 645 else
486 return CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_D; 646 return CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_D;
487 } else { 647 } else {
488 if (connector_type == DRM_MODE_CONNECTOR_DVII) 648 if (connector_type == DRM_MODE_CONNECTOR_DVII)
489 return CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I; 649 return CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I;
490 else 650 else
491 return CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D; 651 return CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D;
492 } 652 }
653 } else
654 return supported_devices_connector_object_id_convert
655 [connector_type];
493 } else { 656 } else {
494 return supported_devices_connector_object_id_convert 657 return supported_devices_connector_object_id_convert
495 [connector_type]; 658 [connector_type];
@@ -502,6 +665,7 @@ struct bios_connector {
502 uint16_t devices; 665 uint16_t devices;
503 int connector_type; 666 int connector_type;
504 struct radeon_i2c_bus_rec ddc_bus; 667 struct radeon_i2c_bus_rec ddc_bus;
668 struct radeon_hpd hpd;
505}; 669};
506 670
507bool radeon_get_atom_connector_info_from_supported_devices_table(struct 671bool radeon_get_atom_connector_info_from_supported_devices_table(struct
@@ -517,17 +681,23 @@ bool radeon_get_atom_connector_info_from_supported_devices_table(struct
517 uint16_t device_support; 681 uint16_t device_support;
518 uint8_t dac; 682 uint8_t dac;
519 union atom_supported_devices *supported_devices; 683 union atom_supported_devices *supported_devices;
520 int i, j; 684 int i, j, max_device;
521 struct bios_connector bios_connectors[ATOM_MAX_SUPPORTED_DEVICE]; 685 struct bios_connector bios_connectors[ATOM_MAX_SUPPORTED_DEVICE];
522 686
523 atom_parse_data_header(ctx, index, &size, &frev, &crev, &data_offset); 687 if (!atom_parse_data_header(ctx, index, &size, &frev, &crev, &data_offset))
688 return false;
524 689
525 supported_devices = 690 supported_devices =
526 (union atom_supported_devices *)(ctx->bios + data_offset); 691 (union atom_supported_devices *)(ctx->bios + data_offset);
527 692
528 device_support = le16_to_cpu(supported_devices->info.usDeviceSupport); 693 device_support = le16_to_cpu(supported_devices->info.usDeviceSupport);
529 694
530 for (i = 0; i < ATOM_MAX_SUPPORTED_DEVICE; i++) { 695 if (frev > 1)
696 max_device = ATOM_MAX_SUPPORTED_DEVICE;
697 else
698 max_device = ATOM_MAX_SUPPORTED_DEVICE_INFO;
699
700 for (i = 0; i < max_device; i++) {
531 ATOM_CONNECTOR_INFO_I2C ci = 701 ATOM_CONNECTOR_INFO_I2C ci =
532 supported_devices->info.asConnInfo[i]; 702 supported_devices->info.asConnInfo[i];
533 703
@@ -553,22 +723,8 @@ bool radeon_get_atom_connector_info_from_supported_devices_table(struct
553 723
554 dac = ci.sucConnectorInfo.sbfAccess.bfAssociatedDAC; 724 dac = ci.sucConnectorInfo.sbfAccess.bfAssociatedDAC;
555 725
556 if ((rdev->family == CHIP_RS690) || 726 bios_connectors[i].line_mux =
557 (rdev->family == CHIP_RS740)) { 727 ci.sucI2cId.ucAccess;
558 if ((i == ATOM_DEVICE_DFP2_INDEX)
559 && (ci.sucI2cId.sbfAccess.bfI2C_LineMux == 2))
560 bios_connectors[i].line_mux =
561 ci.sucI2cId.sbfAccess.bfI2C_LineMux + 1;
562 else if ((i == ATOM_DEVICE_DFP3_INDEX)
563 && (ci.sucI2cId.sbfAccess.bfI2C_LineMux == 1))
564 bios_connectors[i].line_mux =
565 ci.sucI2cId.sbfAccess.bfI2C_LineMux + 1;
566 else
567 bios_connectors[i].line_mux =
568 ci.sucI2cId.sbfAccess.bfI2C_LineMux;
569 } else
570 bios_connectors[i].line_mux =
571 ci.sucI2cId.sbfAccess.bfI2C_LineMux;
572 728
573 /* give tv unique connector ids */ 729 /* give tv unique connector ids */
574 if (i == ATOM_DEVICE_TV1_INDEX) { 730 if (i == ATOM_DEVICE_TV1_INDEX) {
@@ -582,8 +738,30 @@ bool radeon_get_atom_connector_info_from_supported_devices_table(struct
582 bios_connectors[i].line_mux = 52; 738 bios_connectors[i].line_mux = 52;
583 } else 739 } else
584 bios_connectors[i].ddc_bus = 740 bios_connectors[i].ddc_bus =
585 radeon_lookup_gpio(dev, 741 radeon_lookup_i2c_gpio(rdev,
586 bios_connectors[i].line_mux); 742 bios_connectors[i].line_mux);
743
744 if ((crev > 1) && (frev > 1)) {
745 u8 isb = supported_devices->info_2d1.asIntSrcInfo[i].ucIntSrcBitmap;
746 switch (isb) {
747 case 0x4:
748 bios_connectors[i].hpd.hpd = RADEON_HPD_1;
749 break;
750 case 0xa:
751 bios_connectors[i].hpd.hpd = RADEON_HPD_2;
752 break;
753 default:
754 bios_connectors[i].hpd.hpd = RADEON_HPD_NONE;
755 break;
756 }
757 } else {
758 if (i == ATOM_DEVICE_DFP1_INDEX)
759 bios_connectors[i].hpd.hpd = RADEON_HPD_1;
760 else if (i == ATOM_DEVICE_DFP2_INDEX)
761 bios_connectors[i].hpd.hpd = RADEON_HPD_2;
762 else
763 bios_connectors[i].hpd.hpd = RADEON_HPD_NONE;
764 }
587 765
588 /* Always set the connector type to VGA for CRT1/CRT2. if they are 766 /* Always set the connector type to VGA for CRT1/CRT2. if they are
589 * shared with a DVI port, we'll pick up the DVI connector when we 767 * shared with a DVI port, we'll pick up the DVI connector when we
@@ -595,7 +773,8 @@ bool radeon_get_atom_connector_info_from_supported_devices_table(struct
595 773
596 if (!radeon_atom_apply_quirks 774 if (!radeon_atom_apply_quirks
597 (dev, (1 << i), &bios_connectors[i].connector_type, 775 (dev, (1 << i), &bios_connectors[i].connector_type,
598 &bios_connectors[i].ddc_bus, &bios_connectors[i].line_mux)) 776 &bios_connectors[i].ddc_bus, &bios_connectors[i].line_mux,
777 &bios_connectors[i].hpd))
599 continue; 778 continue;
600 779
601 bios_connectors[i].valid = true; 780 bios_connectors[i].valid = true;
@@ -610,41 +789,42 @@ bool radeon_get_atom_connector_info_from_supported_devices_table(struct
610 else 789 else
611 radeon_add_legacy_encoder(dev, 790 radeon_add_legacy_encoder(dev,
612 radeon_get_encoder_id(dev, 791 radeon_get_encoder_id(dev,
613 (1 << 792 (1 << i),
614 i),
615 dac), 793 dac),
616 (1 << i)); 794 (1 << i));
617 } 795 }
618 796
619 /* combine shared connectors */ 797 /* combine shared connectors */
620 for (i = 0; i < ATOM_MAX_SUPPORTED_DEVICE; i++) { 798 for (i = 0; i < max_device; i++) {
621 if (bios_connectors[i].valid) { 799 if (bios_connectors[i].valid) {
622 for (j = 0; j < ATOM_MAX_SUPPORTED_DEVICE; j++) { 800 for (j = 0; j < max_device; j++) {
623 if (bios_connectors[j].valid && (i != j)) { 801 if (bios_connectors[j].valid && (i != j)) {
624 if (bios_connectors[i].line_mux == 802 if (bios_connectors[i].line_mux ==
625 bios_connectors[j].line_mux) { 803 bios_connectors[j].line_mux) {
626 if (((bios_connectors[i]. 804 /* make sure not to combine LVDS */
627 devices & 805 if (bios_connectors[i].devices & (ATOM_DEVICE_LCD_SUPPORT)) {
628 (ATOM_DEVICE_DFP_SUPPORT)) 806 bios_connectors[i].line_mux = 53;
629 && (bios_connectors[j]. 807 bios_connectors[i].ddc_bus.valid = false;
630 devices & 808 continue;
631 (ATOM_DEVICE_CRT_SUPPORT))) 809 }
632 || 810 if (bios_connectors[j].devices & (ATOM_DEVICE_LCD_SUPPORT)) {
633 ((bios_connectors[j]. 811 bios_connectors[j].line_mux = 53;
634 devices & 812 bios_connectors[j].ddc_bus.valid = false;
635 (ATOM_DEVICE_DFP_SUPPORT)) 813 continue;
636 && (bios_connectors[i]. 814 }
637 devices & 815 /* combine analog and digital for DVI-I */
638 (ATOM_DEVICE_CRT_SUPPORT)))) { 816 if (((bios_connectors[i].devices & (ATOM_DEVICE_DFP_SUPPORT)) &&
639 bios_connectors[i]. 817 (bios_connectors[j].devices & (ATOM_DEVICE_CRT_SUPPORT))) ||
640 devices |= 818 ((bios_connectors[j].devices & (ATOM_DEVICE_DFP_SUPPORT)) &&
641 bios_connectors[j]. 819 (bios_connectors[i].devices & (ATOM_DEVICE_CRT_SUPPORT)))) {
642 devices; 820 bios_connectors[i].devices |=
643 bios_connectors[i]. 821 bios_connectors[j].devices;
644 connector_type = 822 bios_connectors[i].connector_type =
645 DRM_MODE_CONNECTOR_DVII; 823 DRM_MODE_CONNECTOR_DVII;
646 bios_connectors[j]. 824 if (bios_connectors[j].devices & (ATOM_DEVICE_DFP_SUPPORT))
647 valid = false; 825 bios_connectors[i].hpd =
826 bios_connectors[j].hpd;
827 bios_connectors[j].valid = false;
648 } 828 }
649 } 829 }
650 } 830 }
@@ -653,7 +833,7 @@ bool radeon_get_atom_connector_info_from_supported_devices_table(struct
653 } 833 }
654 834
655 /* add the connectors */ 835 /* add the connectors */
656 for (i = 0; i < ATOM_MAX_SUPPORTED_DEVICE; i++) { 836 for (i = 0; i < max_device; i++) {
657 if (bios_connectors[i].valid) { 837 if (bios_connectors[i].valid) {
658 uint16_t connector_object_id = 838 uint16_t connector_object_id =
659 atombios_get_connector_object_id(dev, 839 atombios_get_connector_object_id(dev,
@@ -666,7 +846,8 @@ bool radeon_get_atom_connector_info_from_supported_devices_table(struct
666 connector_type, 846 connector_type,
667 &bios_connectors[i].ddc_bus, 847 &bios_connectors[i].ddc_bus,
668 false, 0, 848 false, 0,
669 connector_object_id); 849 connector_object_id,
850 &bios_connectors[i].hpd);
670 } 851 }
671 } 852 }
672 853
@@ -680,6 +861,7 @@ union firmware_info {
680 ATOM_FIRMWARE_INFO_V1_2 info_12; 861 ATOM_FIRMWARE_INFO_V1_2 info_12;
681 ATOM_FIRMWARE_INFO_V1_3 info_13; 862 ATOM_FIRMWARE_INFO_V1_3 info_13;
682 ATOM_FIRMWARE_INFO_V1_4 info_14; 863 ATOM_FIRMWARE_INFO_V1_4 info_14;
864 ATOM_FIRMWARE_INFO_V2_1 info_21;
683}; 865};
684 866
685bool radeon_atom_get_clock_info(struct drm_device *dev) 867bool radeon_atom_get_clock_info(struct drm_device *dev)
@@ -691,18 +873,16 @@ bool radeon_atom_get_clock_info(struct drm_device *dev)
691 uint8_t frev, crev; 873 uint8_t frev, crev;
692 struct radeon_pll *p1pll = &rdev->clock.p1pll; 874 struct radeon_pll *p1pll = &rdev->clock.p1pll;
693 struct radeon_pll *p2pll = &rdev->clock.p2pll; 875 struct radeon_pll *p2pll = &rdev->clock.p2pll;
876 struct radeon_pll *dcpll = &rdev->clock.dcpll;
694 struct radeon_pll *spll = &rdev->clock.spll; 877 struct radeon_pll *spll = &rdev->clock.spll;
695 struct radeon_pll *mpll = &rdev->clock.mpll; 878 struct radeon_pll *mpll = &rdev->clock.mpll;
696 uint16_t data_offset; 879 uint16_t data_offset;
697 880
698 atom_parse_data_header(mode_info->atom_context, index, NULL, &frev, 881 if (atom_parse_data_header(mode_info->atom_context, index, NULL,
699 &crev, &data_offset); 882 &frev, &crev, &data_offset)) {
700 883 firmware_info =
701 firmware_info = 884 (union firmware_info *)(mode_info->atom_context->bios +
702 (union firmware_info *)(mode_info->atom_context->bios + 885 data_offset);
703 data_offset);
704
705 if (firmware_info) {
706 /* pixel clocks */ 886 /* pixel clocks */
707 p1pll->reference_freq = 887 p1pll->reference_freq =
708 le16_to_cpu(firmware_info->info.usReferenceClock); 888 le16_to_cpu(firmware_info->info.usReferenceClock);
@@ -717,6 +897,20 @@ bool radeon_atom_get_clock_info(struct drm_device *dev)
717 p1pll->pll_out_max = 897 p1pll->pll_out_max =
718 le32_to_cpu(firmware_info->info.ulMaxPixelClockPLL_Output); 898 le32_to_cpu(firmware_info->info.ulMaxPixelClockPLL_Output);
719 899
900 if (crev >= 4) {
901 p1pll->lcd_pll_out_min =
902 le16_to_cpu(firmware_info->info_14.usLcdMinPixelClockPLL_Output) * 100;
903 if (p1pll->lcd_pll_out_min == 0)
904 p1pll->lcd_pll_out_min = p1pll->pll_out_min;
905 p1pll->lcd_pll_out_max =
906 le16_to_cpu(firmware_info->info_14.usLcdMaxPixelClockPLL_Output) * 100;
907 if (p1pll->lcd_pll_out_max == 0)
908 p1pll->lcd_pll_out_max = p1pll->pll_out_max;
909 } else {
910 p1pll->lcd_pll_out_min = p1pll->pll_out_min;
911 p1pll->lcd_pll_out_max = p1pll->pll_out_max;
912 }
913
720 if (p1pll->pll_out_min == 0) { 914 if (p1pll->pll_out_min == 0) {
721 if (ASIC_IS_AVIVO(rdev)) 915 if (ASIC_IS_AVIVO(rdev))
722 p1pll->pll_out_min = 64800; 916 p1pll->pll_out_min = 64800;
@@ -731,7 +925,8 @@ bool radeon_atom_get_clock_info(struct drm_device *dev)
731 * pre-DCE 3.0 r6xx hardware. This might need to be adjusted per 925 * pre-DCE 3.0 r6xx hardware. This might need to be adjusted per
732 * family. 926 * family.
733 */ 927 */
734 p1pll->pll_out_min = 64800; 928 if (!radeon_new_pll)
929 p1pll->pll_out_min = 64800;
735 } 930 }
736 931
737 p1pll->pll_in_min = 932 p1pll->pll_in_min =
@@ -792,8 +987,53 @@ bool radeon_atom_get_clock_info(struct drm_device *dev)
792 rdev->clock.default_mclk = 987 rdev->clock.default_mclk =
793 le32_to_cpu(firmware_info->info.ulDefaultMemoryClock); 988 le32_to_cpu(firmware_info->info.ulDefaultMemoryClock);
794 989
990 if (ASIC_IS_DCE4(rdev)) {
991 rdev->clock.default_dispclk =
992 le32_to_cpu(firmware_info->info_21.ulDefaultDispEngineClkFreq);
993 if (rdev->clock.default_dispclk == 0)
994 rdev->clock.default_dispclk = 60000; /* 600 Mhz */
995 rdev->clock.dp_extclk =
996 le16_to_cpu(firmware_info->info_21.usUniphyDPModeExtClkFreq);
997 }
998 *dcpll = *p1pll;
999
795 return true; 1000 return true;
796 } 1001 }
1002
1003 return false;
1004}
1005
1006union igp_info {
1007 struct _ATOM_INTEGRATED_SYSTEM_INFO info;
1008 struct _ATOM_INTEGRATED_SYSTEM_INFO_V2 info_2;
1009};
1010
1011bool radeon_atombios_sideport_present(struct radeon_device *rdev)
1012{
1013 struct radeon_mode_info *mode_info = &rdev->mode_info;
1014 int index = GetIndexIntoMasterTable(DATA, IntegratedSystemInfo);
1015 union igp_info *igp_info;
1016 u8 frev, crev;
1017 u16 data_offset;
1018
1019 if (atom_parse_data_header(mode_info->atom_context, index, NULL,
1020 &frev, &crev, &data_offset)) {
1021 igp_info = (union igp_info *)(mode_info->atom_context->bios +
1022 data_offset);
1023 switch (crev) {
1024 case 1:
1025 if (igp_info->info.ucMemoryType & 0xf0)
1026 return true;
1027 break;
1028 case 2:
1029 if (igp_info->info_2.ucMemoryType & 0x0f)
1030 return true;
1031 break;
1032 default:
1033 DRM_ERROR("Unsupported IGP table: %d %d\n", frev, crev);
1034 break;
1035 }
1036 }
797 return false; 1037 return false;
798} 1038}
799 1039
@@ -810,14 +1050,12 @@ bool radeon_atombios_get_tmds_info(struct radeon_encoder *encoder,
810 uint16_t maxfreq; 1050 uint16_t maxfreq;
811 int i; 1051 int i;
812 1052
813 atom_parse_data_header(mode_info->atom_context, index, NULL, &frev, 1053 if (atom_parse_data_header(mode_info->atom_context, index, NULL,
814 &crev, &data_offset); 1054 &frev, &crev, &data_offset)) {
1055 tmds_info =
1056 (struct _ATOM_TMDS_INFO *)(mode_info->atom_context->bios +
1057 data_offset);
815 1058
816 tmds_info =
817 (struct _ATOM_TMDS_INFO *)(mode_info->atom_context->bios +
818 data_offset);
819
820 if (tmds_info) {
821 maxfreq = le16_to_cpu(tmds_info->usMaxFrequency); 1059 maxfreq = le16_to_cpu(tmds_info->usMaxFrequency);
822 for (i = 0; i < 4; i++) { 1060 for (i = 0; i < 4; i++) {
823 tmds->tmds_pll[i].freq = 1061 tmds->tmds_pll[i].freq =
@@ -861,29 +1099,34 @@ static struct radeon_atom_ss *radeon_atombios_get_ss_info(struct
861 struct _ATOM_SPREAD_SPECTRUM_INFO *ss_info; 1099 struct _ATOM_SPREAD_SPECTRUM_INFO *ss_info;
862 uint8_t frev, crev; 1100 uint8_t frev, crev;
863 struct radeon_atom_ss *ss = NULL; 1101 struct radeon_atom_ss *ss = NULL;
1102 int i;
864 1103
865 if (id > ATOM_MAX_SS_ENTRY) 1104 if (id > ATOM_MAX_SS_ENTRY)
866 return NULL; 1105 return NULL;
867 1106
868 atom_parse_data_header(mode_info->atom_context, index, NULL, &frev, 1107 if (atom_parse_data_header(mode_info->atom_context, index, NULL,
869 &crev, &data_offset); 1108 &frev, &crev, &data_offset)) {
870 1109 ss_info =
871 ss_info = 1110 (struct _ATOM_SPREAD_SPECTRUM_INFO *)(mode_info->atom_context->bios + data_offset);
872 (struct _ATOM_SPREAD_SPECTRUM_INFO *)(mode_info->atom_context->bios + data_offset);
873 1111
874 if (ss_info) {
875 ss = 1112 ss =
876 kzalloc(sizeof(struct radeon_atom_ss), GFP_KERNEL); 1113 kzalloc(sizeof(struct radeon_atom_ss), GFP_KERNEL);
877 1114
878 if (!ss) 1115 if (!ss)
879 return NULL; 1116 return NULL;
880 1117
881 ss->percentage = le16_to_cpu(ss_info->asSS_Info[id].usSpreadSpectrumPercentage); 1118 for (i = 0; i < ATOM_MAX_SS_ENTRY; i++) {
882 ss->type = ss_info->asSS_Info[id].ucSpreadSpectrumType; 1119 if (ss_info->asSS_Info[i].ucSS_Id == id) {
883 ss->step = ss_info->asSS_Info[id].ucSS_Step; 1120 ss->percentage =
884 ss->delay = ss_info->asSS_Info[id].ucSS_Delay; 1121 le16_to_cpu(ss_info->asSS_Info[i].usSpreadSpectrumPercentage);
885 ss->range = ss_info->asSS_Info[id].ucSS_Range; 1122 ss->type = ss_info->asSS_Info[i].ucSpreadSpectrumType;
886 ss->refdiv = ss_info->asSS_Info[id].ucRecommendedRef_Div; 1123 ss->step = ss_info->asSS_Info[i].ucSS_Step;
1124 ss->delay = ss_info->asSS_Info[i].ucSS_Delay;
1125 ss->range = ss_info->asSS_Info[i].ucSS_Range;
1126 ss->refdiv = ss_info->asSS_Info[i].ucRecommendedRef_Div;
1127 break;
1128 }
1129 }
887 } 1130 }
888 return ss; 1131 return ss;
889} 1132}
@@ -901,18 +1144,15 @@ struct radeon_encoder_atom_dig *radeon_atombios_get_lvds_info(struct
901 struct radeon_device *rdev = dev->dev_private; 1144 struct radeon_device *rdev = dev->dev_private;
902 struct radeon_mode_info *mode_info = &rdev->mode_info; 1145 struct radeon_mode_info *mode_info = &rdev->mode_info;
903 int index = GetIndexIntoMasterTable(DATA, LVDS_Info); 1146 int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
904 uint16_t data_offset; 1147 uint16_t data_offset, misc;
905 union lvds_info *lvds_info; 1148 union lvds_info *lvds_info;
906 uint8_t frev, crev; 1149 uint8_t frev, crev;
907 struct radeon_encoder_atom_dig *lvds = NULL; 1150 struct radeon_encoder_atom_dig *lvds = NULL;
908 1151
909 atom_parse_data_header(mode_info->atom_context, index, NULL, &frev, 1152 if (atom_parse_data_header(mode_info->atom_context, index, NULL,
910 &crev, &data_offset); 1153 &frev, &crev, &data_offset)) {
911 1154 lvds_info =
912 lvds_info = 1155 (union lvds_info *)(mode_info->atom_context->bios + data_offset);
913 (union lvds_info *)(mode_info->atom_context->bios + data_offset);
914
915 if (lvds_info) {
916 lvds = 1156 lvds =
917 kzalloc(sizeof(struct radeon_encoder_atom_dig), GFP_KERNEL); 1157 kzalloc(sizeof(struct radeon_encoder_atom_dig), GFP_KERNEL);
918 1158
@@ -940,11 +1180,36 @@ struct radeon_encoder_atom_dig *radeon_atombios_get_lvds_info(struct
940 lvds->panel_pwr_delay = 1180 lvds->panel_pwr_delay =
941 le16_to_cpu(lvds_info->info.usOffDelayInMs); 1181 le16_to_cpu(lvds_info->info.usOffDelayInMs);
942 lvds->lvds_misc = lvds_info->info.ucLVDS_Misc; 1182 lvds->lvds_misc = lvds_info->info.ucLVDS_Misc;
1183
1184 misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
1185 if (misc & ATOM_VSYNC_POLARITY)
1186 lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
1187 if (misc & ATOM_HSYNC_POLARITY)
1188 lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
1189 if (misc & ATOM_COMPOSITESYNC)
1190 lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
1191 if (misc & ATOM_INTERLACE)
1192 lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
1193 if (misc & ATOM_DOUBLE_CLOCK_MODE)
1194 lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
1195
943 /* set crtc values */ 1196 /* set crtc values */
944 drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V); 1197 drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
945 1198
946 lvds->ss = radeon_atombios_get_ss_info(encoder, lvds_info->info.ucSS_Id); 1199 lvds->ss = radeon_atombios_get_ss_info(encoder, lvds_info->info.ucSS_Id);
947 1200
1201 if (ASIC_IS_AVIVO(rdev)) {
1202 if (radeon_new_pll == 0)
1203 lvds->pll_algo = PLL_ALGO_LEGACY;
1204 else
1205 lvds->pll_algo = PLL_ALGO_NEW;
1206 } else {
1207 if (radeon_new_pll == 1)
1208 lvds->pll_algo = PLL_ALGO_NEW;
1209 else
1210 lvds->pll_algo = PLL_ALGO_LEGACY;
1211 }
1212
948 encoder->native_mode = lvds->native_mode; 1213 encoder->native_mode = lvds->native_mode;
949 } 1214 }
950 return lvds; 1215 return lvds;
@@ -963,11 +1228,11 @@ radeon_atombios_get_primary_dac_info(struct radeon_encoder *encoder)
963 uint8_t bg, dac; 1228 uint8_t bg, dac;
964 struct radeon_encoder_primary_dac *p_dac = NULL; 1229 struct radeon_encoder_primary_dac *p_dac = NULL;
965 1230
966 atom_parse_data_header(mode_info->atom_context, index, NULL, &frev, &crev, &data_offset); 1231 if (atom_parse_data_header(mode_info->atom_context, index, NULL,
967 1232 &frev, &crev, &data_offset)) {
968 dac_info = (struct _COMPASSIONATE_DATA *)(mode_info->atom_context->bios + data_offset); 1233 dac_info = (struct _COMPASSIONATE_DATA *)
1234 (mode_info->atom_context->bios + data_offset);
969 1235
970 if (dac_info) {
971 p_dac = kzalloc(sizeof(struct radeon_encoder_primary_dac), GFP_KERNEL); 1236 p_dac = kzalloc(sizeof(struct radeon_encoder_primary_dac), GFP_KERNEL);
972 1237
973 if (!p_dac) 1238 if (!p_dac)
@@ -992,12 +1257,14 @@ bool radeon_atom_get_tv_timings(struct radeon_device *rdev, int index,
992 u8 frev, crev; 1257 u8 frev, crev;
993 u16 data_offset, misc; 1258 u16 data_offset, misc;
994 1259
995 atom_parse_data_header(mode_info->atom_context, data_index, NULL, &frev, &crev, &data_offset); 1260 if (!atom_parse_data_header(mode_info->atom_context, data_index, NULL,
1261 &frev, &crev, &data_offset))
1262 return false;
996 1263
997 switch (crev) { 1264 switch (crev) {
998 case 1: 1265 case 1:
999 tv_info = (ATOM_ANALOG_TV_INFO *)(mode_info->atom_context->bios + data_offset); 1266 tv_info = (ATOM_ANALOG_TV_INFO *)(mode_info->atom_context->bios + data_offset);
1000 if (index > MAX_SUPPORTED_TV_TIMING) 1267 if (index >= MAX_SUPPORTED_TV_TIMING)
1001 return false; 1268 return false;
1002 1269
1003 mode->crtc_htotal = le16_to_cpu(tv_info->aModeTimings[index].usCRTC_H_Total); 1270 mode->crtc_htotal = le16_to_cpu(tv_info->aModeTimings[index].usCRTC_H_Total);
@@ -1035,7 +1302,7 @@ bool radeon_atom_get_tv_timings(struct radeon_device *rdev, int index,
1035 break; 1302 break;
1036 case 2: 1303 case 2:
1037 tv_info_v1_2 = (ATOM_ANALOG_TV_INFO_V1_2 *)(mode_info->atom_context->bios + data_offset); 1304 tv_info_v1_2 = (ATOM_ANALOG_TV_INFO_V1_2 *)(mode_info->atom_context->bios + data_offset);
1038 if (index > MAX_SUPPORTED_TV_TIMING_V1_2) 1305 if (index >= MAX_SUPPORTED_TV_TIMING_V1_2)
1039 return false; 1306 return false;
1040 1307
1041 dtd_timings = &tv_info_v1_2->aModeTimings[index]; 1308 dtd_timings = &tv_info_v1_2->aModeTimings[index];
@@ -1074,6 +1341,64 @@ bool radeon_atom_get_tv_timings(struct radeon_device *rdev, int index,
1074 return true; 1341 return true;
1075} 1342}
1076 1343
1344enum radeon_tv_std
1345radeon_atombios_get_tv_info(struct radeon_device *rdev)
1346{
1347 struct radeon_mode_info *mode_info = &rdev->mode_info;
1348 int index = GetIndexIntoMasterTable(DATA, AnalogTV_Info);
1349 uint16_t data_offset;
1350 uint8_t frev, crev;
1351 struct _ATOM_ANALOG_TV_INFO *tv_info;
1352 enum radeon_tv_std tv_std = TV_STD_NTSC;
1353
1354 if (atom_parse_data_header(mode_info->atom_context, index, NULL,
1355 &frev, &crev, &data_offset)) {
1356
1357 tv_info = (struct _ATOM_ANALOG_TV_INFO *)
1358 (mode_info->atom_context->bios + data_offset);
1359
1360 switch (tv_info->ucTV_BootUpDefaultStandard) {
1361 case ATOM_TV_NTSC:
1362 tv_std = TV_STD_NTSC;
1363 DRM_INFO("Default TV standard: NTSC\n");
1364 break;
1365 case ATOM_TV_NTSCJ:
1366 tv_std = TV_STD_NTSC_J;
1367 DRM_INFO("Default TV standard: NTSC-J\n");
1368 break;
1369 case ATOM_TV_PAL:
1370 tv_std = TV_STD_PAL;
1371 DRM_INFO("Default TV standard: PAL\n");
1372 break;
1373 case ATOM_TV_PALM:
1374 tv_std = TV_STD_PAL_M;
1375 DRM_INFO("Default TV standard: PAL-M\n");
1376 break;
1377 case ATOM_TV_PALN:
1378 tv_std = TV_STD_PAL_N;
1379 DRM_INFO("Default TV standard: PAL-N\n");
1380 break;
1381 case ATOM_TV_PALCN:
1382 tv_std = TV_STD_PAL_CN;
1383 DRM_INFO("Default TV standard: PAL-CN\n");
1384 break;
1385 case ATOM_TV_PAL60:
1386 tv_std = TV_STD_PAL_60;
1387 DRM_INFO("Default TV standard: PAL-60\n");
1388 break;
1389 case ATOM_TV_SECAM:
1390 tv_std = TV_STD_SECAM;
1391 DRM_INFO("Default TV standard: SECAM\n");
1392 break;
1393 default:
1394 tv_std = TV_STD_NTSC;
1395 DRM_INFO("Unknown TV standard; defaulting to NTSC\n");
1396 break;
1397 }
1398 }
1399 return tv_std;
1400}
1401
1077struct radeon_encoder_tv_dac * 1402struct radeon_encoder_tv_dac *
1078radeon_atombios_get_tv_dac_info(struct radeon_encoder *encoder) 1403radeon_atombios_get_tv_dac_info(struct radeon_encoder *encoder)
1079{ 1404{
@@ -1087,11 +1412,12 @@ radeon_atombios_get_tv_dac_info(struct radeon_encoder *encoder)
1087 uint8_t bg, dac; 1412 uint8_t bg, dac;
1088 struct radeon_encoder_tv_dac *tv_dac = NULL; 1413 struct radeon_encoder_tv_dac *tv_dac = NULL;
1089 1414
1090 atom_parse_data_header(mode_info->atom_context, index, NULL, &frev, &crev, &data_offset); 1415 if (atom_parse_data_header(mode_info->atom_context, index, NULL,
1416 &frev, &crev, &data_offset)) {
1091 1417
1092 dac_info = (struct _COMPASSIONATE_DATA *)(mode_info->atom_context->bios + data_offset); 1418 dac_info = (struct _COMPASSIONATE_DATA *)
1419 (mode_info->atom_context->bios + data_offset);
1093 1420
1094 if (dac_info) {
1095 tv_dac = kzalloc(sizeof(struct radeon_encoder_tv_dac), GFP_KERNEL); 1421 tv_dac = kzalloc(sizeof(struct radeon_encoder_tv_dac), GFP_KERNEL);
1096 1422
1097 if (!tv_dac) 1423 if (!tv_dac)
@@ -1109,24 +1435,429 @@ radeon_atombios_get_tv_dac_info(struct radeon_encoder *encoder)
1109 dac = dac_info->ucDAC2_NTSC_DAC_Adjustment; 1435 dac = dac_info->ucDAC2_NTSC_DAC_Adjustment;
1110 tv_dac->ntsc_tvdac_adj = (bg << 16) | (dac << 20); 1436 tv_dac->ntsc_tvdac_adj = (bg << 16) | (dac << 20);
1111 1437
1438 tv_dac->tv_std = radeon_atombios_get_tv_info(rdev);
1112 } 1439 }
1113 return tv_dac; 1440 return tv_dac;
1114} 1441}
1115 1442
1116void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable) 1443static const char *thermal_controller_names[] = {
1444 "NONE",
1445 "LM63",
1446 "ADM1032",
1447 "ADM1030",
1448 "MUA6649",
1449 "LM64",
1450 "F75375",
1451 "ASC7512",
1452};
1453
1454static const char *pp_lib_thermal_controller_names[] = {
1455 "NONE",
1456 "LM63",
1457 "ADM1032",
1458 "ADM1030",
1459 "MUA6649",
1460 "LM64",
1461 "F75375",
1462 "RV6xx",
1463 "RV770",
1464 "ADT7473",
1465};
1466
1467union power_info {
1468 struct _ATOM_POWERPLAY_INFO info;
1469 struct _ATOM_POWERPLAY_INFO_V2 info_2;
1470 struct _ATOM_POWERPLAY_INFO_V3 info_3;
1471 struct _ATOM_PPLIB_POWERPLAYTABLE info_4;
1472};
1473
1474void radeon_atombios_get_power_modes(struct radeon_device *rdev)
1117{ 1475{
1118 DYNAMIC_CLOCK_GATING_PS_ALLOCATION args; 1476 struct radeon_mode_info *mode_info = &rdev->mode_info;
1119 int index = GetIndexIntoMasterTable(COMMAND, DynamicClockGating); 1477 int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
1478 u16 data_offset;
1479 u8 frev, crev;
1480 u32 misc, misc2 = 0, sclk, mclk;
1481 union power_info *power_info;
1482 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
1483 struct _ATOM_PPLIB_STATE *power_state;
1484 int num_modes = 0, i, j;
1485 int state_index = 0, mode_index = 0;
1486 struct radeon_i2c_bus_rec i2c_bus;
1487
1488 rdev->pm.default_power_state = NULL;
1489
1490 if (atom_parse_data_header(mode_info->atom_context, index, NULL,
1491 &frev, &crev, &data_offset)) {
1492 power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
1493 if (frev < 4) {
1494 /* add the i2c bus for thermal/fan chip */
1495 if (power_info->info.ucOverdriveThermalController > 0) {
1496 DRM_INFO("Possible %s thermal controller at 0x%02x\n",
1497 thermal_controller_names[power_info->info.ucOverdriveThermalController],
1498 power_info->info.ucOverdriveControllerAddress >> 1);
1499 i2c_bus = radeon_lookup_i2c_gpio(rdev, power_info->info.ucOverdriveI2cLine);
1500 rdev->pm.i2c_bus = radeon_i2c_create(rdev->ddev, &i2c_bus, "Thermal");
1501 }
1502 num_modes = power_info->info.ucNumOfPowerModeEntries;
1503 if (num_modes > ATOM_MAX_NUMBEROF_POWER_BLOCK)
1504 num_modes = ATOM_MAX_NUMBEROF_POWER_BLOCK;
1505 for (i = 0; i < num_modes; i++) {
1506 rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_NONE;
1507 switch (frev) {
1508 case 1:
1509 rdev->pm.power_state[state_index].num_clock_modes = 1;
1510 rdev->pm.power_state[state_index].clock_info[0].mclk =
1511 le16_to_cpu(power_info->info.asPowerPlayInfo[i].usMemoryClock);
1512 rdev->pm.power_state[state_index].clock_info[0].sclk =
1513 le16_to_cpu(power_info->info.asPowerPlayInfo[i].usEngineClock);
1514 /* skip invalid modes */
1515 if ((rdev->pm.power_state[state_index].clock_info[0].mclk == 0) ||
1516 (rdev->pm.power_state[state_index].clock_info[0].sclk == 0))
1517 continue;
1518 /* skip overclock modes for now */
1519 if ((rdev->pm.power_state[state_index].clock_info[0].mclk >
1520 rdev->clock.default_mclk + RADEON_MODE_OVERCLOCK_MARGIN) ||
1521 (rdev->pm.power_state[state_index].clock_info[0].sclk >
1522 rdev->clock.default_sclk + RADEON_MODE_OVERCLOCK_MARGIN))
1523 continue;
1524 rdev->pm.power_state[state_index].non_clock_info.pcie_lanes =
1525 power_info->info.asPowerPlayInfo[i].ucNumPciELanes;
1526 misc = le32_to_cpu(power_info->info.asPowerPlayInfo[i].ulMiscInfo);
1527 if (misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_SUPPORT) {
1528 rdev->pm.power_state[state_index].clock_info[0].voltage.type =
1529 VOLTAGE_GPIO;
1530 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio =
1531 radeon_lookup_gpio(rdev,
1532 power_info->info.asPowerPlayInfo[i].ucVoltageDropIndex);
1533 if (misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_ACTIVE_HIGH)
1534 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high =
1535 true;
1536 else
1537 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high =
1538 false;
1539 } else if (misc & ATOM_PM_MISCINFO_PROGRAM_VOLTAGE) {
1540 rdev->pm.power_state[state_index].clock_info[0].voltage.type =
1541 VOLTAGE_VDDC;
1542 rdev->pm.power_state[state_index].clock_info[0].voltage.vddc_id =
1543 power_info->info.asPowerPlayInfo[i].ucVoltageDropIndex;
1544 }
1545 /* order matters! */
1546 if (misc & ATOM_PM_MISCINFO_POWER_SAVING_MODE)
1547 rdev->pm.power_state[state_index].type =
1548 POWER_STATE_TYPE_POWERSAVE;
1549 if (misc & ATOM_PM_MISCINFO_DEFAULT_DC_STATE_ENTRY_TRUE)
1550 rdev->pm.power_state[state_index].type =
1551 POWER_STATE_TYPE_BATTERY;
1552 if (misc & ATOM_PM_MISCINFO_DEFAULT_LOW_DC_STATE_ENTRY_TRUE)
1553 rdev->pm.power_state[state_index].type =
1554 POWER_STATE_TYPE_BATTERY;
1555 if (misc & ATOM_PM_MISCINFO_LOAD_BALANCE_EN)
1556 rdev->pm.power_state[state_index].type =
1557 POWER_STATE_TYPE_BALANCED;
1558 if (misc & ATOM_PM_MISCINFO_3D_ACCELERATION_EN)
1559 rdev->pm.power_state[state_index].type =
1560 POWER_STATE_TYPE_PERFORMANCE;
1561 if (misc & ATOM_PM_MISCINFO_DRIVER_DEFAULT_MODE) {
1562 rdev->pm.power_state[state_index].type =
1563 POWER_STATE_TYPE_DEFAULT;
1564 rdev->pm.default_power_state = &rdev->pm.power_state[state_index];
1565 rdev->pm.power_state[state_index].default_clock_mode =
1566 &rdev->pm.power_state[state_index].clock_info[0];
1567 }
1568 state_index++;
1569 break;
1570 case 2:
1571 rdev->pm.power_state[state_index].num_clock_modes = 1;
1572 rdev->pm.power_state[state_index].clock_info[0].mclk =
1573 le32_to_cpu(power_info->info_2.asPowerPlayInfo[i].ulMemoryClock);
1574 rdev->pm.power_state[state_index].clock_info[0].sclk =
1575 le32_to_cpu(power_info->info_2.asPowerPlayInfo[i].ulEngineClock);
1576 /* skip invalid modes */
1577 if ((rdev->pm.power_state[state_index].clock_info[0].mclk == 0) ||
1578 (rdev->pm.power_state[state_index].clock_info[0].sclk == 0))
1579 continue;
1580 /* skip overclock modes for now */
1581 if ((rdev->pm.power_state[state_index].clock_info[0].mclk >
1582 rdev->clock.default_mclk + RADEON_MODE_OVERCLOCK_MARGIN) ||
1583 (rdev->pm.power_state[state_index].clock_info[0].sclk >
1584 rdev->clock.default_sclk + RADEON_MODE_OVERCLOCK_MARGIN))
1585 continue;
1586 rdev->pm.power_state[state_index].non_clock_info.pcie_lanes =
1587 power_info->info_2.asPowerPlayInfo[i].ucNumPciELanes;
1588 misc = le32_to_cpu(power_info->info_2.asPowerPlayInfo[i].ulMiscInfo);
1589 misc2 = le32_to_cpu(power_info->info_2.asPowerPlayInfo[i].ulMiscInfo2);
1590 if (misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_SUPPORT) {
1591 rdev->pm.power_state[state_index].clock_info[0].voltage.type =
1592 VOLTAGE_GPIO;
1593 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio =
1594 radeon_lookup_gpio(rdev,
1595 power_info->info_2.asPowerPlayInfo[i].ucVoltageDropIndex);
1596 if (misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_ACTIVE_HIGH)
1597 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high =
1598 true;
1599 else
1600 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high =
1601 false;
1602 } else if (misc & ATOM_PM_MISCINFO_PROGRAM_VOLTAGE) {
1603 rdev->pm.power_state[state_index].clock_info[0].voltage.type =
1604 VOLTAGE_VDDC;
1605 rdev->pm.power_state[state_index].clock_info[0].voltage.vddc_id =
1606 power_info->info_2.asPowerPlayInfo[i].ucVoltageDropIndex;
1607 }
1608 /* order matters! */
1609 if (misc & ATOM_PM_MISCINFO_POWER_SAVING_MODE)
1610 rdev->pm.power_state[state_index].type =
1611 POWER_STATE_TYPE_POWERSAVE;
1612 if (misc & ATOM_PM_MISCINFO_DEFAULT_DC_STATE_ENTRY_TRUE)
1613 rdev->pm.power_state[state_index].type =
1614 POWER_STATE_TYPE_BATTERY;
1615 if (misc & ATOM_PM_MISCINFO_DEFAULT_LOW_DC_STATE_ENTRY_TRUE)
1616 rdev->pm.power_state[state_index].type =
1617 POWER_STATE_TYPE_BATTERY;
1618 if (misc & ATOM_PM_MISCINFO_LOAD_BALANCE_EN)
1619 rdev->pm.power_state[state_index].type =
1620 POWER_STATE_TYPE_BALANCED;
1621 if (misc & ATOM_PM_MISCINFO_3D_ACCELERATION_EN)
1622 rdev->pm.power_state[state_index].type =
1623 POWER_STATE_TYPE_PERFORMANCE;
1624 if (misc2 & ATOM_PM_MISCINFO2_SYSTEM_AC_LITE_MODE)
1625 rdev->pm.power_state[state_index].type =
1626 POWER_STATE_TYPE_BALANCED;
1627 if (misc & ATOM_PM_MISCINFO_DRIVER_DEFAULT_MODE) {
1628 rdev->pm.power_state[state_index].type =
1629 POWER_STATE_TYPE_DEFAULT;
1630 rdev->pm.default_power_state = &rdev->pm.power_state[state_index];
1631 rdev->pm.power_state[state_index].default_clock_mode =
1632 &rdev->pm.power_state[state_index].clock_info[0];
1633 }
1634 state_index++;
1635 break;
1636 case 3:
1637 rdev->pm.power_state[state_index].num_clock_modes = 1;
1638 rdev->pm.power_state[state_index].clock_info[0].mclk =
1639 le32_to_cpu(power_info->info_3.asPowerPlayInfo[i].ulMemoryClock);
1640 rdev->pm.power_state[state_index].clock_info[0].sclk =
1641 le32_to_cpu(power_info->info_3.asPowerPlayInfo[i].ulEngineClock);
1642 /* skip invalid modes */
1643 if ((rdev->pm.power_state[state_index].clock_info[0].mclk == 0) ||
1644 (rdev->pm.power_state[state_index].clock_info[0].sclk == 0))
1645 continue;
1646 /* skip overclock modes for now */
1647 if ((rdev->pm.power_state[state_index].clock_info[0].mclk >
1648 rdev->clock.default_mclk + RADEON_MODE_OVERCLOCK_MARGIN) ||
1649 (rdev->pm.power_state[state_index].clock_info[0].sclk >
1650 rdev->clock.default_sclk + RADEON_MODE_OVERCLOCK_MARGIN))
1651 continue;
1652 rdev->pm.power_state[state_index].non_clock_info.pcie_lanes =
1653 power_info->info_3.asPowerPlayInfo[i].ucNumPciELanes;
1654 misc = le32_to_cpu(power_info->info_3.asPowerPlayInfo[i].ulMiscInfo);
1655 misc2 = le32_to_cpu(power_info->info_3.asPowerPlayInfo[i].ulMiscInfo2);
1656 if (misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_SUPPORT) {
1657 rdev->pm.power_state[state_index].clock_info[0].voltage.type =
1658 VOLTAGE_GPIO;
1659 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio =
1660 radeon_lookup_gpio(rdev,
1661 power_info->info_3.asPowerPlayInfo[i].ucVoltageDropIndex);
1662 if (misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_ACTIVE_HIGH)
1663 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high =
1664 true;
1665 else
1666 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high =
1667 false;
1668 } else if (misc & ATOM_PM_MISCINFO_PROGRAM_VOLTAGE) {
1669 rdev->pm.power_state[state_index].clock_info[0].voltage.type =
1670 VOLTAGE_VDDC;
1671 rdev->pm.power_state[state_index].clock_info[0].voltage.vddc_id =
1672 power_info->info_3.asPowerPlayInfo[i].ucVoltageDropIndex;
1673 if (misc2 & ATOM_PM_MISCINFO2_VDDCI_DYNAMIC_VOLTAGE_EN) {
1674 rdev->pm.power_state[state_index].clock_info[0].voltage.vddci_enabled =
1675 true;
1676 rdev->pm.power_state[state_index].clock_info[0].voltage.vddci_id =
1677 power_info->info_3.asPowerPlayInfo[i].ucVDDCI_VoltageDropIndex;
1678 }
1679 }
1680 /* order matters! */
1681 if (misc & ATOM_PM_MISCINFO_POWER_SAVING_MODE)
1682 rdev->pm.power_state[state_index].type =
1683 POWER_STATE_TYPE_POWERSAVE;
1684 if (misc & ATOM_PM_MISCINFO_DEFAULT_DC_STATE_ENTRY_TRUE)
1685 rdev->pm.power_state[state_index].type =
1686 POWER_STATE_TYPE_BATTERY;
1687 if (misc & ATOM_PM_MISCINFO_DEFAULT_LOW_DC_STATE_ENTRY_TRUE)
1688 rdev->pm.power_state[state_index].type =
1689 POWER_STATE_TYPE_BATTERY;
1690 if (misc & ATOM_PM_MISCINFO_LOAD_BALANCE_EN)
1691 rdev->pm.power_state[state_index].type =
1692 POWER_STATE_TYPE_BALANCED;
1693 if (misc & ATOM_PM_MISCINFO_3D_ACCELERATION_EN)
1694 rdev->pm.power_state[state_index].type =
1695 POWER_STATE_TYPE_PERFORMANCE;
1696 if (misc2 & ATOM_PM_MISCINFO2_SYSTEM_AC_LITE_MODE)
1697 rdev->pm.power_state[state_index].type =
1698 POWER_STATE_TYPE_BALANCED;
1699 if (misc & ATOM_PM_MISCINFO_DRIVER_DEFAULT_MODE) {
1700 rdev->pm.power_state[state_index].type =
1701 POWER_STATE_TYPE_DEFAULT;
1702 rdev->pm.default_power_state = &rdev->pm.power_state[state_index];
1703 rdev->pm.power_state[state_index].default_clock_mode =
1704 &rdev->pm.power_state[state_index].clock_info[0];
1705 }
1706 state_index++;
1707 break;
1708 }
1709 }
1710 } else if (frev == 4) {
1711 /* add the i2c bus for thermal/fan chip */
1712 /* no support for internal controller yet */
1713 if (power_info->info_4.sThermalController.ucType > 0) {
1714 if ((power_info->info_4.sThermalController.ucType == ATOM_PP_THERMALCONTROLLER_RV6xx) ||
1715 (power_info->info_4.sThermalController.ucType == ATOM_PP_THERMALCONTROLLER_RV770)) {
1716 DRM_INFO("Internal thermal controller %s fan control\n",
1717 (power_info->info_4.sThermalController.ucFanParameters &
1718 ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
1719 } else {
1720 DRM_INFO("Possible %s thermal controller at 0x%02x %s fan control\n",
1721 pp_lib_thermal_controller_names[power_info->info_4.sThermalController.ucType],
1722 power_info->info_4.sThermalController.ucI2cAddress >> 1,
1723 (power_info->info_4.sThermalController.ucFanParameters &
1724 ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
1725 i2c_bus = radeon_lookup_i2c_gpio(rdev, power_info->info_4.sThermalController.ucI2cLine);
1726 rdev->pm.i2c_bus = radeon_i2c_create(rdev->ddev, &i2c_bus, "Thermal");
1727 }
1728 }
1729 for (i = 0; i < power_info->info_4.ucNumStates; i++) {
1730 mode_index = 0;
1731 power_state = (struct _ATOM_PPLIB_STATE *)
1732 (mode_info->atom_context->bios +
1733 data_offset +
1734 le16_to_cpu(power_info->info_4.usStateArrayOffset) +
1735 i * power_info->info_4.ucStateEntrySize);
1736 non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
1737 (mode_info->atom_context->bios +
1738 data_offset +
1739 le16_to_cpu(power_info->info_4.usNonClockInfoArrayOffset) +
1740 (power_state->ucNonClockStateIndex *
1741 power_info->info_4.ucNonClockSize));
1742 for (j = 0; j < (power_info->info_4.ucStateEntrySize - 1); j++) {
1743 if (rdev->flags & RADEON_IS_IGP) {
1744 struct _ATOM_PPLIB_RS780_CLOCK_INFO *clock_info =
1745 (struct _ATOM_PPLIB_RS780_CLOCK_INFO *)
1746 (mode_info->atom_context->bios +
1747 data_offset +
1748 le16_to_cpu(power_info->info_4.usClockInfoArrayOffset) +
1749 (power_state->ucClockStateIndices[j] *
1750 power_info->info_4.ucClockInfoSize));
1751 sclk = le16_to_cpu(clock_info->usLowEngineClockLow);
1752 sclk |= clock_info->ucLowEngineClockHigh << 16;
1753 rdev->pm.power_state[state_index].clock_info[mode_index].sclk = sclk;
1754 /* skip invalid modes */
1755 if (rdev->pm.power_state[state_index].clock_info[mode_index].sclk == 0)
1756 continue;
1757 /* skip overclock modes for now */
1758 if (rdev->pm.power_state[state_index].clock_info[mode_index].sclk >
1759 rdev->clock.default_sclk + RADEON_MODE_OVERCLOCK_MARGIN)
1760 continue;
1761 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.type =
1762 VOLTAGE_SW;
1763 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage =
1764 clock_info->usVDDC;
1765 mode_index++;
1766 } else {
1767 struct _ATOM_PPLIB_R600_CLOCK_INFO *clock_info =
1768 (struct _ATOM_PPLIB_R600_CLOCK_INFO *)
1769 (mode_info->atom_context->bios +
1770 data_offset +
1771 le16_to_cpu(power_info->info_4.usClockInfoArrayOffset) +
1772 (power_state->ucClockStateIndices[j] *
1773 power_info->info_4.ucClockInfoSize));
1774 sclk = le16_to_cpu(clock_info->usEngineClockLow);
1775 sclk |= clock_info->ucEngineClockHigh << 16;
1776 mclk = le16_to_cpu(clock_info->usMemoryClockLow);
1777 mclk |= clock_info->ucMemoryClockHigh << 16;
1778 rdev->pm.power_state[state_index].clock_info[mode_index].mclk = mclk;
1779 rdev->pm.power_state[state_index].clock_info[mode_index].sclk = sclk;
1780 /* skip invalid modes */
1781 if ((rdev->pm.power_state[state_index].clock_info[mode_index].mclk == 0) ||
1782 (rdev->pm.power_state[state_index].clock_info[mode_index].sclk == 0))
1783 continue;
1784 /* skip overclock modes for now */
1785 if ((rdev->pm.power_state[state_index].clock_info[mode_index].mclk >
1786 rdev->clock.default_mclk + RADEON_MODE_OVERCLOCK_MARGIN) ||
1787 (rdev->pm.power_state[state_index].clock_info[mode_index].sclk >
1788 rdev->clock.default_sclk + RADEON_MODE_OVERCLOCK_MARGIN))
1789 continue;
1790 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.type =
1791 VOLTAGE_SW;
1792 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage =
1793 clock_info->usVDDC;
1794 mode_index++;
1795 }
1796 }
1797 rdev->pm.power_state[state_index].num_clock_modes = mode_index;
1798 if (mode_index) {
1799 misc = le32_to_cpu(non_clock_info->ulCapsAndSettings);
1800 misc2 = le16_to_cpu(non_clock_info->usClassification);
1801 rdev->pm.power_state[state_index].non_clock_info.pcie_lanes =
1802 ((misc & ATOM_PPLIB_PCIE_LINK_WIDTH_MASK) >>
1803 ATOM_PPLIB_PCIE_LINK_WIDTH_SHIFT) + 1;
1804 switch (misc2 & ATOM_PPLIB_CLASSIFICATION_UI_MASK) {
1805 case ATOM_PPLIB_CLASSIFICATION_UI_BATTERY:
1806 rdev->pm.power_state[state_index].type =
1807 POWER_STATE_TYPE_BATTERY;
1808 break;
1809 case ATOM_PPLIB_CLASSIFICATION_UI_BALANCED:
1810 rdev->pm.power_state[state_index].type =
1811 POWER_STATE_TYPE_BALANCED;
1812 break;
1813 case ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE:
1814 rdev->pm.power_state[state_index].type =
1815 POWER_STATE_TYPE_PERFORMANCE;
1816 break;
1817 }
1818 if (misc2 & ATOM_PPLIB_CLASSIFICATION_BOOT) {
1819 rdev->pm.power_state[state_index].type =
1820 POWER_STATE_TYPE_DEFAULT;
1821 rdev->pm.default_power_state = &rdev->pm.power_state[state_index];
1822 rdev->pm.power_state[state_index].default_clock_mode =
1823 &rdev->pm.power_state[state_index].clock_info[mode_index - 1];
1824 }
1825 state_index++;
1826 }
1827 }
1828 }
1829 } else {
1830 /* XXX figure out some good default low power mode for cards w/out power tables */
1831 }
1120 1832
1121 args.ucEnable = enable; 1833 if (rdev->pm.default_power_state == NULL) {
1834 /* add the default mode */
1835 rdev->pm.power_state[state_index].type =
1836 POWER_STATE_TYPE_DEFAULT;
1837 rdev->pm.power_state[state_index].num_clock_modes = 1;
1838 rdev->pm.power_state[state_index].clock_info[0].mclk = rdev->clock.default_mclk;
1839 rdev->pm.power_state[state_index].clock_info[0].sclk = rdev->clock.default_sclk;
1840 rdev->pm.power_state[state_index].default_clock_mode =
1841 &rdev->pm.power_state[state_index].clock_info[0];
1842 rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_NONE;
1843 if (rdev->asic->get_pcie_lanes)
1844 rdev->pm.power_state[state_index].non_clock_info.pcie_lanes = radeon_get_pcie_lanes(rdev);
1845 else
1846 rdev->pm.power_state[state_index].non_clock_info.pcie_lanes = 16;
1847 rdev->pm.default_power_state = &rdev->pm.power_state[state_index];
1848 state_index++;
1849 }
1850 rdev->pm.num_power_states = state_index;
1122 1851
1123 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 1852 rdev->pm.current_power_state = rdev->pm.default_power_state;
1853 rdev->pm.current_clock_mode =
1854 rdev->pm.default_power_state->default_clock_mode;
1124} 1855}
1125 1856
1126void radeon_atom_static_pwrmgt_setup(struct radeon_device *rdev, int enable) 1857void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable)
1127{ 1858{
1128 ENABLE_ASIC_STATIC_PWR_MGT_PS_ALLOCATION args; 1859 DYNAMIC_CLOCK_GATING_PS_ALLOCATION args;
1129 int index = GetIndexIntoMasterTable(COMMAND, EnableASIC_StaticPwrMgt); 1860 int index = GetIndexIntoMasterTable(COMMAND, DynamicClockGating);
1130 1861
1131 args.ucEnable = enable; 1862 args.ucEnable = enable;
1132 1863
diff --git a/drivers/gpu/drm/radeon/radeon_atpx_handler.c b/drivers/gpu/drm/radeon/radeon_atpx_handler.c
new file mode 100644
index 000000000000..ed5dfe58f29c
--- /dev/null
+++ b/drivers/gpu/drm/radeon/radeon_atpx_handler.c
@@ -0,0 +1,258 @@
1/*
2 * Copyright (c) 2010 Red Hat Inc.
3 * Author : Dave Airlie <airlied@redhat.com>
4 *
5 * Licensed under GPLv2
6 *
7 * ATPX support for both Intel/ATI
8 */
9#include <linux/vga_switcheroo.h>
10#include <linux/slab.h>
11#include <acpi/acpi.h>
12#include <acpi/acpi_bus.h>
13#include <linux/pci.h>
14
15#define ATPX_VERSION 0
16#define ATPX_GPU_PWR 2
17#define ATPX_MUX_SELECT 3
18
19#define ATPX_INTEGRATED 0
20#define ATPX_DISCRETE 1
21
22#define ATPX_MUX_IGD 0
23#define ATPX_MUX_DISCRETE 1
24
25static struct radeon_atpx_priv {
26 bool atpx_detected;
27 /* handle for device - and atpx */
28 acpi_handle dhandle;
29 acpi_handle atpx_handle;
30 acpi_handle atrm_handle;
31} radeon_atpx_priv;
32
33/* retrieve the ROM in 4k blocks */
34static int radeon_atrm_call(acpi_handle atrm_handle, uint8_t *bios,
35 int offset, int len)
36{
37 acpi_status status;
38 union acpi_object atrm_arg_elements[2], *obj;
39 struct acpi_object_list atrm_arg;
40 struct acpi_buffer buffer = { ACPI_ALLOCATE_BUFFER, NULL};
41
42 atrm_arg.count = 2;
43 atrm_arg.pointer = &atrm_arg_elements[0];
44
45 atrm_arg_elements[0].type = ACPI_TYPE_INTEGER;
46 atrm_arg_elements[0].integer.value = offset;
47
48 atrm_arg_elements[1].type = ACPI_TYPE_INTEGER;
49 atrm_arg_elements[1].integer.value = len;
50
51 status = acpi_evaluate_object(atrm_handle, NULL, &atrm_arg, &buffer);
52 if (ACPI_FAILURE(status)) {
53 printk("failed to evaluate ATRM got %s\n", acpi_format_exception(status));
54 return -ENODEV;
55 }
56
57 obj = (union acpi_object *)buffer.pointer;
58 memcpy(bios+offset, obj->buffer.pointer, len);
59 kfree(buffer.pointer);
60 return len;
61}
62
63bool radeon_atrm_supported(struct pci_dev *pdev)
64{
65 /* get the discrete ROM only via ATRM */
66 if (!radeon_atpx_priv.atpx_detected)
67 return false;
68
69 if (radeon_atpx_priv.dhandle == DEVICE_ACPI_HANDLE(&pdev->dev))
70 return false;
71 return true;
72}
73
74
75int radeon_atrm_get_bios_chunk(uint8_t *bios, int offset, int len)
76{
77 return radeon_atrm_call(radeon_atpx_priv.atrm_handle, bios, offset, len);
78}
79
80static int radeon_atpx_get_version(acpi_handle handle)
81{
82 acpi_status status;
83 union acpi_object atpx_arg_elements[2], *obj;
84 struct acpi_object_list atpx_arg;
85 struct acpi_buffer buffer = { ACPI_ALLOCATE_BUFFER, NULL };
86
87 atpx_arg.count = 2;
88 atpx_arg.pointer = &atpx_arg_elements[0];
89
90 atpx_arg_elements[0].type = ACPI_TYPE_INTEGER;
91 atpx_arg_elements[0].integer.value = ATPX_VERSION;
92
93 atpx_arg_elements[1].type = ACPI_TYPE_INTEGER;
94 atpx_arg_elements[1].integer.value = ATPX_VERSION;
95
96 status = acpi_evaluate_object(handle, NULL, &atpx_arg, &buffer);
97 if (ACPI_FAILURE(status)) {
98 printk("%s: failed to call ATPX: %s\n", __func__, acpi_format_exception(status));
99 return -ENOSYS;
100 }
101 obj = (union acpi_object *)buffer.pointer;
102 if (obj && (obj->type == ACPI_TYPE_BUFFER))
103 printk(KERN_INFO "radeon atpx: version is %d\n", *((u8 *)(obj->buffer.pointer) + 2));
104 kfree(buffer.pointer);
105 return 0;
106}
107
108static int radeon_atpx_execute(acpi_handle handle, int cmd_id, u16 value)
109{
110 acpi_status status;
111 union acpi_object atpx_arg_elements[2];
112 struct acpi_object_list atpx_arg;
113 struct acpi_buffer buffer = { ACPI_ALLOCATE_BUFFER, NULL };
114 uint8_t buf[4] = {0};
115
116 if (!handle)
117 return -EINVAL;
118
119 atpx_arg.count = 2;
120 atpx_arg.pointer = &atpx_arg_elements[0];
121
122 atpx_arg_elements[0].type = ACPI_TYPE_INTEGER;
123 atpx_arg_elements[0].integer.value = cmd_id;
124
125 buf[2] = value & 0xff;
126 buf[3] = (value >> 8) & 0xff;
127
128 atpx_arg_elements[1].type = ACPI_TYPE_BUFFER;
129 atpx_arg_elements[1].buffer.length = 4;
130 atpx_arg_elements[1].buffer.pointer = buf;
131
132 status = acpi_evaluate_object(handle, NULL, &atpx_arg, &buffer);
133 if (ACPI_FAILURE(status)) {
134 printk("%s: failed to call ATPX: %s\n", __func__, acpi_format_exception(status));
135 return -ENOSYS;
136 }
137 kfree(buffer.pointer);
138
139 return 0;
140}
141
142static int radeon_atpx_set_discrete_state(acpi_handle handle, int state)
143{
144 return radeon_atpx_execute(handle, ATPX_GPU_PWR, state);
145}
146
147static int radeon_atpx_switch_mux(acpi_handle handle, int mux_id)
148{
149 return radeon_atpx_execute(handle, ATPX_MUX_SELECT, mux_id);
150}
151
152
153static int radeon_atpx_switchto(enum vga_switcheroo_client_id id)
154{
155 if (id == VGA_SWITCHEROO_IGD)
156 radeon_atpx_switch_mux(radeon_atpx_priv.atpx_handle, 0);
157 else
158 radeon_atpx_switch_mux(radeon_atpx_priv.atpx_handle, 1);
159 return 0;
160}
161
162static int radeon_atpx_power_state(enum vga_switcheroo_client_id id,
163 enum vga_switcheroo_state state)
164{
165 /* on w500 ACPI can't change intel gpu state */
166 if (id == VGA_SWITCHEROO_IGD)
167 return 0;
168
169 radeon_atpx_set_discrete_state(radeon_atpx_priv.atpx_handle, state);
170 return 0;
171}
172
173static bool radeon_atpx_pci_probe_handle(struct pci_dev *pdev)
174{
175 acpi_handle dhandle, atpx_handle, atrm_handle;
176 acpi_status status;
177
178 dhandle = DEVICE_ACPI_HANDLE(&pdev->dev);
179 if (!dhandle)
180 return false;
181
182 status = acpi_get_handle(dhandle, "ATPX", &atpx_handle);
183 if (ACPI_FAILURE(status))
184 return false;
185
186 status = acpi_get_handle(dhandle, "ATRM", &atrm_handle);
187 if (ACPI_FAILURE(status))
188 return false;
189
190 radeon_atpx_priv.dhandle = dhandle;
191 radeon_atpx_priv.atpx_handle = atpx_handle;
192 radeon_atpx_priv.atrm_handle = atrm_handle;
193 return true;
194}
195
196static int radeon_atpx_init(void)
197{
198 /* set up the ATPX handle */
199
200 radeon_atpx_get_version(radeon_atpx_priv.atpx_handle);
201 return 0;
202}
203
204static int radeon_atpx_get_client_id(struct pci_dev *pdev)
205{
206 if (radeon_atpx_priv.dhandle == DEVICE_ACPI_HANDLE(&pdev->dev))
207 return VGA_SWITCHEROO_IGD;
208 else
209 return VGA_SWITCHEROO_DIS;
210}
211
212static struct vga_switcheroo_handler radeon_atpx_handler = {
213 .switchto = radeon_atpx_switchto,
214 .power_state = radeon_atpx_power_state,
215 .init = radeon_atpx_init,
216 .get_client_id = radeon_atpx_get_client_id,
217};
218
219static bool radeon_atpx_detect(void)
220{
221 char acpi_method_name[255] = { 0 };
222 struct acpi_buffer buffer = {sizeof(acpi_method_name), acpi_method_name};
223 struct pci_dev *pdev = NULL;
224 bool has_atpx = false;
225 int vga_count = 0;
226
227 while ((pdev = pci_get_class(PCI_CLASS_DISPLAY_VGA << 8, pdev)) != NULL) {
228 vga_count++;
229
230 has_atpx |= (radeon_atpx_pci_probe_handle(pdev) == true);
231 }
232
233 if (has_atpx && vga_count == 2) {
234 acpi_get_name(radeon_atpx_priv.atpx_handle, ACPI_FULL_PATHNAME, &buffer);
235 printk(KERN_INFO "VGA switcheroo: detected switching method %s handle\n",
236 acpi_method_name);
237 radeon_atpx_priv.atpx_detected = true;
238 return true;
239 }
240 return false;
241}
242
243void radeon_register_atpx_handler(void)
244{
245 bool r;
246
247 /* detect if we have any ATPX + 2 VGA in the system */
248 r = radeon_atpx_detect();
249 if (!r)
250 return;
251
252 vga_switcheroo_register_handler(&radeon_atpx_handler);
253}
254
255void radeon_unregister_atpx_handler(void)
256{
257 vga_switcheroo_unregister_handler();
258}
diff --git a/drivers/gpu/drm/radeon/radeon_benchmark.c b/drivers/gpu/drm/radeon/radeon_benchmark.c
index 10bd50a7db87..7932dc4d6b90 100644
--- a/drivers/gpu/drm/radeon/radeon_benchmark.c
+++ b/drivers/gpu/drm/radeon/radeon_benchmark.c
@@ -29,8 +29,8 @@
29void radeon_benchmark_move(struct radeon_device *rdev, unsigned bsize, 29void radeon_benchmark_move(struct radeon_device *rdev, unsigned bsize,
30 unsigned sdomain, unsigned ddomain) 30 unsigned sdomain, unsigned ddomain)
31{ 31{
32 struct radeon_object *dobj = NULL; 32 struct radeon_bo *dobj = NULL;
33 struct radeon_object *sobj = NULL; 33 struct radeon_bo *sobj = NULL;
34 struct radeon_fence *fence = NULL; 34 struct radeon_fence *fence = NULL;
35 uint64_t saddr, daddr; 35 uint64_t saddr, daddr;
36 unsigned long start_jiffies; 36 unsigned long start_jiffies;
@@ -41,47 +41,66 @@ void radeon_benchmark_move(struct radeon_device *rdev, unsigned bsize,
41 41
42 size = bsize; 42 size = bsize;
43 n = 1024; 43 n = 1024;
44 r = radeon_object_create(rdev, NULL, size, true, sdomain, false, &sobj); 44 r = radeon_bo_create(rdev, NULL, size, true, sdomain, &sobj);
45 if (r) { 45 if (r) {
46 goto out_cleanup; 46 goto out_cleanup;
47 } 47 }
48 r = radeon_object_pin(sobj, sdomain, &saddr); 48 r = radeon_bo_reserve(sobj, false);
49 if (unlikely(r != 0))
50 goto out_cleanup;
51 r = radeon_bo_pin(sobj, sdomain, &saddr);
52 radeon_bo_unreserve(sobj);
49 if (r) { 53 if (r) {
50 goto out_cleanup; 54 goto out_cleanup;
51 } 55 }
52 r = radeon_object_create(rdev, NULL, size, true, ddomain, false, &dobj); 56 r = radeon_bo_create(rdev, NULL, size, true, ddomain, &dobj);
53 if (r) { 57 if (r) {
54 goto out_cleanup; 58 goto out_cleanup;
55 } 59 }
56 r = radeon_object_pin(dobj, ddomain, &daddr); 60 r = radeon_bo_reserve(dobj, false);
61 if (unlikely(r != 0))
62 goto out_cleanup;
63 r = radeon_bo_pin(dobj, ddomain, &daddr);
64 radeon_bo_unreserve(dobj);
57 if (r) { 65 if (r) {
58 goto out_cleanup; 66 goto out_cleanup;
59 } 67 }
60 start_jiffies = jiffies; 68
61 for (i = 0; i < n; i++) { 69 /* r100 doesn't have dma engine so skip the test */
62 r = radeon_fence_create(rdev, &fence); 70 if (rdev->asic->copy_dma) {
63 if (r) { 71
64 goto out_cleanup; 72 start_jiffies = jiffies;
73 for (i = 0; i < n; i++) {
74 r = radeon_fence_create(rdev, &fence);
75 if (r) {
76 goto out_cleanup;
77 }
78
79 r = radeon_copy_dma(rdev, saddr, daddr,
80 size / RADEON_GPU_PAGE_SIZE, fence);
81
82 if (r) {
83 goto out_cleanup;
84 }
85 r = radeon_fence_wait(fence, false);
86 if (r) {
87 goto out_cleanup;
88 }
89 radeon_fence_unref(&fence);
65 } 90 }
66 r = radeon_copy_dma(rdev, saddr, daddr, size / RADEON_GPU_PAGE_SIZE, fence); 91 end_jiffies = jiffies;
67 if (r) { 92 time = end_jiffies - start_jiffies;
68 goto out_cleanup; 93 time = jiffies_to_msecs(time);
94 if (time > 0) {
95 i = ((n * size) >> 10) / time;
96 printk(KERN_INFO "radeon: dma %u bo moves of %ukb from"
97 " %d to %d in %lums (%ukb/ms %ukb/s %uM/s)\n",
98 n, size >> 10,
99 sdomain, ddomain, time,
100 i, i * 1000, (i * 1000) / 1024);
69 } 101 }
70 r = radeon_fence_wait(fence, false);
71 if (r) {
72 goto out_cleanup;
73 }
74 radeon_fence_unref(&fence);
75 }
76 end_jiffies = jiffies;
77 time = end_jiffies - start_jiffies;
78 time = jiffies_to_msecs(time);
79 if (time > 0) {
80 i = ((n * size) >> 10) / time;
81 printk(KERN_INFO "radeon: dma %u bo moves of %ukb from %d to %d"
82 " in %lums (%ukb/ms %ukb/s %uM/s)\n", n, size >> 10,
83 sdomain, ddomain, time, i, i * 1000, (i * 1000) / 1024);
84 } 102 }
103
85 start_jiffies = jiffies; 104 start_jiffies = jiffies;
86 for (i = 0; i < n; i++) { 105 for (i = 0; i < n; i++) {
87 r = radeon_fence_create(rdev, &fence); 106 r = radeon_fence_create(rdev, &fence);
@@ -109,12 +128,20 @@ void radeon_benchmark_move(struct radeon_device *rdev, unsigned bsize,
109 } 128 }
110out_cleanup: 129out_cleanup:
111 if (sobj) { 130 if (sobj) {
112 radeon_object_unpin(sobj); 131 r = radeon_bo_reserve(sobj, false);
113 radeon_object_unref(&sobj); 132 if (likely(r == 0)) {
133 radeon_bo_unpin(sobj);
134 radeon_bo_unreserve(sobj);
135 }
136 radeon_bo_unref(&sobj);
114 } 137 }
115 if (dobj) { 138 if (dobj) {
116 radeon_object_unpin(dobj); 139 r = radeon_bo_reserve(dobj, false);
117 radeon_object_unref(&dobj); 140 if (likely(r == 0)) {
141 radeon_bo_unpin(dobj);
142 radeon_bo_unreserve(dobj);
143 }
144 radeon_bo_unref(&dobj);
118 } 145 }
119 if (fence) { 146 if (fence) {
120 radeon_fence_unref(&fence); 147 radeon_fence_unref(&fence);
diff --git a/drivers/gpu/drm/radeon/radeon_bios.c b/drivers/gpu/drm/radeon/radeon_bios.c
index 906921740c60..8ad71f701316 100644
--- a/drivers/gpu/drm/radeon/radeon_bios.c
+++ b/drivers/gpu/drm/radeon/radeon_bios.c
@@ -30,6 +30,8 @@
30#include "radeon.h" 30#include "radeon.h"
31#include "atom.h" 31#include "atom.h"
32 32
33#include <linux/vga_switcheroo.h>
34#include <linux/slab.h>
33/* 35/*
34 * BIOS. 36 * BIOS.
35 */ 37 */
@@ -62,7 +64,7 @@ static bool igp_read_bios_from_vram(struct radeon_device *rdev)
62 iounmap(bios); 64 iounmap(bios);
63 return false; 65 return false;
64 } 66 }
65 memcpy(rdev->bios, bios, size); 67 memcpy_fromio(rdev->bios, bios, size);
66 iounmap(bios); 68 iounmap(bios);
67 return true; 69 return true;
68} 70}
@@ -93,6 +95,38 @@ static bool radeon_read_bios(struct radeon_device *rdev)
93 return true; 95 return true;
94} 96}
95 97
98/* ATRM is used to get the BIOS on the discrete cards in
99 * dual-gpu systems.
100 */
101static bool radeon_atrm_get_bios(struct radeon_device *rdev)
102{
103 int ret;
104 int size = 64 * 1024;
105 int i;
106
107 if (!radeon_atrm_supported(rdev->pdev))
108 return false;
109
110 rdev->bios = kmalloc(size, GFP_KERNEL);
111 if (!rdev->bios) {
112 DRM_ERROR("Unable to allocate bios\n");
113 return false;
114 }
115
116 for (i = 0; i < size / ATRM_BIOS_PAGE; i++) {
117 ret = radeon_atrm_get_bios_chunk(rdev->bios,
118 (i * ATRM_BIOS_PAGE),
119 ATRM_BIOS_PAGE);
120 if (ret <= 0)
121 break;
122 }
123
124 if (i == 0 || rdev->bios[0] != 0x55 || rdev->bios[1] != 0xaa) {
125 kfree(rdev->bios);
126 return false;
127 }
128 return true;
129}
96static bool r700_read_disabled_bios(struct radeon_device *rdev) 130static bool r700_read_disabled_bios(struct radeon_device *rdev)
97{ 131{
98 uint32_t viph_control; 132 uint32_t viph_control;
@@ -388,16 +422,16 @@ static bool radeon_read_disabled_bios(struct radeon_device *rdev)
388 return legacy_read_disabled_bios(rdev); 422 return legacy_read_disabled_bios(rdev);
389} 423}
390 424
425
391bool radeon_get_bios(struct radeon_device *rdev) 426bool radeon_get_bios(struct radeon_device *rdev)
392{ 427{
393 bool r; 428 bool r;
394 uint16_t tmp; 429 uint16_t tmp;
395 430
396 if (rdev->flags & RADEON_IS_IGP) { 431 r = radeon_atrm_get_bios(rdev);
432 if (r == false)
397 r = igp_read_bios_from_vram(rdev); 433 r = igp_read_bios_from_vram(rdev);
398 if (r == false) 434 if (r == false)
399 r = radeon_read_bios(rdev);
400 } else
401 r = radeon_read_bios(rdev); 435 r = radeon_read_bios(rdev);
402 if (r == false) { 436 if (r == false) {
403 r = radeon_read_disabled_bios(rdev); 437 r = radeon_read_disabled_bios(rdev);
@@ -408,6 +442,13 @@ bool radeon_get_bios(struct radeon_device *rdev)
408 return false; 442 return false;
409 } 443 }
410 if (rdev->bios[0] != 0x55 || rdev->bios[1] != 0xaa) { 444 if (rdev->bios[0] != 0x55 || rdev->bios[1] != 0xaa) {
445 printk("BIOS signature incorrect %x %x\n", rdev->bios[0], rdev->bios[1]);
446 goto free_bios;
447 }
448
449 tmp = RBIOS16(0x18);
450 if (RBIOS8(tmp + 0x14) != 0x0) {
451 DRM_INFO("Not an x86 BIOS ROM, not using.\n");
411 goto free_bios; 452 goto free_bios;
412 } 453 }
413 454
diff --git a/drivers/gpu/drm/radeon/radeon_clocks.c b/drivers/gpu/drm/radeon/radeon_clocks.c
index a81354167621..f64936cc4dd9 100644
--- a/drivers/gpu/drm/radeon/radeon_clocks.c
+++ b/drivers/gpu/drm/radeon/radeon_clocks.c
@@ -44,6 +44,10 @@ uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev)
44 44
45 ref_div = 45 ref_div =
46 RREG32_PLL(RADEON_M_SPLL_REF_FB_DIV) & RADEON_M_SPLL_REF_DIV_MASK; 46 RREG32_PLL(RADEON_M_SPLL_REF_FB_DIV) & RADEON_M_SPLL_REF_DIV_MASK;
47
48 if (ref_div == 0)
49 return 0;
50
47 sclk = fb_div / ref_div; 51 sclk = fb_div / ref_div;
48 52
49 post_div = RREG32_PLL(RADEON_SCLK_CNTL) & RADEON_SCLK_SRC_SEL_MASK; 53 post_div = RREG32_PLL(RADEON_SCLK_CNTL) & RADEON_SCLK_SRC_SEL_MASK;
@@ -52,13 +56,13 @@ uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev)
52 else if (post_div == 3) 56 else if (post_div == 3)
53 sclk >>= 2; 57 sclk >>= 2;
54 else if (post_div == 4) 58 else if (post_div == 4)
55 sclk >>= 4; 59 sclk >>= 3;
56 60
57 return sclk; 61 return sclk;
58} 62}
59 63
60/* 10 khz */ 64/* 10 khz */
61static uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev) 65uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev)
62{ 66{
63 struct radeon_pll *mpll = &rdev->clock.mpll; 67 struct radeon_pll *mpll = &rdev->clock.mpll;
64 uint32_t fb_div, ref_div, post_div, mclk; 68 uint32_t fb_div, ref_div, post_div, mclk;
@@ -70,6 +74,10 @@ static uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev)
70 74
71 ref_div = 75 ref_div =
72 RREG32_PLL(RADEON_M_SPLL_REF_FB_DIV) & RADEON_M_SPLL_REF_DIV_MASK; 76 RREG32_PLL(RADEON_M_SPLL_REF_FB_DIV) & RADEON_M_SPLL_REF_DIV_MASK;
77
78 if (ref_div == 0)
79 return 0;
80
73 mclk = fb_div / ref_div; 81 mclk = fb_div / ref_div;
74 82
75 post_div = RREG32_PLL(RADEON_MCLK_CNTL) & 0x7; 83 post_div = RREG32_PLL(RADEON_MCLK_CNTL) & 0x7;
@@ -78,7 +86,7 @@ static uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev)
78 else if (post_div == 3) 86 else if (post_div == 3)
79 mclk >>= 2; 87 mclk >>= 2;
80 else if (post_div == 4) 88 else if (post_div == 4)
81 mclk >>= 4; 89 mclk >>= 3;
82 90
83 return mclk; 91 return mclk;
84} 92}
@@ -88,6 +96,7 @@ void radeon_get_clock_info(struct drm_device *dev)
88 struct radeon_device *rdev = dev->dev_private; 96 struct radeon_device *rdev = dev->dev_private;
89 struct radeon_pll *p1pll = &rdev->clock.p1pll; 97 struct radeon_pll *p1pll = &rdev->clock.p1pll;
90 struct radeon_pll *p2pll = &rdev->clock.p2pll; 98 struct radeon_pll *p2pll = &rdev->clock.p2pll;
99 struct radeon_pll *dcpll = &rdev->clock.dcpll;
91 struct radeon_pll *spll = &rdev->clock.spll; 100 struct radeon_pll *spll = &rdev->clock.spll;
92 struct radeon_pll *mpll = &rdev->clock.mpll; 101 struct radeon_pll *mpll = &rdev->clock.mpll;
93 int ret; 102 int ret;
@@ -98,8 +107,19 @@ void radeon_get_clock_info(struct drm_device *dev)
98 ret = radeon_combios_get_clock_info(dev); 107 ret = radeon_combios_get_clock_info(dev);
99 108
100 if (ret) { 109 if (ret) {
101 if (p1pll->reference_div < 2) 110 if (p1pll->reference_div < 2) {
102 p1pll->reference_div = 12; 111 if (!ASIC_IS_AVIVO(rdev)) {
112 u32 tmp = RREG32_PLL(RADEON_PPLL_REF_DIV);
113 if (ASIC_IS_R300(rdev))
114 p1pll->reference_div =
115 (tmp & R300_PPLL_REF_DIV_ACC_MASK) >> R300_PPLL_REF_DIV_ACC_SHIFT;
116 else
117 p1pll->reference_div = tmp & RADEON_PPLL_REF_DIV_MASK;
118 if (p1pll->reference_div < 2)
119 p1pll->reference_div = 12;
120 } else
121 p1pll->reference_div = 12;
122 }
103 if (p2pll->reference_div < 2) 123 if (p2pll->reference_div < 2)
104 p2pll->reference_div = 12; 124 p2pll->reference_div = 12;
105 if (rdev->family < CHIP_RS600) { 125 if (rdev->family < CHIP_RS600) {
@@ -185,6 +205,17 @@ void radeon_get_clock_info(struct drm_device *dev)
185 p2pll->max_frac_feedback_div = 0; 205 p2pll->max_frac_feedback_div = 0;
186 } 206 }
187 207
208 /* dcpll is DCE4 only */
209 dcpll->min_post_div = 2;
210 dcpll->max_post_div = 0x7f;
211 dcpll->min_frac_feedback_div = 0;
212 dcpll->max_frac_feedback_div = 9;
213 dcpll->min_ref_div = 2;
214 dcpll->max_ref_div = 0x3ff;
215 dcpll->min_feedback_div = 4;
216 dcpll->max_feedback_div = 0xfff;
217 dcpll->best_vco = 0;
218
188 p1pll->min_ref_div = 2; 219 p1pll->min_ref_div = 2;
189 p1pll->max_ref_div = 0x3ff; 220 p1pll->max_ref_div = 0x3ff;
190 p1pll->min_feedback_div = 4; 221 p1pll->min_feedback_div = 4;
@@ -827,8 +858,10 @@ int radeon_static_clocks_init(struct drm_device *dev)
827 /* XXX make sure engine is idle */ 858 /* XXX make sure engine is idle */
828 859
829 if (radeon_dynclks != -1) { 860 if (radeon_dynclks != -1) {
830 if (radeon_dynclks) 861 if (radeon_dynclks) {
831 radeon_set_clock_gating(rdev, 1); 862 if (rdev->asic->set_clock_gating)
863 radeon_set_clock_gating(rdev, 1);
864 }
832 } 865 }
833 radeon_apply_clock_quirks(rdev); 866 radeon_apply_clock_quirks(rdev);
834 return 0; 867 return 0;
diff --git a/drivers/gpu/drm/radeon/radeon_combios.c b/drivers/gpu/drm/radeon/radeon_combios.c
index 5253cbf6db1f..37db8adb2748 100644
--- a/drivers/gpu/drm/radeon/radeon_combios.c
+++ b/drivers/gpu/drm/radeon/radeon_combios.c
@@ -50,7 +50,8 @@ radeon_add_legacy_connector(struct drm_device *dev,
50 uint32_t supported_device, 50 uint32_t supported_device,
51 int connector_type, 51 int connector_type,
52 struct radeon_i2c_bus_rec *i2c_bus, 52 struct radeon_i2c_bus_rec *i2c_bus,
53 uint16_t connector_object_id); 53 uint16_t connector_object_id,
54 struct radeon_hpd *hpd);
54 55
55/* from radeon_legacy_encoder.c */ 56/* from radeon_legacy_encoder.c */
56extern void 57extern void
@@ -149,6 +150,9 @@ static uint16_t combios_get_table_offset(struct drm_device *dev,
149 int rev; 150 int rev;
150 uint16_t offset = 0, check_offset; 151 uint16_t offset = 0, check_offset;
151 152
153 if (!rdev->bios)
154 return 0;
155
152 switch (table) { 156 switch (table) {
153 /* absolute offset tables */ 157 /* absolute offset tables */
154 case COMBIOS_ASIC_INIT_1_TABLE: 158 case COMBIOS_ASIC_INIT_1_TABLE:
@@ -442,39 +446,162 @@ static uint16_t combios_get_table_offset(struct drm_device *dev,
442 446
443} 447}
444 448
445struct radeon_i2c_bus_rec combios_setup_i2c_bus(int ddc_line) 449bool radeon_combios_check_hardcoded_edid(struct radeon_device *rdev)
450{
451 int edid_info;
452 struct edid *edid;
453 edid_info = combios_get_table_offset(rdev->ddev, COMBIOS_HARDCODED_EDID_TABLE);
454 if (!edid_info)
455 return false;
456
457 edid = kmalloc(EDID_LENGTH * (DRM_MAX_EDID_EXT_NUM + 1),
458 GFP_KERNEL);
459 if (edid == NULL)
460 return false;
461
462 memcpy((unsigned char *)edid,
463 (unsigned char *)(rdev->bios + edid_info), EDID_LENGTH);
464
465 if (!drm_edid_is_valid(edid)) {
466 kfree(edid);
467 return false;
468 }
469
470 rdev->mode_info.bios_hardcoded_edid = edid;
471 return true;
472}
473
474struct edid *
475radeon_combios_get_hardcoded_edid(struct radeon_device *rdev)
476{
477 if (rdev->mode_info.bios_hardcoded_edid)
478 return rdev->mode_info.bios_hardcoded_edid;
479 return NULL;
480}
481
482static struct radeon_i2c_bus_rec combios_setup_i2c_bus(struct radeon_device *rdev,
483 int ddc_line)
446{ 484{
447 struct radeon_i2c_bus_rec i2c; 485 struct radeon_i2c_bus_rec i2c;
448 486
449 i2c.mask_clk_mask = RADEON_GPIO_EN_1; 487 if (ddc_line == RADEON_GPIOPAD_MASK) {
450 i2c.mask_data_mask = RADEON_GPIO_EN_0; 488 i2c.mask_clk_reg = RADEON_GPIOPAD_MASK;
451 i2c.a_clk_mask = RADEON_GPIO_A_1; 489 i2c.mask_data_reg = RADEON_GPIOPAD_MASK;
452 i2c.a_data_mask = RADEON_GPIO_A_0; 490 i2c.a_clk_reg = RADEON_GPIOPAD_A;
453 i2c.put_clk_mask = RADEON_GPIO_EN_1; 491 i2c.a_data_reg = RADEON_GPIOPAD_A;
454 i2c.put_data_mask = RADEON_GPIO_EN_0; 492 i2c.en_clk_reg = RADEON_GPIOPAD_EN;
455 i2c.get_clk_mask = RADEON_GPIO_Y_1; 493 i2c.en_data_reg = RADEON_GPIOPAD_EN;
456 i2c.get_data_mask = RADEON_GPIO_Y_0; 494 i2c.y_clk_reg = RADEON_GPIOPAD_Y;
457 if ((ddc_line == RADEON_LCD_GPIO_MASK) || 495 i2c.y_data_reg = RADEON_GPIOPAD_Y;
458 (ddc_line == RADEON_MDGPIO_EN_REG)) { 496 } else if (ddc_line == RADEON_MDGPIO_MASK) {
459 i2c.mask_clk_reg = ddc_line; 497 i2c.mask_clk_reg = RADEON_MDGPIO_MASK;
460 i2c.mask_data_reg = ddc_line; 498 i2c.mask_data_reg = RADEON_MDGPIO_MASK;
461 i2c.a_clk_reg = ddc_line; 499 i2c.a_clk_reg = RADEON_MDGPIO_A;
462 i2c.a_data_reg = ddc_line; 500 i2c.a_data_reg = RADEON_MDGPIO_A;
463 i2c.put_clk_reg = ddc_line; 501 i2c.en_clk_reg = RADEON_MDGPIO_EN;
464 i2c.put_data_reg = ddc_line; 502 i2c.en_data_reg = RADEON_MDGPIO_EN;
465 i2c.get_clk_reg = ddc_line + 4; 503 i2c.y_clk_reg = RADEON_MDGPIO_Y;
466 i2c.get_data_reg = ddc_line + 4; 504 i2c.y_data_reg = RADEON_MDGPIO_Y;
467 } else { 505 } else {
506 i2c.mask_clk_mask = RADEON_GPIO_EN_1;
507 i2c.mask_data_mask = RADEON_GPIO_EN_0;
508 i2c.a_clk_mask = RADEON_GPIO_A_1;
509 i2c.a_data_mask = RADEON_GPIO_A_0;
510 i2c.en_clk_mask = RADEON_GPIO_EN_1;
511 i2c.en_data_mask = RADEON_GPIO_EN_0;
512 i2c.y_clk_mask = RADEON_GPIO_Y_1;
513 i2c.y_data_mask = RADEON_GPIO_Y_0;
514
468 i2c.mask_clk_reg = ddc_line; 515 i2c.mask_clk_reg = ddc_line;
469 i2c.mask_data_reg = ddc_line; 516 i2c.mask_data_reg = ddc_line;
470 i2c.a_clk_reg = ddc_line; 517 i2c.a_clk_reg = ddc_line;
471 i2c.a_data_reg = ddc_line; 518 i2c.a_data_reg = ddc_line;
472 i2c.put_clk_reg = ddc_line; 519 i2c.en_clk_reg = ddc_line;
473 i2c.put_data_reg = ddc_line; 520 i2c.en_data_reg = ddc_line;
474 i2c.get_clk_reg = ddc_line; 521 i2c.y_clk_reg = ddc_line;
475 i2c.get_data_reg = ddc_line; 522 i2c.y_data_reg = ddc_line;
476 } 523 }
477 524
525 switch (rdev->family) {
526 case CHIP_R100:
527 case CHIP_RV100:
528 case CHIP_RS100:
529 case CHIP_RV200:
530 case CHIP_RS200:
531 case CHIP_RS300:
532 switch (ddc_line) {
533 case RADEON_GPIO_DVI_DDC:
534 i2c.hw_capable = true;
535 break;
536 default:
537 i2c.hw_capable = false;
538 break;
539 }
540 break;
541 case CHIP_R200:
542 switch (ddc_line) {
543 case RADEON_GPIO_DVI_DDC:
544 case RADEON_GPIO_MONID:
545 i2c.hw_capable = true;
546 break;
547 default:
548 i2c.hw_capable = false;
549 break;
550 }
551 break;
552 case CHIP_RV250:
553 case CHIP_RV280:
554 switch (ddc_line) {
555 case RADEON_GPIO_VGA_DDC:
556 case RADEON_GPIO_DVI_DDC:
557 case RADEON_GPIO_CRT2_DDC:
558 i2c.hw_capable = true;
559 break;
560 default:
561 i2c.hw_capable = false;
562 break;
563 }
564 break;
565 case CHIP_R300:
566 case CHIP_R350:
567 switch (ddc_line) {
568 case RADEON_GPIO_VGA_DDC:
569 case RADEON_GPIO_DVI_DDC:
570 i2c.hw_capable = true;
571 break;
572 default:
573 i2c.hw_capable = false;
574 break;
575 }
576 break;
577 case CHIP_RV350:
578 case CHIP_RV380:
579 case CHIP_RS400:
580 case CHIP_RS480:
581 switch (ddc_line) {
582 case RADEON_GPIO_VGA_DDC:
583 case RADEON_GPIO_DVI_DDC:
584 i2c.hw_capable = true;
585 break;
586 case RADEON_GPIO_MONID:
587 /* hw i2c on RADEON_GPIO_MONID doesn't seem to work
588 * reliably on some pre-r4xx hardware; not sure why.
589 */
590 i2c.hw_capable = false;
591 break;
592 default:
593 i2c.hw_capable = false;
594 break;
595 }
596 break;
597 default:
598 i2c.hw_capable = false;
599 break;
600 }
601 i2c.mm_i2c = false;
602 i2c.i2c_id = 0;
603 i2c.hpd_id = 0;
604
478 if (ddc_line) 605 if (ddc_line)
479 i2c.valid = true; 606 i2c.valid = true;
480 else 607 else
@@ -494,9 +621,6 @@ bool radeon_combios_get_clock_info(struct drm_device *dev)
494 int8_t rev; 621 int8_t rev;
495 uint16_t sclk, mclk; 622 uint16_t sclk, mclk;
496 623
497 if (rdev->bios == NULL)
498 return NULL;
499
500 pll_info = combios_get_table_offset(dev, COMBIOS_PLL_INFO_TABLE); 624 pll_info = combios_get_table_offset(dev, COMBIOS_PLL_INFO_TABLE);
501 if (pll_info) { 625 if (pll_info) {
502 rev = RBIOS8(pll_info); 626 rev = RBIOS8(pll_info);
@@ -506,6 +630,8 @@ bool radeon_combios_get_clock_info(struct drm_device *dev)
506 p1pll->reference_div = RBIOS16(pll_info + 0x10); 630 p1pll->reference_div = RBIOS16(pll_info + 0x10);
507 p1pll->pll_out_min = RBIOS32(pll_info + 0x12); 631 p1pll->pll_out_min = RBIOS32(pll_info + 0x12);
508 p1pll->pll_out_max = RBIOS32(pll_info + 0x16); 632 p1pll->pll_out_max = RBIOS32(pll_info + 0x16);
633 p1pll->lcd_pll_out_min = p1pll->pll_out_min;
634 p1pll->lcd_pll_out_max = p1pll->pll_out_max;
509 635
510 if (rev > 9) { 636 if (rev > 9) {
511 p1pll->pll_in_min = RBIOS32(pll_info + 0x36); 637 p1pll->pll_in_min = RBIOS32(pll_info + 0x36);
@@ -562,6 +688,48 @@ bool radeon_combios_get_clock_info(struct drm_device *dev)
562 return false; 688 return false;
563} 689}
564 690
691bool radeon_combios_sideport_present(struct radeon_device *rdev)
692{
693 struct drm_device *dev = rdev->ddev;
694 u16 igp_info;
695
696 igp_info = combios_get_table_offset(dev, COMBIOS_INTEGRATED_SYSTEM_INFO_TABLE);
697
698 if (igp_info) {
699 if (RBIOS16(igp_info + 0x4))
700 return true;
701 }
702 return false;
703}
704
705static const uint32_t default_primarydac_adj[CHIP_LAST] = {
706 0x00000808, /* r100 */
707 0x00000808, /* rv100 */
708 0x00000808, /* rs100 */
709 0x00000808, /* rv200 */
710 0x00000808, /* rs200 */
711 0x00000808, /* r200 */
712 0x00000808, /* rv250 */
713 0x00000000, /* rs300 */
714 0x00000808, /* rv280 */
715 0x00000808, /* r300 */
716 0x00000808, /* r350 */
717 0x00000808, /* rv350 */
718 0x00000808, /* rv380 */
719 0x00000808, /* r420 */
720 0x00000808, /* r423 */
721 0x00000808, /* rv410 */
722 0x00000000, /* rs400 */
723 0x00000000, /* rs480 */
724};
725
726static void radeon_legacy_get_primary_dac_info_from_table(struct radeon_device *rdev,
727 struct radeon_encoder_primary_dac *p_dac)
728{
729 p_dac->ps2_pdac_adj = default_primarydac_adj[rdev->family];
730 return;
731}
732
565struct radeon_encoder_primary_dac *radeon_combios_get_primary_dac_info(struct 733struct radeon_encoder_primary_dac *radeon_combios_get_primary_dac_info(struct
566 radeon_encoder 734 radeon_encoder
567 *encoder) 735 *encoder)
@@ -571,20 +739,17 @@ struct radeon_encoder_primary_dac *radeon_combios_get_primary_dac_info(struct
571 uint16_t dac_info; 739 uint16_t dac_info;
572 uint8_t rev, bg, dac; 740 uint8_t rev, bg, dac;
573 struct radeon_encoder_primary_dac *p_dac = NULL; 741 struct radeon_encoder_primary_dac *p_dac = NULL;
742 int found = 0;
574 743
575 if (rdev->bios == NULL) 744 p_dac = kzalloc(sizeof(struct radeon_encoder_primary_dac),
745 GFP_KERNEL);
746
747 if (!p_dac)
576 return NULL; 748 return NULL;
577 749
578 /* check CRT table */ 750 /* check CRT table */
579 dac_info = combios_get_table_offset(dev, COMBIOS_CRT_INFO_TABLE); 751 dac_info = combios_get_table_offset(dev, COMBIOS_CRT_INFO_TABLE);
580 if (dac_info) { 752 if (dac_info) {
581 p_dac =
582 kzalloc(sizeof(struct radeon_encoder_primary_dac),
583 GFP_KERNEL);
584
585 if (!p_dac)
586 return NULL;
587
588 rev = RBIOS8(dac_info) & 0x3; 753 rev = RBIOS8(dac_info) & 0x3;
589 if (rev < 2) { 754 if (rev < 2) {
590 bg = RBIOS8(dac_info + 0x2) & 0xf; 755 bg = RBIOS8(dac_info + 0x2) & 0xf;
@@ -595,17 +760,21 @@ struct radeon_encoder_primary_dac *radeon_combios_get_primary_dac_info(struct
595 dac = RBIOS8(dac_info + 0x3) & 0xf; 760 dac = RBIOS8(dac_info + 0x3) & 0xf;
596 p_dac->ps2_pdac_adj = (bg << 8) | (dac); 761 p_dac->ps2_pdac_adj = (bg << 8) | (dac);
597 } 762 }
598 763 /* if the values are all zeros, use the table */
764 if (p_dac->ps2_pdac_adj)
765 found = 1;
599 } 766 }
600 767
768 if (!found) /* fallback to defaults */
769 radeon_legacy_get_primary_dac_info_from_table(rdev, p_dac);
770
601 return p_dac; 771 return p_dac;
602} 772}
603 773
604static enum radeon_tv_std 774enum radeon_tv_std
605radeon_combios_get_tv_info(struct radeon_encoder *encoder) 775radeon_combios_get_tv_info(struct radeon_device *rdev)
606{ 776{
607 struct drm_device *dev = encoder->base.dev; 777 struct drm_device *dev = rdev->ddev;
608 struct radeon_device *rdev = dev->dev_private;
609 uint16_t tv_info; 778 uint16_t tv_info;
610 enum radeon_tv_std tv_std = TV_STD_NTSC; 779 enum radeon_tv_std tv_std = TV_STD_NTSC;
611 780
@@ -712,9 +881,6 @@ struct radeon_encoder_tv_dac *radeon_combios_get_tv_dac_info(struct
712 if (!tv_dac) 881 if (!tv_dac)
713 return NULL; 882 return NULL;
714 883
715 if (rdev->bios == NULL)
716 goto out;
717
718 /* first check TV table */ 884 /* first check TV table */
719 dac_info = combios_get_table_offset(dev, COMBIOS_TV_INFO_TABLE); 885 dac_info = combios_get_table_offset(dev, COMBIOS_TV_INFO_TABLE);
720 if (dac_info) { 886 if (dac_info) {
@@ -731,7 +897,9 @@ struct radeon_encoder_tv_dac *radeon_combios_get_tv_dac_info(struct
731 bg = RBIOS8(dac_info + 0x10) & 0xf; 897 bg = RBIOS8(dac_info + 0x10) & 0xf;
732 dac = RBIOS8(dac_info + 0x11) & 0xf; 898 dac = RBIOS8(dac_info + 0x11) & 0xf;
733 tv_dac->ntsc_tvdac_adj = (bg << 16) | (dac << 20); 899 tv_dac->ntsc_tvdac_adj = (bg << 16) | (dac << 20);
734 found = 1; 900 /* if the values are all zeros, use the table */
901 if (tv_dac->ps2_tvdac_adj)
902 found = 1;
735 } else if (rev > 1) { 903 } else if (rev > 1) {
736 bg = RBIOS8(dac_info + 0xc) & 0xf; 904 bg = RBIOS8(dac_info + 0xc) & 0xf;
737 dac = (RBIOS8(dac_info + 0xc) >> 4) & 0xf; 905 dac = (RBIOS8(dac_info + 0xc) >> 4) & 0xf;
@@ -744,9 +912,11 @@ struct radeon_encoder_tv_dac *radeon_combios_get_tv_dac_info(struct
744 bg = RBIOS8(dac_info + 0xe) & 0xf; 912 bg = RBIOS8(dac_info + 0xe) & 0xf;
745 dac = (RBIOS8(dac_info + 0xe) >> 4) & 0xf; 913 dac = (RBIOS8(dac_info + 0xe) >> 4) & 0xf;
746 tv_dac->ntsc_tvdac_adj = (bg << 16) | (dac << 20); 914 tv_dac->ntsc_tvdac_adj = (bg << 16) | (dac << 20);
747 found = 1; 915 /* if the values are all zeros, use the table */
916 if (tv_dac->ps2_tvdac_adj)
917 found = 1;
748 } 918 }
749 tv_dac->tv_std = radeon_combios_get_tv_info(encoder); 919 tv_dac->tv_std = radeon_combios_get_tv_info(rdev);
750 } 920 }
751 if (!found) { 921 if (!found) {
752 /* then check CRT table */ 922 /* then check CRT table */
@@ -761,7 +931,9 @@ struct radeon_encoder_tv_dac *radeon_combios_get_tv_dac_info(struct
761 (bg << 16) | (dac << 20); 931 (bg << 16) | (dac << 20);
762 tv_dac->pal_tvdac_adj = tv_dac->ps2_tvdac_adj; 932 tv_dac->pal_tvdac_adj = tv_dac->ps2_tvdac_adj;
763 tv_dac->ntsc_tvdac_adj = tv_dac->ps2_tvdac_adj; 933 tv_dac->ntsc_tvdac_adj = tv_dac->ps2_tvdac_adj;
764 found = 1; 934 /* if the values are all zeros, use the table */
935 if (tv_dac->ps2_tvdac_adj)
936 found = 1;
765 } else { 937 } else {
766 bg = RBIOS8(dac_info + 0x4) & 0xf; 938 bg = RBIOS8(dac_info + 0x4) & 0xf;
767 dac = RBIOS8(dac_info + 0x5) & 0xf; 939 dac = RBIOS8(dac_info + 0x5) & 0xf;
@@ -769,14 +941,15 @@ struct radeon_encoder_tv_dac *radeon_combios_get_tv_dac_info(struct
769 (bg << 16) | (dac << 20); 941 (bg << 16) | (dac << 20);
770 tv_dac->pal_tvdac_adj = tv_dac->ps2_tvdac_adj; 942 tv_dac->pal_tvdac_adj = tv_dac->ps2_tvdac_adj;
771 tv_dac->ntsc_tvdac_adj = tv_dac->ps2_tvdac_adj; 943 tv_dac->ntsc_tvdac_adj = tv_dac->ps2_tvdac_adj;
772 found = 1; 944 /* if the values are all zeros, use the table */
945 if (tv_dac->ps2_tvdac_adj)
946 found = 1;
773 } 947 }
774 } else { 948 } else {
775 DRM_INFO("No TV DAC info found in BIOS\n"); 949 DRM_INFO("No TV DAC info found in BIOS\n");
776 } 950 }
777 } 951 }
778 952
779out:
780 if (!found) /* fallback to defaults */ 953 if (!found) /* fallback to defaults */
781 radeon_legacy_get_tv_dac_info_from_table(rdev, tv_dac); 954 radeon_legacy_get_tv_dac_info_from_table(rdev, tv_dac);
782 955
@@ -864,11 +1037,6 @@ struct radeon_encoder_lvds *radeon_combios_get_lvds_info(struct radeon_encoder
864 int tmp, i; 1037 int tmp, i;
865 struct radeon_encoder_lvds *lvds = NULL; 1038 struct radeon_encoder_lvds *lvds = NULL;
866 1039
867 if (rdev->bios == NULL) {
868 lvds = radeon_legacy_get_lvds_info_from_regs(rdev);
869 goto out;
870 }
871
872 lcd_info = combios_get_table_offset(dev, COMBIOS_LCD_INFO_TABLE); 1040 lcd_info = combios_get_table_offset(dev, COMBIOS_LCD_INFO_TABLE);
873 1041
874 if (lcd_info) { 1042 if (lcd_info) {
@@ -890,8 +1058,7 @@ struct radeon_encoder_lvds *radeon_combios_get_lvds_info(struct radeon_encoder
890 lvds->native_mode.vdisplay); 1058 lvds->native_mode.vdisplay);
891 1059
892 lvds->panel_vcc_delay = RBIOS16(lcd_info + 0x2c); 1060 lvds->panel_vcc_delay = RBIOS16(lcd_info + 0x2c);
893 if (lvds->panel_vcc_delay > 2000 || lvds->panel_vcc_delay < 0) 1061 lvds->panel_vcc_delay = min_t(u16, lvds->panel_vcc_delay, 2000);
894 lvds->panel_vcc_delay = 2000;
895 1062
896 lvds->panel_pwr_delay = RBIOS8(lcd_info + 0x24); 1063 lvds->panel_pwr_delay = RBIOS8(lcd_info + 0x24);
897 lvds->panel_digon_delay = RBIOS16(lcd_info + 0x38) & 0xf; 1064 lvds->panel_digon_delay = RBIOS16(lcd_info + 0x38) & 0xf;
@@ -970,7 +1137,7 @@ struct radeon_encoder_lvds *radeon_combios_get_lvds_info(struct radeon_encoder
970 DRM_INFO("No panel info found in BIOS\n"); 1137 DRM_INFO("No panel info found in BIOS\n");
971 lvds = radeon_legacy_get_lvds_info_from_regs(rdev); 1138 lvds = radeon_legacy_get_lvds_info_from_regs(rdev);
972 } 1139 }
973out: 1140
974 if (lvds) 1141 if (lvds)
975 encoder->native_mode = lvds->native_mode; 1142 encoder->native_mode = lvds->native_mode;
976 return lvds; 1143 return lvds;
@@ -993,8 +1160,8 @@ static const struct radeon_tmds_pll default_tmds_pll[CHIP_LAST][4] = {
993 {{0xffffffff, 0xb01cb}, {0, 0}, {0, 0}, {0, 0}}, /* CHIP_R420 */ 1160 {{0xffffffff, 0xb01cb}, {0, 0}, {0, 0}, {0, 0}}, /* CHIP_R420 */
994 {{0xffffffff, 0xb01cb}, {0, 0}, {0, 0}, {0, 0}}, /* CHIP_R423 */ 1161 {{0xffffffff, 0xb01cb}, {0, 0}, {0, 0}, {0, 0}}, /* CHIP_R423 */
995 {{0xffffffff, 0xb01cb}, {0, 0}, {0, 0}, {0, 0}}, /* CHIP_RV410 */ 1162 {{0xffffffff, 0xb01cb}, {0, 0}, {0, 0}, {0, 0}}, /* CHIP_RV410 */
996 {{15000, 0xb0155}, {0xffffffff, 0xb01cb}, {0, 0}, {0, 0}}, /* CHIP_RS400 */ 1163 { {0, 0}, {0, 0}, {0, 0}, {0, 0} }, /* CHIP_RS400 */
997 {{15000, 0xb0155}, {0xffffffff, 0xb01cb}, {0, 0}, {0, 0}}, /* CHIP_RS480 */ 1164 { {0, 0}, {0, 0}, {0, 0}, {0, 0} }, /* CHIP_RS480 */
998}; 1165};
999 1166
1000bool radeon_legacy_get_tmds_info_from_table(struct radeon_encoder *encoder, 1167bool radeon_legacy_get_tmds_info_from_table(struct radeon_encoder *encoder,
@@ -1022,13 +1189,9 @@ bool radeon_legacy_get_tmds_info_from_combios(struct radeon_encoder *encoder,
1022 int i, n; 1189 int i, n;
1023 uint8_t ver; 1190 uint8_t ver;
1024 1191
1025 if (rdev->bios == NULL)
1026 return false;
1027
1028 tmds_info = combios_get_table_offset(dev, COMBIOS_DFP_INFO_TABLE); 1192 tmds_info = combios_get_table_offset(dev, COMBIOS_DFP_INFO_TABLE);
1029 1193
1030 if (tmds_info) { 1194 if (tmds_info) {
1031
1032 ver = RBIOS8(tmds_info); 1195 ver = RBIOS8(tmds_info);
1033 DRM_INFO("DFP table revision: %d\n", ver); 1196 DRM_INFO("DFP table revision: %d\n", ver);
1034 if (ver == 3) { 1197 if (ver == 3) {
@@ -1063,96 +1226,184 @@ bool radeon_legacy_get_tmds_info_from_combios(struct radeon_encoder *encoder,
1063 tmds->tmds_pll[i].value); 1226 tmds->tmds_pll[i].value);
1064 } 1227 }
1065 } 1228 }
1066 } else 1229 } else {
1067 DRM_INFO("No TMDS info found in BIOS\n"); 1230 DRM_INFO("No TMDS info found in BIOS\n");
1231 return false;
1232 }
1068 return true; 1233 return true;
1069} 1234}
1070 1235
1071struct radeon_encoder_int_tmds *radeon_combios_get_tmds_info(struct radeon_encoder *encoder) 1236bool radeon_legacy_get_ext_tmds_info_from_table(struct radeon_encoder *encoder,
1237 struct radeon_encoder_ext_tmds *tmds)
1072{ 1238{
1073 struct radeon_encoder_int_tmds *tmds = NULL; 1239 struct drm_device *dev = encoder->base.dev;
1074 bool ret; 1240 struct radeon_device *rdev = dev->dev_private;
1075 1241 struct radeon_i2c_bus_rec i2c_bus;
1076 tmds = kzalloc(sizeof(struct radeon_encoder_int_tmds), GFP_KERNEL);
1077 1242
1078 if (!tmds) 1243 /* default for macs */
1079 return NULL; 1244 i2c_bus = combios_setup_i2c_bus(rdev, RADEON_GPIO_MONID);
1245 tmds->i2c_bus = radeon_i2c_create(dev, &i2c_bus, "DVO");
1080 1246
1081 ret = radeon_legacy_get_tmds_info_from_combios(encoder, tmds); 1247 /* XXX some macs have duallink chips */
1082 if (ret == false) 1248 switch (rdev->mode_info.connector_table) {
1083 radeon_legacy_get_tmds_info_from_table(encoder, tmds); 1249 case CT_POWERBOOK_EXTERNAL:
1250 case CT_MINI_EXTERNAL:
1251 default:
1252 tmds->dvo_chip = DVO_SIL164;
1253 tmds->slave_addr = 0x70 >> 1; /* 7 bit addressing */
1254 break;
1255 }
1084 1256
1085 return tmds; 1257 return true;
1086} 1258}
1087 1259
1088void radeon_combios_get_ext_tmds_info(struct radeon_encoder *encoder) 1260bool radeon_legacy_get_ext_tmds_info_from_combios(struct radeon_encoder *encoder,
1261 struct radeon_encoder_ext_tmds *tmds)
1089{ 1262{
1090 struct drm_device *dev = encoder->base.dev; 1263 struct drm_device *dev = encoder->base.dev;
1091 struct radeon_device *rdev = dev->dev_private; 1264 struct radeon_device *rdev = dev->dev_private;
1092 uint16_t ext_tmds_info; 1265 uint16_t offset;
1093 uint8_t ver; 1266 uint8_t ver, id, blocks, clk, data;
1267 int i;
1268 enum radeon_combios_ddc gpio;
1269 struct radeon_i2c_bus_rec i2c_bus;
1094 1270
1095 if (rdev->bios == NULL) 1271 tmds->i2c_bus = NULL;
1096 return; 1272 if (rdev->flags & RADEON_IS_IGP) {
1273 offset = combios_get_table_offset(dev, COMBIOS_I2C_INFO_TABLE);
1274 if (offset) {
1275 ver = RBIOS8(offset);
1276 DRM_INFO("GPIO Table revision: %d\n", ver);
1277 blocks = RBIOS8(offset + 2);
1278 for (i = 0; i < blocks; i++) {
1279 id = RBIOS8(offset + 3 + (i * 5) + 0);
1280 if (id == 136) {
1281 clk = RBIOS8(offset + 3 + (i * 5) + 3);
1282 data = RBIOS8(offset + 3 + (i * 5) + 4);
1283 i2c_bus.valid = true;
1284 i2c_bus.mask_clk_mask = (1 << clk);
1285 i2c_bus.mask_data_mask = (1 << data);
1286 i2c_bus.a_clk_mask = (1 << clk);
1287 i2c_bus.a_data_mask = (1 << data);
1288 i2c_bus.en_clk_mask = (1 << clk);
1289 i2c_bus.en_data_mask = (1 << data);
1290 i2c_bus.y_clk_mask = (1 << clk);
1291 i2c_bus.y_data_mask = (1 << data);
1292 i2c_bus.mask_clk_reg = RADEON_GPIOPAD_MASK;
1293 i2c_bus.mask_data_reg = RADEON_GPIOPAD_MASK;
1294 i2c_bus.a_clk_reg = RADEON_GPIOPAD_A;
1295 i2c_bus.a_data_reg = RADEON_GPIOPAD_A;
1296 i2c_bus.en_clk_reg = RADEON_GPIOPAD_EN;
1297 i2c_bus.en_data_reg = RADEON_GPIOPAD_EN;
1298 i2c_bus.y_clk_reg = RADEON_GPIOPAD_Y;
1299 i2c_bus.y_data_reg = RADEON_GPIOPAD_Y;
1300 tmds->i2c_bus = radeon_i2c_create(dev, &i2c_bus, "DVO");
1301 tmds->dvo_chip = DVO_SIL164;
1302 tmds->slave_addr = 0x70 >> 1; /* 7 bit addressing */
1303 break;
1304 }
1305 }
1306 }
1307 } else {
1308 offset = combios_get_table_offset(dev, COMBIOS_EXT_TMDS_INFO_TABLE);
1309 if (offset) {
1310 ver = RBIOS8(offset);
1311 DRM_INFO("External TMDS Table revision: %d\n", ver);
1312 tmds->slave_addr = RBIOS8(offset + 4 + 2);
1313 tmds->slave_addr >>= 1; /* 7 bit addressing */
1314 gpio = RBIOS8(offset + 4 + 3);
1315 switch (gpio) {
1316 case DDC_MONID:
1317 i2c_bus = combios_setup_i2c_bus(rdev, RADEON_GPIO_MONID);
1318 tmds->i2c_bus = radeon_i2c_create(dev, &i2c_bus, "DVO");
1319 break;
1320 case DDC_DVI:
1321 i2c_bus = combios_setup_i2c_bus(rdev, RADEON_GPIO_DVI_DDC);
1322 tmds->i2c_bus = radeon_i2c_create(dev, &i2c_bus, "DVO");
1323 break;
1324 case DDC_VGA:
1325 i2c_bus = combios_setup_i2c_bus(rdev, RADEON_GPIO_VGA_DDC);
1326 tmds->i2c_bus = radeon_i2c_create(dev, &i2c_bus, "DVO");
1327 break;
1328 case DDC_CRT2:
1329 /* R3xx+ chips don't have GPIO_CRT2_DDC gpio pad */
1330 if (rdev->family >= CHIP_R300)
1331 i2c_bus = combios_setup_i2c_bus(rdev, RADEON_GPIO_MONID);
1332 else
1333 i2c_bus = combios_setup_i2c_bus(rdev, RADEON_GPIO_CRT2_DDC);
1334 tmds->i2c_bus = radeon_i2c_create(dev, &i2c_bus, "DVO");
1335 break;
1336 case DDC_LCD: /* MM i2c */
1337 i2c_bus.valid = true;
1338 i2c_bus.hw_capable = true;
1339 i2c_bus.mm_i2c = true;
1340 tmds->i2c_bus = radeon_i2c_create(dev, &i2c_bus, "DVO");
1341 break;
1342 default:
1343 DRM_ERROR("Unsupported gpio %d\n", gpio);
1344 break;
1345 }
1346 }
1347 }
1097 1348
1098 ext_tmds_info = 1349 if (!tmds->i2c_bus) {
1099 combios_get_table_offset(dev, COMBIOS_EXT_TMDS_INFO_TABLE); 1350 DRM_INFO("No valid Ext TMDS info found in BIOS\n");
1100 if (ext_tmds_info) { 1351 return false;
1101 ver = RBIOS8(ext_tmds_info);
1102 DRM_INFO("External TMDS Table revision: %d\n", ver);
1103 // TODO
1104 } 1352 }
1353
1354 return true;
1105} 1355}
1106 1356
1107bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev) 1357bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1108{ 1358{
1109 struct radeon_device *rdev = dev->dev_private; 1359 struct radeon_device *rdev = dev->dev_private;
1110 struct radeon_i2c_bus_rec ddc_i2c; 1360 struct radeon_i2c_bus_rec ddc_i2c;
1361 struct radeon_hpd hpd;
1111 1362
1112 rdev->mode_info.connector_table = radeon_connector_table; 1363 rdev->mode_info.connector_table = radeon_connector_table;
1113 if (rdev->mode_info.connector_table == CT_NONE) { 1364 if (rdev->mode_info.connector_table == CT_NONE) {
1114#ifdef CONFIG_PPC_PMAC 1365#ifdef CONFIG_PPC_PMAC
1115 if (machine_is_compatible("PowerBook3,3")) { 1366 if (of_machine_is_compatible("PowerBook3,3")) {
1116 /* powerbook with VGA */ 1367 /* powerbook with VGA */
1117 rdev->mode_info.connector_table = CT_POWERBOOK_VGA; 1368 rdev->mode_info.connector_table = CT_POWERBOOK_VGA;
1118 } else if (machine_is_compatible("PowerBook3,4") || 1369 } else if (of_machine_is_compatible("PowerBook3,4") ||
1119 machine_is_compatible("PowerBook3,5")) { 1370 of_machine_is_compatible("PowerBook3,5")) {
1120 /* powerbook with internal tmds */ 1371 /* powerbook with internal tmds */
1121 rdev->mode_info.connector_table = CT_POWERBOOK_INTERNAL; 1372 rdev->mode_info.connector_table = CT_POWERBOOK_INTERNAL;
1122 } else if (machine_is_compatible("PowerBook5,1") || 1373 } else if (of_machine_is_compatible("PowerBook5,1") ||
1123 machine_is_compatible("PowerBook5,2") || 1374 of_machine_is_compatible("PowerBook5,2") ||
1124 machine_is_compatible("PowerBook5,3") || 1375 of_machine_is_compatible("PowerBook5,3") ||
1125 machine_is_compatible("PowerBook5,4") || 1376 of_machine_is_compatible("PowerBook5,4") ||
1126 machine_is_compatible("PowerBook5,5")) { 1377 of_machine_is_compatible("PowerBook5,5")) {
1127 /* powerbook with external single link tmds (sil164) */ 1378 /* powerbook with external single link tmds (sil164) */
1128 rdev->mode_info.connector_table = CT_POWERBOOK_EXTERNAL; 1379 rdev->mode_info.connector_table = CT_POWERBOOK_EXTERNAL;
1129 } else if (machine_is_compatible("PowerBook5,6")) { 1380 } else if (of_machine_is_compatible("PowerBook5,6")) {
1130 /* powerbook with external dual or single link tmds */ 1381 /* powerbook with external dual or single link tmds */
1131 rdev->mode_info.connector_table = CT_POWERBOOK_EXTERNAL; 1382 rdev->mode_info.connector_table = CT_POWERBOOK_EXTERNAL;
1132 } else if (machine_is_compatible("PowerBook5,7") || 1383 } else if (of_machine_is_compatible("PowerBook5,7") ||
1133 machine_is_compatible("PowerBook5,8") || 1384 of_machine_is_compatible("PowerBook5,8") ||
1134 machine_is_compatible("PowerBook5,9")) { 1385 of_machine_is_compatible("PowerBook5,9")) {
1135 /* PowerBook6,2 ? */ 1386 /* PowerBook6,2 ? */
1136 /* powerbook with external dual link tmds (sil1178?) */ 1387 /* powerbook with external dual link tmds (sil1178?) */
1137 rdev->mode_info.connector_table = CT_POWERBOOK_EXTERNAL; 1388 rdev->mode_info.connector_table = CT_POWERBOOK_EXTERNAL;
1138 } else if (machine_is_compatible("PowerBook4,1") || 1389 } else if (of_machine_is_compatible("PowerBook4,1") ||
1139 machine_is_compatible("PowerBook4,2") || 1390 of_machine_is_compatible("PowerBook4,2") ||
1140 machine_is_compatible("PowerBook4,3") || 1391 of_machine_is_compatible("PowerBook4,3") ||
1141 machine_is_compatible("PowerBook6,3") || 1392 of_machine_is_compatible("PowerBook6,3") ||
1142 machine_is_compatible("PowerBook6,5") || 1393 of_machine_is_compatible("PowerBook6,5") ||
1143 machine_is_compatible("PowerBook6,7")) { 1394 of_machine_is_compatible("PowerBook6,7")) {
1144 /* ibook */ 1395 /* ibook */
1145 rdev->mode_info.connector_table = CT_IBOOK; 1396 rdev->mode_info.connector_table = CT_IBOOK;
1146 } else if (machine_is_compatible("PowerMac4,4")) { 1397 } else if (of_machine_is_compatible("PowerMac4,4")) {
1147 /* emac */ 1398 /* emac */
1148 rdev->mode_info.connector_table = CT_EMAC; 1399 rdev->mode_info.connector_table = CT_EMAC;
1149 } else if (machine_is_compatible("PowerMac10,1")) { 1400 } else if (of_machine_is_compatible("PowerMac10,1")) {
1150 /* mini with internal tmds */ 1401 /* mini with internal tmds */
1151 rdev->mode_info.connector_table = CT_MINI_INTERNAL; 1402 rdev->mode_info.connector_table = CT_MINI_INTERNAL;
1152 } else if (machine_is_compatible("PowerMac10,2")) { 1403 } else if (of_machine_is_compatible("PowerMac10,2")) {
1153 /* mini with external tmds */ 1404 /* mini with external tmds */
1154 rdev->mode_info.connector_table = CT_MINI_EXTERNAL; 1405 rdev->mode_info.connector_table = CT_MINI_EXTERNAL;
1155 } else if (machine_is_compatible("PowerMac12,1")) { 1406 } else if (of_machine_is_compatible("PowerMac12,1")) {
1156 /* PowerMac8,1 ? */ 1407 /* PowerMac8,1 ? */
1157 /* imac g5 isight */ 1408 /* imac g5 isight */
1158 rdev->mode_info.connector_table = CT_IMAC_G5_ISIGHT; 1409 rdev->mode_info.connector_table = CT_IMAC_G5_ISIGHT;
@@ -1168,7 +1419,8 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1168 /* these are the most common settings */ 1419 /* these are the most common settings */
1169 if (rdev->flags & RADEON_SINGLE_CRTC) { 1420 if (rdev->flags & RADEON_SINGLE_CRTC) {
1170 /* VGA - primary dac */ 1421 /* VGA - primary dac */
1171 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_VGA_DDC); 1422 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_VGA_DDC);
1423 hpd.hpd = RADEON_HPD_NONE;
1172 radeon_add_legacy_encoder(dev, 1424 radeon_add_legacy_encoder(dev,
1173 radeon_get_encoder_id(dev, 1425 radeon_get_encoder_id(dev,
1174 ATOM_DEVICE_CRT1_SUPPORT, 1426 ATOM_DEVICE_CRT1_SUPPORT,
@@ -1178,10 +1430,12 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1178 ATOM_DEVICE_CRT1_SUPPORT, 1430 ATOM_DEVICE_CRT1_SUPPORT,
1179 DRM_MODE_CONNECTOR_VGA, 1431 DRM_MODE_CONNECTOR_VGA,
1180 &ddc_i2c, 1432 &ddc_i2c,
1181 CONNECTOR_OBJECT_ID_VGA); 1433 CONNECTOR_OBJECT_ID_VGA,
1434 &hpd);
1182 } else if (rdev->flags & RADEON_IS_MOBILITY) { 1435 } else if (rdev->flags & RADEON_IS_MOBILITY) {
1183 /* LVDS */ 1436 /* LVDS */
1184 ddc_i2c = combios_setup_i2c_bus(RADEON_LCD_GPIO_MASK); 1437 ddc_i2c = combios_setup_i2c_bus(rdev, 0);
1438 hpd.hpd = RADEON_HPD_NONE;
1185 radeon_add_legacy_encoder(dev, 1439 radeon_add_legacy_encoder(dev,
1186 radeon_get_encoder_id(dev, 1440 radeon_get_encoder_id(dev,
1187 ATOM_DEVICE_LCD1_SUPPORT, 1441 ATOM_DEVICE_LCD1_SUPPORT,
@@ -1191,10 +1445,12 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1191 ATOM_DEVICE_LCD1_SUPPORT, 1445 ATOM_DEVICE_LCD1_SUPPORT,
1192 DRM_MODE_CONNECTOR_LVDS, 1446 DRM_MODE_CONNECTOR_LVDS,
1193 &ddc_i2c, 1447 &ddc_i2c,
1194 CONNECTOR_OBJECT_ID_LVDS); 1448 CONNECTOR_OBJECT_ID_LVDS,
1449 &hpd);
1195 1450
1196 /* VGA - primary dac */ 1451 /* VGA - primary dac */
1197 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_VGA_DDC); 1452 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_VGA_DDC);
1453 hpd.hpd = RADEON_HPD_NONE;
1198 radeon_add_legacy_encoder(dev, 1454 radeon_add_legacy_encoder(dev,
1199 radeon_get_encoder_id(dev, 1455 radeon_get_encoder_id(dev,
1200 ATOM_DEVICE_CRT1_SUPPORT, 1456 ATOM_DEVICE_CRT1_SUPPORT,
@@ -1204,10 +1460,12 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1204 ATOM_DEVICE_CRT1_SUPPORT, 1460 ATOM_DEVICE_CRT1_SUPPORT,
1205 DRM_MODE_CONNECTOR_VGA, 1461 DRM_MODE_CONNECTOR_VGA,
1206 &ddc_i2c, 1462 &ddc_i2c,
1207 CONNECTOR_OBJECT_ID_VGA); 1463 CONNECTOR_OBJECT_ID_VGA,
1464 &hpd);
1208 } else { 1465 } else {
1209 /* DVI-I - tv dac, int tmds */ 1466 /* DVI-I - tv dac, int tmds */
1210 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_DVI_DDC); 1467 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_DVI_DDC);
1468 hpd.hpd = RADEON_HPD_1;
1211 radeon_add_legacy_encoder(dev, 1469 radeon_add_legacy_encoder(dev,
1212 radeon_get_encoder_id(dev, 1470 radeon_get_encoder_id(dev,
1213 ATOM_DEVICE_DFP1_SUPPORT, 1471 ATOM_DEVICE_DFP1_SUPPORT,
@@ -1223,10 +1481,12 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1223 ATOM_DEVICE_CRT2_SUPPORT, 1481 ATOM_DEVICE_CRT2_SUPPORT,
1224 DRM_MODE_CONNECTOR_DVII, 1482 DRM_MODE_CONNECTOR_DVII,
1225 &ddc_i2c, 1483 &ddc_i2c,
1226 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I); 1484 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I,
1485 &hpd);
1227 1486
1228 /* VGA - primary dac */ 1487 /* VGA - primary dac */
1229 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_VGA_DDC); 1488 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_VGA_DDC);
1489 hpd.hpd = RADEON_HPD_NONE;
1230 radeon_add_legacy_encoder(dev, 1490 radeon_add_legacy_encoder(dev,
1231 radeon_get_encoder_id(dev, 1491 radeon_get_encoder_id(dev,
1232 ATOM_DEVICE_CRT1_SUPPORT, 1492 ATOM_DEVICE_CRT1_SUPPORT,
@@ -1236,11 +1496,14 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1236 ATOM_DEVICE_CRT1_SUPPORT, 1496 ATOM_DEVICE_CRT1_SUPPORT,
1237 DRM_MODE_CONNECTOR_VGA, 1497 DRM_MODE_CONNECTOR_VGA,
1238 &ddc_i2c, 1498 &ddc_i2c,
1239 CONNECTOR_OBJECT_ID_VGA); 1499 CONNECTOR_OBJECT_ID_VGA,
1500 &hpd);
1240 } 1501 }
1241 1502
1242 if (rdev->family != CHIP_R100 && rdev->family != CHIP_R200) { 1503 if (rdev->family != CHIP_R100 && rdev->family != CHIP_R200) {
1243 /* TV - tv dac */ 1504 /* TV - tv dac */
1505 ddc_i2c.valid = false;
1506 hpd.hpd = RADEON_HPD_NONE;
1244 radeon_add_legacy_encoder(dev, 1507 radeon_add_legacy_encoder(dev,
1245 radeon_get_encoder_id(dev, 1508 radeon_get_encoder_id(dev,
1246 ATOM_DEVICE_TV1_SUPPORT, 1509 ATOM_DEVICE_TV1_SUPPORT,
@@ -1250,14 +1513,16 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1250 ATOM_DEVICE_TV1_SUPPORT, 1513 ATOM_DEVICE_TV1_SUPPORT,
1251 DRM_MODE_CONNECTOR_SVIDEO, 1514 DRM_MODE_CONNECTOR_SVIDEO,
1252 &ddc_i2c, 1515 &ddc_i2c,
1253 CONNECTOR_OBJECT_ID_SVIDEO); 1516 CONNECTOR_OBJECT_ID_SVIDEO,
1517 &hpd);
1254 } 1518 }
1255 break; 1519 break;
1256 case CT_IBOOK: 1520 case CT_IBOOK:
1257 DRM_INFO("Connector Table: %d (ibook)\n", 1521 DRM_INFO("Connector Table: %d (ibook)\n",
1258 rdev->mode_info.connector_table); 1522 rdev->mode_info.connector_table);
1259 /* LVDS */ 1523 /* LVDS */
1260 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_DVI_DDC); 1524 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_DVI_DDC);
1525 hpd.hpd = RADEON_HPD_NONE;
1261 radeon_add_legacy_encoder(dev, 1526 radeon_add_legacy_encoder(dev,
1262 radeon_get_encoder_id(dev, 1527 radeon_get_encoder_id(dev,
1263 ATOM_DEVICE_LCD1_SUPPORT, 1528 ATOM_DEVICE_LCD1_SUPPORT,
@@ -1265,9 +1530,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1265 ATOM_DEVICE_LCD1_SUPPORT); 1530 ATOM_DEVICE_LCD1_SUPPORT);
1266 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_LCD1_SUPPORT, 1531 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_LCD1_SUPPORT,
1267 DRM_MODE_CONNECTOR_LVDS, &ddc_i2c, 1532 DRM_MODE_CONNECTOR_LVDS, &ddc_i2c,
1268 CONNECTOR_OBJECT_ID_LVDS); 1533 CONNECTOR_OBJECT_ID_LVDS,
1534 &hpd);
1269 /* VGA - TV DAC */ 1535 /* VGA - TV DAC */
1270 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_VGA_DDC); 1536 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_VGA_DDC);
1537 hpd.hpd = RADEON_HPD_NONE;
1271 radeon_add_legacy_encoder(dev, 1538 radeon_add_legacy_encoder(dev,
1272 radeon_get_encoder_id(dev, 1539 radeon_get_encoder_id(dev,
1273 ATOM_DEVICE_CRT2_SUPPORT, 1540 ATOM_DEVICE_CRT2_SUPPORT,
@@ -1275,8 +1542,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1275 ATOM_DEVICE_CRT2_SUPPORT); 1542 ATOM_DEVICE_CRT2_SUPPORT);
1276 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_CRT2_SUPPORT, 1543 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_CRT2_SUPPORT,
1277 DRM_MODE_CONNECTOR_VGA, &ddc_i2c, 1544 DRM_MODE_CONNECTOR_VGA, &ddc_i2c,
1278 CONNECTOR_OBJECT_ID_VGA); 1545 CONNECTOR_OBJECT_ID_VGA,
1546 &hpd);
1279 /* TV - TV DAC */ 1547 /* TV - TV DAC */
1548 ddc_i2c.valid = false;
1549 hpd.hpd = RADEON_HPD_NONE;
1280 radeon_add_legacy_encoder(dev, 1550 radeon_add_legacy_encoder(dev,
1281 radeon_get_encoder_id(dev, 1551 radeon_get_encoder_id(dev,
1282 ATOM_DEVICE_TV1_SUPPORT, 1552 ATOM_DEVICE_TV1_SUPPORT,
@@ -1285,13 +1555,15 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1285 radeon_add_legacy_connector(dev, 2, ATOM_DEVICE_TV1_SUPPORT, 1555 radeon_add_legacy_connector(dev, 2, ATOM_DEVICE_TV1_SUPPORT,
1286 DRM_MODE_CONNECTOR_SVIDEO, 1556 DRM_MODE_CONNECTOR_SVIDEO,
1287 &ddc_i2c, 1557 &ddc_i2c,
1288 CONNECTOR_OBJECT_ID_SVIDEO); 1558 CONNECTOR_OBJECT_ID_SVIDEO,
1559 &hpd);
1289 break; 1560 break;
1290 case CT_POWERBOOK_EXTERNAL: 1561 case CT_POWERBOOK_EXTERNAL:
1291 DRM_INFO("Connector Table: %d (powerbook external tmds)\n", 1562 DRM_INFO("Connector Table: %d (powerbook external tmds)\n",
1292 rdev->mode_info.connector_table); 1563 rdev->mode_info.connector_table);
1293 /* LVDS */ 1564 /* LVDS */
1294 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_DVI_DDC); 1565 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_DVI_DDC);
1566 hpd.hpd = RADEON_HPD_NONE;
1295 radeon_add_legacy_encoder(dev, 1567 radeon_add_legacy_encoder(dev,
1296 radeon_get_encoder_id(dev, 1568 radeon_get_encoder_id(dev,
1297 ATOM_DEVICE_LCD1_SUPPORT, 1569 ATOM_DEVICE_LCD1_SUPPORT,
@@ -1299,9 +1571,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1299 ATOM_DEVICE_LCD1_SUPPORT); 1571 ATOM_DEVICE_LCD1_SUPPORT);
1300 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_LCD1_SUPPORT, 1572 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_LCD1_SUPPORT,
1301 DRM_MODE_CONNECTOR_LVDS, &ddc_i2c, 1573 DRM_MODE_CONNECTOR_LVDS, &ddc_i2c,
1302 CONNECTOR_OBJECT_ID_LVDS); 1574 CONNECTOR_OBJECT_ID_LVDS,
1575 &hpd);
1303 /* DVI-I - primary dac, ext tmds */ 1576 /* DVI-I - primary dac, ext tmds */
1304 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_VGA_DDC); 1577 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_VGA_DDC);
1578 hpd.hpd = RADEON_HPD_2; /* ??? */
1305 radeon_add_legacy_encoder(dev, 1579 radeon_add_legacy_encoder(dev,
1306 radeon_get_encoder_id(dev, 1580 radeon_get_encoder_id(dev,
1307 ATOM_DEVICE_DFP2_SUPPORT, 1581 ATOM_DEVICE_DFP2_SUPPORT,
@@ -1317,8 +1591,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1317 ATOM_DEVICE_DFP2_SUPPORT | 1591 ATOM_DEVICE_DFP2_SUPPORT |
1318 ATOM_DEVICE_CRT1_SUPPORT, 1592 ATOM_DEVICE_CRT1_SUPPORT,
1319 DRM_MODE_CONNECTOR_DVII, &ddc_i2c, 1593 DRM_MODE_CONNECTOR_DVII, &ddc_i2c,
1320 CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_I); 1594 CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_I,
1595 &hpd);
1321 /* TV - TV DAC */ 1596 /* TV - TV DAC */
1597 ddc_i2c.valid = false;
1598 hpd.hpd = RADEON_HPD_NONE;
1322 radeon_add_legacy_encoder(dev, 1599 radeon_add_legacy_encoder(dev,
1323 radeon_get_encoder_id(dev, 1600 radeon_get_encoder_id(dev,
1324 ATOM_DEVICE_TV1_SUPPORT, 1601 ATOM_DEVICE_TV1_SUPPORT,
@@ -1327,13 +1604,15 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1327 radeon_add_legacy_connector(dev, 2, ATOM_DEVICE_TV1_SUPPORT, 1604 radeon_add_legacy_connector(dev, 2, ATOM_DEVICE_TV1_SUPPORT,
1328 DRM_MODE_CONNECTOR_SVIDEO, 1605 DRM_MODE_CONNECTOR_SVIDEO,
1329 &ddc_i2c, 1606 &ddc_i2c,
1330 CONNECTOR_OBJECT_ID_SVIDEO); 1607 CONNECTOR_OBJECT_ID_SVIDEO,
1608 &hpd);
1331 break; 1609 break;
1332 case CT_POWERBOOK_INTERNAL: 1610 case CT_POWERBOOK_INTERNAL:
1333 DRM_INFO("Connector Table: %d (powerbook internal tmds)\n", 1611 DRM_INFO("Connector Table: %d (powerbook internal tmds)\n",
1334 rdev->mode_info.connector_table); 1612 rdev->mode_info.connector_table);
1335 /* LVDS */ 1613 /* LVDS */
1336 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_DVI_DDC); 1614 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_DVI_DDC);
1615 hpd.hpd = RADEON_HPD_NONE;
1337 radeon_add_legacy_encoder(dev, 1616 radeon_add_legacy_encoder(dev,
1338 radeon_get_encoder_id(dev, 1617 radeon_get_encoder_id(dev,
1339 ATOM_DEVICE_LCD1_SUPPORT, 1618 ATOM_DEVICE_LCD1_SUPPORT,
@@ -1341,9 +1620,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1341 ATOM_DEVICE_LCD1_SUPPORT); 1620 ATOM_DEVICE_LCD1_SUPPORT);
1342 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_LCD1_SUPPORT, 1621 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_LCD1_SUPPORT,
1343 DRM_MODE_CONNECTOR_LVDS, &ddc_i2c, 1622 DRM_MODE_CONNECTOR_LVDS, &ddc_i2c,
1344 CONNECTOR_OBJECT_ID_LVDS); 1623 CONNECTOR_OBJECT_ID_LVDS,
1624 &hpd);
1345 /* DVI-I - primary dac, int tmds */ 1625 /* DVI-I - primary dac, int tmds */
1346 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_VGA_DDC); 1626 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_VGA_DDC);
1627 hpd.hpd = RADEON_HPD_1; /* ??? */
1347 radeon_add_legacy_encoder(dev, 1628 radeon_add_legacy_encoder(dev,
1348 radeon_get_encoder_id(dev, 1629 radeon_get_encoder_id(dev,
1349 ATOM_DEVICE_DFP1_SUPPORT, 1630 ATOM_DEVICE_DFP1_SUPPORT,
@@ -1358,8 +1639,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1358 ATOM_DEVICE_DFP1_SUPPORT | 1639 ATOM_DEVICE_DFP1_SUPPORT |
1359 ATOM_DEVICE_CRT1_SUPPORT, 1640 ATOM_DEVICE_CRT1_SUPPORT,
1360 DRM_MODE_CONNECTOR_DVII, &ddc_i2c, 1641 DRM_MODE_CONNECTOR_DVII, &ddc_i2c,
1361 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I); 1642 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I,
1643 &hpd);
1362 /* TV - TV DAC */ 1644 /* TV - TV DAC */
1645 ddc_i2c.valid = false;
1646 hpd.hpd = RADEON_HPD_NONE;
1363 radeon_add_legacy_encoder(dev, 1647 radeon_add_legacy_encoder(dev,
1364 radeon_get_encoder_id(dev, 1648 radeon_get_encoder_id(dev,
1365 ATOM_DEVICE_TV1_SUPPORT, 1649 ATOM_DEVICE_TV1_SUPPORT,
@@ -1368,13 +1652,15 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1368 radeon_add_legacy_connector(dev, 2, ATOM_DEVICE_TV1_SUPPORT, 1652 radeon_add_legacy_connector(dev, 2, ATOM_DEVICE_TV1_SUPPORT,
1369 DRM_MODE_CONNECTOR_SVIDEO, 1653 DRM_MODE_CONNECTOR_SVIDEO,
1370 &ddc_i2c, 1654 &ddc_i2c,
1371 CONNECTOR_OBJECT_ID_SVIDEO); 1655 CONNECTOR_OBJECT_ID_SVIDEO,
1656 &hpd);
1372 break; 1657 break;
1373 case CT_POWERBOOK_VGA: 1658 case CT_POWERBOOK_VGA:
1374 DRM_INFO("Connector Table: %d (powerbook vga)\n", 1659 DRM_INFO("Connector Table: %d (powerbook vga)\n",
1375 rdev->mode_info.connector_table); 1660 rdev->mode_info.connector_table);
1376 /* LVDS */ 1661 /* LVDS */
1377 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_DVI_DDC); 1662 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_DVI_DDC);
1663 hpd.hpd = RADEON_HPD_NONE;
1378 radeon_add_legacy_encoder(dev, 1664 radeon_add_legacy_encoder(dev,
1379 radeon_get_encoder_id(dev, 1665 radeon_get_encoder_id(dev,
1380 ATOM_DEVICE_LCD1_SUPPORT, 1666 ATOM_DEVICE_LCD1_SUPPORT,
@@ -1382,9 +1668,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1382 ATOM_DEVICE_LCD1_SUPPORT); 1668 ATOM_DEVICE_LCD1_SUPPORT);
1383 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_LCD1_SUPPORT, 1669 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_LCD1_SUPPORT,
1384 DRM_MODE_CONNECTOR_LVDS, &ddc_i2c, 1670 DRM_MODE_CONNECTOR_LVDS, &ddc_i2c,
1385 CONNECTOR_OBJECT_ID_LVDS); 1671 CONNECTOR_OBJECT_ID_LVDS,
1672 &hpd);
1386 /* VGA - primary dac */ 1673 /* VGA - primary dac */
1387 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_VGA_DDC); 1674 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_VGA_DDC);
1675 hpd.hpd = RADEON_HPD_NONE;
1388 radeon_add_legacy_encoder(dev, 1676 radeon_add_legacy_encoder(dev,
1389 radeon_get_encoder_id(dev, 1677 radeon_get_encoder_id(dev,
1390 ATOM_DEVICE_CRT1_SUPPORT, 1678 ATOM_DEVICE_CRT1_SUPPORT,
@@ -1392,8 +1680,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1392 ATOM_DEVICE_CRT1_SUPPORT); 1680 ATOM_DEVICE_CRT1_SUPPORT);
1393 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_CRT1_SUPPORT, 1681 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_CRT1_SUPPORT,
1394 DRM_MODE_CONNECTOR_VGA, &ddc_i2c, 1682 DRM_MODE_CONNECTOR_VGA, &ddc_i2c,
1395 CONNECTOR_OBJECT_ID_VGA); 1683 CONNECTOR_OBJECT_ID_VGA,
1684 &hpd);
1396 /* TV - TV DAC */ 1685 /* TV - TV DAC */
1686 ddc_i2c.valid = false;
1687 hpd.hpd = RADEON_HPD_NONE;
1397 radeon_add_legacy_encoder(dev, 1688 radeon_add_legacy_encoder(dev,
1398 radeon_get_encoder_id(dev, 1689 radeon_get_encoder_id(dev,
1399 ATOM_DEVICE_TV1_SUPPORT, 1690 ATOM_DEVICE_TV1_SUPPORT,
@@ -1402,13 +1693,15 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1402 radeon_add_legacy_connector(dev, 2, ATOM_DEVICE_TV1_SUPPORT, 1693 radeon_add_legacy_connector(dev, 2, ATOM_DEVICE_TV1_SUPPORT,
1403 DRM_MODE_CONNECTOR_SVIDEO, 1694 DRM_MODE_CONNECTOR_SVIDEO,
1404 &ddc_i2c, 1695 &ddc_i2c,
1405 CONNECTOR_OBJECT_ID_SVIDEO); 1696 CONNECTOR_OBJECT_ID_SVIDEO,
1697 &hpd);
1406 break; 1698 break;
1407 case CT_MINI_EXTERNAL: 1699 case CT_MINI_EXTERNAL:
1408 DRM_INFO("Connector Table: %d (mini external tmds)\n", 1700 DRM_INFO("Connector Table: %d (mini external tmds)\n",
1409 rdev->mode_info.connector_table); 1701 rdev->mode_info.connector_table);
1410 /* DVI-I - tv dac, ext tmds */ 1702 /* DVI-I - tv dac, ext tmds */
1411 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_CRT2_DDC); 1703 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_CRT2_DDC);
1704 hpd.hpd = RADEON_HPD_2; /* ??? */
1412 radeon_add_legacy_encoder(dev, 1705 radeon_add_legacy_encoder(dev,
1413 radeon_get_encoder_id(dev, 1706 radeon_get_encoder_id(dev,
1414 ATOM_DEVICE_DFP2_SUPPORT, 1707 ATOM_DEVICE_DFP2_SUPPORT,
@@ -1424,8 +1717,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1424 ATOM_DEVICE_DFP2_SUPPORT | 1717 ATOM_DEVICE_DFP2_SUPPORT |
1425 ATOM_DEVICE_CRT2_SUPPORT, 1718 ATOM_DEVICE_CRT2_SUPPORT,
1426 DRM_MODE_CONNECTOR_DVII, &ddc_i2c, 1719 DRM_MODE_CONNECTOR_DVII, &ddc_i2c,
1427 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I); 1720 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I,
1721 &hpd);
1428 /* TV - TV DAC */ 1722 /* TV - TV DAC */
1723 ddc_i2c.valid = false;
1724 hpd.hpd = RADEON_HPD_NONE;
1429 radeon_add_legacy_encoder(dev, 1725 radeon_add_legacy_encoder(dev,
1430 radeon_get_encoder_id(dev, 1726 radeon_get_encoder_id(dev,
1431 ATOM_DEVICE_TV1_SUPPORT, 1727 ATOM_DEVICE_TV1_SUPPORT,
@@ -1434,13 +1730,15 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1434 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_TV1_SUPPORT, 1730 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_TV1_SUPPORT,
1435 DRM_MODE_CONNECTOR_SVIDEO, 1731 DRM_MODE_CONNECTOR_SVIDEO,
1436 &ddc_i2c, 1732 &ddc_i2c,
1437 CONNECTOR_OBJECT_ID_SVIDEO); 1733 CONNECTOR_OBJECT_ID_SVIDEO,
1734 &hpd);
1438 break; 1735 break;
1439 case CT_MINI_INTERNAL: 1736 case CT_MINI_INTERNAL:
1440 DRM_INFO("Connector Table: %d (mini internal tmds)\n", 1737 DRM_INFO("Connector Table: %d (mini internal tmds)\n",
1441 rdev->mode_info.connector_table); 1738 rdev->mode_info.connector_table);
1442 /* DVI-I - tv dac, int tmds */ 1739 /* DVI-I - tv dac, int tmds */
1443 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_CRT2_DDC); 1740 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_CRT2_DDC);
1741 hpd.hpd = RADEON_HPD_1; /* ??? */
1444 radeon_add_legacy_encoder(dev, 1742 radeon_add_legacy_encoder(dev,
1445 radeon_get_encoder_id(dev, 1743 radeon_get_encoder_id(dev,
1446 ATOM_DEVICE_DFP1_SUPPORT, 1744 ATOM_DEVICE_DFP1_SUPPORT,
@@ -1455,8 +1753,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1455 ATOM_DEVICE_DFP1_SUPPORT | 1753 ATOM_DEVICE_DFP1_SUPPORT |
1456 ATOM_DEVICE_CRT2_SUPPORT, 1754 ATOM_DEVICE_CRT2_SUPPORT,
1457 DRM_MODE_CONNECTOR_DVII, &ddc_i2c, 1755 DRM_MODE_CONNECTOR_DVII, &ddc_i2c,
1458 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I); 1756 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I,
1757 &hpd);
1459 /* TV - TV DAC */ 1758 /* TV - TV DAC */
1759 ddc_i2c.valid = false;
1760 hpd.hpd = RADEON_HPD_NONE;
1460 radeon_add_legacy_encoder(dev, 1761 radeon_add_legacy_encoder(dev,
1461 radeon_get_encoder_id(dev, 1762 radeon_get_encoder_id(dev,
1462 ATOM_DEVICE_TV1_SUPPORT, 1763 ATOM_DEVICE_TV1_SUPPORT,
@@ -1465,13 +1766,15 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1465 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_TV1_SUPPORT, 1766 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_TV1_SUPPORT,
1466 DRM_MODE_CONNECTOR_SVIDEO, 1767 DRM_MODE_CONNECTOR_SVIDEO,
1467 &ddc_i2c, 1768 &ddc_i2c,
1468 CONNECTOR_OBJECT_ID_SVIDEO); 1769 CONNECTOR_OBJECT_ID_SVIDEO,
1770 &hpd);
1469 break; 1771 break;
1470 case CT_IMAC_G5_ISIGHT: 1772 case CT_IMAC_G5_ISIGHT:
1471 DRM_INFO("Connector Table: %d (imac g5 isight)\n", 1773 DRM_INFO("Connector Table: %d (imac g5 isight)\n",
1472 rdev->mode_info.connector_table); 1774 rdev->mode_info.connector_table);
1473 /* DVI-D - int tmds */ 1775 /* DVI-D - int tmds */
1474 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_MONID); 1776 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_MONID);
1777 hpd.hpd = RADEON_HPD_1; /* ??? */
1475 radeon_add_legacy_encoder(dev, 1778 radeon_add_legacy_encoder(dev,
1476 radeon_get_encoder_id(dev, 1779 radeon_get_encoder_id(dev,
1477 ATOM_DEVICE_DFP1_SUPPORT, 1780 ATOM_DEVICE_DFP1_SUPPORT,
@@ -1479,9 +1782,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1479 ATOM_DEVICE_DFP1_SUPPORT); 1782 ATOM_DEVICE_DFP1_SUPPORT);
1480 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_DFP1_SUPPORT, 1783 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_DFP1_SUPPORT,
1481 DRM_MODE_CONNECTOR_DVID, &ddc_i2c, 1784 DRM_MODE_CONNECTOR_DVID, &ddc_i2c,
1482 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D); 1785 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D,
1786 &hpd);
1483 /* VGA - tv dac */ 1787 /* VGA - tv dac */
1484 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_DVI_DDC); 1788 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_DVI_DDC);
1789 hpd.hpd = RADEON_HPD_NONE;
1485 radeon_add_legacy_encoder(dev, 1790 radeon_add_legacy_encoder(dev,
1486 radeon_get_encoder_id(dev, 1791 radeon_get_encoder_id(dev,
1487 ATOM_DEVICE_CRT2_SUPPORT, 1792 ATOM_DEVICE_CRT2_SUPPORT,
@@ -1489,8 +1794,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1489 ATOM_DEVICE_CRT2_SUPPORT); 1794 ATOM_DEVICE_CRT2_SUPPORT);
1490 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_CRT2_SUPPORT, 1795 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_CRT2_SUPPORT,
1491 DRM_MODE_CONNECTOR_VGA, &ddc_i2c, 1796 DRM_MODE_CONNECTOR_VGA, &ddc_i2c,
1492 CONNECTOR_OBJECT_ID_VGA); 1797 CONNECTOR_OBJECT_ID_VGA,
1798 &hpd);
1493 /* TV - TV DAC */ 1799 /* TV - TV DAC */
1800 ddc_i2c.valid = false;
1801 hpd.hpd = RADEON_HPD_NONE;
1494 radeon_add_legacy_encoder(dev, 1802 radeon_add_legacy_encoder(dev,
1495 radeon_get_encoder_id(dev, 1803 radeon_get_encoder_id(dev,
1496 ATOM_DEVICE_TV1_SUPPORT, 1804 ATOM_DEVICE_TV1_SUPPORT,
@@ -1499,13 +1807,15 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1499 radeon_add_legacy_connector(dev, 2, ATOM_DEVICE_TV1_SUPPORT, 1807 radeon_add_legacy_connector(dev, 2, ATOM_DEVICE_TV1_SUPPORT,
1500 DRM_MODE_CONNECTOR_SVIDEO, 1808 DRM_MODE_CONNECTOR_SVIDEO,
1501 &ddc_i2c, 1809 &ddc_i2c,
1502 CONNECTOR_OBJECT_ID_SVIDEO); 1810 CONNECTOR_OBJECT_ID_SVIDEO,
1811 &hpd);
1503 break; 1812 break;
1504 case CT_EMAC: 1813 case CT_EMAC:
1505 DRM_INFO("Connector Table: %d (emac)\n", 1814 DRM_INFO("Connector Table: %d (emac)\n",
1506 rdev->mode_info.connector_table); 1815 rdev->mode_info.connector_table);
1507 /* VGA - primary dac */ 1816 /* VGA - primary dac */
1508 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_VGA_DDC); 1817 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_VGA_DDC);
1818 hpd.hpd = RADEON_HPD_NONE;
1509 radeon_add_legacy_encoder(dev, 1819 radeon_add_legacy_encoder(dev,
1510 radeon_get_encoder_id(dev, 1820 radeon_get_encoder_id(dev,
1511 ATOM_DEVICE_CRT1_SUPPORT, 1821 ATOM_DEVICE_CRT1_SUPPORT,
@@ -1513,9 +1823,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1513 ATOM_DEVICE_CRT1_SUPPORT); 1823 ATOM_DEVICE_CRT1_SUPPORT);
1514 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_CRT1_SUPPORT, 1824 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_CRT1_SUPPORT,
1515 DRM_MODE_CONNECTOR_VGA, &ddc_i2c, 1825 DRM_MODE_CONNECTOR_VGA, &ddc_i2c,
1516 CONNECTOR_OBJECT_ID_VGA); 1826 CONNECTOR_OBJECT_ID_VGA,
1827 &hpd);
1517 /* VGA - tv dac */ 1828 /* VGA - tv dac */
1518 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_CRT2_DDC); 1829 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_CRT2_DDC);
1830 hpd.hpd = RADEON_HPD_NONE;
1519 radeon_add_legacy_encoder(dev, 1831 radeon_add_legacy_encoder(dev,
1520 radeon_get_encoder_id(dev, 1832 radeon_get_encoder_id(dev,
1521 ATOM_DEVICE_CRT2_SUPPORT, 1833 ATOM_DEVICE_CRT2_SUPPORT,
@@ -1523,8 +1835,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1523 ATOM_DEVICE_CRT2_SUPPORT); 1835 ATOM_DEVICE_CRT2_SUPPORT);
1524 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_CRT2_SUPPORT, 1836 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_CRT2_SUPPORT,
1525 DRM_MODE_CONNECTOR_VGA, &ddc_i2c, 1837 DRM_MODE_CONNECTOR_VGA, &ddc_i2c,
1526 CONNECTOR_OBJECT_ID_VGA); 1838 CONNECTOR_OBJECT_ID_VGA,
1839 &hpd);
1527 /* TV - TV DAC */ 1840 /* TV - TV DAC */
1841 ddc_i2c.valid = false;
1842 hpd.hpd = RADEON_HPD_NONE;
1528 radeon_add_legacy_encoder(dev, 1843 radeon_add_legacy_encoder(dev,
1529 radeon_get_encoder_id(dev, 1844 radeon_get_encoder_id(dev,
1530 ATOM_DEVICE_TV1_SUPPORT, 1845 ATOM_DEVICE_TV1_SUPPORT,
@@ -1533,7 +1848,8 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1533 radeon_add_legacy_connector(dev, 2, ATOM_DEVICE_TV1_SUPPORT, 1848 radeon_add_legacy_connector(dev, 2, ATOM_DEVICE_TV1_SUPPORT,
1534 DRM_MODE_CONNECTOR_SVIDEO, 1849 DRM_MODE_CONNECTOR_SVIDEO,
1535 &ddc_i2c, 1850 &ddc_i2c,
1536 CONNECTOR_OBJECT_ID_SVIDEO); 1851 CONNECTOR_OBJECT_ID_SVIDEO,
1852 &hpd);
1537 break; 1853 break;
1538 default: 1854 default:
1539 DRM_INFO("Connector table: %d (invalid)\n", 1855 DRM_INFO("Connector table: %d (invalid)\n",
@@ -1550,7 +1866,8 @@ static bool radeon_apply_legacy_quirks(struct drm_device *dev,
1550 int bios_index, 1866 int bios_index,
1551 enum radeon_combios_connector 1867 enum radeon_combios_connector
1552 *legacy_connector, 1868 *legacy_connector,
1553 struct radeon_i2c_bus_rec *ddc_i2c) 1869 struct radeon_i2c_bus_rec *ddc_i2c,
1870 struct radeon_hpd *hpd)
1554{ 1871{
1555 struct radeon_device *rdev = dev->dev_private; 1872 struct radeon_device *rdev = dev->dev_private;
1556 1873
@@ -1558,29 +1875,26 @@ static bool radeon_apply_legacy_quirks(struct drm_device *dev,
1558 if ((rdev->family == CHIP_RS400 || 1875 if ((rdev->family == CHIP_RS400 ||
1559 rdev->family == CHIP_RS480) && 1876 rdev->family == CHIP_RS480) &&
1560 ddc_i2c->mask_clk_reg == RADEON_GPIO_CRT2_DDC) 1877 ddc_i2c->mask_clk_reg == RADEON_GPIO_CRT2_DDC)
1561 *ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_MONID); 1878 *ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_MONID);
1562 else if ((rdev->family == CHIP_RS400 || 1879 else if ((rdev->family == CHIP_RS400 ||
1563 rdev->family == CHIP_RS480) && 1880 rdev->family == CHIP_RS480) &&
1564 ddc_i2c->mask_clk_reg == RADEON_GPIO_MONID) { 1881 ddc_i2c->mask_clk_reg == RADEON_GPIO_MONID) {
1565 ddc_i2c->valid = true; 1882 *ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIOPAD_MASK);
1566 ddc_i2c->mask_clk_mask = (0x20 << 8); 1883 ddc_i2c->mask_clk_mask = (0x20 << 8);
1567 ddc_i2c->mask_data_mask = 0x80; 1884 ddc_i2c->mask_data_mask = 0x80;
1568 ddc_i2c->a_clk_mask = (0x20 << 8); 1885 ddc_i2c->a_clk_mask = (0x20 << 8);
1569 ddc_i2c->a_data_mask = 0x80; 1886 ddc_i2c->a_data_mask = 0x80;
1570 ddc_i2c->put_clk_mask = (0x20 << 8); 1887 ddc_i2c->en_clk_mask = (0x20 << 8);
1571 ddc_i2c->put_data_mask = 0x80; 1888 ddc_i2c->en_data_mask = 0x80;
1572 ddc_i2c->get_clk_mask = (0x20 << 8); 1889 ddc_i2c->y_clk_mask = (0x20 << 8);
1573 ddc_i2c->get_data_mask = 0x80; 1890 ddc_i2c->y_data_mask = 0x80;
1574 ddc_i2c->mask_clk_reg = RADEON_GPIOPAD_MASK;
1575 ddc_i2c->mask_data_reg = RADEON_GPIOPAD_MASK;
1576 ddc_i2c->a_clk_reg = RADEON_GPIOPAD_A;
1577 ddc_i2c->a_data_reg = RADEON_GPIOPAD_A;
1578 ddc_i2c->put_clk_reg = RADEON_GPIOPAD_EN;
1579 ddc_i2c->put_data_reg = RADEON_GPIOPAD_EN;
1580 ddc_i2c->get_clk_reg = RADEON_LCD_GPIO_Y_REG;
1581 ddc_i2c->get_data_reg = RADEON_LCD_GPIO_Y_REG;
1582 } 1891 }
1583 1892
1893 /* R3xx+ chips don't have GPIO_CRT2_DDC gpio pad */
1894 if ((rdev->family >= CHIP_R300) &&
1895 ddc_i2c->mask_clk_reg == RADEON_GPIO_CRT2_DDC)
1896 *ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_DVI_DDC);
1897
1584 /* Certain IBM chipset RN50s have a BIOS reporting two VGAs, 1898 /* Certain IBM chipset RN50s have a BIOS reporting two VGAs,
1585 one with VGA DDC and one with CRT2 DDC. - kill the CRT2 DDC one */ 1899 one with VGA DDC and one with CRT2 DDC. - kill the CRT2 DDC one */
1586 if (dev->pdev->device == 0x515e && 1900 if (dev->pdev->device == 0x515e &&
@@ -1624,6 +1938,12 @@ static bool radeon_apply_legacy_tv_quirks(struct drm_device *dev)
1624 dev->pdev->subsystem_device == 0x280a) 1938 dev->pdev->subsystem_device == 0x280a)
1625 return false; 1939 return false;
1626 1940
1941 /* MSI S270 has non-existent TV port */
1942 if (dev->pdev->device == 0x5955 &&
1943 dev->pdev->subsystem_vendor == 0x1462 &&
1944 dev->pdev->subsystem_device == 0x0131)
1945 return false;
1946
1627 return true; 1947 return true;
1628} 1948}
1629 1949
@@ -1671,9 +1991,7 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1671 enum radeon_combios_connector connector; 1991 enum radeon_combios_connector connector;
1672 int i = 0; 1992 int i = 0;
1673 struct radeon_i2c_bus_rec ddc_i2c; 1993 struct radeon_i2c_bus_rec ddc_i2c;
1674 1994 struct radeon_hpd hpd;
1675 if (rdev->bios == NULL)
1676 return false;
1677 1995
1678 conn_info = combios_get_table_offset(dev, COMBIOS_CONNECTOR_INFO_TABLE); 1996 conn_info = combios_get_table_offset(dev, COMBIOS_CONNECTOR_INFO_TABLE);
1679 if (conn_info) { 1997 if (conn_info) {
@@ -1691,26 +2009,40 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1691 switch (ddc_type) { 2009 switch (ddc_type) {
1692 case DDC_MONID: 2010 case DDC_MONID:
1693 ddc_i2c = 2011 ddc_i2c =
1694 combios_setup_i2c_bus(RADEON_GPIO_MONID); 2012 combios_setup_i2c_bus(rdev, RADEON_GPIO_MONID);
1695 break; 2013 break;
1696 case DDC_DVI: 2014 case DDC_DVI:
1697 ddc_i2c = 2015 ddc_i2c =
1698 combios_setup_i2c_bus(RADEON_GPIO_DVI_DDC); 2016 combios_setup_i2c_bus(rdev, RADEON_GPIO_DVI_DDC);
1699 break; 2017 break;
1700 case DDC_VGA: 2018 case DDC_VGA:
1701 ddc_i2c = 2019 ddc_i2c =
1702 combios_setup_i2c_bus(RADEON_GPIO_VGA_DDC); 2020 combios_setup_i2c_bus(rdev, RADEON_GPIO_VGA_DDC);
1703 break; 2021 break;
1704 case DDC_CRT2: 2022 case DDC_CRT2:
1705 ddc_i2c = 2023 ddc_i2c =
1706 combios_setup_i2c_bus(RADEON_GPIO_CRT2_DDC); 2024 combios_setup_i2c_bus(rdev, RADEON_GPIO_CRT2_DDC);
1707 break; 2025 break;
1708 default: 2026 default:
1709 break; 2027 break;
1710 } 2028 }
1711 2029
2030 switch (connector) {
2031 case CONNECTOR_PROPRIETARY_LEGACY:
2032 case CONNECTOR_DVI_I_LEGACY:
2033 case CONNECTOR_DVI_D_LEGACY:
2034 if ((tmp >> 4) & 0x1)
2035 hpd.hpd = RADEON_HPD_2;
2036 else
2037 hpd.hpd = RADEON_HPD_1;
2038 break;
2039 default:
2040 hpd.hpd = RADEON_HPD_NONE;
2041 break;
2042 }
2043
1712 if (!radeon_apply_legacy_quirks(dev, i, &connector, 2044 if (!radeon_apply_legacy_quirks(dev, i, &connector,
1713 &ddc_i2c)) 2045 &ddc_i2c, &hpd))
1714 continue; 2046 continue;
1715 2047
1716 switch (connector) { 2048 switch (connector) {
@@ -1727,7 +2059,8 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1727 legacy_connector_convert 2059 legacy_connector_convert
1728 [connector], 2060 [connector],
1729 &ddc_i2c, 2061 &ddc_i2c,
1730 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D); 2062 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_D,
2063 &hpd);
1731 break; 2064 break;
1732 case CONNECTOR_CRT_LEGACY: 2065 case CONNECTOR_CRT_LEGACY:
1733 if (tmp & 0x1) { 2066 if (tmp & 0x1) {
@@ -1753,7 +2086,8 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1753 legacy_connector_convert 2086 legacy_connector_convert
1754 [connector], 2087 [connector],
1755 &ddc_i2c, 2088 &ddc_i2c,
1756 CONNECTOR_OBJECT_ID_VGA); 2089 CONNECTOR_OBJECT_ID_VGA,
2090 &hpd);
1757 break; 2091 break;
1758 case CONNECTOR_DVI_I_LEGACY: 2092 case CONNECTOR_DVI_I_LEGACY:
1759 devices = 0; 2093 devices = 0;
@@ -1799,7 +2133,8 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1799 legacy_connector_convert 2133 legacy_connector_convert
1800 [connector], 2134 [connector],
1801 &ddc_i2c, 2135 &ddc_i2c,
1802 connector_object_id); 2136 connector_object_id,
2137 &hpd);
1803 break; 2138 break;
1804 case CONNECTOR_DVI_D_LEGACY: 2139 case CONNECTOR_DVI_D_LEGACY:
1805 if ((tmp >> 4) & 0x1) { 2140 if ((tmp >> 4) & 0x1) {
@@ -1817,7 +2152,8 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1817 legacy_connector_convert 2152 legacy_connector_convert
1818 [connector], 2153 [connector],
1819 &ddc_i2c, 2154 &ddc_i2c,
1820 connector_object_id); 2155 connector_object_id,
2156 &hpd);
1821 break; 2157 break;
1822 case CONNECTOR_CTV_LEGACY: 2158 case CONNECTOR_CTV_LEGACY:
1823 case CONNECTOR_STV_LEGACY: 2159 case CONNECTOR_STV_LEGACY:
@@ -1832,7 +2168,8 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1832 legacy_connector_convert 2168 legacy_connector_convert
1833 [connector], 2169 [connector],
1834 &ddc_i2c, 2170 &ddc_i2c,
1835 CONNECTOR_OBJECT_ID_SVIDEO); 2171 CONNECTOR_OBJECT_ID_SVIDEO,
2172 &hpd);
1836 break; 2173 break;
1837 default: 2174 default:
1838 DRM_ERROR("Unknown connector type: %d\n", 2175 DRM_ERROR("Unknown connector type: %d\n",
@@ -1858,14 +2195,16 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1858 0), 2195 0),
1859 ATOM_DEVICE_DFP1_SUPPORT); 2196 ATOM_DEVICE_DFP1_SUPPORT);
1860 2197
1861 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_DVI_DDC); 2198 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_DVI_DDC);
2199 hpd.hpd = RADEON_HPD_NONE;
1862 radeon_add_legacy_connector(dev, 2200 radeon_add_legacy_connector(dev,
1863 0, 2201 0,
1864 ATOM_DEVICE_CRT1_SUPPORT | 2202 ATOM_DEVICE_CRT1_SUPPORT |
1865 ATOM_DEVICE_DFP1_SUPPORT, 2203 ATOM_DEVICE_DFP1_SUPPORT,
1866 DRM_MODE_CONNECTOR_DVII, 2204 DRM_MODE_CONNECTOR_DVII,
1867 &ddc_i2c, 2205 &ddc_i2c,
1868 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I); 2206 CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I,
2207 &hpd);
1869 } else { 2208 } else {
1870 uint16_t crt_info = 2209 uint16_t crt_info =
1871 combios_get_table_offset(dev, COMBIOS_CRT_INFO_TABLE); 2210 combios_get_table_offset(dev, COMBIOS_CRT_INFO_TABLE);
@@ -1876,13 +2215,15 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1876 ATOM_DEVICE_CRT1_SUPPORT, 2215 ATOM_DEVICE_CRT1_SUPPORT,
1877 1), 2216 1),
1878 ATOM_DEVICE_CRT1_SUPPORT); 2217 ATOM_DEVICE_CRT1_SUPPORT);
1879 ddc_i2c = combios_setup_i2c_bus(RADEON_GPIO_VGA_DDC); 2218 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_VGA_DDC);
2219 hpd.hpd = RADEON_HPD_NONE;
1880 radeon_add_legacy_connector(dev, 2220 radeon_add_legacy_connector(dev,
1881 0, 2221 0,
1882 ATOM_DEVICE_CRT1_SUPPORT, 2222 ATOM_DEVICE_CRT1_SUPPORT,
1883 DRM_MODE_CONNECTOR_VGA, 2223 DRM_MODE_CONNECTOR_VGA,
1884 &ddc_i2c, 2224 &ddc_i2c,
1885 CONNECTOR_OBJECT_ID_VGA); 2225 CONNECTOR_OBJECT_ID_VGA,
2226 &hpd);
1886 } else { 2227 } else {
1887 DRM_DEBUG("No connector info found\n"); 2228 DRM_DEBUG("No connector info found\n");
1888 return false; 2229 return false;
@@ -1910,27 +2251,27 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1910 case DDC_MONID: 2251 case DDC_MONID:
1911 ddc_i2c = 2252 ddc_i2c =
1912 combios_setup_i2c_bus 2253 combios_setup_i2c_bus
1913 (RADEON_GPIO_MONID); 2254 (rdev, RADEON_GPIO_MONID);
1914 break; 2255 break;
1915 case DDC_DVI: 2256 case DDC_DVI:
1916 ddc_i2c = 2257 ddc_i2c =
1917 combios_setup_i2c_bus 2258 combios_setup_i2c_bus
1918 (RADEON_GPIO_DVI_DDC); 2259 (rdev, RADEON_GPIO_DVI_DDC);
1919 break; 2260 break;
1920 case DDC_VGA: 2261 case DDC_VGA:
1921 ddc_i2c = 2262 ddc_i2c =
1922 combios_setup_i2c_bus 2263 combios_setup_i2c_bus
1923 (RADEON_GPIO_VGA_DDC); 2264 (rdev, RADEON_GPIO_VGA_DDC);
1924 break; 2265 break;
1925 case DDC_CRT2: 2266 case DDC_CRT2:
1926 ddc_i2c = 2267 ddc_i2c =
1927 combios_setup_i2c_bus 2268 combios_setup_i2c_bus
1928 (RADEON_GPIO_CRT2_DDC); 2269 (rdev, RADEON_GPIO_CRT2_DDC);
1929 break; 2270 break;
1930 case DDC_LCD: 2271 case DDC_LCD:
1931 ddc_i2c = 2272 ddc_i2c =
1932 combios_setup_i2c_bus 2273 combios_setup_i2c_bus
1933 (RADEON_LCD_GPIO_MASK); 2274 (rdev, RADEON_GPIOPAD_MASK);
1934 ddc_i2c.mask_clk_mask = 2275 ddc_i2c.mask_clk_mask =
1935 RBIOS32(lcd_ddc_info + 3); 2276 RBIOS32(lcd_ddc_info + 3);
1936 ddc_i2c.mask_data_mask = 2277 ddc_i2c.mask_data_mask =
@@ -1939,19 +2280,19 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1939 RBIOS32(lcd_ddc_info + 3); 2280 RBIOS32(lcd_ddc_info + 3);
1940 ddc_i2c.a_data_mask = 2281 ddc_i2c.a_data_mask =
1941 RBIOS32(lcd_ddc_info + 7); 2282 RBIOS32(lcd_ddc_info + 7);
1942 ddc_i2c.put_clk_mask = 2283 ddc_i2c.en_clk_mask =
1943 RBIOS32(lcd_ddc_info + 3); 2284 RBIOS32(lcd_ddc_info + 3);
1944 ddc_i2c.put_data_mask = 2285 ddc_i2c.en_data_mask =
1945 RBIOS32(lcd_ddc_info + 7); 2286 RBIOS32(lcd_ddc_info + 7);
1946 ddc_i2c.get_clk_mask = 2287 ddc_i2c.y_clk_mask =
1947 RBIOS32(lcd_ddc_info + 3); 2288 RBIOS32(lcd_ddc_info + 3);
1948 ddc_i2c.get_data_mask = 2289 ddc_i2c.y_data_mask =
1949 RBIOS32(lcd_ddc_info + 7); 2290 RBIOS32(lcd_ddc_info + 7);
1950 break; 2291 break;
1951 case DDC_GPIO: 2292 case DDC_GPIO:
1952 ddc_i2c = 2293 ddc_i2c =
1953 combios_setup_i2c_bus 2294 combios_setup_i2c_bus
1954 (RADEON_MDGPIO_EN_REG); 2295 (rdev, RADEON_MDGPIO_MASK);
1955 ddc_i2c.mask_clk_mask = 2296 ddc_i2c.mask_clk_mask =
1956 RBIOS32(lcd_ddc_info + 3); 2297 RBIOS32(lcd_ddc_info + 3);
1957 ddc_i2c.mask_data_mask = 2298 ddc_i2c.mask_data_mask =
@@ -1960,13 +2301,13 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1960 RBIOS32(lcd_ddc_info + 3); 2301 RBIOS32(lcd_ddc_info + 3);
1961 ddc_i2c.a_data_mask = 2302 ddc_i2c.a_data_mask =
1962 RBIOS32(lcd_ddc_info + 7); 2303 RBIOS32(lcd_ddc_info + 7);
1963 ddc_i2c.put_clk_mask = 2304 ddc_i2c.en_clk_mask =
1964 RBIOS32(lcd_ddc_info + 3); 2305 RBIOS32(lcd_ddc_info + 3);
1965 ddc_i2c.put_data_mask = 2306 ddc_i2c.en_data_mask =
1966 RBIOS32(lcd_ddc_info + 7); 2307 RBIOS32(lcd_ddc_info + 7);
1967 ddc_i2c.get_clk_mask = 2308 ddc_i2c.y_clk_mask =
1968 RBIOS32(lcd_ddc_info + 3); 2309 RBIOS32(lcd_ddc_info + 3);
1969 ddc_i2c.get_data_mask = 2310 ddc_i2c.y_data_mask =
1970 RBIOS32(lcd_ddc_info + 7); 2311 RBIOS32(lcd_ddc_info + 7);
1971 break; 2312 break;
1972 default: 2313 default:
@@ -1977,12 +2318,14 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1977 } else 2318 } else
1978 ddc_i2c.valid = false; 2319 ddc_i2c.valid = false;
1979 2320
2321 hpd.hpd = RADEON_HPD_NONE;
1980 radeon_add_legacy_connector(dev, 2322 radeon_add_legacy_connector(dev,
1981 5, 2323 5,
1982 ATOM_DEVICE_LCD1_SUPPORT, 2324 ATOM_DEVICE_LCD1_SUPPORT,
1983 DRM_MODE_CONNECTOR_LVDS, 2325 DRM_MODE_CONNECTOR_LVDS,
1984 &ddc_i2c, 2326 &ddc_i2c,
1985 CONNECTOR_OBJECT_ID_LVDS); 2327 CONNECTOR_OBJECT_ID_LVDS,
2328 &hpd);
1986 } 2329 }
1987 } 2330 }
1988 2331
@@ -1993,6 +2336,7 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
1993 if (tv_info) { 2336 if (tv_info) {
1994 if (RBIOS8(tv_info + 6) == 'T') { 2337 if (RBIOS8(tv_info + 6) == 'T') {
1995 if (radeon_apply_legacy_tv_quirks(dev)) { 2338 if (radeon_apply_legacy_tv_quirks(dev)) {
2339 hpd.hpd = RADEON_HPD_NONE;
1996 radeon_add_legacy_encoder(dev, 2340 radeon_add_legacy_encoder(dev,
1997 radeon_get_encoder_id 2341 radeon_get_encoder_id
1998 (dev, 2342 (dev,
@@ -2003,7 +2347,8 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
2003 ATOM_DEVICE_TV1_SUPPORT, 2347 ATOM_DEVICE_TV1_SUPPORT,
2004 DRM_MODE_CONNECTOR_SVIDEO, 2348 DRM_MODE_CONNECTOR_SVIDEO,
2005 &ddc_i2c, 2349 &ddc_i2c,
2006 CONNECTOR_OBJECT_ID_SVIDEO); 2350 CONNECTOR_OBJECT_ID_SVIDEO,
2351 &hpd);
2007 } 2352 }
2008 } 2353 }
2009 } 2354 }
@@ -2014,6 +2359,293 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
2014 return true; 2359 return true;
2015} 2360}
2016 2361
2362void radeon_combios_get_power_modes(struct radeon_device *rdev)
2363{
2364 struct drm_device *dev = rdev->ddev;
2365 u16 offset, misc, misc2 = 0;
2366 u8 rev, blocks, tmp;
2367 int state_index = 0;
2368
2369 rdev->pm.default_power_state = NULL;
2370
2371 if (rdev->flags & RADEON_IS_MOBILITY) {
2372 offset = combios_get_table_offset(dev, COMBIOS_POWERPLAY_INFO_TABLE);
2373 if (offset) {
2374 rev = RBIOS8(offset);
2375 blocks = RBIOS8(offset + 0x2);
2376 /* power mode 0 tends to be the only valid one */
2377 rdev->pm.power_state[state_index].num_clock_modes = 1;
2378 rdev->pm.power_state[state_index].clock_info[0].mclk = RBIOS32(offset + 0x5 + 0x2);
2379 rdev->pm.power_state[state_index].clock_info[0].sclk = RBIOS32(offset + 0x5 + 0x6);
2380 if ((rdev->pm.power_state[state_index].clock_info[0].mclk == 0) ||
2381 (rdev->pm.power_state[state_index].clock_info[0].sclk == 0))
2382 goto default_mode;
2383 /* skip overclock modes for now */
2384 if ((rdev->pm.power_state[state_index].clock_info[0].mclk >
2385 rdev->clock.default_mclk + RADEON_MODE_OVERCLOCK_MARGIN) ||
2386 (rdev->pm.power_state[state_index].clock_info[0].sclk >
2387 rdev->clock.default_sclk + RADEON_MODE_OVERCLOCK_MARGIN))
2388 goto default_mode;
2389 rdev->pm.power_state[state_index].type =
2390 POWER_STATE_TYPE_BATTERY;
2391 misc = RBIOS16(offset + 0x5 + 0x0);
2392 if (rev > 4)
2393 misc2 = RBIOS16(offset + 0x5 + 0xe);
2394 if (misc & 0x4) {
2395 rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_GPIO;
2396 if (misc & 0x8)
2397 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high =
2398 true;
2399 else
2400 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high =
2401 false;
2402 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio.valid = true;
2403 if (rev < 6) {
2404 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio.reg =
2405 RBIOS16(offset + 0x5 + 0xb) * 4;
2406 tmp = RBIOS8(offset + 0x5 + 0xd);
2407 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio.mask = (1 << tmp);
2408 } else {
2409 u8 entries = RBIOS8(offset + 0x5 + 0xb);
2410 u16 voltage_table_offset = RBIOS16(offset + 0x5 + 0xc);
2411 if (entries && voltage_table_offset) {
2412 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio.reg =
2413 RBIOS16(voltage_table_offset) * 4;
2414 tmp = RBIOS8(voltage_table_offset + 0x2);
2415 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio.mask = (1 << tmp);
2416 } else
2417 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio.valid = false;
2418 }
2419 switch ((misc2 & 0x700) >> 8) {
2420 case 0:
2421 default:
2422 rdev->pm.power_state[state_index].clock_info[0].voltage.delay = 0;
2423 break;
2424 case 1:
2425 rdev->pm.power_state[state_index].clock_info[0].voltage.delay = 33;
2426 break;
2427 case 2:
2428 rdev->pm.power_state[state_index].clock_info[0].voltage.delay = 66;
2429 break;
2430 case 3:
2431 rdev->pm.power_state[state_index].clock_info[0].voltage.delay = 99;
2432 break;
2433 case 4:
2434 rdev->pm.power_state[state_index].clock_info[0].voltage.delay = 132;
2435 break;
2436 }
2437 } else
2438 rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_NONE;
2439 if (rev > 6)
2440 rdev->pm.power_state[state_index].non_clock_info.pcie_lanes =
2441 RBIOS8(offset + 0x5 + 0x10);
2442 state_index++;
2443 } else {
2444 /* XXX figure out some good default low power mode for mobility cards w/out power tables */
2445 }
2446 } else {
2447 /* XXX figure out some good default low power mode for desktop cards */
2448 }
2449
2450default_mode:
2451 /* add the default mode */
2452 rdev->pm.power_state[state_index].type =
2453 POWER_STATE_TYPE_DEFAULT;
2454 rdev->pm.power_state[state_index].num_clock_modes = 1;
2455 rdev->pm.power_state[state_index].clock_info[0].mclk = rdev->clock.default_mclk;
2456 rdev->pm.power_state[state_index].clock_info[0].sclk = rdev->clock.default_sclk;
2457 rdev->pm.power_state[state_index].default_clock_mode = &rdev->pm.power_state[state_index].clock_info[0];
2458 rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_NONE;
2459 if (rdev->asic->get_pcie_lanes)
2460 rdev->pm.power_state[state_index].non_clock_info.pcie_lanes = radeon_get_pcie_lanes(rdev);
2461 else
2462 rdev->pm.power_state[state_index].non_clock_info.pcie_lanes = 16;
2463 rdev->pm.default_power_state = &rdev->pm.power_state[state_index];
2464 rdev->pm.num_power_states = state_index + 1;
2465
2466 rdev->pm.current_power_state = rdev->pm.default_power_state;
2467 rdev->pm.current_clock_mode =
2468 rdev->pm.default_power_state->default_clock_mode;
2469}
2470
2471void radeon_external_tmds_setup(struct drm_encoder *encoder)
2472{
2473 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
2474 struct radeon_encoder_ext_tmds *tmds = radeon_encoder->enc_priv;
2475
2476 if (!tmds)
2477 return;
2478
2479 switch (tmds->dvo_chip) {
2480 case DVO_SIL164:
2481 /* sil 164 */
2482 radeon_i2c_put_byte(tmds->i2c_bus,
2483 tmds->slave_addr,
2484 0x08, 0x30);
2485 radeon_i2c_put_byte(tmds->i2c_bus,
2486 tmds->slave_addr,
2487 0x09, 0x00);
2488 radeon_i2c_put_byte(tmds->i2c_bus,
2489 tmds->slave_addr,
2490 0x0a, 0x90);
2491 radeon_i2c_put_byte(tmds->i2c_bus,
2492 tmds->slave_addr,
2493 0x0c, 0x89);
2494 radeon_i2c_put_byte(tmds->i2c_bus,
2495 tmds->slave_addr,
2496 0x08, 0x3b);
2497 break;
2498 case DVO_SIL1178:
2499 /* sil 1178 - untested */
2500 /*
2501 * 0x0f, 0x44
2502 * 0x0f, 0x4c
2503 * 0x0e, 0x01
2504 * 0x0a, 0x80
2505 * 0x09, 0x30
2506 * 0x0c, 0xc9
2507 * 0x0d, 0x70
2508 * 0x08, 0x32
2509 * 0x08, 0x33
2510 */
2511 break;
2512 default:
2513 break;
2514 }
2515
2516}
2517
2518bool radeon_combios_external_tmds_setup(struct drm_encoder *encoder)
2519{
2520 struct drm_device *dev = encoder->dev;
2521 struct radeon_device *rdev = dev->dev_private;
2522 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
2523 uint16_t offset;
2524 uint8_t blocks, slave_addr, rev;
2525 uint32_t index, id;
2526 uint32_t reg, val, and_mask, or_mask;
2527 struct radeon_encoder_ext_tmds *tmds = radeon_encoder->enc_priv;
2528
2529 if (!tmds)
2530 return false;
2531
2532 if (rdev->flags & RADEON_IS_IGP) {
2533 offset = combios_get_table_offset(dev, COMBIOS_TMDS_POWER_ON_TABLE);
2534 rev = RBIOS8(offset);
2535 if (offset) {
2536 rev = RBIOS8(offset);
2537 if (rev > 1) {
2538 blocks = RBIOS8(offset + 3);
2539 index = offset + 4;
2540 while (blocks > 0) {
2541 id = RBIOS16(index);
2542 index += 2;
2543 switch (id >> 13) {
2544 case 0:
2545 reg = (id & 0x1fff) * 4;
2546 val = RBIOS32(index);
2547 index += 4;
2548 WREG32(reg, val);
2549 break;
2550 case 2:
2551 reg = (id & 0x1fff) * 4;
2552 and_mask = RBIOS32(index);
2553 index += 4;
2554 or_mask = RBIOS32(index);
2555 index += 4;
2556 val = RREG32(reg);
2557 val = (val & and_mask) | or_mask;
2558 WREG32(reg, val);
2559 break;
2560 case 3:
2561 val = RBIOS16(index);
2562 index += 2;
2563 udelay(val);
2564 break;
2565 case 4:
2566 val = RBIOS16(index);
2567 index += 2;
2568 udelay(val * 1000);
2569 break;
2570 case 6:
2571 slave_addr = id & 0xff;
2572 slave_addr >>= 1; /* 7 bit addressing */
2573 index++;
2574 reg = RBIOS8(index);
2575 index++;
2576 val = RBIOS8(index);
2577 index++;
2578 radeon_i2c_put_byte(tmds->i2c_bus,
2579 slave_addr,
2580 reg, val);
2581 break;
2582 default:
2583 DRM_ERROR("Unknown id %d\n", id >> 13);
2584 break;
2585 }
2586 blocks--;
2587 }
2588 return true;
2589 }
2590 }
2591 } else {
2592 offset = combios_get_table_offset(dev, COMBIOS_EXT_TMDS_INFO_TABLE);
2593 if (offset) {
2594 index = offset + 10;
2595 id = RBIOS16(index);
2596 while (id != 0xffff) {
2597 index += 2;
2598 switch (id >> 13) {
2599 case 0:
2600 reg = (id & 0x1fff) * 4;
2601 val = RBIOS32(index);
2602 WREG32(reg, val);
2603 break;
2604 case 2:
2605 reg = (id & 0x1fff) * 4;
2606 and_mask = RBIOS32(index);
2607 index += 4;
2608 or_mask = RBIOS32(index);
2609 index += 4;
2610 val = RREG32(reg);
2611 val = (val & and_mask) | or_mask;
2612 WREG32(reg, val);
2613 break;
2614 case 4:
2615 val = RBIOS16(index);
2616 index += 2;
2617 udelay(val);
2618 break;
2619 case 5:
2620 reg = id & 0x1fff;
2621 and_mask = RBIOS32(index);
2622 index += 4;
2623 or_mask = RBIOS32(index);
2624 index += 4;
2625 val = RREG32_PLL(reg);
2626 val = (val & and_mask) | or_mask;
2627 WREG32_PLL(reg, val);
2628 break;
2629 case 6:
2630 reg = id & 0x1fff;
2631 val = RBIOS8(index);
2632 index += 1;
2633 radeon_i2c_put_byte(tmds->i2c_bus,
2634 tmds->slave_addr,
2635 reg, val);
2636 break;
2637 default:
2638 DRM_ERROR("Unknown id %d\n", id >> 13);
2639 break;
2640 }
2641 id = RBIOS16(index);
2642 }
2643 return true;
2644 }
2645 }
2646 return false;
2647}
2648
2017static void combios_parse_mmio_table(struct drm_device *dev, uint16_t offset) 2649static void combios_parse_mmio_table(struct drm_device *dev, uint16_t offset)
2018{ 2650{
2019 struct radeon_device *rdev = dev->dev_private; 2651 struct radeon_device *rdev = dev->dev_private;
diff --git a/drivers/gpu/drm/radeon/radeon_connectors.c b/drivers/gpu/drm/radeon/radeon_connectors.c
index 29763ceae3af..4559a53d5e57 100644
--- a/drivers/gpu/drm/radeon/radeon_connectors.c
+++ b/drivers/gpu/drm/radeon/radeon_connectors.c
@@ -40,6 +40,28 @@ radeon_atombios_connected_scratch_regs(struct drm_connector *connector,
40 struct drm_encoder *encoder, 40 struct drm_encoder *encoder,
41 bool connected); 41 bool connected);
42 42
43void radeon_connector_hotplug(struct drm_connector *connector)
44{
45 struct drm_device *dev = connector->dev;
46 struct radeon_device *rdev = dev->dev_private;
47 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
48
49 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
50 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
51
52 if ((connector->connector_type == DRM_MODE_CONNECTOR_DisplayPort) ||
53 (connector->connector_type == DRM_MODE_CONNECTOR_eDP)) {
54 if ((radeon_dp_getsinktype(radeon_connector) == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
55 (radeon_dp_getsinktype(radeon_connector) == CONNECTOR_OBJECT_ID_eDP)) {
56 if (radeon_dp_needs_link_train(radeon_connector)) {
57 if (connector->encoder)
58 dp_link_train(connector->encoder, connector);
59 }
60 }
61 }
62
63}
64
43static void radeon_property_change_mode(struct drm_encoder *encoder) 65static void radeon_property_change_mode(struct drm_encoder *encoder)
44{ 66{
45 struct drm_crtc *crtc = encoder->crtc; 67 struct drm_crtc *crtc = encoder->crtc;
@@ -140,12 +162,14 @@ radeon_connector_analog_encoder_conflict_solve(struct drm_connector *connector,
140{ 162{
141 struct drm_device *dev = connector->dev; 163 struct drm_device *dev = connector->dev;
142 struct drm_connector *conflict; 164 struct drm_connector *conflict;
165 struct radeon_connector *radeon_conflict;
143 int i; 166 int i;
144 167
145 list_for_each_entry(conflict, &dev->mode_config.connector_list, head) { 168 list_for_each_entry(conflict, &dev->mode_config.connector_list, head) {
146 if (conflict == connector) 169 if (conflict == connector)
147 continue; 170 continue;
148 171
172 radeon_conflict = to_radeon_connector(conflict);
149 for (i = 0; i < DRM_CONNECTOR_MAX_ENCODER; i++) { 173 for (i = 0; i < DRM_CONNECTOR_MAX_ENCODER; i++) {
150 if (conflict->encoder_ids[i] == 0) 174 if (conflict->encoder_ids[i] == 0)
151 break; 175 break;
@@ -155,6 +179,9 @@ radeon_connector_analog_encoder_conflict_solve(struct drm_connector *connector,
155 if (conflict->status != connector_status_connected) 179 if (conflict->status != connector_status_connected)
156 continue; 180 continue;
157 181
182 if (radeon_conflict->use_digital)
183 continue;
184
158 if (priority == true) { 185 if (priority == true) {
159 DRM_INFO("1: conflicting encoders switching off %s\n", drm_get_connector_name(conflict)); 186 DRM_INFO("1: conflicting encoders switching off %s\n", drm_get_connector_name(conflict));
160 DRM_INFO("in favor of %s\n", drm_get_connector_name(connector)); 187 DRM_INFO("in favor of %s\n", drm_get_connector_name(connector));
@@ -188,6 +215,18 @@ static struct drm_display_mode *radeon_fp_native_mode(struct drm_encoder *encode
188 drm_mode_set_name(mode); 215 drm_mode_set_name(mode);
189 216
190 DRM_DEBUG("Adding native panel mode %s\n", mode->name); 217 DRM_DEBUG("Adding native panel mode %s\n", mode->name);
218 } else if (native_mode->hdisplay != 0 &&
219 native_mode->vdisplay != 0) {
220 /* mac laptops without an edid */
221 /* Note that this is not necessarily the exact panel mode,
222 * but an approximation based on the cvt formula. For these
223 * systems we should ideally read the mode info out of the
224 * registers or add a mode table, but this works and is much
225 * simpler.
226 */
227 mode = drm_cvt_mode(dev, native_mode->hdisplay, native_mode->vdisplay, 60, true, false, false);
228 mode->type = DRM_MODE_TYPE_PREFERRED | DRM_MODE_TYPE_DRIVER;
229 DRM_DEBUG("Adding cvt approximation of native panel mode %s\n", mode->name);
191 } 230 }
192 return mode; 231 return mode;
193} 232}
@@ -253,6 +292,7 @@ int radeon_connector_set_property(struct drm_connector *connector, struct drm_pr
253 292
254 if (property == rdev->mode_info.coherent_mode_property) { 293 if (property == rdev->mode_info.coherent_mode_property) {
255 struct radeon_encoder_atom_dig *dig; 294 struct radeon_encoder_atom_dig *dig;
295 bool new_coherent_mode;
256 296
257 /* need to find digital encoder on connector */ 297 /* need to find digital encoder on connector */
258 encoder = radeon_find_encoder(connector, DRM_MODE_ENCODER_TMDS); 298 encoder = radeon_find_encoder(connector, DRM_MODE_ENCODER_TMDS);
@@ -265,8 +305,11 @@ int radeon_connector_set_property(struct drm_connector *connector, struct drm_pr
265 return 0; 305 return 0;
266 306
267 dig = radeon_encoder->enc_priv; 307 dig = radeon_encoder->enc_priv;
268 dig->coherent_mode = val ? true : false; 308 new_coherent_mode = val ? true : false;
269 radeon_property_change_mode(&radeon_encoder->base); 309 if (dig->coherent_mode != new_coherent_mode) {
310 dig->coherent_mode = new_coherent_mode;
311 radeon_property_change_mode(&radeon_encoder->base);
312 }
270 } 313 }
271 314
272 if (property == rdev->mode_info.tv_std_property) { 315 if (property == rdev->mode_info.tv_std_property) {
@@ -281,7 +324,7 @@ int radeon_connector_set_property(struct drm_connector *connector, struct drm_pr
281 radeon_encoder = to_radeon_encoder(encoder); 324 radeon_encoder = to_radeon_encoder(encoder);
282 if (!radeon_encoder->enc_priv) 325 if (!radeon_encoder->enc_priv)
283 return 0; 326 return 0;
284 if (rdev->is_atom_bios) { 327 if (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom) {
285 struct radeon_encoder_atom_dac *dac_int; 328 struct radeon_encoder_atom_dac *dac_int;
286 dac_int = radeon_encoder->enc_priv; 329 dac_int = radeon_encoder->enc_priv;
287 dac_int->tv_std = val; 330 dac_int->tv_std = val;
@@ -445,10 +488,8 @@ static enum drm_connector_status radeon_lvds_detect(struct drm_connector *connec
445 ret = connector_status_connected; 488 ret = connector_status_connected;
446 else { 489 else {
447 if (radeon_connector->ddc_bus) { 490 if (radeon_connector->ddc_bus) {
448 radeon_i2c_do_lock(radeon_connector, 1);
449 radeon_connector->edid = drm_get_edid(&radeon_connector->base, 491 radeon_connector->edid = drm_get_edid(&radeon_connector->base,
450 &radeon_connector->ddc_bus->adapter); 492 &radeon_connector->ddc_bus->adapter);
451 radeon_i2c_do_lock(radeon_connector, 0);
452 if (radeon_connector->edid) 493 if (radeon_connector->edid)
453 ret = connector_status_connected; 494 ret = connector_status_connected;
454 } 495 }
@@ -546,24 +587,21 @@ static enum drm_connector_status radeon_vga_detect(struct drm_connector *connect
546 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 587 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
547 struct drm_encoder *encoder; 588 struct drm_encoder *encoder;
548 struct drm_encoder_helper_funcs *encoder_funcs; 589 struct drm_encoder_helper_funcs *encoder_funcs;
549 bool dret; 590 bool dret = false;
550 enum drm_connector_status ret = connector_status_disconnected; 591 enum drm_connector_status ret = connector_status_disconnected;
551 592
552 encoder = radeon_best_single_encoder(connector); 593 encoder = radeon_best_single_encoder(connector);
553 if (!encoder) 594 if (!encoder)
554 ret = connector_status_disconnected; 595 ret = connector_status_disconnected;
555 596
556 radeon_i2c_do_lock(radeon_connector, 1); 597 if (radeon_connector->ddc_bus)
557 dret = radeon_ddc_probe(radeon_connector); 598 dret = radeon_ddc_probe(radeon_connector);
558 radeon_i2c_do_lock(radeon_connector, 0);
559 if (dret) { 599 if (dret) {
560 if (radeon_connector->edid) { 600 if (radeon_connector->edid) {
561 kfree(radeon_connector->edid); 601 kfree(radeon_connector->edid);
562 radeon_connector->edid = NULL; 602 radeon_connector->edid = NULL;
563 } 603 }
564 radeon_i2c_do_lock(radeon_connector, 1);
565 radeon_connector->edid = drm_get_edid(&radeon_connector->base, &radeon_connector->ddc_bus->adapter); 604 radeon_connector->edid = drm_get_edid(&radeon_connector->base, &radeon_connector->ddc_bus->adapter);
566 radeon_i2c_do_lock(radeon_connector, 0);
567 605
568 if (!radeon_connector->edid) { 606 if (!radeon_connector->edid) {
569 DRM_ERROR("%s: probed a monitor but no|invalid EDID\n", 607 DRM_ERROR("%s: probed a monitor but no|invalid EDID\n",
@@ -583,7 +621,7 @@ static enum drm_connector_status radeon_vga_detect(struct drm_connector *connect
583 ret = connector_status_connected; 621 ret = connector_status_connected;
584 } 622 }
585 } else { 623 } else {
586 if (radeon_connector->dac_load_detect) { 624 if (radeon_connector->dac_load_detect && encoder) {
587 encoder_funcs = encoder->helper_private; 625 encoder_funcs = encoder->helper_private;
588 ret = encoder_funcs->detect(encoder, connector); 626 ret = encoder_funcs->detect(encoder, connector);
589 } 627 }
@@ -706,19 +744,16 @@ static enum drm_connector_status radeon_dvi_detect(struct drm_connector *connect
706 struct drm_mode_object *obj; 744 struct drm_mode_object *obj;
707 int i; 745 int i;
708 enum drm_connector_status ret = connector_status_disconnected; 746 enum drm_connector_status ret = connector_status_disconnected;
709 bool dret; 747 bool dret = false;
710 748
711 radeon_i2c_do_lock(radeon_connector, 1); 749 if (radeon_connector->ddc_bus)
712 dret = radeon_ddc_probe(radeon_connector); 750 dret = radeon_ddc_probe(radeon_connector);
713 radeon_i2c_do_lock(radeon_connector, 0);
714 if (dret) { 751 if (dret) {
715 if (radeon_connector->edid) { 752 if (radeon_connector->edid) {
716 kfree(radeon_connector->edid); 753 kfree(radeon_connector->edid);
717 radeon_connector->edid = NULL; 754 radeon_connector->edid = NULL;
718 } 755 }
719 radeon_i2c_do_lock(radeon_connector, 1);
720 radeon_connector->edid = drm_get_edid(&radeon_connector->base, &radeon_connector->ddc_bus->adapter); 756 radeon_connector->edid = drm_get_edid(&radeon_connector->base, &radeon_connector->ddc_bus->adapter);
721 radeon_i2c_do_lock(radeon_connector, 0);
722 757
723 if (!radeon_connector->edid) { 758 if (!radeon_connector->edid) {
724 DRM_ERROR("%s: probed a monitor but no|invalid EDID\n", 759 DRM_ERROR("%s: probed a monitor but no|invalid EDID\n",
@@ -735,6 +770,39 @@ static enum drm_connector_status radeon_dvi_detect(struct drm_connector *connect
735 ret = connector_status_disconnected; 770 ret = connector_status_disconnected;
736 } else 771 } else
737 ret = connector_status_connected; 772 ret = connector_status_connected;
773
774 /* multiple connectors on the same encoder with the same ddc line
775 * This tends to be HDMI and DVI on the same encoder with the
776 * same ddc line. If the edid says HDMI, consider the HDMI port
777 * connected and the DVI port disconnected. If the edid doesn't
778 * say HDMI, vice versa.
779 */
780 if (radeon_connector->shared_ddc && (ret == connector_status_connected)) {
781 struct drm_device *dev = connector->dev;
782 struct drm_connector *list_connector;
783 struct radeon_connector *list_radeon_connector;
784 list_for_each_entry(list_connector, &dev->mode_config.connector_list, head) {
785 if (connector == list_connector)
786 continue;
787 list_radeon_connector = to_radeon_connector(list_connector);
788 if (radeon_connector->devices == list_radeon_connector->devices) {
789 if (drm_detect_hdmi_monitor(radeon_connector->edid)) {
790 if (connector->connector_type == DRM_MODE_CONNECTOR_DVID) {
791 kfree(radeon_connector->edid);
792 radeon_connector->edid = NULL;
793 ret = connector_status_disconnected;
794 }
795 } else {
796 if ((connector->connector_type == DRM_MODE_CONNECTOR_HDMIA) ||
797 (connector->connector_type == DRM_MODE_CONNECTOR_HDMIB)) {
798 kfree(radeon_connector->edid);
799 radeon_connector->edid = NULL;
800 ret = connector_status_disconnected;
801 }
802 }
803 }
804 }
805 }
738 } 806 }
739 } 807 }
740 808
@@ -833,10 +901,18 @@ static void radeon_dvi_force(struct drm_connector *connector)
833static int radeon_dvi_mode_valid(struct drm_connector *connector, 901static int radeon_dvi_mode_valid(struct drm_connector *connector,
834 struct drm_display_mode *mode) 902 struct drm_display_mode *mode)
835{ 903{
904 struct drm_device *dev = connector->dev;
905 struct radeon_device *rdev = dev->dev_private;
836 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 906 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
837 907
838 /* XXX check mode bandwidth */ 908 /* XXX check mode bandwidth */
839 909
910 /* clocks over 135 MHz have heat issues with DVI on RV100 */
911 if (radeon_connector->use_digital &&
912 (rdev->family == CHIP_RV100) &&
913 (mode->clock > 135000))
914 return MODE_CLOCK_HIGH;
915
840 if (radeon_connector->use_digital && (mode->clock > 165000)) { 916 if (radeon_connector->use_digital && (mode->clock > 165000)) {
841 if ((radeon_connector->connector_object_id == CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_I) || 917 if ((radeon_connector->connector_object_id == CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_I) ||
842 (radeon_connector->connector_object_id == CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_D) || 918 (radeon_connector->connector_object_id == CONNECTOR_OBJECT_ID_DUAL_LINK_DVI_D) ||
@@ -863,6 +939,91 @@ struct drm_connector_funcs radeon_dvi_connector_funcs = {
863 .force = radeon_dvi_force, 939 .force = radeon_dvi_force,
864}; 940};
865 941
942static void radeon_dp_connector_destroy(struct drm_connector *connector)
943{
944 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
945 struct radeon_connector_atom_dig *radeon_dig_connector = radeon_connector->con_priv;
946
947 if (radeon_connector->ddc_bus)
948 radeon_i2c_destroy(radeon_connector->ddc_bus);
949 if (radeon_connector->edid)
950 kfree(radeon_connector->edid);
951 if (radeon_dig_connector->dp_i2c_bus)
952 radeon_i2c_destroy(radeon_dig_connector->dp_i2c_bus);
953 kfree(radeon_connector->con_priv);
954 drm_sysfs_connector_remove(connector);
955 drm_connector_cleanup(connector);
956 kfree(connector);
957}
958
959static int radeon_dp_get_modes(struct drm_connector *connector)
960{
961 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
962 int ret;
963
964 ret = radeon_ddc_get_modes(radeon_connector);
965 return ret;
966}
967
968static enum drm_connector_status radeon_dp_detect(struct drm_connector *connector)
969{
970 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
971 enum drm_connector_status ret = connector_status_disconnected;
972 struct radeon_connector_atom_dig *radeon_dig_connector = radeon_connector->con_priv;
973 u8 sink_type;
974
975 if (radeon_connector->edid) {
976 kfree(radeon_connector->edid);
977 radeon_connector->edid = NULL;
978 }
979
980 sink_type = radeon_dp_getsinktype(radeon_connector);
981 if ((sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
982 (sink_type == CONNECTOR_OBJECT_ID_eDP)) {
983 if (radeon_dp_getdpcd(radeon_connector)) {
984 radeon_dig_connector->dp_sink_type = sink_type;
985 ret = connector_status_connected;
986 }
987 } else {
988 if (radeon_ddc_probe(radeon_connector)) {
989 radeon_dig_connector->dp_sink_type = sink_type;
990 ret = connector_status_connected;
991 }
992 }
993
994 return ret;
995}
996
997static int radeon_dp_mode_valid(struct drm_connector *connector,
998 struct drm_display_mode *mode)
999{
1000 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1001 struct radeon_connector_atom_dig *radeon_dig_connector = radeon_connector->con_priv;
1002
1003 /* XXX check mode bandwidth */
1004
1005 if ((radeon_dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
1006 (radeon_dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP))
1007 return radeon_dp_mode_valid_helper(radeon_connector, mode);
1008 else
1009 return MODE_OK;
1010}
1011
1012struct drm_connector_helper_funcs radeon_dp_connector_helper_funcs = {
1013 .get_modes = radeon_dp_get_modes,
1014 .mode_valid = radeon_dp_mode_valid,
1015 .best_encoder = radeon_dvi_encoder,
1016};
1017
1018struct drm_connector_funcs radeon_dp_connector_funcs = {
1019 .dpms = drm_helper_connector_dpms,
1020 .detect = radeon_dp_detect,
1021 .fill_modes = drm_helper_probe_single_connector_modes,
1022 .set_property = radeon_connector_set_property,
1023 .destroy = radeon_dp_connector_destroy,
1024 .force = radeon_dvi_force,
1025};
1026
866void 1027void
867radeon_add_atom_connector(struct drm_device *dev, 1028radeon_add_atom_connector(struct drm_device *dev,
868 uint32_t connector_id, 1029 uint32_t connector_id,
@@ -871,7 +1032,8 @@ radeon_add_atom_connector(struct drm_device *dev,
871 struct radeon_i2c_bus_rec *i2c_bus, 1032 struct radeon_i2c_bus_rec *i2c_bus,
872 bool linkb, 1033 bool linkb,
873 uint32_t igp_lane_info, 1034 uint32_t igp_lane_info,
874 uint16_t connector_object_id) 1035 uint16_t connector_object_id,
1036 struct radeon_hpd *hpd)
875{ 1037{
876 struct radeon_device *rdev = dev->dev_private; 1038 struct radeon_device *rdev = dev->dev_private;
877 struct drm_connector *connector; 1039 struct drm_connector *connector;
@@ -893,8 +1055,7 @@ radeon_add_atom_connector(struct drm_device *dev,
893 return; 1055 return;
894 } 1056 }
895 if (radeon_connector->ddc_bus && i2c_bus->valid) { 1057 if (radeon_connector->ddc_bus && i2c_bus->valid) {
896 if (memcmp(&radeon_connector->ddc_bus->rec, i2c_bus, 1058 if (radeon_connector->ddc_bus->rec.i2c_id == i2c_bus->i2c_id) {
897 sizeof(struct radeon_i2c_bus_rec)) == 0) {
898 radeon_connector->shared_ddc = true; 1059 radeon_connector->shared_ddc = true;
899 shared_ddc = true; 1060 shared_ddc = true;
900 } 1061 }
@@ -911,6 +1072,7 @@ radeon_add_atom_connector(struct drm_device *dev,
911 radeon_connector->devices = supported_device; 1072 radeon_connector->devices = supported_device;
912 radeon_connector->shared_ddc = shared_ddc; 1073 radeon_connector->shared_ddc = shared_ddc;
913 radeon_connector->connector_object_id = connector_object_id; 1074 radeon_connector->connector_object_id = connector_object_id;
1075 radeon_connector->hpd = *hpd;
914 switch (connector_type) { 1076 switch (connector_type) {
915 case DRM_MODE_CONNECTOR_VGA: 1077 case DRM_MODE_CONNECTOR_VGA:
916 drm_connector_init(dev, &radeon_connector->base, &radeon_vga_connector_funcs, connector_type); 1078 drm_connector_init(dev, &radeon_connector->base, &radeon_vga_connector_funcs, connector_type);
@@ -963,10 +1125,12 @@ radeon_add_atom_connector(struct drm_device *dev,
963 drm_connector_attach_property(&radeon_connector->base, 1125 drm_connector_attach_property(&radeon_connector->base,
964 rdev->mode_info.coherent_mode_property, 1126 rdev->mode_info.coherent_mode_property,
965 1); 1127 1);
966 radeon_connector->dac_load_detect = true; 1128 if (connector_type == DRM_MODE_CONNECTOR_DVII) {
967 drm_connector_attach_property(&radeon_connector->base, 1129 radeon_connector->dac_load_detect = true;
968 rdev->mode_info.load_detect_property, 1130 drm_connector_attach_property(&radeon_connector->base,
969 1); 1131 rdev->mode_info.load_detect_property,
1132 1);
1133 }
970 break; 1134 break;
971 case DRM_MODE_CONNECTOR_HDMIA: 1135 case DRM_MODE_CONNECTOR_HDMIA:
972 case DRM_MODE_CONNECTOR_HDMIB: 1136 case DRM_MODE_CONNECTOR_HDMIB:
@@ -991,22 +1155,36 @@ radeon_add_atom_connector(struct drm_device *dev,
991 subpixel_order = SubPixelHorizontalRGB; 1155 subpixel_order = SubPixelHorizontalRGB;
992 break; 1156 break;
993 case DRM_MODE_CONNECTOR_DisplayPort: 1157 case DRM_MODE_CONNECTOR_DisplayPort:
1158 case DRM_MODE_CONNECTOR_eDP:
994 radeon_dig_connector = kzalloc(sizeof(struct radeon_connector_atom_dig), GFP_KERNEL); 1159 radeon_dig_connector = kzalloc(sizeof(struct radeon_connector_atom_dig), GFP_KERNEL);
995 if (!radeon_dig_connector) 1160 if (!radeon_dig_connector)
996 goto failed; 1161 goto failed;
997 radeon_dig_connector->linkb = linkb; 1162 radeon_dig_connector->linkb = linkb;
998 radeon_dig_connector->igp_lane_info = igp_lane_info; 1163 radeon_dig_connector->igp_lane_info = igp_lane_info;
999 radeon_connector->con_priv = radeon_dig_connector; 1164 radeon_connector->con_priv = radeon_dig_connector;
1000 drm_connector_init(dev, &radeon_connector->base, &radeon_dvi_connector_funcs, connector_type); 1165 drm_connector_init(dev, &radeon_connector->base, &radeon_dp_connector_funcs, connector_type);
1001 ret = drm_connector_helper_add(&radeon_connector->base, &radeon_dvi_connector_helper_funcs); 1166 ret = drm_connector_helper_add(&radeon_connector->base, &radeon_dp_connector_helper_funcs);
1002 if (ret) 1167 if (ret)
1003 goto failed; 1168 goto failed;
1004 if (i2c_bus->valid) { 1169 if (i2c_bus->valid) {
1005 radeon_connector->ddc_bus = radeon_i2c_create(dev, i2c_bus, "DP"); 1170 /* add DP i2c bus */
1171 if (connector_type == DRM_MODE_CONNECTOR_eDP)
1172 radeon_dig_connector->dp_i2c_bus = radeon_i2c_create_dp(dev, i2c_bus, "eDP-auxch");
1173 else
1174 radeon_dig_connector->dp_i2c_bus = radeon_i2c_create_dp(dev, i2c_bus, "DP-auxch");
1175 if (!radeon_dig_connector->dp_i2c_bus)
1176 goto failed;
1177 if (connector_type == DRM_MODE_CONNECTOR_eDP)
1178 radeon_connector->ddc_bus = radeon_i2c_create(dev, i2c_bus, "eDP");
1179 else
1180 radeon_connector->ddc_bus = radeon_i2c_create(dev, i2c_bus, "DP");
1006 if (!radeon_connector->ddc_bus) 1181 if (!radeon_connector->ddc_bus)
1007 goto failed; 1182 goto failed;
1008 } 1183 }
1009 subpixel_order = SubPixelHorizontalRGB; 1184 subpixel_order = SubPixelHorizontalRGB;
1185 drm_connector_attach_property(&radeon_connector->base,
1186 rdev->mode_info.coherent_mode_property,
1187 1);
1010 break; 1188 break;
1011 case DRM_MODE_CONNECTOR_SVIDEO: 1189 case DRM_MODE_CONNECTOR_SVIDEO:
1012 case DRM_MODE_CONNECTOR_Composite: 1190 case DRM_MODE_CONNECTOR_Composite:
@@ -1020,6 +1198,9 @@ radeon_add_atom_connector(struct drm_device *dev,
1020 drm_connector_attach_property(&radeon_connector->base, 1198 drm_connector_attach_property(&radeon_connector->base,
1021 rdev->mode_info.load_detect_property, 1199 rdev->mode_info.load_detect_property,
1022 1); 1200 1);
1201 drm_connector_attach_property(&radeon_connector->base,
1202 rdev->mode_info.tv_std_property,
1203 radeon_atombios_get_tv_info(rdev));
1023 } 1204 }
1024 break; 1205 break;
1025 case DRM_MODE_CONNECTOR_LVDS: 1206 case DRM_MODE_CONNECTOR_LVDS:
@@ -1038,7 +1219,6 @@ radeon_add_atom_connector(struct drm_device *dev,
1038 if (!radeon_connector->ddc_bus) 1219 if (!radeon_connector->ddc_bus)
1039 goto failed; 1220 goto failed;
1040 } 1221 }
1041 drm_mode_create_scaling_mode_property(dev);
1042 drm_connector_attach_property(&radeon_connector->base, 1222 drm_connector_attach_property(&radeon_connector->base,
1043 dev->mode_config.scaling_mode_property, 1223 dev->mode_config.scaling_mode_property,
1044 DRM_MODE_SCALE_FULLSCREEN); 1224 DRM_MODE_SCALE_FULLSCREEN);
@@ -1063,7 +1243,8 @@ radeon_add_legacy_connector(struct drm_device *dev,
1063 uint32_t supported_device, 1243 uint32_t supported_device,
1064 int connector_type, 1244 int connector_type,
1065 struct radeon_i2c_bus_rec *i2c_bus, 1245 struct radeon_i2c_bus_rec *i2c_bus,
1066 uint16_t connector_object_id) 1246 uint16_t connector_object_id,
1247 struct radeon_hpd *hpd)
1067{ 1248{
1068 struct radeon_device *rdev = dev->dev_private; 1249 struct radeon_device *rdev = dev->dev_private;
1069 struct drm_connector *connector; 1250 struct drm_connector *connector;
@@ -1093,6 +1274,7 @@ radeon_add_legacy_connector(struct drm_device *dev,
1093 radeon_connector->connector_id = connector_id; 1274 radeon_connector->connector_id = connector_id;
1094 radeon_connector->devices = supported_device; 1275 radeon_connector->devices = supported_device;
1095 radeon_connector->connector_object_id = connector_object_id; 1276 radeon_connector->connector_object_id = connector_object_id;
1277 radeon_connector->hpd = *hpd;
1096 switch (connector_type) { 1278 switch (connector_type) {
1097 case DRM_MODE_CONNECTOR_VGA: 1279 case DRM_MODE_CONNECTOR_VGA:
1098 drm_connector_init(dev, &radeon_connector->base, &radeon_vga_connector_funcs, connector_type); 1280 drm_connector_init(dev, &radeon_connector->base, &radeon_vga_connector_funcs, connector_type);
@@ -1134,6 +1316,8 @@ radeon_add_legacy_connector(struct drm_device *dev,
1134 radeon_connector->ddc_bus = radeon_i2c_create(dev, i2c_bus, "DVI"); 1316 radeon_connector->ddc_bus = radeon_i2c_create(dev, i2c_bus, "DVI");
1135 if (!radeon_connector->ddc_bus) 1317 if (!radeon_connector->ddc_bus)
1136 goto failed; 1318 goto failed;
1319 }
1320 if (connector_type == DRM_MODE_CONNECTOR_DVII) {
1137 radeon_connector->dac_load_detect = true; 1321 radeon_connector->dac_load_detect = true;
1138 drm_connector_attach_property(&radeon_connector->base, 1322 drm_connector_attach_property(&radeon_connector->base,
1139 rdev->mode_info.load_detect_property, 1323 rdev->mode_info.load_detect_property,
@@ -1159,7 +1343,10 @@ radeon_add_legacy_connector(struct drm_device *dev,
1159 radeon_connector->dac_load_detect = false; 1343 radeon_connector->dac_load_detect = false;
1160 drm_connector_attach_property(&radeon_connector->base, 1344 drm_connector_attach_property(&radeon_connector->base,
1161 rdev->mode_info.load_detect_property, 1345 rdev->mode_info.load_detect_property,
1162 1); 1346 radeon_connector->dac_load_detect);
1347 drm_connector_attach_property(&radeon_connector->base,
1348 rdev->mode_info.tv_std_property,
1349 radeon_combios_get_tv_info(rdev));
1163 } 1350 }
1164 break; 1351 break;
1165 case DRM_MODE_CONNECTOR_LVDS: 1352 case DRM_MODE_CONNECTOR_LVDS:
diff --git a/drivers/gpu/drm/radeon/radeon_cp.c b/drivers/gpu/drm/radeon/radeon_cp.c
index 4f7afc79dd82..2f042a3c0e62 100644
--- a/drivers/gpu/drm/radeon/radeon_cp.c
+++ b/drivers/gpu/drm/radeon/radeon_cp.c
@@ -417,8 +417,9 @@ static int radeon_do_wait_for_idle(drm_radeon_private_t * dev_priv)
417 return -EBUSY; 417 return -EBUSY;
418} 418}
419 419
420static void radeon_init_pipes(drm_radeon_private_t *dev_priv) 420static void radeon_init_pipes(struct drm_device *dev)
421{ 421{
422 drm_radeon_private_t *dev_priv = dev->dev_private;
422 uint32_t gb_tile_config, gb_pipe_sel = 0; 423 uint32_t gb_tile_config, gb_pipe_sel = 0;
423 424
424 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV530) { 425 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV530) {
@@ -434,13 +435,19 @@ static void radeon_init_pipes(drm_radeon_private_t *dev_priv)
434 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R420) { 435 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R420) {
435 gb_pipe_sel = RADEON_READ(R400_GB_PIPE_SELECT); 436 gb_pipe_sel = RADEON_READ(R400_GB_PIPE_SELECT);
436 dev_priv->num_gb_pipes = ((gb_pipe_sel >> 12) & 0x3) + 1; 437 dev_priv->num_gb_pipes = ((gb_pipe_sel >> 12) & 0x3) + 1;
438 /* SE cards have 1 pipe */
439 if ((dev->pdev->device == 0x5e4c) ||
440 (dev->pdev->device == 0x5e4f))
441 dev_priv->num_gb_pipes = 1;
437 } else { 442 } else {
438 /* R3xx */ 443 /* R3xx */
439 if (((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R300) || 444 if (((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R300 &&
440 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R350)) { 445 dev->pdev->device != 0x4144) ||
446 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R350 &&
447 dev->pdev->device != 0x4148)) {
441 dev_priv->num_gb_pipes = 2; 448 dev_priv->num_gb_pipes = 2;
442 } else { 449 } else {
443 /* R3Vxx */ 450 /* RV3xx/R300 AD/R350 AH */
444 dev_priv->num_gb_pipes = 1; 451 dev_priv->num_gb_pipes = 1;
445 } 452 }
446 } 453 }
@@ -736,7 +743,7 @@ static int radeon_do_engine_reset(struct drm_device * dev)
736 743
737 /* setup the raster pipes */ 744 /* setup the raster pipes */
738 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R300) 745 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R300)
739 radeon_init_pipes(dev_priv); 746 radeon_init_pipes(dev);
740 747
741 /* Reset the CP ring */ 748 /* Reset the CP ring */
742 radeon_do_cp_reset(dev_priv); 749 radeon_do_cp_reset(dev_priv);
@@ -1644,6 +1651,7 @@ static int radeon_do_resume_cp(struct drm_device *dev, struct drm_file *file_pri
1644 radeon_cp_load_microcode(dev_priv); 1651 radeon_cp_load_microcode(dev_priv);
1645 radeon_cp_init_ring_buffer(dev, dev_priv, file_priv); 1652 radeon_cp_init_ring_buffer(dev, dev_priv, file_priv);
1646 1653
1654 dev_priv->have_z_offset = 0;
1647 radeon_do_engine_reset(dev); 1655 radeon_do_engine_reset(dev);
1648 radeon_irq_set_state(dev, RADEON_SW_INT_ENABLE, 1); 1656 radeon_irq_set_state(dev, RADEON_SW_INT_ENABLE, 1);
1649 1657
@@ -1941,8 +1949,8 @@ struct drm_buf *radeon_freelist_get(struct drm_device * dev)
1941 for (t = 0; t < dev_priv->usec_timeout; t++) { 1949 for (t = 0; t < dev_priv->usec_timeout; t++) {
1942 u32 done_age = GET_SCRATCH(dev_priv, 1); 1950 u32 done_age = GET_SCRATCH(dev_priv, 1);
1943 DRM_DEBUG("done_age = %d\n", done_age); 1951 DRM_DEBUG("done_age = %d\n", done_age);
1944 for (i = start; i < dma->buf_count; i++) { 1952 for (i = 0; i < dma->buf_count; i++) {
1945 buf = dma->buflist[i]; 1953 buf = dma->buflist[start];
1946 buf_priv = buf->dev_private; 1954 buf_priv = buf->dev_private;
1947 if (buf->file_priv == NULL || (buf->pending && 1955 if (buf->file_priv == NULL || (buf->pending &&
1948 buf_priv->age <= 1956 buf_priv->age <=
@@ -1951,7 +1959,8 @@ struct drm_buf *radeon_freelist_get(struct drm_device * dev)
1951 buf->pending = 0; 1959 buf->pending = 0;
1952 return buf; 1960 return buf;
1953 } 1961 }
1954 start = 0; 1962 if (++start >= dma->buf_count)
1963 start = 0;
1955 } 1964 }
1956 1965
1957 if (t) { 1966 if (t) {
@@ -1960,47 +1969,9 @@ struct drm_buf *radeon_freelist_get(struct drm_device * dev)
1960 } 1969 }
1961 } 1970 }
1962 1971
1963 DRM_DEBUG("returning NULL!\n");
1964 return NULL; 1972 return NULL;
1965} 1973}
1966 1974
1967#if 0
1968struct drm_buf *radeon_freelist_get(struct drm_device * dev)
1969{
1970 struct drm_device_dma *dma = dev->dma;
1971 drm_radeon_private_t *dev_priv = dev->dev_private;
1972 drm_radeon_buf_priv_t *buf_priv;
1973 struct drm_buf *buf;
1974 int i, t;
1975 int start;
1976 u32 done_age;
1977
1978 done_age = radeon_read_ring_rptr(dev_priv, RADEON_SCRATCHOFF(1));
1979 if (++dev_priv->last_buf >= dma->buf_count)
1980 dev_priv->last_buf = 0;
1981
1982 start = dev_priv->last_buf;
1983 dev_priv->stats.freelist_loops++;
1984
1985 for (t = 0; t < 2; t++) {
1986 for (i = start; i < dma->buf_count; i++) {
1987 buf = dma->buflist[i];
1988 buf_priv = buf->dev_private;
1989 if (buf->file_priv == 0 || (buf->pending &&
1990 buf_priv->age <=
1991 done_age)) {
1992 dev_priv->stats.requested_bufs++;
1993 buf->pending = 0;
1994 return buf;
1995 }
1996 }
1997 start = 0;
1998 }
1999
2000 return NULL;
2001}
2002#endif
2003
2004void radeon_freelist_reset(struct drm_device * dev) 1975void radeon_freelist_reset(struct drm_device * dev)
2005{ 1976{
2006 struct drm_device_dma *dma = dev->dma; 1977 struct drm_device_dma *dma = dev->dma;
@@ -2182,6 +2153,7 @@ int radeon_master_create(struct drm_device *dev, struct drm_master *master)
2182 &master_priv->sarea); 2153 &master_priv->sarea);
2183 if (ret) { 2154 if (ret) {
2184 DRM_ERROR("SAREA setup failed\n"); 2155 DRM_ERROR("SAREA setup failed\n");
2156 kfree(master_priv);
2185 return ret; 2157 return ret;
2186 } 2158 }
2187 master_priv->sarea_priv = master_priv->sarea->handle + sizeof(struct drm_sarea); 2159 master_priv->sarea_priv = master_priv->sarea->handle + sizeof(struct drm_sarea);
diff --git a/drivers/gpu/drm/radeon/radeon_cs.c b/drivers/gpu/drm/radeon/radeon_cs.c
index 5ab2cf96a264..f9b0fe002c0a 100644
--- a/drivers/gpu/drm/radeon/radeon_cs.c
+++ b/drivers/gpu/drm/radeon/radeon_cs.c
@@ -76,17 +76,17 @@ int radeon_cs_parser_relocs(struct radeon_cs_parser *p)
76 } 76 }
77 p->relocs_ptr[i] = &p->relocs[i]; 77 p->relocs_ptr[i] = &p->relocs[i];
78 p->relocs[i].robj = p->relocs[i].gobj->driver_private; 78 p->relocs[i].robj = p->relocs[i].gobj->driver_private;
79 p->relocs[i].lobj.robj = p->relocs[i].robj; 79 p->relocs[i].lobj.bo = p->relocs[i].robj;
80 p->relocs[i].lobj.rdomain = r->read_domains; 80 p->relocs[i].lobj.rdomain = r->read_domains;
81 p->relocs[i].lobj.wdomain = r->write_domain; 81 p->relocs[i].lobj.wdomain = r->write_domain;
82 p->relocs[i].handle = r->handle; 82 p->relocs[i].handle = r->handle;
83 p->relocs[i].flags = r->flags; 83 p->relocs[i].flags = r->flags;
84 INIT_LIST_HEAD(&p->relocs[i].lobj.list); 84 INIT_LIST_HEAD(&p->relocs[i].lobj.list);
85 radeon_object_list_add_object(&p->relocs[i].lobj, 85 radeon_bo_list_add_object(&p->relocs[i].lobj,
86 &p->validated); 86 &p->validated);
87 } 87 }
88 } 88 }
89 return radeon_object_list_validate(&p->validated, p->ib->fence); 89 return radeon_bo_list_validate(&p->validated);
90} 90}
91 91
92int radeon_cs_parser_init(struct radeon_cs_parser *p, void *data) 92int radeon_cs_parser_init(struct radeon_cs_parser *p, void *data)
@@ -189,16 +189,14 @@ static void radeon_cs_parser_fini(struct radeon_cs_parser *parser, int error)
189{ 189{
190 unsigned i; 190 unsigned i;
191 191
192 if (error) { 192 if (!error && parser->ib) {
193 radeon_object_list_unvalidate(&parser->validated); 193 radeon_bo_list_fence(&parser->validated, parser->ib->fence);
194 } else {
195 radeon_object_list_clean(&parser->validated);
196 } 194 }
197 for (i = 0; i < parser->nrelocs; i++) { 195 radeon_bo_list_unreserve(&parser->validated);
198 if (parser->relocs[i].gobj) { 196 if (parser->relocs != NULL) {
199 mutex_lock(&parser->rdev->ddev->struct_mutex); 197 for (i = 0; i < parser->nrelocs; i++) {
200 drm_gem_object_unreference(parser->relocs[i].gobj); 198 if (parser->relocs[i].gobj)
201 mutex_unlock(&parser->rdev->ddev->struct_mutex); 199 drm_gem_object_unreference_unlocked(parser->relocs[i].gobj);
202 } 200 }
203 } 201 }
204 kfree(parser->track); 202 kfree(parser->track);
@@ -230,6 +228,7 @@ int radeon_cs_ioctl(struct drm_device *dev, void *data, struct drm_file *filp)
230 memset(&parser, 0, sizeof(struct radeon_cs_parser)); 228 memset(&parser, 0, sizeof(struct radeon_cs_parser));
231 parser.filp = filp; 229 parser.filp = filp;
232 parser.rdev = rdev; 230 parser.rdev = rdev;
231 parser.dev = rdev->dev;
233 r = radeon_cs_parser_init(&parser, data); 232 r = radeon_cs_parser_init(&parser, data);
234 if (r) { 233 if (r) {
235 DRM_ERROR("Failed to initialize parser !\n"); 234 DRM_ERROR("Failed to initialize parser !\n");
@@ -246,7 +245,8 @@ int radeon_cs_ioctl(struct drm_device *dev, void *data, struct drm_file *filp)
246 } 245 }
247 r = radeon_cs_parser_relocs(&parser); 246 r = radeon_cs_parser_relocs(&parser);
248 if (r) { 247 if (r) {
249 DRM_ERROR("Failed to parse relocation !\n"); 248 if (r != -ERESTARTSYS)
249 DRM_ERROR("Failed to parse relocation %d!\n", r);
250 radeon_cs_parser_fini(&parser, r); 250 radeon_cs_parser_fini(&parser, r);
251 mutex_unlock(&rdev->cs_mutex); 251 mutex_unlock(&rdev->cs_mutex);
252 return r; 252 return r;
diff --git a/drivers/gpu/drm/radeon/radeon_cursor.c b/drivers/gpu/drm/radeon/radeon_cursor.c
index 28772a37009c..b7023fff89eb 100644
--- a/drivers/gpu/drm/radeon/radeon_cursor.c
+++ b/drivers/gpu/drm/radeon/radeon_cursor.c
@@ -36,7 +36,14 @@ static void radeon_lock_cursor(struct drm_crtc *crtc, bool lock)
36 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 36 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
37 uint32_t cur_lock; 37 uint32_t cur_lock;
38 38
39 if (ASIC_IS_AVIVO(rdev)) { 39 if (ASIC_IS_DCE4(rdev)) {
40 cur_lock = RREG32(EVERGREEN_CUR_UPDATE + radeon_crtc->crtc_offset);
41 if (lock)
42 cur_lock |= EVERGREEN_CURSOR_UPDATE_LOCK;
43 else
44 cur_lock &= ~EVERGREEN_CURSOR_UPDATE_LOCK;
45 WREG32(EVERGREEN_CUR_UPDATE + radeon_crtc->crtc_offset, cur_lock);
46 } else if (ASIC_IS_AVIVO(rdev)) {
40 cur_lock = RREG32(AVIVO_D1CUR_UPDATE + radeon_crtc->crtc_offset); 47 cur_lock = RREG32(AVIVO_D1CUR_UPDATE + radeon_crtc->crtc_offset);
41 if (lock) 48 if (lock)
42 cur_lock |= AVIVO_D1CURSOR_UPDATE_LOCK; 49 cur_lock |= AVIVO_D1CURSOR_UPDATE_LOCK;
@@ -58,7 +65,10 @@ static void radeon_hide_cursor(struct drm_crtc *crtc)
58 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 65 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
59 struct radeon_device *rdev = crtc->dev->dev_private; 66 struct radeon_device *rdev = crtc->dev->dev_private;
60 67
61 if (ASIC_IS_AVIVO(rdev)) { 68 if (ASIC_IS_DCE4(rdev)) {
69 WREG32(RADEON_MM_INDEX, EVERGREEN_CUR_CONTROL + radeon_crtc->crtc_offset);
70 WREG32(RADEON_MM_DATA, EVERGREEN_CURSOR_MODE(EVERGREEN_CURSOR_24_8_PRE_MULT));
71 } else if (ASIC_IS_AVIVO(rdev)) {
62 WREG32(RADEON_MM_INDEX, AVIVO_D1CUR_CONTROL + radeon_crtc->crtc_offset); 72 WREG32(RADEON_MM_INDEX, AVIVO_D1CUR_CONTROL + radeon_crtc->crtc_offset);
63 WREG32(RADEON_MM_DATA, (AVIVO_D1CURSOR_MODE_24BPP << AVIVO_D1CURSOR_MODE_SHIFT)); 73 WREG32(RADEON_MM_DATA, (AVIVO_D1CURSOR_MODE_24BPP << AVIVO_D1CURSOR_MODE_SHIFT));
64 } else { 74 } else {
@@ -81,10 +91,14 @@ static void radeon_show_cursor(struct drm_crtc *crtc)
81 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 91 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
82 struct radeon_device *rdev = crtc->dev->dev_private; 92 struct radeon_device *rdev = crtc->dev->dev_private;
83 93
84 if (ASIC_IS_AVIVO(rdev)) { 94 if (ASIC_IS_DCE4(rdev)) {
95 WREG32(RADEON_MM_INDEX, EVERGREEN_CUR_CONTROL + radeon_crtc->crtc_offset);
96 WREG32(RADEON_MM_DATA, EVERGREEN_CURSOR_EN |
97 EVERGREEN_CURSOR_MODE(EVERGREEN_CURSOR_24_8_PRE_MULT));
98 } else if (ASIC_IS_AVIVO(rdev)) {
85 WREG32(RADEON_MM_INDEX, AVIVO_D1CUR_CONTROL + radeon_crtc->crtc_offset); 99 WREG32(RADEON_MM_INDEX, AVIVO_D1CUR_CONTROL + radeon_crtc->crtc_offset);
86 WREG32(RADEON_MM_DATA, AVIVO_D1CURSOR_EN | 100 WREG32(RADEON_MM_DATA, AVIVO_D1CURSOR_EN |
87 (AVIVO_D1CURSOR_MODE_24BPP << AVIVO_D1CURSOR_MODE_SHIFT)); 101 (AVIVO_D1CURSOR_MODE_24BPP << AVIVO_D1CURSOR_MODE_SHIFT));
88 } else { 102 } else {
89 switch (radeon_crtc->crtc_id) { 103 switch (radeon_crtc->crtc_id) {
90 case 0: 104 case 0:
@@ -109,7 +123,10 @@ static void radeon_set_cursor(struct drm_crtc *crtc, struct drm_gem_object *obj,
109 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 123 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
110 struct radeon_device *rdev = crtc->dev->dev_private; 124 struct radeon_device *rdev = crtc->dev->dev_private;
111 125
112 if (ASIC_IS_AVIVO(rdev)) { 126 if (ASIC_IS_DCE4(rdev)) {
127 WREG32(EVERGREEN_CUR_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset, 0);
128 WREG32(EVERGREEN_CUR_SURFACE_ADDRESS + radeon_crtc->crtc_offset, gpu_addr);
129 } else if (ASIC_IS_AVIVO(rdev)) {
113 if (rdev->family >= CHIP_RV770) { 130 if (rdev->family >= CHIP_RV770) {
114 if (radeon_crtc->crtc_id) 131 if (radeon_crtc->crtc_id)
115 WREG32(R700_D2CUR_SURFACE_ADDRESS_HIGH, 0); 132 WREG32(R700_D2CUR_SURFACE_ADDRESS_HIGH, 0);
@@ -169,17 +186,13 @@ int radeon_crtc_cursor_set(struct drm_crtc *crtc,
169unpin: 186unpin:
170 if (radeon_crtc->cursor_bo) { 187 if (radeon_crtc->cursor_bo) {
171 radeon_gem_object_unpin(radeon_crtc->cursor_bo); 188 radeon_gem_object_unpin(radeon_crtc->cursor_bo);
172 mutex_lock(&crtc->dev->struct_mutex); 189 drm_gem_object_unreference_unlocked(radeon_crtc->cursor_bo);
173 drm_gem_object_unreference(radeon_crtc->cursor_bo);
174 mutex_unlock(&crtc->dev->struct_mutex);
175 } 190 }
176 191
177 radeon_crtc->cursor_bo = obj; 192 radeon_crtc->cursor_bo = obj;
178 return 0; 193 return 0;
179fail: 194fail:
180 mutex_lock(&crtc->dev->struct_mutex); 195 drm_gem_object_unreference_unlocked(obj);
181 drm_gem_object_unreference(obj);
182 mutex_unlock(&crtc->dev->struct_mutex);
183 196
184 return 0; 197 return 0;
185} 198}
@@ -201,7 +214,20 @@ int radeon_crtc_cursor_move(struct drm_crtc *crtc,
201 yorigin = CURSOR_HEIGHT - 1; 214 yorigin = CURSOR_HEIGHT - 1;
202 215
203 radeon_lock_cursor(crtc, true); 216 radeon_lock_cursor(crtc, true);
204 if (ASIC_IS_AVIVO(rdev)) { 217 if (ASIC_IS_DCE4(rdev)) {
218 /* cursors are offset into the total surface */
219 x += crtc->x;
220 y += crtc->y;
221 DRM_DEBUG("x %d y %d c->x %d c->y %d\n", x, y, crtc->x, crtc->y);
222
223 /* XXX: check if evergreen has the same issues as avivo chips */
224 WREG32(EVERGREEN_CUR_POSITION + radeon_crtc->crtc_offset,
225 ((xorigin ? 0 : x) << 16) |
226 (yorigin ? 0 : y));
227 WREG32(EVERGREEN_CUR_HOT_SPOT + radeon_crtc->crtc_offset, (xorigin << 16) | yorigin);
228 WREG32(EVERGREEN_CUR_SIZE + radeon_crtc->crtc_offset,
229 ((radeon_crtc->cursor_width - 1) << 16) | (radeon_crtc->cursor_height - 1));
230 } else if (ASIC_IS_AVIVO(rdev)) {
205 int w = radeon_crtc->cursor_width; 231 int w = radeon_crtc->cursor_width;
206 int i = 0; 232 int i = 0;
207 struct drm_crtc *crtc_p; 233 struct drm_crtc *crtc_p;
diff --git a/drivers/gpu/drm/radeon/radeon_device.c b/drivers/gpu/drm/radeon/radeon_device.c
index 41bb76fbe734..7b629e305560 100644
--- a/drivers/gpu/drm/radeon/radeon_device.c
+++ b/drivers/gpu/drm/radeon/radeon_device.c
@@ -26,15 +26,64 @@
26 * Jerome Glisse 26 * Jerome Glisse
27 */ 27 */
28#include <linux/console.h> 28#include <linux/console.h>
29#include <linux/slab.h>
29#include <drm/drmP.h> 30#include <drm/drmP.h>
30#include <drm/drm_crtc_helper.h> 31#include <drm/drm_crtc_helper.h>
31#include <drm/radeon_drm.h> 32#include <drm/radeon_drm.h>
32#include <linux/vgaarb.h> 33#include <linux/vgaarb.h>
34#include <linux/vga_switcheroo.h>
33#include "radeon_reg.h" 35#include "radeon_reg.h"
34#include "radeon.h" 36#include "radeon.h"
35#include "radeon_asic.h"
36#include "atom.h" 37#include "atom.h"
37 38
39static const char radeon_family_name[][16] = {
40 "R100",
41 "RV100",
42 "RS100",
43 "RV200",
44 "RS200",
45 "R200",
46 "RV250",
47 "RS300",
48 "RV280",
49 "R300",
50 "R350",
51 "RV350",
52 "RV380",
53 "R420",
54 "R423",
55 "RV410",
56 "RS400",
57 "RS480",
58 "RS600",
59 "RS690",
60 "RS740",
61 "RV515",
62 "R520",
63 "RV530",
64 "RV560",
65 "RV570",
66 "R580",
67 "R600",
68 "RV610",
69 "RV630",
70 "RV670",
71 "RV620",
72 "RV635",
73 "RS780",
74 "RS880",
75 "RV770",
76 "RV730",
77 "RV710",
78 "RV740",
79 "CEDAR",
80 "REDWOOD",
81 "JUNIPER",
82 "CYPRESS",
83 "HEMLOCK",
84 "LAST",
85};
86
38/* 87/*
39 * Clear GPU surface registers. 88 * Clear GPU surface registers.
40 */ 89 */
@@ -44,10 +93,11 @@ void radeon_surface_init(struct radeon_device *rdev)
44 if (rdev->family < CHIP_R600) { 93 if (rdev->family < CHIP_R600) {
45 int i; 94 int i;
46 95
47 for (i = 0; i < 8; i++) { 96 for (i = 0; i < RADEON_GEM_MAX_SURFACES; i++) {
48 WREG32(RADEON_SURFACE0_INFO + 97 if (rdev->surface_regs[i].bo)
49 i * (RADEON_SURFACE1_INFO - RADEON_SURFACE0_INFO), 98 radeon_bo_get_surface_reg(rdev->surface_regs[i].bo);
50 0); 99 else
100 radeon_clear_surface_reg(rdev, i);
51 } 101 }
52 /* enable surfaces */ 102 /* enable surfaces */
53 WREG32(RADEON_SURFACE_CNTL, 0); 103 WREG32(RADEON_SURFACE_CNTL, 0);
@@ -99,80 +149,103 @@ void radeon_scratch_free(struct radeon_device *rdev, uint32_t reg)
99 } 149 }
100} 150}
101 151
102/* 152/**
103 * MC common functions 153 * radeon_vram_location - try to find VRAM location
154 * @rdev: radeon device structure holding all necessary informations
155 * @mc: memory controller structure holding memory informations
156 * @base: base address at which to put VRAM
157 *
158 * Function will place try to place VRAM at base address provided
159 * as parameter (which is so far either PCI aperture address or
160 * for IGP TOM base address).
161 *
162 * If there is not enough space to fit the unvisible VRAM in the 32bits
163 * address space then we limit the VRAM size to the aperture.
164 *
165 * If we are using AGP and if the AGP aperture doesn't allow us to have
166 * room for all the VRAM than we restrict the VRAM to the PCI aperture
167 * size and print a warning.
168 *
169 * This function will never fails, worst case are limiting VRAM.
170 *
171 * Note: GTT start, end, size should be initialized before calling this
172 * function on AGP platform.
173 *
174 * Note: We don't explictly enforce VRAM start to be aligned on VRAM size,
175 * this shouldn't be a problem as we are using the PCI aperture as a reference.
176 * Otherwise this would be needed for rv280, all r3xx, and all r4xx, but
177 * not IGP.
178 *
179 * Note: we use mc_vram_size as on some board we need to program the mc to
180 * cover the whole aperture even if VRAM size is inferior to aperture size
181 * Novell bug 204882 + along with lots of ubuntu ones
182 *
183 * Note: when limiting vram it's safe to overwritte real_vram_size because
184 * we are not in case where real_vram_size is inferior to mc_vram_size (ie
185 * note afected by bogus hw of Novell bug 204882 + along with lots of ubuntu
186 * ones)
187 *
188 * Note: IGP TOM addr should be the same as the aperture addr, we don't
189 * explicitly check for that thought.
190 *
191 * FIXME: when reducing VRAM size align new size on power of 2.
104 */ 192 */
105int radeon_mc_setup(struct radeon_device *rdev) 193void radeon_vram_location(struct radeon_device *rdev, struct radeon_mc *mc, u64 base)
194{
195 mc->vram_start = base;
196 if (mc->mc_vram_size > (0xFFFFFFFF - base + 1)) {
197 dev_warn(rdev->dev, "limiting VRAM to PCI aperture size\n");
198 mc->real_vram_size = mc->aper_size;
199 mc->mc_vram_size = mc->aper_size;
200 }
201 mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
202 if (rdev->flags & RADEON_IS_AGP && mc->vram_end > mc->gtt_start && mc->vram_end <= mc->gtt_end) {
203 dev_warn(rdev->dev, "limiting VRAM to PCI aperture size\n");
204 mc->real_vram_size = mc->aper_size;
205 mc->mc_vram_size = mc->aper_size;
206 }
207 mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
208 dev_info(rdev->dev, "VRAM: %lluM 0x%08llX - 0x%08llX (%lluM used)\n",
209 mc->mc_vram_size >> 20, mc->vram_start,
210 mc->vram_end, mc->real_vram_size >> 20);
211}
212
213/**
214 * radeon_gtt_location - try to find GTT location
215 * @rdev: radeon device structure holding all necessary informations
216 * @mc: memory controller structure holding memory informations
217 *
218 * Function will place try to place GTT before or after VRAM.
219 *
220 * If GTT size is bigger than space left then we ajust GTT size.
221 * Thus function will never fails.
222 *
223 * FIXME: when reducing GTT size align new size on power of 2.
224 */
225void radeon_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
106{ 226{
107 uint32_t tmp; 227 u64 size_af, size_bf;
108 228
109 /* Some chips have an "issue" with the memory controller, the 229 size_af = 0xFFFFFFFF - mc->vram_end;
110 * location must be aligned to the size. We just align it down, 230 size_bf = mc->vram_start;
111 * too bad if we walk over the top of system memory, we don't 231 if (size_bf > size_af) {
112 * use DMA without a remapped anyway. 232 if (mc->gtt_size > size_bf) {
113 * Affected chips are rv280, all r3xx, and all r4xx, but not IGP 233 dev_warn(rdev->dev, "limiting GTT\n");
114 */ 234 mc->gtt_size = size_bf;
115 /* FGLRX seems to setup like this, VRAM a 0, then GART.
116 */
117 /*
118 * Note: from R6xx the address space is 40bits but here we only
119 * use 32bits (still have to see a card which would exhaust 4G
120 * address space).
121 */
122 if (rdev->mc.vram_location != 0xFFFFFFFFUL) {
123 /* vram location was already setup try to put gtt after
124 * if it fits */
125 tmp = rdev->mc.vram_location + rdev->mc.mc_vram_size;
126 tmp = (tmp + rdev->mc.gtt_size - 1) & ~(rdev->mc.gtt_size - 1);
127 if ((0xFFFFFFFFUL - tmp) >= rdev->mc.gtt_size) {
128 rdev->mc.gtt_location = tmp;
129 } else {
130 if (rdev->mc.gtt_size >= rdev->mc.vram_location) {
131 printk(KERN_ERR "[drm] GTT too big to fit "
132 "before or after vram location.\n");
133 return -EINVAL;
134 }
135 rdev->mc.gtt_location = 0;
136 }
137 } else if (rdev->mc.gtt_location != 0xFFFFFFFFUL) {
138 /* gtt location was already setup try to put vram before
139 * if it fits */
140 if (rdev->mc.mc_vram_size < rdev->mc.gtt_location) {
141 rdev->mc.vram_location = 0;
142 } else {
143 tmp = rdev->mc.gtt_location + rdev->mc.gtt_size;
144 tmp += (rdev->mc.mc_vram_size - 1);
145 tmp &= ~(rdev->mc.mc_vram_size - 1);
146 if ((0xFFFFFFFFUL - tmp) >= rdev->mc.mc_vram_size) {
147 rdev->mc.vram_location = tmp;
148 } else {
149 printk(KERN_ERR "[drm] vram too big to fit "
150 "before or after GTT location.\n");
151 return -EINVAL;
152 }
153 } 235 }
236 mc->gtt_start = mc->vram_start - mc->gtt_size;
154 } else { 237 } else {
155 rdev->mc.vram_location = 0; 238 if (mc->gtt_size > size_af) {
156 tmp = rdev->mc.mc_vram_size; 239 dev_warn(rdev->dev, "limiting GTT\n");
157 tmp = (tmp + rdev->mc.gtt_size - 1) & ~(rdev->mc.gtt_size - 1); 240 mc->gtt_size = size_af;
158 rdev->mc.gtt_location = tmp; 241 }
159 } 242 mc->gtt_start = mc->vram_end + 1;
160 rdev->mc.vram_start = rdev->mc.vram_location; 243 }
161 rdev->mc.vram_end = rdev->mc.vram_location + rdev->mc.mc_vram_size - 1; 244 mc->gtt_end = mc->gtt_start + mc->gtt_size - 1;
162 rdev->mc.gtt_start = rdev->mc.gtt_location; 245 dev_info(rdev->dev, "GTT: %lluM 0x%08llX - 0x%08llX\n",
163 rdev->mc.gtt_end = rdev->mc.gtt_location + rdev->mc.gtt_size - 1; 246 mc->gtt_size >> 20, mc->gtt_start, mc->gtt_end);
164 DRM_INFO("radeon: VRAM %uM\n", (unsigned)(rdev->mc.mc_vram_size >> 20));
165 DRM_INFO("radeon: VRAM from 0x%08X to 0x%08X\n",
166 (unsigned)rdev->mc.vram_location,
167 (unsigned)(rdev->mc.vram_location + rdev->mc.mc_vram_size - 1));
168 DRM_INFO("radeon: GTT %uM\n", (unsigned)(rdev->mc.gtt_size >> 20));
169 DRM_INFO("radeon: GTT from 0x%08X to 0x%08X\n",
170 (unsigned)rdev->mc.gtt_location,
171 (unsigned)(rdev->mc.gtt_location + rdev->mc.gtt_size - 1));
172 return 0;
173} 247}
174 248
175
176/* 249/*
177 * GPU helpers function. 250 * GPU helpers function.
178 */ 251 */
@@ -181,7 +254,16 @@ bool radeon_card_posted(struct radeon_device *rdev)
181 uint32_t reg; 254 uint32_t reg;
182 255
183 /* first check CRTCs */ 256 /* first check CRTCs */
184 if (ASIC_IS_AVIVO(rdev)) { 257 if (ASIC_IS_DCE4(rdev)) {
258 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
259 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
260 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
261 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
262 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
263 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
264 if (reg & EVERGREEN_CRTC_MASTER_EN)
265 return true;
266 } else if (ASIC_IS_AVIVO(rdev)) {
185 reg = RREG32(AVIVO_D1CRTC_CONTROL) | 267 reg = RREG32(AVIVO_D1CRTC_CONTROL) |
186 RREG32(AVIVO_D2CRTC_CONTROL); 268 RREG32(AVIVO_D2CRTC_CONTROL);
187 if (reg & AVIVO_CRTC_EN) { 269 if (reg & AVIVO_CRTC_EN) {
@@ -208,8 +290,58 @@ bool radeon_card_posted(struct radeon_device *rdev)
208 290
209} 291}
210 292
293void radeon_update_bandwidth_info(struct radeon_device *rdev)
294{
295 fixed20_12 a;
296 u32 sclk, mclk;
297
298 if (rdev->flags & RADEON_IS_IGP) {
299 sclk = radeon_get_engine_clock(rdev);
300 mclk = rdev->clock.default_mclk;
301
302 a.full = rfixed_const(100);
303 rdev->pm.sclk.full = rfixed_const(sclk);
304 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
305 rdev->pm.mclk.full = rfixed_const(mclk);
306 rdev->pm.mclk.full = rfixed_div(rdev->pm.mclk, a);
307
308 a.full = rfixed_const(16);
309 /* core_bandwidth = sclk(Mhz) * 16 */
310 rdev->pm.core_bandwidth.full = rfixed_div(rdev->pm.sclk, a);
311 } else {
312 sclk = radeon_get_engine_clock(rdev);
313 mclk = radeon_get_memory_clock(rdev);
314
315 a.full = rfixed_const(100);
316 rdev->pm.sclk.full = rfixed_const(sclk);
317 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
318 rdev->pm.mclk.full = rfixed_const(mclk);
319 rdev->pm.mclk.full = rfixed_div(rdev->pm.mclk, a);
320 }
321}
322
323bool radeon_boot_test_post_card(struct radeon_device *rdev)
324{
325 if (radeon_card_posted(rdev))
326 return true;
327
328 if (rdev->bios) {
329 DRM_INFO("GPU not posted. posting now...\n");
330 if (rdev->is_atom_bios)
331 atom_asic_init(rdev->mode_info.atom_context);
332 else
333 radeon_combios_asic_init(rdev->ddev);
334 return true;
335 } else {
336 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
337 return false;
338 }
339}
340
211int radeon_dummy_page_init(struct radeon_device *rdev) 341int radeon_dummy_page_init(struct radeon_device *rdev)
212{ 342{
343 if (rdev->dummy_page.page)
344 return 0;
213 rdev->dummy_page.page = alloc_page(GFP_DMA32 | GFP_KERNEL | __GFP_ZERO); 345 rdev->dummy_page.page = alloc_page(GFP_DMA32 | GFP_KERNEL | __GFP_ZERO);
214 if (rdev->dummy_page.page == NULL) 346 if (rdev->dummy_page.page == NULL)
215 return -ENOMEM; 347 return -ENOMEM;
@@ -234,167 +366,6 @@ void radeon_dummy_page_fini(struct radeon_device *rdev)
234} 366}
235 367
236 368
237/*
238 * Registers accessors functions.
239 */
240uint32_t radeon_invalid_rreg(struct radeon_device *rdev, uint32_t reg)
241{
242 DRM_ERROR("Invalid callback to read register 0x%04X\n", reg);
243 BUG_ON(1);
244 return 0;
245}
246
247void radeon_invalid_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
248{
249 DRM_ERROR("Invalid callback to write register 0x%04X with 0x%08X\n",
250 reg, v);
251 BUG_ON(1);
252}
253
254void radeon_register_accessor_init(struct radeon_device *rdev)
255{
256 rdev->mc_rreg = &radeon_invalid_rreg;
257 rdev->mc_wreg = &radeon_invalid_wreg;
258 rdev->pll_rreg = &radeon_invalid_rreg;
259 rdev->pll_wreg = &radeon_invalid_wreg;
260 rdev->pciep_rreg = &radeon_invalid_rreg;
261 rdev->pciep_wreg = &radeon_invalid_wreg;
262
263 /* Don't change order as we are overridding accessor. */
264 if (rdev->family < CHIP_RV515) {
265 rdev->pcie_reg_mask = 0xff;
266 } else {
267 rdev->pcie_reg_mask = 0x7ff;
268 }
269 /* FIXME: not sure here */
270 if (rdev->family <= CHIP_R580) {
271 rdev->pll_rreg = &r100_pll_rreg;
272 rdev->pll_wreg = &r100_pll_wreg;
273 }
274 if (rdev->family >= CHIP_R420) {
275 rdev->mc_rreg = &r420_mc_rreg;
276 rdev->mc_wreg = &r420_mc_wreg;
277 }
278 if (rdev->family >= CHIP_RV515) {
279 rdev->mc_rreg = &rv515_mc_rreg;
280 rdev->mc_wreg = &rv515_mc_wreg;
281 }
282 if (rdev->family == CHIP_RS400 || rdev->family == CHIP_RS480) {
283 rdev->mc_rreg = &rs400_mc_rreg;
284 rdev->mc_wreg = &rs400_mc_wreg;
285 }
286 if (rdev->family == CHIP_RS690 || rdev->family == CHIP_RS740) {
287 rdev->mc_rreg = &rs690_mc_rreg;
288 rdev->mc_wreg = &rs690_mc_wreg;
289 }
290 if (rdev->family == CHIP_RS600) {
291 rdev->mc_rreg = &rs600_mc_rreg;
292 rdev->mc_wreg = &rs600_mc_wreg;
293 }
294 if (rdev->family >= CHIP_R600) {
295 rdev->pciep_rreg = &r600_pciep_rreg;
296 rdev->pciep_wreg = &r600_pciep_wreg;
297 }
298}
299
300
301/*
302 * ASIC
303 */
304int radeon_asic_init(struct radeon_device *rdev)
305{
306 radeon_register_accessor_init(rdev);
307 switch (rdev->family) {
308 case CHIP_R100:
309 case CHIP_RV100:
310 case CHIP_RS100:
311 case CHIP_RV200:
312 case CHIP_RS200:
313 case CHIP_R200:
314 case CHIP_RV250:
315 case CHIP_RS300:
316 case CHIP_RV280:
317 rdev->asic = &r100_asic;
318 break;
319 case CHIP_R300:
320 case CHIP_R350:
321 case CHIP_RV350:
322 case CHIP_RV380:
323 rdev->asic = &r300_asic;
324 if (rdev->flags & RADEON_IS_PCIE) {
325 rdev->asic->gart_tlb_flush = &rv370_pcie_gart_tlb_flush;
326 rdev->asic->gart_set_page = &rv370_pcie_gart_set_page;
327 }
328 break;
329 case CHIP_R420:
330 case CHIP_R423:
331 case CHIP_RV410:
332 rdev->asic = &r420_asic;
333 break;
334 case CHIP_RS400:
335 case CHIP_RS480:
336 rdev->asic = &rs400_asic;
337 break;
338 case CHIP_RS600:
339 rdev->asic = &rs600_asic;
340 break;
341 case CHIP_RS690:
342 case CHIP_RS740:
343 rdev->asic = &rs690_asic;
344 break;
345 case CHIP_RV515:
346 rdev->asic = &rv515_asic;
347 break;
348 case CHIP_R520:
349 case CHIP_RV530:
350 case CHIP_RV560:
351 case CHIP_RV570:
352 case CHIP_R580:
353 rdev->asic = &r520_asic;
354 break;
355 case CHIP_R600:
356 case CHIP_RV610:
357 case CHIP_RV630:
358 case CHIP_RV620:
359 case CHIP_RV635:
360 case CHIP_RV670:
361 case CHIP_RS780:
362 case CHIP_RS880:
363 rdev->asic = &r600_asic;
364 break;
365 case CHIP_RV770:
366 case CHIP_RV730:
367 case CHIP_RV710:
368 case CHIP_RV740:
369 rdev->asic = &rv770_asic;
370 break;
371 default:
372 /* FIXME: not supported yet */
373 return -EINVAL;
374 }
375 return 0;
376}
377
378
379/*
380 * Wrapper around modesetting bits.
381 */
382int radeon_clocks_init(struct radeon_device *rdev)
383{
384 int r;
385
386 r = radeon_static_clocks_init(rdev->ddev);
387 if (r) {
388 return r;
389 }
390 DRM_INFO("Clocks initialized !\n");
391 return 0;
392}
393
394void radeon_clocks_fini(struct radeon_device *rdev)
395{
396}
397
398/* ATOM accessor methods */ 369/* ATOM accessor methods */
399static uint32_t cail_pll_read(struct card_info *info, uint32_t reg) 370static uint32_t cail_pll_read(struct card_info *info, uint32_t reg)
400{ 371{
@@ -462,13 +433,18 @@ int radeon_atombios_init(struct radeon_device *rdev)
462 atom_card_info->pll_write = cail_pll_write; 433 atom_card_info->pll_write = cail_pll_write;
463 434
464 rdev->mode_info.atom_context = atom_parse(atom_card_info, rdev->bios); 435 rdev->mode_info.atom_context = atom_parse(atom_card_info, rdev->bios);
436 mutex_init(&rdev->mode_info.atom_context->mutex);
465 radeon_atom_initialize_bios_scratch_regs(rdev->ddev); 437 radeon_atom_initialize_bios_scratch_regs(rdev->ddev);
438 atom_allocate_fb_scratch(rdev->mode_info.atom_context);
466 return 0; 439 return 0;
467} 440}
468 441
469void radeon_atombios_fini(struct radeon_device *rdev) 442void radeon_atombios_fini(struct radeon_device *rdev)
470{ 443{
471 kfree(rdev->mode_info.atom_context); 444 if (rdev->mode_info.atom_context) {
445 kfree(rdev->mode_info.atom_context->scratch);
446 kfree(rdev->mode_info.atom_context);
447 }
472 kfree(rdev->mode_info.atom_card_info); 448 kfree(rdev->mode_info.atom_card_info);
473} 449}
474 450
@@ -494,31 +470,102 @@ static unsigned int radeon_vga_set_decode(void *cookie, bool state)
494 return VGA_RSRC_NORMAL_IO | VGA_RSRC_NORMAL_MEM; 470 return VGA_RSRC_NORMAL_IO | VGA_RSRC_NORMAL_MEM;
495} 471}
496 472
497void radeon_agp_disable(struct radeon_device *rdev) 473void radeon_check_arguments(struct radeon_device *rdev)
498{ 474{
499 rdev->flags &= ~RADEON_IS_AGP; 475 /* vramlimit must be a power of two */
500 if (rdev->family >= CHIP_R600) { 476 switch (radeon_vram_limit) {
501 DRM_INFO("Forcing AGP to PCIE mode\n"); 477 case 0:
502 rdev->flags |= RADEON_IS_PCIE; 478 case 4:
503 } else if (rdev->family >= CHIP_RV515 || 479 case 8:
504 rdev->family == CHIP_RV380 || 480 case 16:
505 rdev->family == CHIP_RV410 || 481 case 32:
506 rdev->family == CHIP_R423) { 482 case 64:
507 DRM_INFO("Forcing AGP to PCIE mode\n"); 483 case 128:
508 rdev->flags |= RADEON_IS_PCIE; 484 case 256:
509 rdev->asic->gart_tlb_flush = &rv370_pcie_gart_tlb_flush; 485 case 512:
510 rdev->asic->gart_set_page = &rv370_pcie_gart_set_page; 486 case 1024:
487 case 2048:
488 case 4096:
489 break;
490 default:
491 dev_warn(rdev->dev, "vram limit (%d) must be a power of 2\n",
492 radeon_vram_limit);
493 radeon_vram_limit = 0;
494 break;
495 }
496 radeon_vram_limit = radeon_vram_limit << 20;
497 /* gtt size must be power of two and greater or equal to 32M */
498 switch (radeon_gart_size) {
499 case 4:
500 case 8:
501 case 16:
502 dev_warn(rdev->dev, "gart size (%d) too small forcing to 512M\n",
503 radeon_gart_size);
504 radeon_gart_size = 512;
505 break;
506 case 32:
507 case 64:
508 case 128:
509 case 256:
510 case 512:
511 case 1024:
512 case 2048:
513 case 4096:
514 break;
515 default:
516 dev_warn(rdev->dev, "gart size (%d) must be a power of 2\n",
517 radeon_gart_size);
518 radeon_gart_size = 512;
519 break;
520 }
521 rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024;
522 /* AGP mode can only be -1, 1, 2, 4, 8 */
523 switch (radeon_agpmode) {
524 case -1:
525 case 0:
526 case 1:
527 case 2:
528 case 4:
529 case 8:
530 break;
531 default:
532 dev_warn(rdev->dev, "invalid AGP mode %d (valid mode: "
533 "-1, 0, 1, 2, 4, 8)\n", radeon_agpmode);
534 radeon_agpmode = 0;
535 break;
536 }
537}
538
539static void radeon_switcheroo_set_state(struct pci_dev *pdev, enum vga_switcheroo_state state)
540{
541 struct drm_device *dev = pci_get_drvdata(pdev);
542 struct radeon_device *rdev = dev->dev_private;
543 pm_message_t pmm = { .event = PM_EVENT_SUSPEND };
544 if (state == VGA_SWITCHEROO_ON) {
545 printk(KERN_INFO "radeon: switched on\n");
546 /* don't suspend or resume card normally */
547 rdev->powered_down = false;
548 radeon_resume_kms(dev);
511 } else { 549 } else {
512 DRM_INFO("Forcing AGP to PCI mode\n"); 550 printk(KERN_INFO "radeon: switched off\n");
513 rdev->flags |= RADEON_IS_PCI; 551 radeon_suspend_kms(dev, pmm);
514 rdev->asic->gart_tlb_flush = &r100_pci_gart_tlb_flush; 552 /* don't suspend or resume card normally */
515 rdev->asic->gart_set_page = &r100_pci_gart_set_page; 553 rdev->powered_down = true;
516 } 554 }
517} 555}
518 556
519/* 557static bool radeon_switcheroo_can_switch(struct pci_dev *pdev)
520 * Radeon device. 558{
521 */ 559 struct drm_device *dev = pci_get_drvdata(pdev);
560 bool can_switch;
561
562 spin_lock(&dev->count_lock);
563 can_switch = (dev->open_count == 0);
564 spin_unlock(&dev->count_lock);
565 return can_switch;
566}
567
568
522int radeon_device_init(struct radeon_device *rdev, 569int radeon_device_init(struct radeon_device *rdev,
523 struct drm_device *ddev, 570 struct drm_device *ddev,
524 struct pci_dev *pdev, 571 struct pci_dev *pdev,
@@ -527,7 +574,6 @@ int radeon_device_init(struct radeon_device *rdev,
527 int r; 574 int r;
528 int dma_bits; 575 int dma_bits;
529 576
530 DRM_INFO("radeon: Initializing kernel modesetting.\n");
531 rdev->shutdown = false; 577 rdev->shutdown = false;
532 rdev->dev = &pdev->dev; 578 rdev->dev = &pdev->dev;
533 rdev->ddev = ddev; 579 rdev->ddev = ddev;
@@ -539,21 +585,44 @@ int radeon_device_init(struct radeon_device *rdev,
539 rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024; 585 rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024;
540 rdev->gpu_lockup = false; 586 rdev->gpu_lockup = false;
541 rdev->accel_working = false; 587 rdev->accel_working = false;
588
589 DRM_INFO("initializing kernel modesetting (%s 0x%04X:0x%04X).\n",
590 radeon_family_name[rdev->family], pdev->vendor, pdev->device);
591
542 /* mutex initialization are all done here so we 592 /* mutex initialization are all done here so we
543 * can recall function without having locking issues */ 593 * can recall function without having locking issues */
544 mutex_init(&rdev->cs_mutex); 594 mutex_init(&rdev->cs_mutex);
545 mutex_init(&rdev->ib_pool.mutex); 595 mutex_init(&rdev->ib_pool.mutex);
546 mutex_init(&rdev->cp.mutex); 596 mutex_init(&rdev->cp.mutex);
597 mutex_init(&rdev->dc_hw_i2c_mutex);
598 if (rdev->family >= CHIP_R600)
599 spin_lock_init(&rdev->ih.lock);
600 mutex_init(&rdev->gem.mutex);
601 mutex_init(&rdev->pm.mutex);
547 rwlock_init(&rdev->fence_drv.lock); 602 rwlock_init(&rdev->fence_drv.lock);
548 INIT_LIST_HEAD(&rdev->gem.objects); 603 INIT_LIST_HEAD(&rdev->gem.objects);
604 init_waitqueue_head(&rdev->irq.vblank_queue);
605
606 /* setup workqueue */
607 rdev->wq = create_workqueue("radeon");
608 if (rdev->wq == NULL)
609 return -ENOMEM;
549 610
550 /* Set asic functions */ 611 /* Set asic functions */
551 r = radeon_asic_init(rdev); 612 r = radeon_asic_init(rdev);
552 if (r) { 613 if (r)
553 return r; 614 return r;
615 radeon_check_arguments(rdev);
616
617 /* all of the newer IGP chips have an internal gart
618 * However some rs4xx report as AGP, so remove that here.
619 */
620 if ((rdev->family >= CHIP_RS400) &&
621 (rdev->flags & RADEON_IS_IGP)) {
622 rdev->flags &= ~RADEON_IS_AGP;
554 } 623 }
555 624
556 if (radeon_agpmode == -1) { 625 if (rdev->flags & RADEON_IS_AGP && radeon_agpmode == -1) {
557 radeon_agp_disable(rdev); 626 radeon_agp_disable(rdev);
558 } 627 }
559 628
@@ -590,6 +659,9 @@ int radeon_device_init(struct radeon_device *rdev,
590 /* this will fail for cards that aren't VGA class devices, just 659 /* this will fail for cards that aren't VGA class devices, just
591 * ignore it */ 660 * ignore it */
592 vga_client_register(rdev->pdev, rdev, NULL, radeon_vga_set_decode); 661 vga_client_register(rdev->pdev, rdev, NULL, radeon_vga_set_decode);
662 vga_switcheroo_register_client(rdev->pdev,
663 radeon_switcheroo_set_state,
664 radeon_switcheroo_can_switch);
593 665
594 r = radeon_init(rdev); 666 r = radeon_init(rdev);
595 if (r) 667 if (r)
@@ -620,6 +692,8 @@ void radeon_device_fini(struct radeon_device *rdev)
620 DRM_INFO("radeon: finishing device.\n"); 692 DRM_INFO("radeon: finishing device.\n");
621 rdev->shutdown = true; 693 rdev->shutdown = true;
622 radeon_fini(rdev); 694 radeon_fini(rdev);
695 destroy_workqueue(rdev->wq);
696 vga_switcheroo_unregister_client(rdev->pdev);
623 vga_client_register(rdev->pdev, NULL, NULL, NULL); 697 vga_client_register(rdev->pdev, NULL, NULL, NULL);
624 iounmap(rdev->rmmio); 698 iounmap(rdev->rmmio);
625 rdev->rmmio = NULL; 699 rdev->rmmio = NULL;
@@ -631,38 +705,48 @@ void radeon_device_fini(struct radeon_device *rdev)
631 */ 705 */
632int radeon_suspend_kms(struct drm_device *dev, pm_message_t state) 706int radeon_suspend_kms(struct drm_device *dev, pm_message_t state)
633{ 707{
634 struct radeon_device *rdev = dev->dev_private; 708 struct radeon_device *rdev;
635 struct drm_crtc *crtc; 709 struct drm_crtc *crtc;
710 int r;
636 711
637 if (dev == NULL || rdev == NULL) { 712 if (dev == NULL || dev->dev_private == NULL) {
638 return -ENODEV; 713 return -ENODEV;
639 } 714 }
640 if (state.event == PM_EVENT_PRETHAW) { 715 if (state.event == PM_EVENT_PRETHAW) {
641 return 0; 716 return 0;
642 } 717 }
718 rdev = dev->dev_private;
719
720 if (rdev->powered_down)
721 return 0;
643 /* unpin the front buffers */ 722 /* unpin the front buffers */
644 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 723 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
645 struct radeon_framebuffer *rfb = to_radeon_framebuffer(crtc->fb); 724 struct radeon_framebuffer *rfb = to_radeon_framebuffer(crtc->fb);
646 struct radeon_object *robj; 725 struct radeon_bo *robj;
647 726
648 if (rfb == NULL || rfb->obj == NULL) { 727 if (rfb == NULL || rfb->obj == NULL) {
649 continue; 728 continue;
650 } 729 }
651 robj = rfb->obj->driver_private; 730 robj = rfb->obj->driver_private;
652 if (robj != rdev->fbdev_robj) { 731 if (robj != rdev->fbdev_rbo) {
653 radeon_object_unpin(robj); 732 r = radeon_bo_reserve(robj, false);
733 if (unlikely(r == 0)) {
734 radeon_bo_unpin(robj);
735 radeon_bo_unreserve(robj);
736 }
654 } 737 }
655 } 738 }
656 /* evict vram memory */ 739 /* evict vram memory */
657 radeon_object_evict_vram(rdev); 740 radeon_bo_evict_vram(rdev);
658 /* wait for gpu to finish processing current batch */ 741 /* wait for gpu to finish processing current batch */
659 radeon_fence_wait_last(rdev); 742 radeon_fence_wait_last(rdev);
660 743
661 radeon_save_bios_scratch_regs(rdev); 744 radeon_save_bios_scratch_regs(rdev);
662 745
663 radeon_suspend(rdev); 746 radeon_suspend(rdev);
747 radeon_hpd_fini(rdev);
664 /* evict remaining vram memory */ 748 /* evict remaining vram memory */
665 radeon_object_evict_vram(rdev); 749 radeon_bo_evict_vram(rdev);
666 750
667 pci_save_state(dev->pdev); 751 pci_save_state(dev->pdev);
668 if (state.event == PM_EVENT_SUSPEND) { 752 if (state.event == PM_EVENT_SUSPEND) {
@@ -680,6 +764,9 @@ int radeon_resume_kms(struct drm_device *dev)
680{ 764{
681 struct radeon_device *rdev = dev->dev_private; 765 struct radeon_device *rdev = dev->dev_private;
682 766
767 if (rdev->powered_down)
768 return 0;
769
683 acquire_console_sem(); 770 acquire_console_sem();
684 pci_set_power_state(dev->pdev, PCI_D0); 771 pci_set_power_state(dev->pdev, PCI_D0);
685 pci_restore_state(dev->pdev); 772 pci_restore_state(dev->pdev);
@@ -695,6 +782,8 @@ int radeon_resume_kms(struct drm_device *dev)
695 fb_set_suspend(rdev->fbdev_info, 0); 782 fb_set_suspend(rdev->fbdev_info, 0);
696 release_console_sem(); 783 release_console_sem();
697 784
785 /* reset hpd state */
786 radeon_hpd_init(rdev);
698 /* blat the mode back in */ 787 /* blat the mode back in */
699 drm_helper_resume_force_mode(dev); 788 drm_helper_resume_force_mode(dev);
700 return 0; 789 return 0;
diff --git a/drivers/gpu/drm/radeon/radeon_display.c b/drivers/gpu/drm/radeon/radeon_display.c
index c85df4afcb7a..bb1c122cad21 100644
--- a/drivers/gpu/drm/radeon/radeon_display.c
+++ b/drivers/gpu/drm/radeon/radeon_display.c
@@ -68,6 +68,36 @@ static void avivo_crtc_load_lut(struct drm_crtc *crtc)
68 WREG32(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset, radeon_crtc->crtc_id); 68 WREG32(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset, radeon_crtc->crtc_id);
69} 69}
70 70
71static void evergreen_crtc_load_lut(struct drm_crtc *crtc)
72{
73 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
74 struct drm_device *dev = crtc->dev;
75 struct radeon_device *rdev = dev->dev_private;
76 int i;
77
78 DRM_DEBUG("%d\n", radeon_crtc->crtc_id);
79 WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
80
81 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
82 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
83 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
84
85 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
86 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
87 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
88
89 WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
90 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
91
92 WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
93 for (i = 0; i < 256; i++) {
94 WREG32(EVERGREEN_DC_LUT_30_COLOR + radeon_crtc->crtc_offset,
95 (radeon_crtc->lut_r[i] << 20) |
96 (radeon_crtc->lut_g[i] << 10) |
97 (radeon_crtc->lut_b[i] << 0));
98 }
99}
100
71static void legacy_crtc_load_lut(struct drm_crtc *crtc) 101static void legacy_crtc_load_lut(struct drm_crtc *crtc)
72{ 102{
73 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 103 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
@@ -100,7 +130,9 @@ void radeon_crtc_load_lut(struct drm_crtc *crtc)
100 if (!crtc->enabled) 130 if (!crtc->enabled)
101 return; 131 return;
102 132
103 if (ASIC_IS_AVIVO(rdev)) 133 if (ASIC_IS_DCE4(rdev))
134 evergreen_crtc_load_lut(crtc);
135 else if (ASIC_IS_AVIVO(rdev))
104 avivo_crtc_load_lut(crtc); 136 avivo_crtc_load_lut(crtc);
105 else 137 else
106 legacy_crtc_load_lut(crtc); 138 legacy_crtc_load_lut(crtc);
@@ -234,7 +266,7 @@ static const char *encoder_names[34] = {
234 "INTERNAL_UNIPHY2", 266 "INTERNAL_UNIPHY2",
235}; 267};
236 268
237static const char *connector_names[13] = { 269static const char *connector_names[15] = {
238 "Unknown", 270 "Unknown",
239 "VGA", 271 "VGA",
240 "DVI-I", 272 "DVI-I",
@@ -248,6 +280,18 @@ static const char *connector_names[13] = {
248 "DisplayPort", 280 "DisplayPort",
249 "HDMI-A", 281 "HDMI-A",
250 "HDMI-B", 282 "HDMI-B",
283 "TV",
284 "eDP",
285};
286
287static const char *hpd_names[7] = {
288 "NONE",
289 "HPD1",
290 "HPD2",
291 "HPD3",
292 "HPD4",
293 "HPD5",
294 "HPD6",
251}; 295};
252 296
253static void radeon_print_display_setup(struct drm_device *dev) 297static void radeon_print_display_setup(struct drm_device *dev)
@@ -264,16 +308,27 @@ static void radeon_print_display_setup(struct drm_device *dev)
264 radeon_connector = to_radeon_connector(connector); 308 radeon_connector = to_radeon_connector(connector);
265 DRM_INFO("Connector %d:\n", i); 309 DRM_INFO("Connector %d:\n", i);
266 DRM_INFO(" %s\n", connector_names[connector->connector_type]); 310 DRM_INFO(" %s\n", connector_names[connector->connector_type]);
267 if (radeon_connector->ddc_bus) 311 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
312 DRM_INFO(" %s\n", hpd_names[radeon_connector->hpd.hpd]);
313 if (radeon_connector->ddc_bus) {
268 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", 314 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
269 radeon_connector->ddc_bus->rec.mask_clk_reg, 315 radeon_connector->ddc_bus->rec.mask_clk_reg,
270 radeon_connector->ddc_bus->rec.mask_data_reg, 316 radeon_connector->ddc_bus->rec.mask_data_reg,
271 radeon_connector->ddc_bus->rec.a_clk_reg, 317 radeon_connector->ddc_bus->rec.a_clk_reg,
272 radeon_connector->ddc_bus->rec.a_data_reg, 318 radeon_connector->ddc_bus->rec.a_data_reg,
273 radeon_connector->ddc_bus->rec.put_clk_reg, 319 radeon_connector->ddc_bus->rec.en_clk_reg,
274 radeon_connector->ddc_bus->rec.put_data_reg, 320 radeon_connector->ddc_bus->rec.en_data_reg,
275 radeon_connector->ddc_bus->rec.get_clk_reg, 321 radeon_connector->ddc_bus->rec.y_clk_reg,
276 radeon_connector->ddc_bus->rec.get_data_reg); 322 radeon_connector->ddc_bus->rec.y_data_reg);
323 } else {
324 if (connector->connector_type == DRM_MODE_CONNECTOR_VGA ||
325 connector->connector_type == DRM_MODE_CONNECTOR_DVII ||
326 connector->connector_type == DRM_MODE_CONNECTOR_DVID ||
327 connector->connector_type == DRM_MODE_CONNECTOR_DVIA ||
328 connector->connector_type == DRM_MODE_CONNECTOR_HDMIA ||
329 connector->connector_type == DRM_MODE_CONNECTOR_HDMIB)
330 DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n");
331 }
277 DRM_INFO(" Encoders:\n"); 332 DRM_INFO(" Encoders:\n");
278 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 333 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
279 radeon_encoder = to_radeon_encoder(encoder); 334 radeon_encoder = to_radeon_encoder(encoder);
@@ -313,17 +368,20 @@ static bool radeon_setup_enc_conn(struct drm_device *dev)
313 368
314 if (rdev->bios) { 369 if (rdev->bios) {
315 if (rdev->is_atom_bios) { 370 if (rdev->is_atom_bios) {
316 if (rdev->family >= CHIP_R600) 371 ret = radeon_get_atom_connector_info_from_supported_devices_table(dev);
372 if (ret == false)
317 ret = radeon_get_atom_connector_info_from_object_table(dev); 373 ret = radeon_get_atom_connector_info_from_object_table(dev);
318 else 374 } else {
319 ret = radeon_get_atom_connector_info_from_supported_devices_table(dev);
320 } else
321 ret = radeon_get_legacy_connector_info_from_bios(dev); 375 ret = radeon_get_legacy_connector_info_from_bios(dev);
376 if (ret == false)
377 ret = radeon_get_legacy_connector_info_from_table(dev);
378 }
322 } else { 379 } else {
323 if (!ASIC_IS_AVIVO(rdev)) 380 if (!ASIC_IS_AVIVO(rdev))
324 ret = radeon_get_legacy_connector_info_from_table(dev); 381 ret = radeon_get_legacy_connector_info_from_table(dev);
325 } 382 }
326 if (ret) { 383 if (ret) {
384 radeon_setup_encoder_clones(dev);
327 radeon_print_display_setup(dev); 385 radeon_print_display_setup(dev);
328 list_for_each_entry(drm_connector, &dev->mode_config.connector_list, head) 386 list_for_each_entry(drm_connector, &dev->mode_config.connector_list, head)
329 radeon_ddc_dump(drm_connector); 387 radeon_ddc_dump(drm_connector);
@@ -334,16 +392,25 @@ static bool radeon_setup_enc_conn(struct drm_device *dev)
334 392
335int radeon_ddc_get_modes(struct radeon_connector *radeon_connector) 393int radeon_ddc_get_modes(struct radeon_connector *radeon_connector)
336{ 394{
395 struct drm_device *dev = radeon_connector->base.dev;
396 struct radeon_device *rdev = dev->dev_private;
337 int ret = 0; 397 int ret = 0;
338 398
399 if ((radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_DisplayPort) ||
400 (radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_eDP)) {
401 struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
402 if ((dig->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT ||
403 dig->dp_sink_type == CONNECTOR_OBJECT_ID_eDP) && dig->dp_i2c_bus)
404 radeon_connector->edid = drm_get_edid(&radeon_connector->base, &dig->dp_i2c_bus->adapter);
405 }
339 if (!radeon_connector->ddc_bus) 406 if (!radeon_connector->ddc_bus)
340 return -1; 407 return -1;
341 if (!radeon_connector->edid) { 408 if (!radeon_connector->edid) {
342 radeon_i2c_do_lock(radeon_connector, 1);
343 radeon_connector->edid = drm_get_edid(&radeon_connector->base, &radeon_connector->ddc_bus->adapter); 409 radeon_connector->edid = drm_get_edid(&radeon_connector->base, &radeon_connector->ddc_bus->adapter);
344 radeon_i2c_do_lock(radeon_connector, 0);
345 } 410 }
346 411 /* some servers provide a hardcoded edid in rom for KVMs */
412 if (!radeon_connector->edid)
413 radeon_connector->edid = radeon_combios_get_hardcoded_edid(rdev);
347 if (radeon_connector->edid) { 414 if (radeon_connector->edid) {
348 drm_mode_connector_update_edid_property(&radeon_connector->base, radeon_connector->edid); 415 drm_mode_connector_update_edid_property(&radeon_connector->base, radeon_connector->edid);
349 ret = drm_add_edid_modes(&radeon_connector->base, radeon_connector->edid); 416 ret = drm_add_edid_modes(&radeon_connector->base, radeon_connector->edid);
@@ -361,9 +428,7 @@ static int radeon_ddc_dump(struct drm_connector *connector)
361 428
362 if (!radeon_connector->ddc_bus) 429 if (!radeon_connector->ddc_bus)
363 return -1; 430 return -1;
364 radeon_i2c_do_lock(radeon_connector, 1);
365 edid = drm_get_edid(connector, &radeon_connector->ddc_bus->adapter); 431 edid = drm_get_edid(connector, &radeon_connector->ddc_bus->adapter);
366 radeon_i2c_do_lock(radeon_connector, 0);
367 if (edid) { 432 if (edid) {
368 kfree(edid); 433 kfree(edid);
369 } 434 }
@@ -380,17 +445,18 @@ static inline uint32_t radeon_div(uint64_t n, uint32_t d)
380 return n; 445 return n;
381} 446}
382 447
383void radeon_compute_pll(struct radeon_pll *pll, 448static void radeon_compute_pll_legacy(struct radeon_pll *pll,
384 uint64_t freq, 449 uint64_t freq,
385 uint32_t *dot_clock_p, 450 uint32_t *dot_clock_p,
386 uint32_t *fb_div_p, 451 uint32_t *fb_div_p,
387 uint32_t *frac_fb_div_p, 452 uint32_t *frac_fb_div_p,
388 uint32_t *ref_div_p, 453 uint32_t *ref_div_p,
389 uint32_t *post_div_p, 454 uint32_t *post_div_p)
390 int flags)
391{ 455{
392 uint32_t min_ref_div = pll->min_ref_div; 456 uint32_t min_ref_div = pll->min_ref_div;
393 uint32_t max_ref_div = pll->max_ref_div; 457 uint32_t max_ref_div = pll->max_ref_div;
458 uint32_t min_post_div = pll->min_post_div;
459 uint32_t max_post_div = pll->max_post_div;
394 uint32_t min_fractional_feed_div = 0; 460 uint32_t min_fractional_feed_div = 0;
395 uint32_t max_fractional_feed_div = 0; 461 uint32_t max_fractional_feed_div = 0;
396 uint32_t best_vco = pll->best_vco; 462 uint32_t best_vco = pll->best_vco;
@@ -402,11 +468,20 @@ void radeon_compute_pll(struct radeon_pll *pll,
402 uint32_t best_error = 0xffffffff; 468 uint32_t best_error = 0xffffffff;
403 uint32_t best_vco_diff = 1; 469 uint32_t best_vco_diff = 1;
404 uint32_t post_div; 470 uint32_t post_div;
471 u32 pll_out_min, pll_out_max;
405 472
406 DRM_DEBUG("PLL freq %llu %u %u\n", freq, pll->min_ref_div, pll->max_ref_div); 473 DRM_DEBUG("PLL freq %llu %u %u\n", freq, pll->min_ref_div, pll->max_ref_div);
407 freq = freq * 1000; 474 freq = freq * 1000;
408 475
409 if (flags & RADEON_PLL_USE_REF_DIV) 476 if (pll->flags & RADEON_PLL_IS_LCD) {
477 pll_out_min = pll->lcd_pll_out_min;
478 pll_out_max = pll->lcd_pll_out_max;
479 } else {
480 pll_out_min = pll->pll_out_min;
481 pll_out_max = pll->pll_out_max;
482 }
483
484 if (pll->flags & RADEON_PLL_USE_REF_DIV)
410 min_ref_div = max_ref_div = pll->reference_div; 485 min_ref_div = max_ref_div = pll->reference_div;
411 else { 486 else {
412 while (min_ref_div < max_ref_div-1) { 487 while (min_ref_div < max_ref_div-1) {
@@ -421,19 +496,22 @@ void radeon_compute_pll(struct radeon_pll *pll,
421 } 496 }
422 } 497 }
423 498
424 if (flags & RADEON_PLL_USE_FRAC_FB_DIV) { 499 if (pll->flags & RADEON_PLL_USE_POST_DIV)
500 min_post_div = max_post_div = pll->post_div;
501
502 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
425 min_fractional_feed_div = pll->min_frac_feedback_div; 503 min_fractional_feed_div = pll->min_frac_feedback_div;
426 max_fractional_feed_div = pll->max_frac_feedback_div; 504 max_fractional_feed_div = pll->max_frac_feedback_div;
427 } 505 }
428 506
429 for (post_div = pll->min_post_div; post_div <= pll->max_post_div; ++post_div) { 507 for (post_div = min_post_div; post_div <= max_post_div; ++post_div) {
430 uint32_t ref_div; 508 uint32_t ref_div;
431 509
432 if ((flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1)) 510 if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1))
433 continue; 511 continue;
434 512
435 /* legacy radeons only have a few post_divs */ 513 /* legacy radeons only have a few post_divs */
436 if (flags & RADEON_PLL_LEGACY) { 514 if (pll->flags & RADEON_PLL_LEGACY) {
437 if ((post_div == 5) || 515 if ((post_div == 5) ||
438 (post_div == 7) || 516 (post_div == 7) ||
439 (post_div == 9) || 517 (post_div == 9) ||
@@ -466,10 +544,10 @@ void radeon_compute_pll(struct radeon_pll *pll,
466 tmp = (uint64_t)pll->reference_freq * feedback_div; 544 tmp = (uint64_t)pll->reference_freq * feedback_div;
467 vco = radeon_div(tmp, ref_div); 545 vco = radeon_div(tmp, ref_div);
468 546
469 if (vco < pll->pll_out_min) { 547 if (vco < pll_out_min) {
470 min_feed_div = feedback_div + 1; 548 min_feed_div = feedback_div + 1;
471 continue; 549 continue;
472 } else if (vco > pll->pll_out_max) { 550 } else if (vco > pll_out_max) {
473 max_feed_div = feedback_div; 551 max_feed_div = feedback_div;
474 continue; 552 continue;
475 } 553 }
@@ -480,7 +558,7 @@ void radeon_compute_pll(struct radeon_pll *pll,
480 tmp += (uint64_t)pll->reference_freq * 1000 * frac_feedback_div; 558 tmp += (uint64_t)pll->reference_freq * 1000 * frac_feedback_div;
481 current_freq = radeon_div(tmp, ref_div * post_div); 559 current_freq = radeon_div(tmp, ref_div * post_div);
482 560
483 if (flags & RADEON_PLL_PREFER_CLOSEST_LOWER) { 561 if (pll->flags & RADEON_PLL_PREFER_CLOSEST_LOWER) {
484 error = freq - current_freq; 562 error = freq - current_freq;
485 error = error < 0 ? 0xffffffff : error; 563 error = error < 0 ? 0xffffffff : error;
486 } else 564 } else
@@ -507,12 +585,12 @@ void radeon_compute_pll(struct radeon_pll *pll,
507 best_freq = current_freq; 585 best_freq = current_freq;
508 best_error = error; 586 best_error = error;
509 best_vco_diff = vco_diff; 587 best_vco_diff = vco_diff;
510 } else if (((flags & RADEON_PLL_PREFER_LOW_REF_DIV) && (ref_div < best_ref_div)) || 588 } else if (((pll->flags & RADEON_PLL_PREFER_LOW_REF_DIV) && (ref_div < best_ref_div)) ||
511 ((flags & RADEON_PLL_PREFER_HIGH_REF_DIV) && (ref_div > best_ref_div)) || 589 ((pll->flags & RADEON_PLL_PREFER_HIGH_REF_DIV) && (ref_div > best_ref_div)) ||
512 ((flags & RADEON_PLL_PREFER_LOW_FB_DIV) && (feedback_div < best_feedback_div)) || 590 ((pll->flags & RADEON_PLL_PREFER_LOW_FB_DIV) && (feedback_div < best_feedback_div)) ||
513 ((flags & RADEON_PLL_PREFER_HIGH_FB_DIV) && (feedback_div > best_feedback_div)) || 591 ((pll->flags & RADEON_PLL_PREFER_HIGH_FB_DIV) && (feedback_div > best_feedback_div)) ||
514 ((flags & RADEON_PLL_PREFER_LOW_POST_DIV) && (post_div < best_post_div)) || 592 ((pll->flags & RADEON_PLL_PREFER_LOW_POST_DIV) && (post_div < best_post_div)) ||
515 ((flags & RADEON_PLL_PREFER_HIGH_POST_DIV) && (post_div > best_post_div))) { 593 ((pll->flags & RADEON_PLL_PREFER_HIGH_POST_DIV) && (post_div > best_post_div))) {
516 best_post_div = post_div; 594 best_post_div = post_div;
517 best_ref_div = ref_div; 595 best_ref_div = ref_div;
518 best_feedback_div = feedback_div; 596 best_feedback_div = feedback_div;
@@ -542,6 +620,214 @@ void radeon_compute_pll(struct radeon_pll *pll,
542 *post_div_p = best_post_div; 620 *post_div_p = best_post_div;
543} 621}
544 622
623static bool
624calc_fb_div(struct radeon_pll *pll,
625 uint32_t freq,
626 uint32_t post_div,
627 uint32_t ref_div,
628 uint32_t *fb_div,
629 uint32_t *fb_div_frac)
630{
631 fixed20_12 feedback_divider, a, b;
632 u32 vco_freq;
633
634 vco_freq = freq * post_div;
635 /* feedback_divider = vco_freq * ref_div / pll->reference_freq; */
636 a.full = rfixed_const(pll->reference_freq);
637 feedback_divider.full = rfixed_const(vco_freq);
638 feedback_divider.full = rfixed_div(feedback_divider, a);
639 a.full = rfixed_const(ref_div);
640 feedback_divider.full = rfixed_mul(feedback_divider, a);
641
642 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
643 /* feedback_divider = floor((feedback_divider * 10.0) + 0.5) * 0.1; */
644 a.full = rfixed_const(10);
645 feedback_divider.full = rfixed_mul(feedback_divider, a);
646 feedback_divider.full += rfixed_const_half(0);
647 feedback_divider.full = rfixed_floor(feedback_divider);
648 feedback_divider.full = rfixed_div(feedback_divider, a);
649
650 /* *fb_div = floor(feedback_divider); */
651 a.full = rfixed_floor(feedback_divider);
652 *fb_div = rfixed_trunc(a);
653 /* *fb_div_frac = fmod(feedback_divider, 1.0) * 10.0; */
654 a.full = rfixed_const(10);
655 b.full = rfixed_mul(feedback_divider, a);
656
657 feedback_divider.full = rfixed_floor(feedback_divider);
658 feedback_divider.full = rfixed_mul(feedback_divider, a);
659 feedback_divider.full = b.full - feedback_divider.full;
660 *fb_div_frac = rfixed_trunc(feedback_divider);
661 } else {
662 /* *fb_div = floor(feedback_divider + 0.5); */
663 feedback_divider.full += rfixed_const_half(0);
664 feedback_divider.full = rfixed_floor(feedback_divider);
665
666 *fb_div = rfixed_trunc(feedback_divider);
667 *fb_div_frac = 0;
668 }
669
670 if (((*fb_div) < pll->min_feedback_div) || ((*fb_div) > pll->max_feedback_div))
671 return false;
672 else
673 return true;
674}
675
676static bool
677calc_fb_ref_div(struct radeon_pll *pll,
678 uint32_t freq,
679 uint32_t post_div,
680 uint32_t *fb_div,
681 uint32_t *fb_div_frac,
682 uint32_t *ref_div)
683{
684 fixed20_12 ffreq, max_error, error, pll_out, a;
685 u32 vco;
686 u32 pll_out_min, pll_out_max;
687
688 if (pll->flags & RADEON_PLL_IS_LCD) {
689 pll_out_min = pll->lcd_pll_out_min;
690 pll_out_max = pll->lcd_pll_out_max;
691 } else {
692 pll_out_min = pll->pll_out_min;
693 pll_out_max = pll->pll_out_max;
694 }
695
696 ffreq.full = rfixed_const(freq);
697 /* max_error = ffreq * 0.0025; */
698 a.full = rfixed_const(400);
699 max_error.full = rfixed_div(ffreq, a);
700
701 for ((*ref_div) = pll->min_ref_div; (*ref_div) < pll->max_ref_div; ++(*ref_div)) {
702 if (calc_fb_div(pll, freq, post_div, (*ref_div), fb_div, fb_div_frac)) {
703 vco = pll->reference_freq * (((*fb_div) * 10) + (*fb_div_frac));
704 vco = vco / ((*ref_div) * 10);
705
706 if ((vco < pll_out_min) || (vco > pll_out_max))
707 continue;
708
709 /* pll_out = vco / post_div; */
710 a.full = rfixed_const(post_div);
711 pll_out.full = rfixed_const(vco);
712 pll_out.full = rfixed_div(pll_out, a);
713
714 if (pll_out.full >= ffreq.full) {
715 error.full = pll_out.full - ffreq.full;
716 if (error.full <= max_error.full)
717 return true;
718 }
719 }
720 }
721 return false;
722}
723
724static void radeon_compute_pll_new(struct radeon_pll *pll,
725 uint64_t freq,
726 uint32_t *dot_clock_p,
727 uint32_t *fb_div_p,
728 uint32_t *frac_fb_div_p,
729 uint32_t *ref_div_p,
730 uint32_t *post_div_p)
731{
732 u32 fb_div = 0, fb_div_frac = 0, post_div = 0, ref_div = 0;
733 u32 best_freq = 0, vco_frequency;
734 u32 pll_out_min, pll_out_max;
735
736 if (pll->flags & RADEON_PLL_IS_LCD) {
737 pll_out_min = pll->lcd_pll_out_min;
738 pll_out_max = pll->lcd_pll_out_max;
739 } else {
740 pll_out_min = pll->pll_out_min;
741 pll_out_max = pll->pll_out_max;
742 }
743
744 /* freq = freq / 10; */
745 do_div(freq, 10);
746
747 if (pll->flags & RADEON_PLL_USE_POST_DIV) {
748 post_div = pll->post_div;
749 if ((post_div < pll->min_post_div) || (post_div > pll->max_post_div))
750 goto done;
751
752 vco_frequency = freq * post_div;
753 if ((vco_frequency < pll_out_min) || (vco_frequency > pll_out_max))
754 goto done;
755
756 if (pll->flags & RADEON_PLL_USE_REF_DIV) {
757 ref_div = pll->reference_div;
758 if ((ref_div < pll->min_ref_div) || (ref_div > pll->max_ref_div))
759 goto done;
760 if (!calc_fb_div(pll, freq, post_div, ref_div, &fb_div, &fb_div_frac))
761 goto done;
762 }
763 } else {
764 for (post_div = pll->max_post_div; post_div >= pll->min_post_div; --post_div) {
765 if (pll->flags & RADEON_PLL_LEGACY) {
766 if ((post_div == 5) ||
767 (post_div == 7) ||
768 (post_div == 9) ||
769 (post_div == 10) ||
770 (post_div == 11))
771 continue;
772 }
773
774 if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1))
775 continue;
776
777 vco_frequency = freq * post_div;
778 if ((vco_frequency < pll_out_min) || (vco_frequency > pll_out_max))
779 continue;
780 if (pll->flags & RADEON_PLL_USE_REF_DIV) {
781 ref_div = pll->reference_div;
782 if ((ref_div < pll->min_ref_div) || (ref_div > pll->max_ref_div))
783 goto done;
784 if (calc_fb_div(pll, freq, post_div, ref_div, &fb_div, &fb_div_frac))
785 break;
786 } else {
787 if (calc_fb_ref_div(pll, freq, post_div, &fb_div, &fb_div_frac, &ref_div))
788 break;
789 }
790 }
791 }
792
793 best_freq = pll->reference_freq * 10 * fb_div;
794 best_freq += pll->reference_freq * fb_div_frac;
795 best_freq = best_freq / (ref_div * post_div);
796
797done:
798 if (best_freq == 0)
799 DRM_ERROR("Couldn't find valid PLL dividers\n");
800
801 *dot_clock_p = best_freq / 10;
802 *fb_div_p = fb_div;
803 *frac_fb_div_p = fb_div_frac;
804 *ref_div_p = ref_div;
805 *post_div_p = post_div;
806
807 DRM_DEBUG("%u %d.%d, %d, %d\n", *dot_clock_p, *fb_div_p, *frac_fb_div_p, *ref_div_p, *post_div_p);
808}
809
810void radeon_compute_pll(struct radeon_pll *pll,
811 uint64_t freq,
812 uint32_t *dot_clock_p,
813 uint32_t *fb_div_p,
814 uint32_t *frac_fb_div_p,
815 uint32_t *ref_div_p,
816 uint32_t *post_div_p)
817{
818 switch (pll->algo) {
819 case PLL_ALGO_NEW:
820 radeon_compute_pll_new(pll, freq, dot_clock_p, fb_div_p,
821 frac_fb_div_p, ref_div_p, post_div_p);
822 break;
823 case PLL_ALGO_LEGACY:
824 default:
825 radeon_compute_pll_legacy(pll, freq, dot_clock_p, fb_div_p,
826 frac_fb_div_p, ref_div_p, post_div_p);
827 break;
828 }
829}
830
545static void radeon_user_framebuffer_destroy(struct drm_framebuffer *fb) 831static void radeon_user_framebuffer_destroy(struct drm_framebuffer *fb)
546{ 832{
547 struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb); 833 struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb);
@@ -550,12 +836,8 @@ static void radeon_user_framebuffer_destroy(struct drm_framebuffer *fb)
550 if (fb->fbdev) 836 if (fb->fbdev)
551 radeonfb_remove(dev, fb); 837 radeonfb_remove(dev, fb);
552 838
553 if (radeon_fb->obj) { 839 if (radeon_fb->obj)
554 radeon_gem_object_unpin(radeon_fb->obj); 840 drm_gem_object_unreference_unlocked(radeon_fb->obj);
555 mutex_lock(&dev->struct_mutex);
556 drm_gem_object_unreference(radeon_fb->obj);
557 mutex_unlock(&dev->struct_mutex);
558 }
559 drm_framebuffer_cleanup(fb); 841 drm_framebuffer_cleanup(fb);
560 kfree(radeon_fb); 842 kfree(radeon_fb);
561} 843}
@@ -599,7 +881,11 @@ radeon_user_framebuffer_create(struct drm_device *dev,
599 struct drm_gem_object *obj; 881 struct drm_gem_object *obj;
600 882
601 obj = drm_gem_object_lookup(dev, file_priv, mode_cmd->handle); 883 obj = drm_gem_object_lookup(dev, file_priv, mode_cmd->handle);
602 884 if (obj == NULL) {
885 dev_err(&dev->pdev->dev, "No GEM object associated to handle 0x%08X, "
886 "can't create framebuffer\n", mode_cmd->handle);
887 return NULL;
888 }
603 return radeon_framebuffer_create(dev, mode_cmd, obj); 889 return radeon_framebuffer_create(dev, mode_cmd, obj);
604} 890}
605 891
@@ -629,7 +915,7 @@ static struct drm_prop_enum_list radeon_tv_std_enum_list[] =
629 { TV_STD_SECAM, "secam" }, 915 { TV_STD_SECAM, "secam" },
630}; 916};
631 917
632int radeon_modeset_create_props(struct radeon_device *rdev) 918static int radeon_modeset_create_props(struct radeon_device *rdev)
633{ 919{
634 int i, sz; 920 int i, sz;
635 921
@@ -642,7 +928,7 @@ int radeon_modeset_create_props(struct radeon_device *rdev)
642 return -ENOMEM; 928 return -ENOMEM;
643 929
644 rdev->mode_info.coherent_mode_property->values[0] = 0; 930 rdev->mode_info.coherent_mode_property->values[0] = 0;
645 rdev->mode_info.coherent_mode_property->values[0] = 1; 931 rdev->mode_info.coherent_mode_property->values[1] = 1;
646 } 932 }
647 933
648 if (!ASIC_IS_AVIVO(rdev)) { 934 if (!ASIC_IS_AVIVO(rdev)) {
@@ -666,7 +952,7 @@ int radeon_modeset_create_props(struct radeon_device *rdev)
666 if (!rdev->mode_info.load_detect_property) 952 if (!rdev->mode_info.load_detect_property)
667 return -ENOMEM; 953 return -ENOMEM;
668 rdev->mode_info.load_detect_property->values[0] = 0; 954 rdev->mode_info.load_detect_property->values[0] = 0;
669 rdev->mode_info.load_detect_property->values[0] = 1; 955 rdev->mode_info.load_detect_property->values[1] = 1;
670 956
671 drm_mode_create_scaling_mode_property(rdev->ddev); 957 drm_mode_create_scaling_mode_property(rdev->ddev);
672 958
@@ -685,9 +971,26 @@ int radeon_modeset_create_props(struct radeon_device *rdev)
685 return 0; 971 return 0;
686} 972}
687 973
974void radeon_update_display_priority(struct radeon_device *rdev)
975{
976 /* adjustment options for the display watermarks */
977 if ((radeon_disp_priority == 0) || (radeon_disp_priority > 2)) {
978 /* set display priority to high for r3xx, rv515 chips
979 * this avoids flickering due to underflow to the
980 * display controllers during heavy acceleration.
981 */
982 if (ASIC_IS_R300(rdev) || (rdev->family == CHIP_RV515))
983 rdev->disp_priority = 2;
984 else
985 rdev->disp_priority = 0;
986 } else
987 rdev->disp_priority = radeon_disp_priority;
988
989}
990
688int radeon_modeset_init(struct radeon_device *rdev) 991int radeon_modeset_init(struct radeon_device *rdev)
689{ 992{
690 int num_crtc = 2, i; 993 int i;
691 int ret; 994 int ret;
692 995
693 drm_mode_config_init(rdev->ddev); 996 drm_mode_config_init(rdev->ddev);
@@ -710,11 +1013,14 @@ int radeon_modeset_init(struct radeon_device *rdev)
710 return ret; 1013 return ret;
711 } 1014 }
712 1015
713 if (rdev->flags & RADEON_SINGLE_CRTC) 1016 /* check combios for a valid hardcoded EDID - Sun servers */
714 num_crtc = 1; 1017 if (!rdev->is_atom_bios) {
1018 /* check for hardcoded EDID in BIOS */
1019 radeon_combios_check_hardcoded_edid(rdev);
1020 }
715 1021
716 /* allocate crtcs */ 1022 /* allocate crtcs */
717 for (i = 0; i < num_crtc; i++) { 1023 for (i = 0; i < rdev->num_crtc; i++) {
718 radeon_crtc_init(rdev->ddev, i); 1024 radeon_crtc_init(rdev->ddev, i);
719 } 1025 }
720 1026
@@ -723,13 +1029,18 @@ int radeon_modeset_init(struct radeon_device *rdev)
723 if (!ret) { 1029 if (!ret) {
724 return ret; 1030 return ret;
725 } 1031 }
1032 /* initialize hpd */
1033 radeon_hpd_init(rdev);
726 drm_helper_initial_config(rdev->ddev); 1034 drm_helper_initial_config(rdev->ddev);
727 return 0; 1035 return 0;
728} 1036}
729 1037
730void radeon_modeset_fini(struct radeon_device *rdev) 1038void radeon_modeset_fini(struct radeon_device *rdev)
731{ 1039{
1040 kfree(rdev->mode_info.bios_hardcoded_edid);
1041
732 if (rdev->mode_info.mode_config_initialized) { 1042 if (rdev->mode_info.mode_config_initialized) {
1043 radeon_hpd_fini(rdev);
733 drm_mode_config_cleanup(rdev->ddev); 1044 drm_mode_config_cleanup(rdev->ddev);
734 rdev->mode_info.mode_config_initialized = false; 1045 rdev->mode_info.mode_config_initialized = false;
735 } 1046 }
@@ -750,9 +1061,17 @@ bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
750 if (encoder->crtc != crtc) 1061 if (encoder->crtc != crtc)
751 continue; 1062 continue;
752 if (first) { 1063 if (first) {
753 radeon_crtc->rmx_type = radeon_encoder->rmx_type; 1064 /* set scaling */
1065 if (radeon_encoder->rmx_type == RMX_OFF)
1066 radeon_crtc->rmx_type = RMX_OFF;
1067 else if (mode->hdisplay < radeon_encoder->native_mode.hdisplay ||
1068 mode->vdisplay < radeon_encoder->native_mode.vdisplay)
1069 radeon_crtc->rmx_type = radeon_encoder->rmx_type;
1070 else
1071 radeon_crtc->rmx_type = RMX_OFF;
1072 /* copy native mode */
754 memcpy(&radeon_crtc->native_mode, 1073 memcpy(&radeon_crtc->native_mode,
755 &radeon_encoder->native_mode, 1074 &radeon_encoder->native_mode,
756 sizeof(struct drm_display_mode)); 1075 sizeof(struct drm_display_mode));
757 first = false; 1076 first = false;
758 } else { 1077 } else {
diff --git a/drivers/gpu/drm/radeon/radeon_drv.c b/drivers/gpu/drm/radeon/radeon_drv.c
index 7f50fb864af8..b3749d47be7b 100644
--- a/drivers/gpu/drm/radeon/radeon_drv.c
+++ b/drivers/gpu/drm/radeon/radeon_drv.c
@@ -40,9 +40,13 @@
40 40
41/* 41/*
42 * KMS wrapper. 42 * KMS wrapper.
43 * - 2.0.0 - initial interface
44 * - 2.1.0 - add square tiling interface
45 * - 2.2.0 - add r6xx/r7xx const buffer support
46 * - 2.3.0 - add MSPOS + 3D texture + r500 VAP regs
43 */ 47 */
44#define KMS_DRIVER_MAJOR 2 48#define KMS_DRIVER_MAJOR 2
45#define KMS_DRIVER_MINOR 0 49#define KMS_DRIVER_MINOR 3
46#define KMS_DRIVER_PATCHLEVEL 0 50#define KMS_DRIVER_PATCHLEVEL 0
47int radeon_driver_load_kms(struct drm_device *dev, unsigned long flags); 51int radeon_driver_load_kms(struct drm_device *dev, unsigned long flags);
48int radeon_driver_unload_kms(struct drm_device *dev); 52int radeon_driver_unload_kms(struct drm_device *dev);
@@ -86,6 +90,11 @@ int radeon_benchmarking = 0;
86int radeon_testing = 0; 90int radeon_testing = 0;
87int radeon_connector_table = 0; 91int radeon_connector_table = 0;
88int radeon_tv = 1; 92int radeon_tv = 1;
93int radeon_new_pll = -1;
94int radeon_dynpm = -1;
95int radeon_audio = 1;
96int radeon_disp_priority = 0;
97int radeon_hw_i2c = 0;
89 98
90MODULE_PARM_DESC(no_wb, "Disable AGP writeback for scratch registers"); 99MODULE_PARM_DESC(no_wb, "Disable AGP writeback for scratch registers");
91module_param_named(no_wb, radeon_no_wb, int, 0444); 100module_param_named(no_wb, radeon_no_wb, int, 0444);
@@ -120,6 +129,21 @@ module_param_named(connector_table, radeon_connector_table, int, 0444);
120MODULE_PARM_DESC(tv, "TV enable (0 = disable)"); 129MODULE_PARM_DESC(tv, "TV enable (0 = disable)");
121module_param_named(tv, radeon_tv, int, 0444); 130module_param_named(tv, radeon_tv, int, 0444);
122 131
132MODULE_PARM_DESC(new_pll, "Select new PLL code");
133module_param_named(new_pll, radeon_new_pll, int, 0444);
134
135MODULE_PARM_DESC(dynpm, "Disable/Enable dynamic power management (1 = enable)");
136module_param_named(dynpm, radeon_dynpm, int, 0444);
137
138MODULE_PARM_DESC(audio, "Audio enable (0 = disable)");
139module_param_named(audio, radeon_audio, int, 0444);
140
141MODULE_PARM_DESC(disp_priority, "Display Priority (0 = auto, 1 = normal, 2 = high)");
142module_param_named(disp_priority, radeon_disp_priority, int, 0444);
143
144MODULE_PARM_DESC(hw_i2c, "hw i2c engine enable (0 = disable)");
145module_param_named(hw_i2c, radeon_hw_i2c, int, 0444);
146
123static int radeon_suspend(struct drm_device *dev, pm_message_t state) 147static int radeon_suspend(struct drm_device *dev, pm_message_t state)
124{ 148{
125 drm_radeon_private_t *dev_priv = dev->dev_private; 149 drm_radeon_private_t *dev_priv = dev->dev_private;
@@ -188,10 +212,11 @@ static struct drm_driver driver_old = {
188 .owner = THIS_MODULE, 212 .owner = THIS_MODULE,
189 .open = drm_open, 213 .open = drm_open,
190 .release = drm_release, 214 .release = drm_release,
191 .ioctl = drm_ioctl, 215 .unlocked_ioctl = drm_ioctl,
192 .mmap = drm_mmap, 216 .mmap = drm_mmap,
193 .poll = drm_poll, 217 .poll = drm_poll,
194 .fasync = drm_fasync, 218 .fasync = drm_fasync,
219 .read = drm_read,
195#ifdef CONFIG_COMPAT 220#ifdef CONFIG_COMPAT
196 .compat_ioctl = radeon_compat_ioctl, 221 .compat_ioctl = radeon_compat_ioctl,
197#endif 222#endif
@@ -276,10 +301,11 @@ static struct drm_driver kms_driver = {
276 .owner = THIS_MODULE, 301 .owner = THIS_MODULE,
277 .open = drm_open, 302 .open = drm_open,
278 .release = drm_release, 303 .release = drm_release,
279 .ioctl = drm_ioctl, 304 .unlocked_ioctl = drm_ioctl,
280 .mmap = radeon_mmap, 305 .mmap = radeon_mmap,
281 .poll = drm_poll, 306 .poll = drm_poll,
282 .fasync = drm_fasync, 307 .fasync = drm_fasync,
308 .read = drm_read,
283#ifdef CONFIG_COMPAT 309#ifdef CONFIG_COMPAT
284 .compat_ioctl = radeon_kms_compat_ioctl, 310 .compat_ioctl = radeon_kms_compat_ioctl,
285#endif 311#endif
@@ -331,6 +357,7 @@ static int __init radeon_init(void)
331 driver = &kms_driver; 357 driver = &kms_driver;
332 driver->driver_features |= DRIVER_MODESET; 358 driver->driver_features |= DRIVER_MODESET;
333 driver->num_ioctls = radeon_max_kms_ioctl; 359 driver->num_ioctls = radeon_max_kms_ioctl;
360 radeon_register_atpx_handler();
334 } 361 }
335 /* if the vga console setting is enabled still 362 /* if the vga console setting is enabled still
336 * let modprobe override it */ 363 * let modprobe override it */
@@ -340,6 +367,7 @@ static int __init radeon_init(void)
340static void __exit radeon_exit(void) 367static void __exit radeon_exit(void)
341{ 368{
342 drm_exit(driver); 369 drm_exit(driver);
370 radeon_unregister_atpx_handler();
343} 371}
344 372
345module_init(radeon_init); 373module_init(radeon_init);
diff --git a/drivers/gpu/drm/radeon/radeon_drv.h b/drivers/gpu/drm/radeon/radeon_drv.h
index 350962e0f346..448eba89d1e6 100644
--- a/drivers/gpu/drm/radeon/radeon_drv.h
+++ b/drivers/gpu/drm/radeon/radeon_drv.h
@@ -106,9 +106,11 @@
106 * 1.29- R500 3D cmd buffer support 106 * 1.29- R500 3D cmd buffer support
107 * 1.30- Add support for occlusion queries 107 * 1.30- Add support for occlusion queries
108 * 1.31- Add support for num Z pipes from GET_PARAM 108 * 1.31- Add support for num Z pipes from GET_PARAM
109 * 1.32- fixes for rv740 setup
110 * 1.33- Add r6xx/r7xx const buffer support
109 */ 111 */
110#define DRIVER_MAJOR 1 112#define DRIVER_MAJOR 1
111#define DRIVER_MINOR 31 113#define DRIVER_MINOR 33
112#define DRIVER_PATCHLEVEL 0 114#define DRIVER_PATCHLEVEL 0
113 115
114enum radeon_cp_microcode_version { 116enum radeon_cp_microcode_version {
@@ -267,6 +269,8 @@ typedef struct drm_radeon_private {
267 269
268 u32 scratch_ages[5]; 270 u32 scratch_ages[5];
269 271
272 int have_z_offset;
273
270 /* starting from here on, data is preserved accross an open */ 274 /* starting from here on, data is preserved accross an open */
271 uint32_t flags; /* see radeon_chip_flags */ 275 uint32_t flags; /* see radeon_chip_flags */
272 resource_size_t fb_aper_offset; 276 resource_size_t fb_aper_offset;
@@ -294,6 +298,9 @@ typedef struct drm_radeon_private {
294 int r700_sc_prim_fifo_size; 298 int r700_sc_prim_fifo_size;
295 int r700_sc_hiz_tile_fifo_size; 299 int r700_sc_hiz_tile_fifo_size;
296 int r700_sc_earlyz_tile_fifo_fize; 300 int r700_sc_earlyz_tile_fifo_fize;
301 int r600_group_size;
302 int r600_npipes;
303 int r600_nbanks;
297 304
298 struct mutex cs_mutex; 305 struct mutex cs_mutex;
299 u32 cs_id_scnt; 306 u32 cs_id_scnt;
@@ -309,9 +316,11 @@ typedef struct drm_radeon_buf_priv {
309 u32 age; 316 u32 age;
310} drm_radeon_buf_priv_t; 317} drm_radeon_buf_priv_t;
311 318
319struct drm_buffer;
320
312typedef struct drm_radeon_kcmd_buffer { 321typedef struct drm_radeon_kcmd_buffer {
313 int bufsz; 322 int bufsz;
314 char *buf; 323 struct drm_buffer *buffer;
315 int nbox; 324 int nbox;
316 struct drm_clip_rect __user *boxes; 325 struct drm_clip_rect __user *boxes;
317} drm_radeon_kcmd_buffer_t; 326} drm_radeon_kcmd_buffer_t;
@@ -454,6 +463,15 @@ extern void r600_blit_swap(struct drm_device *dev,
454 int sx, int sy, int dx, int dy, 463 int sx, int sy, int dx, int dy,
455 int w, int h, int src_pitch, int dst_pitch, int cpp); 464 int w, int h, int src_pitch, int dst_pitch, int cpp);
456 465
466/* atpx handler */
467#if defined(CONFIG_VGA_SWITCHEROO)
468void radeon_register_atpx_handler(void);
469void radeon_unregister_atpx_handler(void);
470#else
471static inline void radeon_register_atpx_handler(void) {}
472static inline void radeon_unregister_atpx_handler(void) {}
473#endif
474
457/* Flags for stats.boxes 475/* Flags for stats.boxes
458 */ 476 */
459#define RADEON_BOX_DMA_IDLE 0x1 477#define RADEON_BOX_DMA_IDLE 0x1
@@ -1104,7 +1122,6 @@ extern u32 radeon_get_scratch(drm_radeon_private_t *dev_priv, int index);
1104# define R600_IT_WAIT_REG_MEM 0x00003C00 1122# define R600_IT_WAIT_REG_MEM 0x00003C00
1105# define R600_IT_MEM_WRITE 0x00003D00 1123# define R600_IT_MEM_WRITE 0x00003D00
1106# define R600_IT_INDIRECT_BUFFER 0x00003200 1124# define R600_IT_INDIRECT_BUFFER 0x00003200
1107# define R600_IT_CP_INTERRUPT 0x00004000
1108# define R600_IT_SURFACE_SYNC 0x00004300 1125# define R600_IT_SURFACE_SYNC 0x00004300
1109# define R600_CB0_DEST_BASE_ENA (1 << 6) 1126# define R600_CB0_DEST_BASE_ENA (1 << 6)
1110# define R600_TC_ACTION_ENA (1 << 23) 1127# define R600_TC_ACTION_ENA (1 << 23)
@@ -2122,4 +2139,32 @@ extern void radeon_commit_ring(drm_radeon_private_t *dev_priv);
2122 write &= mask; \ 2139 write &= mask; \
2123} while (0) 2140} while (0)
2124 2141
2142/**
2143 * Copy given number of dwords from drm buffer to the ring buffer.
2144 */
2145#define OUT_RING_DRM_BUFFER(buf, sz) do { \
2146 int _size = (sz) * 4; \
2147 struct drm_buffer *_buf = (buf); \
2148 int _part_size; \
2149 while (_size > 0) { \
2150 _part_size = _size; \
2151 \
2152 if (write + _part_size/4 > mask) \
2153 _part_size = ((mask + 1) - write)*4; \
2154 \
2155 if (drm_buffer_index(_buf) + _part_size > PAGE_SIZE) \
2156 _part_size = PAGE_SIZE - drm_buffer_index(_buf);\
2157 \
2158 \
2159 \
2160 memcpy(ring + write, &_buf->data[drm_buffer_page(_buf)] \
2161 [drm_buffer_index(_buf)], _part_size); \
2162 \
2163 _size -= _part_size; \
2164 write = (write + _part_size/4) & mask; \
2165 drm_buffer_advance(_buf, _part_size); \
2166 } \
2167} while (0)
2168
2169
2125#endif /* __RADEON_DRV_H__ */ 2170#endif /* __RADEON_DRV_H__ */
diff --git a/drivers/gpu/drm/radeon/radeon_encoders.c b/drivers/gpu/drm/radeon/radeon_encoders.c
index d42bc512d75a..c5ddaf58563a 100644
--- a/drivers/gpu/drm/radeon/radeon_encoders.c
+++ b/drivers/gpu/drm/radeon/radeon_encoders.c
@@ -35,6 +35,51 @@ extern int atom_debug;
35bool radeon_atom_get_tv_timings(struct radeon_device *rdev, int index, 35bool radeon_atom_get_tv_timings(struct radeon_device *rdev, int index,
36 struct drm_display_mode *mode); 36 struct drm_display_mode *mode);
37 37
38static uint32_t radeon_encoder_clones(struct drm_encoder *encoder)
39{
40 struct drm_device *dev = encoder->dev;
41 struct radeon_device *rdev = dev->dev_private;
42 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
43 struct drm_encoder *clone_encoder;
44 uint32_t index_mask = 0;
45 int count;
46
47 /* DIG routing gets problematic */
48 if (rdev->family >= CHIP_R600)
49 return index_mask;
50 /* LVDS/TV are too wacky */
51 if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
52 return index_mask;
53 /* DVO requires 2x ppll clocks depending on tmds chip */
54 if (radeon_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT)
55 return index_mask;
56
57 count = -1;
58 list_for_each_entry(clone_encoder, &dev->mode_config.encoder_list, head) {
59 struct radeon_encoder *radeon_clone = to_radeon_encoder(clone_encoder);
60 count++;
61
62 if (clone_encoder == encoder)
63 continue;
64 if (radeon_clone->devices & (ATOM_DEVICE_LCD_SUPPORT))
65 continue;
66 if (radeon_clone->devices & ATOM_DEVICE_DFP2_SUPPORT)
67 continue;
68 else
69 index_mask |= (1 << count);
70 }
71 return index_mask;
72}
73
74void radeon_setup_encoder_clones(struct drm_device *dev)
75{
76 struct drm_encoder *encoder;
77
78 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
79 encoder->possible_clones = radeon_encoder_clones(encoder);
80 }
81}
82
38uint32_t 83uint32_t
39radeon_get_encoder_id(struct drm_device *dev, uint32_t supported_device, uint8_t dac) 84radeon_get_encoder_id(struct drm_device *dev, uint32_t supported_device, uint8_t dac)
40{ 85{
@@ -111,6 +156,26 @@ radeon_get_encoder_id(struct drm_device *dev, uint32_t supported_device, uint8_t
111 return ret; 156 return ret;
112} 157}
113 158
159static inline bool radeon_encoder_is_digital(struct drm_encoder *encoder)
160{
161 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
162 switch (radeon_encoder->encoder_id) {
163 case ENCODER_OBJECT_ID_INTERNAL_LVDS:
164 case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
165 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
166 case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
167 case ENCODER_OBJECT_ID_INTERNAL_DVO1:
168 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
169 case ENCODER_OBJECT_ID_INTERNAL_DDI:
170 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
171 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
172 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
173 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
174 return true;
175 default:
176 return false;
177 }
178}
114void 179void
115radeon_link_encoder_connector(struct drm_device *dev) 180radeon_link_encoder_connector(struct drm_device *dev)
116{ 181{
@@ -157,34 +222,84 @@ radeon_get_connector_for_encoder(struct drm_encoder *encoder)
157 222
158 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { 223 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
159 radeon_connector = to_radeon_connector(connector); 224 radeon_connector = to_radeon_connector(connector);
160 if (radeon_encoder->devices & radeon_connector->devices) 225 if (radeon_encoder->active_device & radeon_connector->devices)
161 return connector; 226 return connector;
162 } 227 }
163 return NULL; 228 return NULL;
164} 229}
165 230
166/* used for both atom and legacy */ 231static struct radeon_connector_atom_dig *
167void radeon_rmx_mode_fixup(struct drm_encoder *encoder, 232radeon_get_atom_connector_priv_from_encoder(struct drm_encoder *encoder)
168 struct drm_display_mode *mode, 233{
169 struct drm_display_mode *adjusted_mode) 234 struct drm_device *dev = encoder->dev;
235 struct radeon_device *rdev = dev->dev_private;
236 struct drm_connector *connector;
237 struct radeon_connector *radeon_connector;
238 struct radeon_connector_atom_dig *dig_connector;
239
240 if (!rdev->is_atom_bios)
241 return NULL;
242
243 connector = radeon_get_connector_for_encoder(encoder);
244 if (!connector)
245 return NULL;
246
247 radeon_connector = to_radeon_connector(connector);
248
249 if (!radeon_connector->con_priv)
250 return NULL;
251
252 dig_connector = radeon_connector->con_priv;
253
254 return dig_connector;
255}
256
257void radeon_panel_mode_fixup(struct drm_encoder *encoder,
258 struct drm_display_mode *adjusted_mode)
170{ 259{
171 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 260 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
172 struct drm_device *dev = encoder->dev; 261 struct drm_device *dev = encoder->dev;
173 struct radeon_device *rdev = dev->dev_private; 262 struct radeon_device *rdev = dev->dev_private;
174 struct drm_display_mode *native_mode = &radeon_encoder->native_mode; 263 struct drm_display_mode *native_mode = &radeon_encoder->native_mode;
264 unsigned hblank = native_mode->htotal - native_mode->hdisplay;
265 unsigned vblank = native_mode->vtotal - native_mode->vdisplay;
266 unsigned hover = native_mode->hsync_start - native_mode->hdisplay;
267 unsigned vover = native_mode->vsync_start - native_mode->vdisplay;
268 unsigned hsync_width = native_mode->hsync_end - native_mode->hsync_start;
269 unsigned vsync_width = native_mode->vsync_end - native_mode->vsync_start;
175 270
176 if (mode->hdisplay < native_mode->hdisplay || 271 adjusted_mode->clock = native_mode->clock;
177 mode->vdisplay < native_mode->vdisplay) { 272 adjusted_mode->flags = native_mode->flags;
178 int mode_id = adjusted_mode->base.id; 273
179 *adjusted_mode = *native_mode; 274 if (ASIC_IS_AVIVO(rdev)) {
180 if (!ASIC_IS_AVIVO(rdev)) { 275 adjusted_mode->hdisplay = native_mode->hdisplay;
181 adjusted_mode->hdisplay = mode->hdisplay; 276 adjusted_mode->vdisplay = native_mode->vdisplay;
182 adjusted_mode->vdisplay = mode->vdisplay;
183 }
184 adjusted_mode->base.id = mode_id;
185 } 277 }
186}
187 278
279 adjusted_mode->htotal = native_mode->hdisplay + hblank;
280 adjusted_mode->hsync_start = native_mode->hdisplay + hover;
281 adjusted_mode->hsync_end = adjusted_mode->hsync_start + hsync_width;
282
283 adjusted_mode->vtotal = native_mode->vdisplay + vblank;
284 adjusted_mode->vsync_start = native_mode->vdisplay + vover;
285 adjusted_mode->vsync_end = adjusted_mode->vsync_start + vsync_width;
286
287 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
288
289 if (ASIC_IS_AVIVO(rdev)) {
290 adjusted_mode->crtc_hdisplay = native_mode->hdisplay;
291 adjusted_mode->crtc_vdisplay = native_mode->vdisplay;
292 }
293
294 adjusted_mode->crtc_htotal = adjusted_mode->crtc_hdisplay + hblank;
295 adjusted_mode->crtc_hsync_start = adjusted_mode->crtc_hdisplay + hover;
296 adjusted_mode->crtc_hsync_end = adjusted_mode->crtc_hsync_start + hsync_width;
297
298 adjusted_mode->crtc_vtotal = adjusted_mode->crtc_vdisplay + vblank;
299 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + vover;
300 adjusted_mode->crtc_vsync_end = adjusted_mode->crtc_vsync_start + vsync_width;
301
302}
188 303
189static bool radeon_atom_mode_fixup(struct drm_encoder *encoder, 304static bool radeon_atom_mode_fixup(struct drm_encoder *encoder,
190 struct drm_display_mode *mode, 305 struct drm_display_mode *mode,
@@ -194,18 +309,23 @@ static bool radeon_atom_mode_fixup(struct drm_encoder *encoder,
194 struct drm_device *dev = encoder->dev; 309 struct drm_device *dev = encoder->dev;
195 struct radeon_device *rdev = dev->dev_private; 310 struct radeon_device *rdev = dev->dev_private;
196 311
312 /* adjust pm to upcoming mode change */
313 radeon_pm_compute_clocks(rdev);
314
197 /* set the active encoder to connector routing */ 315 /* set the active encoder to connector routing */
198 radeon_encoder_set_active_device(encoder); 316 radeon_encoder_set_active_device(encoder);
199 drm_mode_set_crtcinfo(adjusted_mode, 0); 317 drm_mode_set_crtcinfo(adjusted_mode, 0);
200 318
201 if (radeon_encoder->rmx_type != RMX_OFF)
202 radeon_rmx_mode_fixup(encoder, mode, adjusted_mode);
203
204 /* hw bug */ 319 /* hw bug */
205 if ((mode->flags & DRM_MODE_FLAG_INTERLACE) 320 if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
206 && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2))) 321 && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
207 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2; 322 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
208 323
324 /* get the native mode for LVDS */
325 if (radeon_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
326 radeon_panel_mode_fixup(encoder, adjusted_mode);
327
328 /* get the native mode for TV */
209 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) { 329 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) {
210 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv; 330 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
211 if (tv_dac) { 331 if (tv_dac) {
@@ -218,6 +338,12 @@ static bool radeon_atom_mode_fixup(struct drm_encoder *encoder,
218 } 338 }
219 } 339 }
220 340
341 if (ASIC_IS_DCE3(rdev) &&
342 (radeon_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT))) {
343 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
344 radeon_dp_set_link_config(connector, mode);
345 }
346
221 return true; 347 return true;
222} 348}
223 349
@@ -228,12 +354,8 @@ atombios_dac_setup(struct drm_encoder *encoder, int action)
228 struct radeon_device *rdev = dev->dev_private; 354 struct radeon_device *rdev = dev->dev_private;
229 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 355 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
230 DAC_ENCODER_CONTROL_PS_ALLOCATION args; 356 DAC_ENCODER_CONTROL_PS_ALLOCATION args;
231 int index = 0, num = 0; 357 int index = 0;
232 struct radeon_encoder_atom_dac *dac_info = radeon_encoder->enc_priv; 358 struct radeon_encoder_atom_dac *dac_info = radeon_encoder->enc_priv;
233 enum radeon_tv_std tv_std = TV_STD_NTSC;
234
235 if (dac_info->tv_std)
236 tv_std = dac_info->tv_std;
237 359
238 memset(&args, 0, sizeof(args)); 360 memset(&args, 0, sizeof(args));
239 361
@@ -241,12 +363,10 @@ atombios_dac_setup(struct drm_encoder *encoder, int action)
241 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 363 case ENCODER_OBJECT_ID_INTERNAL_DAC1:
242 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 364 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
243 index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl); 365 index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
244 num = 1;
245 break; 366 break;
246 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 367 case ENCODER_OBJECT_ID_INTERNAL_DAC2:
247 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 368 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
248 index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl); 369 index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
249 num = 2;
250 break; 370 break;
251 } 371 }
252 372
@@ -257,7 +377,7 @@ atombios_dac_setup(struct drm_encoder *encoder, int action)
257 else if (radeon_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 377 else if (radeon_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
258 args.ucDacStandard = ATOM_DAC1_CV; 378 args.ucDacStandard = ATOM_DAC1_CV;
259 else { 379 else {
260 switch (tv_std) { 380 switch (dac_info->tv_std) {
261 case TV_STD_PAL: 381 case TV_STD_PAL:
262 case TV_STD_PAL_M: 382 case TV_STD_PAL_M:
263 case TV_STD_SCART_PAL: 383 case TV_STD_SCART_PAL:
@@ -288,10 +408,6 @@ atombios_tv_setup(struct drm_encoder *encoder, int action)
288 TV_ENCODER_CONTROL_PS_ALLOCATION args; 408 TV_ENCODER_CONTROL_PS_ALLOCATION args;
289 int index = 0; 409 int index = 0;
290 struct radeon_encoder_atom_dac *dac_info = radeon_encoder->enc_priv; 410 struct radeon_encoder_atom_dac *dac_info = radeon_encoder->enc_priv;
291 enum radeon_tv_std tv_std = TV_STD_NTSC;
292
293 if (dac_info->tv_std)
294 tv_std = dac_info->tv_std;
295 411
296 memset(&args, 0, sizeof(args)); 412 memset(&args, 0, sizeof(args));
297 413
@@ -302,7 +418,7 @@ atombios_tv_setup(struct drm_encoder *encoder, int action)
302 if (radeon_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 418 if (radeon_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
303 args.sTVEncoder.ucTvStandard = ATOM_TV_CV; 419 args.sTVEncoder.ucTvStandard = ATOM_TV_CV;
304 else { 420 else {
305 switch (tv_std) { 421 switch (dac_info->tv_std) {
306 case TV_STD_NTSC: 422 case TV_STD_NTSC:
307 args.sTVEncoder.ucTvStandard = ATOM_TV_NTSC; 423 args.sTVEncoder.ucTvStandard = ATOM_TV_NTSC;
308 break; 424 break;
@@ -392,35 +508,25 @@ union lvds_encoder_control {
392 LVDS_ENCODER_CONTROL_PS_ALLOCATION_V2 v2; 508 LVDS_ENCODER_CONTROL_PS_ALLOCATION_V2 v2;
393}; 509};
394 510
395static void 511void
396atombios_digital_setup(struct drm_encoder *encoder, int action) 512atombios_digital_setup(struct drm_encoder *encoder, int action)
397{ 513{
398 struct drm_device *dev = encoder->dev; 514 struct drm_device *dev = encoder->dev;
399 struct radeon_device *rdev = dev->dev_private; 515 struct radeon_device *rdev = dev->dev_private;
400 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 516 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
517 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
518 struct radeon_connector_atom_dig *dig_connector =
519 radeon_get_atom_connector_priv_from_encoder(encoder);
401 union lvds_encoder_control args; 520 union lvds_encoder_control args;
402 int index = 0; 521 int index = 0;
522 int hdmi_detected = 0;
403 uint8_t frev, crev; 523 uint8_t frev, crev;
404 struct radeon_encoder_atom_dig *dig;
405 struct drm_connector *connector;
406 struct radeon_connector *radeon_connector;
407 struct radeon_connector_atom_dig *dig_connector;
408
409 connector = radeon_get_connector_for_encoder(encoder);
410 if (!connector)
411 return;
412
413 radeon_connector = to_radeon_connector(connector);
414
415 if (!radeon_encoder->enc_priv)
416 return;
417
418 dig = radeon_encoder->enc_priv;
419 524
420 if (!radeon_connector->con_priv) 525 if (!dig || !dig_connector)
421 return; 526 return;
422 527
423 dig_connector = radeon_connector->con_priv; 528 if (atombios_get_encoder_mode(encoder) == ATOM_ENCODER_MODE_HDMI)
529 hdmi_detected = 1;
424 530
425 memset(&args, 0, sizeof(args)); 531 memset(&args, 0, sizeof(args));
426 532
@@ -440,7 +546,8 @@ atombios_digital_setup(struct drm_encoder *encoder, int action)
440 break; 546 break;
441 } 547 }
442 548
443 atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev); 549 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev))
550 return;
444 551
445 switch (frev) { 552 switch (frev) {
446 case 1: 553 case 1:
@@ -449,13 +556,13 @@ atombios_digital_setup(struct drm_encoder *encoder, int action)
449 case 1: 556 case 1:
450 args.v1.ucMisc = 0; 557 args.v1.ucMisc = 0;
451 args.v1.ucAction = action; 558 args.v1.ucAction = action;
452 if (drm_detect_hdmi_monitor(radeon_connector->edid)) 559 if (hdmi_detected)
453 args.v1.ucMisc |= PANEL_ENCODER_MISC_HDMI_TYPE; 560 args.v1.ucMisc |= PANEL_ENCODER_MISC_HDMI_TYPE;
454 args.v1.usPixelClock = cpu_to_le16(radeon_encoder->pixel_clock / 10); 561 args.v1.usPixelClock = cpu_to_le16(radeon_encoder->pixel_clock / 10);
455 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 562 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
456 if (dig->lvds_misc & (1 << 0)) 563 if (dig->lvds_misc & ATOM_PANEL_MISC_DUAL)
457 args.v1.ucMisc |= PANEL_ENCODER_MISC_DUAL; 564 args.v1.ucMisc |= PANEL_ENCODER_MISC_DUAL;
458 if (dig->lvds_misc & (1 << 1)) 565 if (dig->lvds_misc & ATOM_PANEL_MISC_888RGB)
459 args.v1.ucMisc |= (1 << 1); 566 args.v1.ucMisc |= (1 << 1);
460 } else { 567 } else {
461 if (dig_connector->linkb) 568 if (dig_connector->linkb)
@@ -474,7 +581,7 @@ atombios_digital_setup(struct drm_encoder *encoder, int action)
474 if (dig->coherent_mode) 581 if (dig->coherent_mode)
475 args.v2.ucMisc |= PANEL_ENCODER_MISC_COHERENT; 582 args.v2.ucMisc |= PANEL_ENCODER_MISC_COHERENT;
476 } 583 }
477 if (drm_detect_hdmi_monitor(radeon_connector->edid)) 584 if (hdmi_detected)
478 args.v2.ucMisc |= PANEL_ENCODER_MISC_HDMI_TYPE; 585 args.v2.ucMisc |= PANEL_ENCODER_MISC_HDMI_TYPE;
479 args.v2.usPixelClock = cpu_to_le16(radeon_encoder->pixel_clock / 10); 586 args.v2.usPixelClock = cpu_to_le16(radeon_encoder->pixel_clock / 10);
480 args.v2.ucTruncate = 0; 587 args.v2.ucTruncate = 0;
@@ -482,18 +589,18 @@ atombios_digital_setup(struct drm_encoder *encoder, int action)
482 args.v2.ucTemporal = 0; 589 args.v2.ucTemporal = 0;
483 args.v2.ucFRC = 0; 590 args.v2.ucFRC = 0;
484 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 591 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
485 if (dig->lvds_misc & (1 << 0)) 592 if (dig->lvds_misc & ATOM_PANEL_MISC_DUAL)
486 args.v2.ucMisc |= PANEL_ENCODER_MISC_DUAL; 593 args.v2.ucMisc |= PANEL_ENCODER_MISC_DUAL;
487 if (dig->lvds_misc & (1 << 5)) { 594 if (dig->lvds_misc & ATOM_PANEL_MISC_SPATIAL) {
488 args.v2.ucSpatial = PANEL_ENCODER_SPATIAL_DITHER_EN; 595 args.v2.ucSpatial = PANEL_ENCODER_SPATIAL_DITHER_EN;
489 if (dig->lvds_misc & (1 << 1)) 596 if (dig->lvds_misc & ATOM_PANEL_MISC_888RGB)
490 args.v2.ucSpatial |= PANEL_ENCODER_SPATIAL_DITHER_DEPTH; 597 args.v2.ucSpatial |= PANEL_ENCODER_SPATIAL_DITHER_DEPTH;
491 } 598 }
492 if (dig->lvds_misc & (1 << 6)) { 599 if (dig->lvds_misc & ATOM_PANEL_MISC_TEMPORAL) {
493 args.v2.ucTemporal = PANEL_ENCODER_TEMPORAL_DITHER_EN; 600 args.v2.ucTemporal = PANEL_ENCODER_TEMPORAL_DITHER_EN;
494 if (dig->lvds_misc & (1 << 1)) 601 if (dig->lvds_misc & ATOM_PANEL_MISC_888RGB)
495 args.v2.ucTemporal |= PANEL_ENCODER_TEMPORAL_DITHER_DEPTH; 602 args.v2.ucTemporal |= PANEL_ENCODER_TEMPORAL_DITHER_DEPTH;
496 if (((dig->lvds_misc >> 2) & 0x3) == 2) 603 if (((dig->lvds_misc >> ATOM_PANEL_MISC_GREY_LEVEL_SHIFT) & 0x3) == 2)
497 args.v2.ucTemporal |= PANEL_ENCODER_TEMPORAL_LEVEL_4; 604 args.v2.ucTemporal |= PANEL_ENCODER_TEMPORAL_LEVEL_4;
498 } 605 }
499 } else { 606 } else {
@@ -514,7 +621,6 @@ atombios_digital_setup(struct drm_encoder *encoder, int action)
514 } 621 }
515 622
516 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 623 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
517
518} 624}
519 625
520int 626int
@@ -522,6 +628,7 @@ atombios_get_encoder_mode(struct drm_encoder *encoder)
522{ 628{
523 struct drm_connector *connector; 629 struct drm_connector *connector;
524 struct radeon_connector *radeon_connector; 630 struct radeon_connector *radeon_connector;
631 struct radeon_connector_atom_dig *dig_connector;
525 632
526 connector = radeon_get_connector_for_encoder(encoder); 633 connector = radeon_get_connector_for_encoder(encoder);
527 if (!connector) 634 if (!connector)
@@ -551,21 +658,23 @@ atombios_get_encoder_mode(struct drm_encoder *encoder)
551 return ATOM_ENCODER_MODE_LVDS; 658 return ATOM_ENCODER_MODE_LVDS;
552 break; 659 break;
553 case DRM_MODE_CONNECTOR_DisplayPort: 660 case DRM_MODE_CONNECTOR_DisplayPort:
554 /*if (radeon_output->MonType == MT_DP) 661 case DRM_MODE_CONNECTOR_eDP:
555 return ATOM_ENCODER_MODE_DP; 662 dig_connector = radeon_connector->con_priv;
556 else*/ 663 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
557 if (drm_detect_hdmi_monitor(radeon_connector->edid)) 664 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP))
665 return ATOM_ENCODER_MODE_DP;
666 else if (drm_detect_hdmi_monitor(radeon_connector->edid))
558 return ATOM_ENCODER_MODE_HDMI; 667 return ATOM_ENCODER_MODE_HDMI;
559 else 668 else
560 return ATOM_ENCODER_MODE_DVI; 669 return ATOM_ENCODER_MODE_DVI;
561 break; 670 break;
562 case CONNECTOR_DVI_A: 671 case DRM_MODE_CONNECTOR_DVIA:
563 case CONNECTOR_VGA: 672 case DRM_MODE_CONNECTOR_VGA:
564 return ATOM_ENCODER_MODE_CRT; 673 return ATOM_ENCODER_MODE_CRT;
565 break; 674 break;
566 case CONNECTOR_STV: 675 case DRM_MODE_CONNECTOR_Composite:
567 case CONNECTOR_CTV: 676 case DRM_MODE_CONNECTOR_SVIDEO:
568 case CONNECTOR_DIN: 677 case DRM_MODE_CONNECTOR_9PinDIN:
569 /* fix me */ 678 /* fix me */
570 return ATOM_ENCODER_MODE_TV; 679 return ATOM_ENCODER_MODE_TV;
571 /*return ATOM_ENCODER_MODE_CV;*/ 680 /*return ATOM_ENCODER_MODE_CV;*/
@@ -573,98 +682,114 @@ atombios_get_encoder_mode(struct drm_encoder *encoder)
573 } 682 }
574} 683}
575 684
576static void 685/*
686 * DIG Encoder/Transmitter Setup
687 *
688 * DCE 3.0/3.1
689 * - 2 DIG transmitter blocks. UNIPHY (links A and B) and LVTMA.
690 * Supports up to 3 digital outputs
691 * - 2 DIG encoder blocks.
692 * DIG1 can drive UNIPHY link A or link B
693 * DIG2 can drive UNIPHY link B or LVTMA
694 *
695 * DCE 3.2
696 * - 3 DIG transmitter blocks. UNIPHY0/1/2 (links A and B).
697 * Supports up to 5 digital outputs
698 * - 2 DIG encoder blocks.
699 * DIG1/2 can drive UNIPHY0/1/2 link A or link B
700 *
701 * DCE 4.0
702 * - 3 DIG transmitter blocks UNPHY0/1/2 (links A and B).
703 * Supports up to 6 digital outputs
704 * - 6 DIG encoder blocks.
705 * - DIG to PHY mapping is hardcoded
706 * DIG1 drives UNIPHY0 link A, A+B
707 * DIG2 drives UNIPHY0 link B
708 * DIG3 drives UNIPHY1 link A, A+B
709 * DIG4 drives UNIPHY1 link B
710 * DIG5 drives UNIPHY2 link A, A+B
711 * DIG6 drives UNIPHY2 link B
712 *
713 * Routing
714 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
715 * Examples:
716 * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
717 * crtc1 -> dig1 -> UNIPHY0 link B -> DP
718 * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
719 * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
720 */
721
722union dig_encoder_control {
723 DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
724 DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
725 DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
726};
727
728void
577atombios_dig_encoder_setup(struct drm_encoder *encoder, int action) 729atombios_dig_encoder_setup(struct drm_encoder *encoder, int action)
578{ 730{
579 struct drm_device *dev = encoder->dev; 731 struct drm_device *dev = encoder->dev;
580 struct radeon_device *rdev = dev->dev_private; 732 struct radeon_device *rdev = dev->dev_private;
581 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 733 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
582 DIG_ENCODER_CONTROL_PS_ALLOCATION args; 734 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
583 int index = 0, num = 0; 735 struct radeon_connector_atom_dig *dig_connector =
736 radeon_get_atom_connector_priv_from_encoder(encoder);
737 union dig_encoder_control args;
738 int index = 0;
584 uint8_t frev, crev; 739 uint8_t frev, crev;
585 struct radeon_encoder_atom_dig *dig;
586 struct drm_connector *connector;
587 struct radeon_connector *radeon_connector;
588 struct radeon_connector_atom_dig *dig_connector;
589
590 connector = radeon_get_connector_for_encoder(encoder);
591 if (!connector)
592 return;
593 740
594 radeon_connector = to_radeon_connector(connector); 741 if (!dig || !dig_connector)
595
596 if (!radeon_connector->con_priv)
597 return; 742 return;
598 743
599 dig_connector = radeon_connector->con_priv;
600
601 if (!radeon_encoder->enc_priv)
602 return;
603
604 dig = radeon_encoder->enc_priv;
605
606 memset(&args, 0, sizeof(args)); 744 memset(&args, 0, sizeof(args));
607 745
608 if (ASIC_IS_DCE32(rdev)) { 746 if (ASIC_IS_DCE4(rdev))
609 if (dig->dig_block) 747 index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
748 else {
749 if (dig->dig_encoder)
610 index = GetIndexIntoMasterTable(COMMAND, DIG2EncoderControl); 750 index = GetIndexIntoMasterTable(COMMAND, DIG2EncoderControl);
611 else 751 else
612 index = GetIndexIntoMasterTable(COMMAND, DIG1EncoderControl); 752 index = GetIndexIntoMasterTable(COMMAND, DIG1EncoderControl);
613 num = dig->dig_block + 1;
614 } else {
615 switch (radeon_encoder->encoder_id) {
616 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
617 index = GetIndexIntoMasterTable(COMMAND, DIG1EncoderControl);
618 num = 1;
619 break;
620 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
621 index = GetIndexIntoMasterTable(COMMAND, DIG2EncoderControl);
622 num = 2;
623 break;
624 }
625 } 753 }
626 754
627 atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev); 755 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev))
756 return;
628 757
629 args.ucAction = action; 758 args.v1.ucAction = action;
630 args.usPixelClock = cpu_to_le16(radeon_encoder->pixel_clock / 10); 759 args.v1.usPixelClock = cpu_to_le16(radeon_encoder->pixel_clock / 10);
760 args.v1.ucEncoderMode = atombios_get_encoder_mode(encoder);
761
762 if (args.v1.ucEncoderMode == ATOM_ENCODER_MODE_DP) {
763 if (dig_connector->dp_clock == 270000)
764 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
765 args.v1.ucLaneNum = dig_connector->dp_lane_count;
766 } else if (radeon_encoder->pixel_clock > 165000)
767 args.v1.ucLaneNum = 8;
768 else
769 args.v1.ucLaneNum = 4;
631 770
632 if (ASIC_IS_DCE32(rdev)) { 771 if (ASIC_IS_DCE4(rdev)) {
772 args.v3.acConfig.ucDigSel = dig->dig_encoder;
773 args.v3.ucBitPerColor = PANEL_8BIT_PER_COLOR;
774 } else {
633 switch (radeon_encoder->encoder_id) { 775 switch (radeon_encoder->encoder_id) {
634 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 776 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
635 args.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1; 777 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
636 break; 778 break;
637 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 779 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
638 args.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2; 780 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
781 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
639 break; 782 break;
640 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 783 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
641 args.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3; 784 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
642 break; 785 break;
643 } 786 }
644 } else {
645 switch (radeon_encoder->encoder_id) {
646 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
647 args.ucConfig = ATOM_ENCODER_CONFIG_TRANSMITTER1;
648 break;
649 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
650 args.ucConfig = ATOM_ENCODER_CONFIG_TRANSMITTER2;
651 break;
652 }
653 }
654
655 if (radeon_encoder->pixel_clock > 165000) {
656 args.ucConfig |= ATOM_ENCODER_CONFIG_LINKA_B;
657 args.ucLaneNum = 8;
658 } else {
659 if (dig_connector->linkb) 787 if (dig_connector->linkb)
660 args.ucConfig |= ATOM_ENCODER_CONFIG_LINKB; 788 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
661 else 789 else
662 args.ucConfig |= ATOM_ENCODER_CONFIG_LINKA; 790 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
663 args.ucLaneNum = 4;
664 } 791 }
665 792
666 args.ucEncoderMode = atombios_get_encoder_mode(encoder);
667
668 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 793 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
669 794
670} 795}
@@ -672,41 +797,38 @@ atombios_dig_encoder_setup(struct drm_encoder *encoder, int action)
672union dig_transmitter_control { 797union dig_transmitter_control {
673 DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1; 798 DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
674 DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2; 799 DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
800 DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
675}; 801};
676 802
677static void 803void
678atombios_dig_transmitter_setup(struct drm_encoder *encoder, int action) 804atombios_dig_transmitter_setup(struct drm_encoder *encoder, int action, uint8_t lane_num, uint8_t lane_set)
679{ 805{
680 struct drm_device *dev = encoder->dev; 806 struct drm_device *dev = encoder->dev;
681 struct radeon_device *rdev = dev->dev_private; 807 struct radeon_device *rdev = dev->dev_private;
682 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 808 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
683 union dig_transmitter_control args; 809 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
684 int index = 0, num = 0; 810 struct radeon_connector_atom_dig *dig_connector =
685 uint8_t frev, crev; 811 radeon_get_atom_connector_priv_from_encoder(encoder);
686 struct radeon_encoder_atom_dig *dig;
687 struct drm_connector *connector; 812 struct drm_connector *connector;
688 struct radeon_connector *radeon_connector; 813 struct radeon_connector *radeon_connector;
689 struct radeon_connector_atom_dig *dig_connector; 814 union dig_transmitter_control args;
815 int index = 0;
816 uint8_t frev, crev;
817 bool is_dp = false;
818 int pll_id = 0;
690 819
691 connector = radeon_get_connector_for_encoder(encoder); 820 if (!dig || !dig_connector)
692 if (!connector)
693 return; 821 return;
694 822
823 connector = radeon_get_connector_for_encoder(encoder);
695 radeon_connector = to_radeon_connector(connector); 824 radeon_connector = to_radeon_connector(connector);
696 825
697 if (!radeon_encoder->enc_priv) 826 if (atombios_get_encoder_mode(encoder) == ATOM_ENCODER_MODE_DP)
698 return; 827 is_dp = true;
699
700 dig = radeon_encoder->enc_priv;
701
702 if (!radeon_connector->con_priv)
703 return;
704
705 dig_connector = radeon_connector->con_priv;
706 828
707 memset(&args, 0, sizeof(args)); 829 memset(&args, 0, sizeof(args));
708 830
709 if (ASIC_IS_DCE32(rdev)) 831 if (ASIC_IS_DCE32(rdev) || ASIC_IS_DCE4(rdev))
710 index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 832 index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
711 else { 833 else {
712 switch (radeon_encoder->encoder_id) { 834 switch (radeon_encoder->encoder_id) {
@@ -719,103 +841,138 @@ atombios_dig_transmitter_setup(struct drm_encoder *encoder, int action)
719 } 841 }
720 } 842 }
721 843
722 atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev); 844 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev))
845 return;
723 846
724 args.v1.ucAction = action; 847 args.v1.ucAction = action;
725 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 848 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
726 args.v1.usInitInfo = radeon_connector->connector_object_id; 849 args.v1.usInitInfo = radeon_connector->connector_object_id;
850 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
851 args.v1.asMode.ucLaneSel = lane_num;
852 args.v1.asMode.ucLaneSet = lane_set;
727 } else { 853 } else {
728 if (radeon_encoder->pixel_clock > 165000) 854 if (is_dp)
855 args.v1.usPixelClock =
856 cpu_to_le16(dig_connector->dp_clock / 10);
857 else if (radeon_encoder->pixel_clock > 165000)
729 args.v1.usPixelClock = cpu_to_le16((radeon_encoder->pixel_clock / 2) / 10); 858 args.v1.usPixelClock = cpu_to_le16((radeon_encoder->pixel_clock / 2) / 10);
730 else 859 else
731 args.v1.usPixelClock = cpu_to_le16(radeon_encoder->pixel_clock / 10); 860 args.v1.usPixelClock = cpu_to_le16(radeon_encoder->pixel_clock / 10);
732 } 861 }
733 if (ASIC_IS_DCE32(rdev)) { 862 if (ASIC_IS_DCE4(rdev)) {
734 if (radeon_encoder->pixel_clock > 165000) 863 if (is_dp)
735 args.v2.usPixelClock = cpu_to_le16((radeon_encoder->pixel_clock / 2) / 10); 864 args.v3.ucLaneNum = dig_connector->dp_lane_count;
736 if (dig->dig_block) 865 else if (radeon_encoder->pixel_clock > 165000)
737 args.v2.acConfig.ucEncoderSel = 1; 866 args.v3.ucLaneNum = 8;
867 else
868 args.v3.ucLaneNum = 4;
869
870 if (dig_connector->linkb) {
871 args.v3.acConfig.ucLinkSel = 1;
872 args.v3.acConfig.ucEncoderSel = 1;
873 }
874
875 /* Select the PLL for the PHY
876 * DP PHY should be clocked from external src if there is
877 * one.
878 */
879 if (encoder->crtc) {
880 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
881 pll_id = radeon_crtc->pll_id;
882 }
883 if (is_dp && rdev->clock.dp_extclk)
884 args.v3.acConfig.ucRefClkSource = 2; /* external src */
885 else
886 args.v3.acConfig.ucRefClkSource = pll_id;
887
888 switch (radeon_encoder->encoder_id) {
889 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
890 args.v3.acConfig.ucTransmitterSel = 0;
891 break;
892 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
893 args.v3.acConfig.ucTransmitterSel = 1;
894 break;
895 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
896 args.v3.acConfig.ucTransmitterSel = 2;
897 break;
898 }
899
900 if (is_dp)
901 args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
902 else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
903 if (dig->coherent_mode)
904 args.v3.acConfig.fCoherentMode = 1;
905 if (radeon_encoder->pixel_clock > 165000)
906 args.v3.acConfig.fDualLinkConnector = 1;
907 }
908 } else if (ASIC_IS_DCE32(rdev)) {
909 args.v2.acConfig.ucEncoderSel = dig->dig_encoder;
910 if (dig_connector->linkb)
911 args.v2.acConfig.ucLinkSel = 1;
738 912
739 switch (radeon_encoder->encoder_id) { 913 switch (radeon_encoder->encoder_id) {
740 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 914 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
741 args.v2.acConfig.ucTransmitterSel = 0; 915 args.v2.acConfig.ucTransmitterSel = 0;
742 num = 0;
743 break; 916 break;
744 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 917 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
745 args.v2.acConfig.ucTransmitterSel = 1; 918 args.v2.acConfig.ucTransmitterSel = 1;
746 num = 1;
747 break; 919 break;
748 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 920 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
749 args.v2.acConfig.ucTransmitterSel = 2; 921 args.v2.acConfig.ucTransmitterSel = 2;
750 num = 2;
751 break; 922 break;
752 } 923 }
753 924
754 if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 925 if (is_dp)
926 args.v2.acConfig.fCoherentMode = 1;
927 else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
755 if (dig->coherent_mode) 928 if (dig->coherent_mode)
756 args.v2.acConfig.fCoherentMode = 1; 929 args.v2.acConfig.fCoherentMode = 1;
930 if (radeon_encoder->pixel_clock > 165000)
931 args.v2.acConfig.fDualLinkConnector = 1;
757 } 932 }
758 } else { 933 } else {
759 args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL; 934 args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
760 935
761 switch (radeon_encoder->encoder_id) { 936 if (dig->dig_encoder)
762 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 937 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
938 else
763 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER; 939 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
764 if (rdev->flags & RADEON_IS_IGP) { 940
765 if (radeon_encoder->pixel_clock > 165000) { 941 if ((rdev->flags & RADEON_IS_IGP) &&
766 args.v1.ucConfig |= (ATOM_TRANSMITTER_CONFIG_8LANE_LINK | 942 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
767 ATOM_TRANSMITTER_CONFIG_LINKA_B); 943 if (is_dp || (radeon_encoder->pixel_clock <= 165000)) {
768 if (dig_connector->igp_lane_info & 0x3) 944 if (dig_connector->igp_lane_info & 0x1)
769 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7; 945 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
770 else if (dig_connector->igp_lane_info & 0xc) 946 else if (dig_connector->igp_lane_info & 0x2)
771 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15; 947 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
772 } else { 948 else if (dig_connector->igp_lane_info & 0x4)
773 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA; 949 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
774 if (dig_connector->igp_lane_info & 0x1) 950 else if (dig_connector->igp_lane_info & 0x8)
775 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3; 951 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
776 else if (dig_connector->igp_lane_info & 0x2)
777 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
778 else if (dig_connector->igp_lane_info & 0x4)
779 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
780 else if (dig_connector->igp_lane_info & 0x8)
781 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
782 }
783 } else { 952 } else {
784 if (radeon_encoder->pixel_clock > 165000) 953 if (dig_connector->igp_lane_info & 0x3)
785 args.v1.ucConfig |= (ATOM_TRANSMITTER_CONFIG_8LANE_LINK | 954 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
786 ATOM_TRANSMITTER_CONFIG_LINKA_B | 955 else if (dig_connector->igp_lane_info & 0xc)
787 ATOM_TRANSMITTER_CONFIG_LANE_0_7); 956 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
788 else {
789 if (dig_connector->linkb)
790 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB | ATOM_TRANSMITTER_CONFIG_LANE_0_3;
791 else
792 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA | ATOM_TRANSMITTER_CONFIG_LANE_0_3;
793 }
794 }
795 break;
796 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
797 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
798 if (radeon_encoder->pixel_clock > 165000)
799 args.v1.ucConfig |= (ATOM_TRANSMITTER_CONFIG_8LANE_LINK |
800 ATOM_TRANSMITTER_CONFIG_LINKA_B |
801 ATOM_TRANSMITTER_CONFIG_LANE_0_7);
802 else {
803 if (dig_connector->linkb)
804 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB | ATOM_TRANSMITTER_CONFIG_LANE_0_3;
805 else
806 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA | ATOM_TRANSMITTER_CONFIG_LANE_0_3;
807 } 957 }
808 break;
809 } 958 }
810 959
811 if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 960 if (dig_connector->linkb)
961 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
962 else
963 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
964
965 if (is_dp)
966 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
967 else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
812 if (dig->coherent_mode) 968 if (dig->coherent_mode)
813 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 969 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
970 if (radeon_encoder->pixel_clock > 165000)
971 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
814 } 972 }
815 } 973 }
816 974
817 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 975 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
818
819} 976}
820 977
821static void 978static void
@@ -918,12 +1075,25 @@ radeon_atom_encoder_dpms(struct drm_encoder *encoder, int mode)
918 if (is_dig) { 1075 if (is_dig) {
919 switch (mode) { 1076 switch (mode) {
920 case DRM_MODE_DPMS_ON: 1077 case DRM_MODE_DPMS_ON:
921 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_ENABLE); 1078 if (atombios_get_encoder_mode(encoder) == ATOM_ENCODER_MODE_DP) {
1079 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1080
1081 dp_link_train(encoder, connector);
1082 if (ASIC_IS_DCE4(rdev))
1083 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON);
1084 }
1085 if (!ASIC_IS_DCE4(rdev))
1086 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_ENABLE_OUTPUT, 0, 0);
922 break; 1087 break;
923 case DRM_MODE_DPMS_STANDBY: 1088 case DRM_MODE_DPMS_STANDBY:
924 case DRM_MODE_DPMS_SUSPEND: 1089 case DRM_MODE_DPMS_SUSPEND:
925 case DRM_MODE_DPMS_OFF: 1090 case DRM_MODE_DPMS_OFF:
926 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_DISABLE); 1091 if (!ASIC_IS_DCE4(rdev))
1092 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_DISABLE_OUTPUT, 0, 0);
1093 if (atombios_get_encoder_mode(encoder) == ATOM_ENCODER_MODE_DP) {
1094 if (ASIC_IS_DCE4(rdev))
1095 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_VIDEO_OFF);
1096 }
927 break; 1097 break;
928 } 1098 }
929 } else { 1099 } else {
@@ -940,9 +1110,12 @@ radeon_atom_encoder_dpms(struct drm_encoder *encoder, int mode)
940 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 1110 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
941 } 1111 }
942 radeon_atombios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false); 1112 radeon_atombios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false);
1113
1114 /* adjust pm to dpms change */
1115 radeon_pm_compute_clocks(rdev);
943} 1116}
944 1117
945union crtc_sourc_param { 1118union crtc_source_param {
946 SELECT_CRTC_SOURCE_PS_ALLOCATION v1; 1119 SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
947 SELECT_CRTC_SOURCE_PARAMETERS_V2 v2; 1120 SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
948}; 1121};
@@ -954,13 +1127,15 @@ atombios_set_encoder_crtc_source(struct drm_encoder *encoder)
954 struct radeon_device *rdev = dev->dev_private; 1127 struct radeon_device *rdev = dev->dev_private;
955 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 1128 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
956 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc); 1129 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
957 union crtc_sourc_param args; 1130 union crtc_source_param args;
958 int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source); 1131 int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
959 uint8_t frev, crev; 1132 uint8_t frev, crev;
1133 struct radeon_encoder_atom_dig *dig;
960 1134
961 memset(&args, 0, sizeof(args)); 1135 memset(&args, 0, sizeof(args));
962 1136
963 atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev); 1137 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev))
1138 return;
964 1139
965 switch (frev) { 1140 switch (frev) {
966 case 1: 1141 case 1:
@@ -1020,20 +1195,32 @@ atombios_set_encoder_crtc_source(struct drm_encoder *encoder)
1020 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1195 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1021 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1196 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1022 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1197 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1023 if (ASIC_IS_DCE32(rdev)) { 1198 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1024 if (radeon_crtc->crtc_id) 1199 dig = radeon_encoder->enc_priv;
1025 args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1200 switch (dig->dig_encoder) {
1026 else 1201 case 0:
1027 args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1028 } else
1029 args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1202 args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1203 break;
1204 case 1:
1205 args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1206 break;
1207 case 2:
1208 args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1209 break;
1210 case 3:
1211 args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1212 break;
1213 case 4:
1214 args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1215 break;
1216 case 5:
1217 args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1218 break;
1219 }
1030 break; 1220 break;
1031 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1221 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1032 args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1222 args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1033 break; 1223 break;
1034 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1035 args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1036 break;
1037 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1224 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1038 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1225 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1039 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1226 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
@@ -1061,6 +1248,8 @@ atombios_set_encoder_crtc_source(struct drm_encoder *encoder)
1061 1248
1062 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 1249 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
1063 1250
1251 /* update scratch regs with new routing */
1252 radeon_atombios_encoder_crtc_scratch_regs(encoder, radeon_crtc->crtc_id);
1064} 1253}
1065 1254
1066static void 1255static void
@@ -1087,6 +1276,7 @@ atombios_apply_encoder_quirks(struct drm_encoder *encoder,
1087 } 1276 }
1088 1277
1089 /* set scaler clears this on some chips */ 1278 /* set scaler clears this on some chips */
1279 /* XXX check DCE4 */
1090 if (!(radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))) { 1280 if (!(radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))) {
1091 if (ASIC_IS_AVIVO(rdev) && (mode->flags & DRM_MODE_FLAG_INTERLACE)) 1281 if (ASIC_IS_AVIVO(rdev) && (mode->flags & DRM_MODE_FLAG_INTERLACE))
1092 WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset, 1282 WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset,
@@ -1094,6 +1284,74 @@ atombios_apply_encoder_quirks(struct drm_encoder *encoder,
1094 } 1284 }
1095} 1285}
1096 1286
1287static int radeon_atom_pick_dig_encoder(struct drm_encoder *encoder)
1288{
1289 struct drm_device *dev = encoder->dev;
1290 struct radeon_device *rdev = dev->dev_private;
1291 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1292 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1293 struct drm_encoder *test_encoder;
1294 struct radeon_encoder_atom_dig *dig;
1295 uint32_t dig_enc_in_use = 0;
1296
1297 if (ASIC_IS_DCE4(rdev)) {
1298 struct radeon_connector_atom_dig *dig_connector =
1299 radeon_get_atom_connector_priv_from_encoder(encoder);
1300
1301 switch (radeon_encoder->encoder_id) {
1302 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1303 if (dig_connector->linkb)
1304 return 1;
1305 else
1306 return 0;
1307 break;
1308 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1309 if (dig_connector->linkb)
1310 return 3;
1311 else
1312 return 2;
1313 break;
1314 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1315 if (dig_connector->linkb)
1316 return 5;
1317 else
1318 return 4;
1319 break;
1320 }
1321 }
1322
1323 /* on DCE32 and encoder can driver any block so just crtc id */
1324 if (ASIC_IS_DCE32(rdev)) {
1325 return radeon_crtc->crtc_id;
1326 }
1327
1328 /* on DCE3 - LVTMA can only be driven by DIGB */
1329 list_for_each_entry(test_encoder, &dev->mode_config.encoder_list, head) {
1330 struct radeon_encoder *radeon_test_encoder;
1331
1332 if (encoder == test_encoder)
1333 continue;
1334
1335 if (!radeon_encoder_is_digital(test_encoder))
1336 continue;
1337
1338 radeon_test_encoder = to_radeon_encoder(test_encoder);
1339 dig = radeon_test_encoder->enc_priv;
1340
1341 if (dig->dig_encoder >= 0)
1342 dig_enc_in_use |= (1 << dig->dig_encoder);
1343 }
1344
1345 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA) {
1346 if (dig_enc_in_use & 0x2)
1347 DRM_ERROR("LVDS required digital encoder 2 but it was in use - stealing\n");
1348 return 1;
1349 }
1350 if (!(dig_enc_in_use & 1))
1351 return 0;
1352 return 1;
1353}
1354
1097static void 1355static void
1098radeon_atom_encoder_mode_set(struct drm_encoder *encoder, 1356radeon_atom_encoder_mode_set(struct drm_encoder *encoder,
1099 struct drm_display_mode *mode, 1357 struct drm_display_mode *mode,
@@ -1102,20 +1360,10 @@ radeon_atom_encoder_mode_set(struct drm_encoder *encoder,
1102 struct drm_device *dev = encoder->dev; 1360 struct drm_device *dev = encoder->dev;
1103 struct radeon_device *rdev = dev->dev_private; 1361 struct radeon_device *rdev = dev->dev_private;
1104 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 1362 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1105 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1106
1107 if (radeon_encoder->enc_priv) {
1108 struct radeon_encoder_atom_dig *dig;
1109 1363
1110 dig = radeon_encoder->enc_priv;
1111 dig->dig_block = radeon_crtc->crtc_id;
1112 }
1113 radeon_encoder->pixel_clock = adjusted_mode->clock; 1364 radeon_encoder->pixel_clock = adjusted_mode->clock;
1114 1365
1115 radeon_atombios_encoder_crtc_scratch_regs(encoder, radeon_crtc->crtc_id); 1366 if (ASIC_IS_AVIVO(rdev) && !ASIC_IS_DCE4(rdev)) {
1116 atombios_set_encoder_crtc_source(encoder);
1117
1118 if (ASIC_IS_AVIVO(rdev)) {
1119 if (radeon_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT | ATOM_DEVICE_TV_SUPPORT)) 1367 if (radeon_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT | ATOM_DEVICE_TV_SUPPORT))
1120 atombios_yuv_setup(encoder, true); 1368 atombios_yuv_setup(encoder, true);
1121 else 1369 else
@@ -1133,15 +1381,26 @@ radeon_atom_encoder_mode_set(struct drm_encoder *encoder,
1133 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1381 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1134 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1382 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1135 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1383 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1136 /* disable the encoder and transmitter */ 1384 if (ASIC_IS_DCE4(rdev)) {
1137 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_DISABLE); 1385 /* disable the transmitter */
1138 atombios_dig_encoder_setup(encoder, ATOM_DISABLE); 1386 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1139 1387 /* setup and enable the encoder */
1140 /* setup and enable the encoder and transmitter */ 1388 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_SETUP);
1141 atombios_dig_encoder_setup(encoder, ATOM_ENABLE); 1389
1142 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_INIT); 1390 /* init and enable the transmitter */
1143 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_SETUP); 1391 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_INIT, 0, 0);
1144 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_ENABLE); 1392 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_ENABLE, 0, 0);
1393 } else {
1394 /* disable the encoder and transmitter */
1395 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1396 atombios_dig_encoder_setup(encoder, ATOM_DISABLE);
1397
1398 /* setup and enable the encoder and transmitter */
1399 atombios_dig_encoder_setup(encoder, ATOM_ENABLE);
1400 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_INIT, 0, 0);
1401 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_SETUP, 0, 0);
1402 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_ENABLE, 0, 0);
1403 }
1145 break; 1404 break;
1146 case ENCODER_OBJECT_ID_INTERNAL_DDI: 1405 case ENCODER_OBJECT_ID_INTERNAL_DDI:
1147 atombios_ddia_setup(encoder, ATOM_ENABLE); 1406 atombios_ddia_setup(encoder, ATOM_ENABLE);
@@ -1155,11 +1414,20 @@ radeon_atom_encoder_mode_set(struct drm_encoder *encoder,
1155 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 1414 case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1156 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1415 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1157 atombios_dac_setup(encoder, ATOM_ENABLE); 1416 atombios_dac_setup(encoder, ATOM_ENABLE);
1158 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT)) 1417 if (radeon_encoder->devices & (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT)) {
1159 atombios_tv_setup(encoder, ATOM_ENABLE); 1418 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT))
1419 atombios_tv_setup(encoder, ATOM_ENABLE);
1420 else
1421 atombios_tv_setup(encoder, ATOM_DISABLE);
1422 }
1160 break; 1423 break;
1161 } 1424 }
1162 atombios_apply_encoder_quirks(encoder, adjusted_mode); 1425 atombios_apply_encoder_quirks(encoder, adjusted_mode);
1426
1427 if (atombios_get_encoder_mode(encoder) == ATOM_ENCODER_MODE_HDMI) {
1428 r600_hdmi_enable(encoder);
1429 r600_hdmi_setmode(encoder, adjusted_mode);
1430 }
1163} 1431}
1164 1432
1165static bool 1433static bool
@@ -1179,7 +1447,8 @@ atombios_dac_load_detect(struct drm_encoder *encoder, struct drm_connector *conn
1179 1447
1180 memset(&args, 0, sizeof(args)); 1448 memset(&args, 0, sizeof(args));
1181 1449
1182 atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev); 1450 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev))
1451 return false;
1183 1452
1184 args.sDacload.ucMisc = 0; 1453 args.sDacload.ucMisc = 0;
1185 1454
@@ -1253,8 +1522,20 @@ radeon_atom_dac_detect(struct drm_encoder *encoder, struct drm_connector *connec
1253 1522
1254static void radeon_atom_encoder_prepare(struct drm_encoder *encoder) 1523static void radeon_atom_encoder_prepare(struct drm_encoder *encoder)
1255{ 1524{
1525 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1526
1527 if (radeon_encoder->active_device &
1528 (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) {
1529 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
1530 if (dig)
1531 dig->dig_encoder = radeon_atom_pick_dig_encoder(encoder);
1532 }
1533
1256 radeon_atom_output_lock(encoder, true); 1534 radeon_atom_output_lock(encoder, true);
1257 radeon_atom_encoder_dpms(encoder, DRM_MODE_DPMS_OFF); 1535 radeon_atom_encoder_dpms(encoder, DRM_MODE_DPMS_OFF);
1536
1537 /* this is needed for the pll/ss setup to work correctly in some cases */
1538 atombios_set_encoder_crtc_source(encoder);
1258} 1539}
1259 1540
1260static void radeon_atom_encoder_commit(struct drm_encoder *encoder) 1541static void radeon_atom_encoder_commit(struct drm_encoder *encoder)
@@ -1266,7 +1547,15 @@ static void radeon_atom_encoder_commit(struct drm_encoder *encoder)
1266static void radeon_atom_encoder_disable(struct drm_encoder *encoder) 1547static void radeon_atom_encoder_disable(struct drm_encoder *encoder)
1267{ 1548{
1268 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 1549 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1550 struct radeon_encoder_atom_dig *dig;
1269 radeon_atom_encoder_dpms(encoder, DRM_MODE_DPMS_OFF); 1551 radeon_atom_encoder_dpms(encoder, DRM_MODE_DPMS_OFF);
1552
1553 if (radeon_encoder_is_digital(encoder)) {
1554 if (atombios_get_encoder_mode(encoder) == ATOM_ENCODER_MODE_HDMI)
1555 r600_hdmi_disable(encoder);
1556 dig = radeon_encoder->enc_priv;
1557 dig->dig_encoder = -1;
1558 }
1270 radeon_encoder->active_device = 0; 1559 radeon_encoder->active_device = 0;
1271} 1560}
1272 1561
@@ -1304,12 +1593,14 @@ static const struct drm_encoder_funcs radeon_atom_enc_funcs = {
1304struct radeon_encoder_atom_dac * 1593struct radeon_encoder_atom_dac *
1305radeon_atombios_set_dac_info(struct radeon_encoder *radeon_encoder) 1594radeon_atombios_set_dac_info(struct radeon_encoder *radeon_encoder)
1306{ 1595{
1596 struct drm_device *dev = radeon_encoder->base.dev;
1597 struct radeon_device *rdev = dev->dev_private;
1307 struct radeon_encoder_atom_dac *dac = kzalloc(sizeof(struct radeon_encoder_atom_dac), GFP_KERNEL); 1598 struct radeon_encoder_atom_dac *dac = kzalloc(sizeof(struct radeon_encoder_atom_dac), GFP_KERNEL);
1308 1599
1309 if (!dac) 1600 if (!dac)
1310 return NULL; 1601 return NULL;
1311 1602
1312 dac->tv_std = TV_STD_NTSC; 1603 dac->tv_std = radeon_atombios_get_tv_info(rdev);
1313 return dac; 1604 return dac;
1314} 1605}
1315 1606
@@ -1323,6 +1614,7 @@ radeon_atombios_set_dig_info(struct radeon_encoder *radeon_encoder)
1323 1614
1324 /* coherent mode by default */ 1615 /* coherent mode by default */
1325 dig->coherent_mode = true; 1616 dig->coherent_mode = true;
1617 dig->dig_encoder = -1;
1326 1618
1327 return dig; 1619 return dig;
1328} 1620}
@@ -1350,11 +1642,18 @@ radeon_add_atom_encoder(struct drm_device *dev, uint32_t encoder_id, uint32_t su
1350 return; 1642 return;
1351 1643
1352 encoder = &radeon_encoder->base; 1644 encoder = &radeon_encoder->base;
1353 if (rdev->flags & RADEON_SINGLE_CRTC) 1645 switch (rdev->num_crtc) {
1646 case 1:
1354 encoder->possible_crtcs = 0x1; 1647 encoder->possible_crtcs = 0x1;
1355 else 1648 break;
1649 case 2:
1650 default:
1356 encoder->possible_crtcs = 0x3; 1651 encoder->possible_crtcs = 0x3;
1357 encoder->possible_clones = 0; 1652 break;
1653 case 6:
1654 encoder->possible_crtcs = 0x3f;
1655 break;
1656 }
1358 1657
1359 radeon_encoder->enc_priv = NULL; 1658 radeon_encoder->enc_priv = NULL;
1360 1659
@@ -1379,6 +1678,7 @@ radeon_add_atom_encoder(struct drm_device *dev, uint32_t encoder_id, uint32_t su
1379 break; 1678 break;
1380 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 1679 case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1381 drm_encoder_init(dev, encoder, &radeon_atom_enc_funcs, DRM_MODE_ENCODER_DAC); 1680 drm_encoder_init(dev, encoder, &radeon_atom_enc_funcs, DRM_MODE_ENCODER_DAC);
1681 radeon_encoder->enc_priv = radeon_atombios_set_dac_info(radeon_encoder);
1382 drm_encoder_helper_add(encoder, &radeon_atom_dac_helper_funcs); 1682 drm_encoder_helper_add(encoder, &radeon_atom_dac_helper_funcs);
1383 break; 1683 break;
1384 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 1684 case ENCODER_OBJECT_ID_INTERNAL_DAC2:
diff --git a/drivers/gpu/drm/radeon/radeon_family.h b/drivers/gpu/drm/radeon/radeon_family.h
index 797972e344a6..e329066dcabd 100644
--- a/drivers/gpu/drm/radeon/radeon_family.h
+++ b/drivers/gpu/drm/radeon/radeon_family.h
@@ -36,7 +36,7 @@
36 * Radeon chip families 36 * Radeon chip families
37 */ 37 */
38enum radeon_family { 38enum radeon_family {
39 CHIP_R100, 39 CHIP_R100 = 0,
40 CHIP_RV100, 40 CHIP_RV100,
41 CHIP_RS100, 41 CHIP_RS100,
42 CHIP_RV200, 42 CHIP_RV200,
@@ -75,6 +75,11 @@ enum radeon_family {
75 CHIP_RV730, 75 CHIP_RV730,
76 CHIP_RV710, 76 CHIP_RV710,
77 CHIP_RV740, 77 CHIP_RV740,
78 CHIP_CEDAR,
79 CHIP_REDWOOD,
80 CHIP_JUNIPER,
81 CHIP_CYPRESS,
82 CHIP_HEMLOCK,
78 CHIP_LAST, 83 CHIP_LAST,
79}; 84};
80 85
@@ -94,4 +99,5 @@ enum radeon_chip_flags {
94 RADEON_IS_PCI = 0x00800000UL, 99 RADEON_IS_PCI = 0x00800000UL,
95 RADEON_IS_IGPGART = 0x01000000UL, 100 RADEON_IS_IGPGART = 0x01000000UL,
96}; 101};
102
97#endif 103#endif
diff --git a/drivers/gpu/drm/radeon/radeon_fb.c b/drivers/gpu/drm/radeon/radeon_fb.c
index b38c4c8e2c61..9ac57a09784b 100644
--- a/drivers/gpu/drm/radeon/radeon_fb.c
+++ b/drivers/gpu/drm/radeon/radeon_fb.c
@@ -28,6 +28,7 @@
28 */ 28 */
29 29
30#include <linux/module.h> 30#include <linux/module.h>
31#include <linux/slab.h>
31#include <linux/fb.h> 32#include <linux/fb.h>
32 33
33#include "drmP.h" 34#include "drmP.h"
@@ -39,6 +40,8 @@
39 40
40#include "drm_fb_helper.h" 41#include "drm_fb_helper.h"
41 42
43#include <linux/vga_switcheroo.h>
44
42struct radeon_fb_device { 45struct radeon_fb_device {
43 struct drm_fb_helper helper; 46 struct drm_fb_helper helper;
44 struct radeon_framebuffer *rfb; 47 struct radeon_framebuffer *rfb;
@@ -59,7 +62,7 @@ static struct fb_ops radeonfb_ops = {
59}; 62};
60 63
61/** 64/**
62 * Curretly it is assumed that the old framebuffer is reused. 65 * Currently it is assumed that the old framebuffer is reused.
63 * 66 *
64 * LOCKING 67 * LOCKING
65 * caller should hold the mode config lock. 68 * caller should hold the mode config lock.
@@ -140,7 +143,7 @@ int radeonfb_create(struct drm_device *dev,
140 struct radeon_framebuffer *rfb; 143 struct radeon_framebuffer *rfb;
141 struct drm_mode_fb_cmd mode_cmd; 144 struct drm_mode_fb_cmd mode_cmd;
142 struct drm_gem_object *gobj = NULL; 145 struct drm_gem_object *gobj = NULL;
143 struct radeon_object *robj = NULL; 146 struct radeon_bo *rbo = NULL;
144 struct device *device = &rdev->pdev->dev; 147 struct device *device = &rdev->pdev->dev;
145 int size, aligned_size, ret; 148 int size, aligned_size, ret;
146 u64 fb_gpuaddr; 149 u64 fb_gpuaddr;
@@ -148,7 +151,6 @@ int radeonfb_create(struct drm_device *dev,
148 unsigned long tmp; 151 unsigned long tmp;
149 bool fb_tiled = false; /* useful for testing */ 152 bool fb_tiled = false; /* useful for testing */
150 u32 tiling_flags = 0; 153 u32 tiling_flags = 0;
151 int crtc_count;
152 154
153 mode_cmd.width = surface_width; 155 mode_cmd.width = surface_width;
154 mode_cmd.height = surface_height; 156 mode_cmd.height = surface_height;
@@ -168,14 +170,14 @@ int radeonfb_create(struct drm_device *dev,
168 ret = radeon_gem_object_create(rdev, aligned_size, 0, 170 ret = radeon_gem_object_create(rdev, aligned_size, 0,
169 RADEON_GEM_DOMAIN_VRAM, 171 RADEON_GEM_DOMAIN_VRAM,
170 false, ttm_bo_type_kernel, 172 false, ttm_bo_type_kernel,
171 false, &gobj); 173 &gobj);
172 if (ret) { 174 if (ret) {
173 printk(KERN_ERR "failed to allocate framebuffer (%d %d)\n", 175 printk(KERN_ERR "failed to allocate framebuffer (%d %d)\n",
174 surface_width, surface_height); 176 surface_width, surface_height);
175 ret = -ENOMEM; 177 ret = -ENOMEM;
176 goto out; 178 goto out;
177 } 179 }
178 robj = gobj->driver_private; 180 rbo = gobj->driver_private;
179 181
180 if (fb_tiled) 182 if (fb_tiled)
181 tiling_flags = RADEON_TILING_MACRO; 183 tiling_flags = RADEON_TILING_MACRO;
@@ -192,8 +194,13 @@ int radeonfb_create(struct drm_device *dev,
192 } 194 }
193#endif 195#endif
194 196
195 if (tiling_flags) 197 if (tiling_flags) {
196 radeon_object_set_tiling_flags(robj, tiling_flags | RADEON_TILING_SURFACE, mode_cmd.pitch); 198 ret = radeon_bo_set_tiling_flags(rbo,
199 tiling_flags | RADEON_TILING_SURFACE,
200 mode_cmd.pitch);
201 if (ret)
202 dev_err(rdev->dev, "FB failed to set tiling flags\n");
203 }
197 mutex_lock(&rdev->ddev->struct_mutex); 204 mutex_lock(&rdev->ddev->struct_mutex);
198 fb = radeon_framebuffer_create(rdev->ddev, &mode_cmd, gobj); 205 fb = radeon_framebuffer_create(rdev->ddev, &mode_cmd, gobj);
199 if (fb == NULL) { 206 if (fb == NULL) {
@@ -201,10 +208,19 @@ int radeonfb_create(struct drm_device *dev,
201 ret = -ENOMEM; 208 ret = -ENOMEM;
202 goto out_unref; 209 goto out_unref;
203 } 210 }
204 ret = radeon_object_pin(robj, RADEON_GEM_DOMAIN_VRAM, &fb_gpuaddr); 211 ret = radeon_bo_reserve(rbo, false);
212 if (unlikely(ret != 0))
213 goto out_unref;
214 ret = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_gpuaddr);
215 if (ret) {
216 radeon_bo_unreserve(rbo);
217 goto out_unref;
218 }
219 if (fb_tiled)
220 radeon_bo_check_tiling(rbo, 0, 0);
221 ret = radeon_bo_kmap(rbo, &fbptr);
222 radeon_bo_unreserve(rbo);
205 if (ret) { 223 if (ret) {
206 printk(KERN_ERR "failed to pin framebuffer\n");
207 ret = -ENOMEM;
208 goto out_unref; 224 goto out_unref;
209 } 225 }
210 226
@@ -213,7 +229,7 @@ int radeonfb_create(struct drm_device *dev,
213 *fb_p = fb; 229 *fb_p = fb;
214 rfb = to_radeon_framebuffer(fb); 230 rfb = to_radeon_framebuffer(fb);
215 rdev->fbdev_rfb = rfb; 231 rdev->fbdev_rfb = rfb;
216 rdev->fbdev_robj = robj; 232 rdev->fbdev_rbo = rbo;
217 233
218 info = framebuffer_alloc(sizeof(struct radeon_fb_device), device); 234 info = framebuffer_alloc(sizeof(struct radeon_fb_device), device);
219 if (info == NULL) { 235 if (info == NULL) {
@@ -225,24 +241,12 @@ int radeonfb_create(struct drm_device *dev,
225 rfbdev = info->par; 241 rfbdev = info->par;
226 rfbdev->helper.funcs = &radeon_fb_helper_funcs; 242 rfbdev->helper.funcs = &radeon_fb_helper_funcs;
227 rfbdev->helper.dev = dev; 243 rfbdev->helper.dev = dev;
228 if (rdev->flags & RADEON_SINGLE_CRTC) 244 ret = drm_fb_helper_init_crtc_count(&rfbdev->helper, rdev->num_crtc,
229 crtc_count = 1;
230 else
231 crtc_count = 2;
232 ret = drm_fb_helper_init_crtc_count(&rfbdev->helper, crtc_count,
233 RADEONFB_CONN_LIMIT); 245 RADEONFB_CONN_LIMIT);
234 if (ret) 246 if (ret)
235 goto out_unref; 247 goto out_unref;
236 248
237 if (fb_tiled) 249 memset_io(fbptr, 0x0, aligned_size);
238 radeon_object_check_tiling(robj, 0, 0);
239
240 ret = radeon_object_kmap(robj, &fbptr);
241 if (ret) {
242 goto out_unref;
243 }
244
245 memset_io(fbptr, 0, aligned_size);
246 250
247 strcpy(info->fix.id, "radeondrmfb"); 251 strcpy(info->fix.id, "radeondrmfb");
248 252
@@ -251,7 +255,7 @@ int radeonfb_create(struct drm_device *dev,
251 info->flags = FBINFO_DEFAULT; 255 info->flags = FBINFO_DEFAULT;
252 info->fbops = &radeonfb_ops; 256 info->fbops = &radeonfb_ops;
253 257
254 tmp = fb_gpuaddr - rdev->mc.vram_location; 258 tmp = fb_gpuaddr - rdev->mc.vram_start;
255 info->fix.smem_start = rdev->mc.aper_base + tmp; 259 info->fix.smem_start = rdev->mc.aper_base + tmp;
256 info->fix.smem_len = size; 260 info->fix.smem_len = size;
257 info->screen_base = fbptr; 261 info->screen_base = fbptr;
@@ -285,11 +289,16 @@ int radeonfb_create(struct drm_device *dev,
285 rfbdev->rdev = rdev; 289 rfbdev->rdev = rdev;
286 290
287 mutex_unlock(&rdev->ddev->struct_mutex); 291 mutex_unlock(&rdev->ddev->struct_mutex);
292 vga_switcheroo_client_fb_set(rdev->ddev->pdev, info);
288 return 0; 293 return 0;
289 294
290out_unref: 295out_unref:
291 if (robj) { 296 if (rbo) {
292 radeon_object_kunmap(robj); 297 ret = radeon_bo_reserve(rbo, false);
298 if (likely(ret == 0)) {
299 radeon_bo_kunmap(rbo);
300 radeon_bo_unreserve(rbo);
301 }
293 } 302 }
294 if (fb && ret) { 303 if (fb && ret) {
295 list_del(&fb->filp_head); 304 list_del(&fb->filp_head);
@@ -321,14 +330,22 @@ int radeon_parse_options(char *options)
321 330
322int radeonfb_probe(struct drm_device *dev) 331int radeonfb_probe(struct drm_device *dev)
323{ 332{
324 return drm_fb_helper_single_fb_probe(dev, 32, &radeonfb_create); 333 struct radeon_device *rdev = dev->dev_private;
334 int bpp_sel = 32;
335
336 /* select 8 bpp console on RN50 or 16MB cards */
337 if (ASIC_IS_RN50(rdev) || rdev->mc.real_vram_size <= (32*1024*1024))
338 bpp_sel = 8;
339
340 return drm_fb_helper_single_fb_probe(dev, bpp_sel, &radeonfb_create);
325} 341}
326 342
327int radeonfb_remove(struct drm_device *dev, struct drm_framebuffer *fb) 343int radeonfb_remove(struct drm_device *dev, struct drm_framebuffer *fb)
328{ 344{
329 struct fb_info *info; 345 struct fb_info *info;
330 struct radeon_framebuffer *rfb = to_radeon_framebuffer(fb); 346 struct radeon_framebuffer *rfb = to_radeon_framebuffer(fb);
331 struct radeon_object *robj; 347 struct radeon_bo *rbo;
348 int r;
332 349
333 if (!fb) { 350 if (!fb) {
334 return -EINVAL; 351 return -EINVAL;
@@ -336,10 +353,14 @@ int radeonfb_remove(struct drm_device *dev, struct drm_framebuffer *fb)
336 info = fb->fbdev; 353 info = fb->fbdev;
337 if (info) { 354 if (info) {
338 struct radeon_fb_device *rfbdev = info->par; 355 struct radeon_fb_device *rfbdev = info->par;
339 robj = rfb->obj->driver_private; 356 rbo = rfb->obj->driver_private;
340 unregister_framebuffer(info); 357 unregister_framebuffer(info);
341 radeon_object_kunmap(robj); 358 r = radeon_bo_reserve(rbo, false);
342 radeon_object_unpin(robj); 359 if (likely(r == 0)) {
360 radeon_bo_kunmap(rbo);
361 radeon_bo_unpin(rbo);
362 radeon_bo_unreserve(rbo);
363 }
343 drm_fb_helper_free(&rfbdev->helper); 364 drm_fb_helper_free(&rfbdev->helper);
344 framebuffer_release(info); 365 framebuffer_release(info);
345 } 366 }
diff --git a/drivers/gpu/drm/radeon/radeon_fence.c b/drivers/gpu/drm/radeon/radeon_fence.c
index 3beb26d74719..d90f95b405c5 100644
--- a/drivers/gpu/drm/radeon/radeon_fence.c
+++ b/drivers/gpu/drm/radeon/radeon_fence.c
@@ -33,6 +33,7 @@
33#include <linux/wait.h> 33#include <linux/wait.h>
34#include <linux/list.h> 34#include <linux/list.h>
35#include <linux/kref.h> 35#include <linux/kref.h>
36#include <linux/slab.h>
36#include "drmP.h" 37#include "drmP.h"
37#include "drm.h" 38#include "drm.h"
38#include "radeon_reg.h" 39#include "radeon_reg.h"
@@ -140,16 +141,15 @@ int radeon_fence_create(struct radeon_device *rdev, struct radeon_fence **fence)
140 141
141bool radeon_fence_signaled(struct radeon_fence *fence) 142bool radeon_fence_signaled(struct radeon_fence *fence)
142{ 143{
143 struct radeon_device *rdev = fence->rdev;
144 unsigned long irq_flags; 144 unsigned long irq_flags;
145 bool signaled = false; 145 bool signaled = false;
146 146
147 if (rdev->gpu_lockup) { 147 if (!fence)
148 return true; 148 return true;
149 } 149
150 if (fence == NULL) { 150 if (fence->rdev->gpu_lockup)
151 return true; 151 return true;
152 } 152
153 write_lock_irqsave(&fence->rdev->fence_drv.lock, irq_flags); 153 write_lock_irqsave(&fence->rdev->fence_drv.lock, irq_flags);
154 signaled = fence->signaled; 154 signaled = fence->signaled;
155 /* if we are shuting down report all fence as signaled */ 155 /* if we are shuting down report all fence as signaled */
@@ -168,37 +168,6 @@ bool radeon_fence_signaled(struct radeon_fence *fence)
168 return signaled; 168 return signaled;
169} 169}
170 170
171int r600_fence_wait(struct radeon_fence *fence, bool intr, bool lazy)
172{
173 struct radeon_device *rdev;
174 int ret = 0;
175
176 rdev = fence->rdev;
177
178 __set_current_state(intr ? TASK_INTERRUPTIBLE : TASK_UNINTERRUPTIBLE);
179
180 while (1) {
181 if (radeon_fence_signaled(fence))
182 break;
183
184 if (time_after_eq(jiffies, fence->timeout)) {
185 ret = -EBUSY;
186 break;
187 }
188
189 if (lazy)
190 schedule_timeout(1);
191
192 if (intr && signal_pending(current)) {
193 ret = -ERESTARTSYS;
194 break;
195 }
196 }
197 __set_current_state(TASK_RUNNING);
198 return ret;
199}
200
201
202int radeon_fence_wait(struct radeon_fence *fence, bool intr) 171int radeon_fence_wait(struct radeon_fence *fence, bool intr)
203{ 172{
204 struct radeon_device *rdev; 173 struct radeon_device *rdev;
@@ -216,13 +185,6 @@ int radeon_fence_wait(struct radeon_fence *fence, bool intr)
216 return 0; 185 return 0;
217 } 186 }
218 187
219 if (rdev->family >= CHIP_R600) {
220 r = r600_fence_wait(fence, intr, 0);
221 if (r == -ERESTARTSYS)
222 return -EBUSY;
223 return r;
224 }
225
226retry: 188retry:
227 cur_jiffies = jiffies; 189 cur_jiffies = jiffies;
228 timeout = HZ / 100; 190 timeout = HZ / 100;
@@ -231,14 +193,17 @@ retry:
231 } 193 }
232 194
233 if (intr) { 195 if (intr) {
196 radeon_irq_kms_sw_irq_get(rdev);
234 r = wait_event_interruptible_timeout(rdev->fence_drv.queue, 197 r = wait_event_interruptible_timeout(rdev->fence_drv.queue,
235 radeon_fence_signaled(fence), timeout); 198 radeon_fence_signaled(fence), timeout);
236 if (unlikely(r == -ERESTARTSYS)) { 199 radeon_irq_kms_sw_irq_put(rdev);
237 return -EBUSY; 200 if (unlikely(r < 0))
238 } 201 return r;
239 } else { 202 } else {
203 radeon_irq_kms_sw_irq_get(rdev);
240 r = wait_event_timeout(rdev->fence_drv.queue, 204 r = wait_event_timeout(rdev->fence_drv.queue,
241 radeon_fence_signaled(fence), timeout); 205 radeon_fence_signaled(fence), timeout);
206 radeon_irq_kms_sw_irq_put(rdev);
242 } 207 }
243 if (unlikely(!radeon_fence_signaled(fence))) { 208 if (unlikely(!radeon_fence_signaled(fence))) {
244 if (unlikely(r == 0)) { 209 if (unlikely(r == 0)) {
@@ -359,7 +324,7 @@ int radeon_fence_driver_init(struct radeon_device *rdev)
359 write_lock_irqsave(&rdev->fence_drv.lock, irq_flags); 324 write_lock_irqsave(&rdev->fence_drv.lock, irq_flags);
360 r = radeon_scratch_get(rdev, &rdev->fence_drv.scratch_reg); 325 r = radeon_scratch_get(rdev, &rdev->fence_drv.scratch_reg);
361 if (r) { 326 if (r) {
362 DRM_ERROR("Fence failed to get a scratch register."); 327 dev_err(rdev->dev, "fence failed to get scratch register\n");
363 write_unlock_irqrestore(&rdev->fence_drv.lock, irq_flags); 328 write_unlock_irqrestore(&rdev->fence_drv.lock, irq_flags);
364 return r; 329 return r;
365 } 330 }
@@ -370,9 +335,10 @@ int radeon_fence_driver_init(struct radeon_device *rdev)
370 INIT_LIST_HEAD(&rdev->fence_drv.signaled); 335 INIT_LIST_HEAD(&rdev->fence_drv.signaled);
371 rdev->fence_drv.count_timeout = 0; 336 rdev->fence_drv.count_timeout = 0;
372 init_waitqueue_head(&rdev->fence_drv.queue); 337 init_waitqueue_head(&rdev->fence_drv.queue);
338 rdev->fence_drv.initialized = true;
373 write_unlock_irqrestore(&rdev->fence_drv.lock, irq_flags); 339 write_unlock_irqrestore(&rdev->fence_drv.lock, irq_flags);
374 if (radeon_debugfs_fence_init(rdev)) { 340 if (radeon_debugfs_fence_init(rdev)) {
375 DRM_ERROR("Failed to register debugfs file for fence !\n"); 341 dev_err(rdev->dev, "fence debugfs file creation failed\n");
376 } 342 }
377 return 0; 343 return 0;
378} 344}
@@ -381,11 +347,13 @@ void radeon_fence_driver_fini(struct radeon_device *rdev)
381{ 347{
382 unsigned long irq_flags; 348 unsigned long irq_flags;
383 349
350 if (!rdev->fence_drv.initialized)
351 return;
384 wake_up_all(&rdev->fence_drv.queue); 352 wake_up_all(&rdev->fence_drv.queue);
385 write_lock_irqsave(&rdev->fence_drv.lock, irq_flags); 353 write_lock_irqsave(&rdev->fence_drv.lock, irq_flags);
386 radeon_scratch_free(rdev, rdev->fence_drv.scratch_reg); 354 radeon_scratch_free(rdev, rdev->fence_drv.scratch_reg);
387 write_unlock_irqrestore(&rdev->fence_drv.lock, irq_flags); 355 write_unlock_irqrestore(&rdev->fence_drv.lock, irq_flags);
388 DRM_INFO("radeon: fence finalized\n"); 356 rdev->fence_drv.initialized = false;
389} 357}
390 358
391 359
diff --git a/drivers/gpu/drm/radeon/radeon_fixed.h b/drivers/gpu/drm/radeon/radeon_fixed.h
index 90187d173847..3d4d84e078ac 100644
--- a/drivers/gpu/drm/radeon/radeon_fixed.h
+++ b/drivers/gpu/drm/radeon/radeon_fixed.h
@@ -38,6 +38,23 @@ typedef union rfixed {
38#define fixed_init_half(A) { .full = rfixed_const_half((A)) } 38#define fixed_init_half(A) { .full = rfixed_const_half((A)) }
39#define rfixed_trunc(A) ((A).full >> 12) 39#define rfixed_trunc(A) ((A).full >> 12)
40 40
41static inline u32 rfixed_floor(fixed20_12 A)
42{
43 u32 non_frac = rfixed_trunc(A);
44
45 return rfixed_const(non_frac);
46}
47
48static inline u32 rfixed_ceil(fixed20_12 A)
49{
50 u32 non_frac = rfixed_trunc(A);
51
52 if (A.full > rfixed_const(non_frac))
53 return rfixed_const(non_frac + 1);
54 else
55 return rfixed_const(non_frac);
56}
57
41static inline u32 rfixed_div(fixed20_12 A, fixed20_12 B) 58static inline u32 rfixed_div(fixed20_12 A, fixed20_12 B)
42{ 59{
43 u64 tmp = ((u64)A.full << 13); 60 u64 tmp = ((u64)A.full << 13);
diff --git a/drivers/gpu/drm/radeon/radeon_gart.c b/drivers/gpu/drm/radeon/radeon_gart.c
index a68d7566178c..1770d3c07fd0 100644
--- a/drivers/gpu/drm/radeon/radeon_gart.c
+++ b/drivers/gpu/drm/radeon/radeon_gart.c
@@ -78,11 +78,9 @@ int radeon_gart_table_vram_alloc(struct radeon_device *rdev)
78 int r; 78 int r;
79 79
80 if (rdev->gart.table.vram.robj == NULL) { 80 if (rdev->gart.table.vram.robj == NULL) {
81 r = radeon_object_create(rdev, NULL, 81 r = radeon_bo_create(rdev, NULL, rdev->gart.table_size,
82 rdev->gart.table_size, 82 true, RADEON_GEM_DOMAIN_VRAM,
83 true, 83 &rdev->gart.table.vram.robj);
84 RADEON_GEM_DOMAIN_VRAM,
85 false, &rdev->gart.table.vram.robj);
86 if (r) { 84 if (r) {
87 return r; 85 return r;
88 } 86 }
@@ -95,32 +93,38 @@ int radeon_gart_table_vram_pin(struct radeon_device *rdev)
95 uint64_t gpu_addr; 93 uint64_t gpu_addr;
96 int r; 94 int r;
97 95
98 r = radeon_object_pin(rdev->gart.table.vram.robj, 96 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
99 RADEON_GEM_DOMAIN_VRAM, &gpu_addr); 97 if (unlikely(r != 0))
100 if (r) {
101 radeon_object_unref(&rdev->gart.table.vram.robj);
102 return r; 98 return r;
103 } 99 r = radeon_bo_pin(rdev->gart.table.vram.robj,
104 r = radeon_object_kmap(rdev->gart.table.vram.robj, 100 RADEON_GEM_DOMAIN_VRAM, &gpu_addr);
105 (void **)&rdev->gart.table.vram.ptr);
106 if (r) { 101 if (r) {
107 radeon_object_unpin(rdev->gart.table.vram.robj); 102 radeon_bo_unreserve(rdev->gart.table.vram.robj);
108 radeon_object_unref(&rdev->gart.table.vram.robj);
109 DRM_ERROR("radeon: failed to map gart vram table.\n");
110 return r; 103 return r;
111 } 104 }
105 r = radeon_bo_kmap(rdev->gart.table.vram.robj,
106 (void **)&rdev->gart.table.vram.ptr);
107 if (r)
108 radeon_bo_unpin(rdev->gart.table.vram.robj);
109 radeon_bo_unreserve(rdev->gart.table.vram.robj);
112 rdev->gart.table_addr = gpu_addr; 110 rdev->gart.table_addr = gpu_addr;
113 return 0; 111 return r;
114} 112}
115 113
116void radeon_gart_table_vram_free(struct radeon_device *rdev) 114void radeon_gart_table_vram_free(struct radeon_device *rdev)
117{ 115{
116 int r;
117
118 if (rdev->gart.table.vram.robj == NULL) { 118 if (rdev->gart.table.vram.robj == NULL) {
119 return; 119 return;
120 } 120 }
121 radeon_object_kunmap(rdev->gart.table.vram.robj); 121 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
122 radeon_object_unpin(rdev->gart.table.vram.robj); 122 if (likely(r == 0)) {
123 radeon_object_unref(&rdev->gart.table.vram.robj); 123 radeon_bo_kunmap(rdev->gart.table.vram.robj);
124 radeon_bo_unpin(rdev->gart.table.vram.robj);
125 radeon_bo_unreserve(rdev->gart.table.vram.robj);
126 }
127 radeon_bo_unref(&rdev->gart.table.vram.robj);
124} 128}
125 129
126 130
@@ -135,6 +139,7 @@ void radeon_gart_unbind(struct radeon_device *rdev, unsigned offset,
135 unsigned t; 139 unsigned t;
136 unsigned p; 140 unsigned p;
137 int i, j; 141 int i, j;
142 u64 page_base;
138 143
139 if (!rdev->gart.ready) { 144 if (!rdev->gart.ready) {
140 WARN(1, "trying to unbind memory to unitialized GART !\n"); 145 WARN(1, "trying to unbind memory to unitialized GART !\n");
@@ -147,9 +152,11 @@ void radeon_gart_unbind(struct radeon_device *rdev, unsigned offset,
147 pci_unmap_page(rdev->pdev, rdev->gart.pages_addr[p], 152 pci_unmap_page(rdev->pdev, rdev->gart.pages_addr[p],
148 PAGE_SIZE, PCI_DMA_BIDIRECTIONAL); 153 PAGE_SIZE, PCI_DMA_BIDIRECTIONAL);
149 rdev->gart.pages[p] = NULL; 154 rdev->gart.pages[p] = NULL;
150 rdev->gart.pages_addr[p] = 0; 155 rdev->gart.pages_addr[p] = rdev->dummy_page.addr;
156 page_base = rdev->gart.pages_addr[p];
151 for (j = 0; j < (PAGE_SIZE / RADEON_GPU_PAGE_SIZE); j++, t++) { 157 for (j = 0; j < (PAGE_SIZE / RADEON_GPU_PAGE_SIZE); j++, t++) {
152 radeon_gart_set_page(rdev, t, 0); 158 radeon_gart_set_page(rdev, t, page_base);
159 page_base += RADEON_GPU_PAGE_SIZE;
153 } 160 }
154 } 161 }
155 } 162 }
@@ -195,8 +202,26 @@ int radeon_gart_bind(struct radeon_device *rdev, unsigned offset,
195 return 0; 202 return 0;
196} 203}
197 204
205void radeon_gart_restore(struct radeon_device *rdev)
206{
207 int i, j, t;
208 u64 page_base;
209
210 for (i = 0, t = 0; i < rdev->gart.num_cpu_pages; i++) {
211 page_base = rdev->gart.pages_addr[i];
212 for (j = 0; j < (PAGE_SIZE / RADEON_GPU_PAGE_SIZE); j++, t++) {
213 radeon_gart_set_page(rdev, t, page_base);
214 page_base += RADEON_GPU_PAGE_SIZE;
215 }
216 }
217 mb();
218 radeon_gart_tlb_flush(rdev);
219}
220
198int radeon_gart_init(struct radeon_device *rdev) 221int radeon_gart_init(struct radeon_device *rdev)
199{ 222{
223 int r, i;
224
200 if (rdev->gart.pages) { 225 if (rdev->gart.pages) {
201 return 0; 226 return 0;
202 } 227 }
@@ -205,6 +230,9 @@ int radeon_gart_init(struct radeon_device *rdev)
205 DRM_ERROR("Page size is smaller than GPU page size!\n"); 230 DRM_ERROR("Page size is smaller than GPU page size!\n");
206 return -EINVAL; 231 return -EINVAL;
207 } 232 }
233 r = radeon_dummy_page_init(rdev);
234 if (r)
235 return r;
208 /* Compute table size */ 236 /* Compute table size */
209 rdev->gart.num_cpu_pages = rdev->mc.gtt_size / PAGE_SIZE; 237 rdev->gart.num_cpu_pages = rdev->mc.gtt_size / PAGE_SIZE;
210 rdev->gart.num_gpu_pages = rdev->mc.gtt_size / RADEON_GPU_PAGE_SIZE; 238 rdev->gart.num_gpu_pages = rdev->mc.gtt_size / RADEON_GPU_PAGE_SIZE;
@@ -223,6 +251,10 @@ int radeon_gart_init(struct radeon_device *rdev)
223 radeon_gart_fini(rdev); 251 radeon_gart_fini(rdev);
224 return -ENOMEM; 252 return -ENOMEM;
225 } 253 }
254 /* set GART entry to point to the dummy page by default */
255 for (i = 0; i < rdev->gart.num_cpu_pages; i++) {
256 rdev->gart.pages_addr[i] = rdev->dummy_page.addr;
257 }
226 return 0; 258 return 0;
227} 259}
228 260
diff --git a/drivers/gpu/drm/radeon/radeon_gem.c b/drivers/gpu/drm/radeon/radeon_gem.c
index d880edf254db..ef92d147d8f0 100644
--- a/drivers/gpu/drm/radeon/radeon_gem.c
+++ b/drivers/gpu/drm/radeon/radeon_gem.c
@@ -38,22 +38,21 @@ int radeon_gem_object_init(struct drm_gem_object *obj)
38 38
39void radeon_gem_object_free(struct drm_gem_object *gobj) 39void radeon_gem_object_free(struct drm_gem_object *gobj)
40{ 40{
41 struct radeon_object *robj = gobj->driver_private; 41 struct radeon_bo *robj = gobj->driver_private;
42 42
43 gobj->driver_private = NULL; 43 gobj->driver_private = NULL;
44 if (robj) { 44 if (robj) {
45 radeon_object_unref(&robj); 45 radeon_bo_unref(&robj);
46 } 46 }
47} 47}
48 48
49int radeon_gem_object_create(struct radeon_device *rdev, int size, 49int radeon_gem_object_create(struct radeon_device *rdev, int size,
50 int alignment, int initial_domain, 50 int alignment, int initial_domain,
51 bool discardable, bool kernel, 51 bool discardable, bool kernel,
52 bool interruptible, 52 struct drm_gem_object **obj)
53 struct drm_gem_object **obj)
54{ 53{
55 struct drm_gem_object *gobj; 54 struct drm_gem_object *gobj;
56 struct radeon_object *robj; 55 struct radeon_bo *robj;
57 int r; 56 int r;
58 57
59 *obj = NULL; 58 *obj = NULL;
@@ -65,14 +64,12 @@ int radeon_gem_object_create(struct radeon_device *rdev, int size,
65 if (alignment < PAGE_SIZE) { 64 if (alignment < PAGE_SIZE) {
66 alignment = PAGE_SIZE; 65 alignment = PAGE_SIZE;
67 } 66 }
68 r = radeon_object_create(rdev, gobj, size, kernel, initial_domain, 67 r = radeon_bo_create(rdev, gobj, size, kernel, initial_domain, &robj);
69 interruptible, &robj);
70 if (r) { 68 if (r) {
71 DRM_ERROR("Failed to allocate GEM object (%d, %d, %u)\n", 69 if (r != -ERESTARTSYS)
72 size, initial_domain, alignment); 70 DRM_ERROR("Failed to allocate GEM object (%d, %d, %u, %d)\n",
73 mutex_lock(&rdev->ddev->struct_mutex); 71 size, initial_domain, alignment, r);
74 drm_gem_object_unreference(gobj); 72 drm_gem_object_unreference_unlocked(gobj);
75 mutex_unlock(&rdev->ddev->struct_mutex);
76 return r; 73 return r;
77 } 74 }
78 gobj->driver_private = robj; 75 gobj->driver_private = robj;
@@ -83,33 +80,33 @@ int radeon_gem_object_create(struct radeon_device *rdev, int size,
83int radeon_gem_object_pin(struct drm_gem_object *obj, uint32_t pin_domain, 80int radeon_gem_object_pin(struct drm_gem_object *obj, uint32_t pin_domain,
84 uint64_t *gpu_addr) 81 uint64_t *gpu_addr)
85{ 82{
86 struct radeon_object *robj = obj->driver_private; 83 struct radeon_bo *robj = obj->driver_private;
87 uint32_t flags; 84 int r;
88 85
89 switch (pin_domain) { 86 r = radeon_bo_reserve(robj, false);
90 case RADEON_GEM_DOMAIN_VRAM: 87 if (unlikely(r != 0))
91 flags = TTM_PL_FLAG_VRAM; 88 return r;
92 break; 89 r = radeon_bo_pin(robj, pin_domain, gpu_addr);
93 case RADEON_GEM_DOMAIN_GTT: 90 radeon_bo_unreserve(robj);
94 flags = TTM_PL_FLAG_TT; 91 return r;
95 break;
96 default:
97 flags = TTM_PL_FLAG_SYSTEM;
98 break;
99 }
100 return radeon_object_pin(robj, flags, gpu_addr);
101} 92}
102 93
103void radeon_gem_object_unpin(struct drm_gem_object *obj) 94void radeon_gem_object_unpin(struct drm_gem_object *obj)
104{ 95{
105 struct radeon_object *robj = obj->driver_private; 96 struct radeon_bo *robj = obj->driver_private;
106 radeon_object_unpin(robj); 97 int r;
98
99 r = radeon_bo_reserve(robj, false);
100 if (likely(r == 0)) {
101 radeon_bo_unpin(robj);
102 radeon_bo_unreserve(robj);
103 }
107} 104}
108 105
109int radeon_gem_set_domain(struct drm_gem_object *gobj, 106int radeon_gem_set_domain(struct drm_gem_object *gobj,
110 uint32_t rdomain, uint32_t wdomain) 107 uint32_t rdomain, uint32_t wdomain)
111{ 108{
112 struct radeon_object *robj; 109 struct radeon_bo *robj;
113 uint32_t domain; 110 uint32_t domain;
114 int r; 111 int r;
115 112
@@ -127,7 +124,7 @@ int radeon_gem_set_domain(struct drm_gem_object *gobj,
127 } 124 }
128 if (domain == RADEON_GEM_DOMAIN_CPU) { 125 if (domain == RADEON_GEM_DOMAIN_CPU) {
129 /* Asking for cpu access wait for object idle */ 126 /* Asking for cpu access wait for object idle */
130 r = radeon_object_wait(robj); 127 r = radeon_bo_wait(robj, NULL, false);
131 if (r) { 128 if (r) {
132 printk(KERN_ERR "Failed to wait for object !\n"); 129 printk(KERN_ERR "Failed to wait for object !\n");
133 return r; 130 return r;
@@ -144,7 +141,7 @@ int radeon_gem_init(struct radeon_device *rdev)
144 141
145void radeon_gem_fini(struct radeon_device *rdev) 142void radeon_gem_fini(struct radeon_device *rdev)
146{ 143{
147 radeon_object_force_delete(rdev); 144 radeon_bo_force_delete(rdev);
148} 145}
149 146
150 147
@@ -158,9 +155,13 @@ int radeon_gem_info_ioctl(struct drm_device *dev, void *data,
158 struct drm_radeon_gem_info *args = data; 155 struct drm_radeon_gem_info *args = data;
159 156
160 args->vram_size = rdev->mc.real_vram_size; 157 args->vram_size = rdev->mc.real_vram_size;
161 /* FIXME: report somethings that makes sense */ 158 args->vram_visible = rdev->mc.real_vram_size;
162 args->vram_visible = rdev->mc.real_vram_size - (4 * 1024 * 1024); 159 if (rdev->stollen_vga_memory)
163 args->gart_size = rdev->mc.gtt_size; 160 args->vram_visible -= radeon_bo_size(rdev->stollen_vga_memory);
161 if (rdev->fbdev_rbo)
162 args->vram_visible -= radeon_bo_size(rdev->fbdev_rbo);
163 args->gart_size = rdev->mc.gtt_size - rdev->cp.ring_size - 4096 -
164 RADEON_IB_POOL_SIZE*64*1024;
164 return 0; 165 return 0;
165} 166}
166 167
@@ -192,21 +193,17 @@ int radeon_gem_create_ioctl(struct drm_device *dev, void *data,
192 /* create a gem object to contain this object in */ 193 /* create a gem object to contain this object in */
193 args->size = roundup(args->size, PAGE_SIZE); 194 args->size = roundup(args->size, PAGE_SIZE);
194 r = radeon_gem_object_create(rdev, args->size, args->alignment, 195 r = radeon_gem_object_create(rdev, args->size, args->alignment,
195 args->initial_domain, false, 196 args->initial_domain, false,
196 false, true, &gobj); 197 false, &gobj);
197 if (r) { 198 if (r) {
198 return r; 199 return r;
199 } 200 }
200 r = drm_gem_handle_create(filp, gobj, &handle); 201 r = drm_gem_handle_create(filp, gobj, &handle);
201 if (r) { 202 if (r) {
202 mutex_lock(&dev->struct_mutex); 203 drm_gem_object_unreference_unlocked(gobj);
203 drm_gem_object_unreference(gobj);
204 mutex_unlock(&dev->struct_mutex);
205 return r; 204 return r;
206 } 205 }
207 mutex_lock(&dev->struct_mutex); 206 drm_gem_object_handle_unreference_unlocked(gobj);
208 drm_gem_object_handle_unreference(gobj);
209 mutex_unlock(&dev->struct_mutex);
210 args->handle = handle; 207 args->handle = handle;
211 return 0; 208 return 0;
212} 209}
@@ -218,7 +215,7 @@ int radeon_gem_set_domain_ioctl(struct drm_device *dev, void *data,
218 * just validate the BO into a certain domain */ 215 * just validate the BO into a certain domain */
219 struct drm_radeon_gem_set_domain *args = data; 216 struct drm_radeon_gem_set_domain *args = data;
220 struct drm_gem_object *gobj; 217 struct drm_gem_object *gobj;
221 struct radeon_object *robj; 218 struct radeon_bo *robj;
222 int r; 219 int r;
223 220
224 /* for now if someone requests domain CPU - 221 /* for now if someone requests domain CPU -
@@ -233,9 +230,7 @@ int radeon_gem_set_domain_ioctl(struct drm_device *dev, void *data,
233 230
234 r = radeon_gem_set_domain(gobj, args->read_domains, args->write_domain); 231 r = radeon_gem_set_domain(gobj, args->read_domains, args->write_domain);
235 232
236 mutex_lock(&dev->struct_mutex); 233 drm_gem_object_unreference_unlocked(gobj);
237 drm_gem_object_unreference(gobj);
238 mutex_unlock(&dev->struct_mutex);
239 return r; 234 return r;
240} 235}
241 236
@@ -244,19 +239,16 @@ int radeon_gem_mmap_ioctl(struct drm_device *dev, void *data,
244{ 239{
245 struct drm_radeon_gem_mmap *args = data; 240 struct drm_radeon_gem_mmap *args = data;
246 struct drm_gem_object *gobj; 241 struct drm_gem_object *gobj;
247 struct radeon_object *robj; 242 struct radeon_bo *robj;
248 int r;
249 243
250 gobj = drm_gem_object_lookup(dev, filp, args->handle); 244 gobj = drm_gem_object_lookup(dev, filp, args->handle);
251 if (gobj == NULL) { 245 if (gobj == NULL) {
252 return -EINVAL; 246 return -EINVAL;
253 } 247 }
254 robj = gobj->driver_private; 248 robj = gobj->driver_private;
255 r = radeon_object_mmap(robj, &args->addr_ptr); 249 args->addr_ptr = radeon_bo_mmap_offset(robj);
256 mutex_lock(&dev->struct_mutex); 250 drm_gem_object_unreference_unlocked(gobj);
257 drm_gem_object_unreference(gobj); 251 return 0;
258 mutex_unlock(&dev->struct_mutex);
259 return r;
260} 252}
261 253
262int radeon_gem_busy_ioctl(struct drm_device *dev, void *data, 254int radeon_gem_busy_ioctl(struct drm_device *dev, void *data,
@@ -264,16 +256,16 @@ int radeon_gem_busy_ioctl(struct drm_device *dev, void *data,
264{ 256{
265 struct drm_radeon_gem_busy *args = data; 257 struct drm_radeon_gem_busy *args = data;
266 struct drm_gem_object *gobj; 258 struct drm_gem_object *gobj;
267 struct radeon_object *robj; 259 struct radeon_bo *robj;
268 int r; 260 int r;
269 uint32_t cur_placement; 261 uint32_t cur_placement = 0;
270 262
271 gobj = drm_gem_object_lookup(dev, filp, args->handle); 263 gobj = drm_gem_object_lookup(dev, filp, args->handle);
272 if (gobj == NULL) { 264 if (gobj == NULL) {
273 return -EINVAL; 265 return -EINVAL;
274 } 266 }
275 robj = gobj->driver_private; 267 robj = gobj->driver_private;
276 r = radeon_object_busy_domain(robj, &cur_placement); 268 r = radeon_bo_wait(robj, &cur_placement, true);
277 switch (cur_placement) { 269 switch (cur_placement) {
278 case TTM_PL_VRAM: 270 case TTM_PL_VRAM:
279 args->domain = RADEON_GEM_DOMAIN_VRAM; 271 args->domain = RADEON_GEM_DOMAIN_VRAM;
@@ -286,9 +278,7 @@ int radeon_gem_busy_ioctl(struct drm_device *dev, void *data,
286 default: 278 default:
287 break; 279 break;
288 } 280 }
289 mutex_lock(&dev->struct_mutex); 281 drm_gem_object_unreference_unlocked(gobj);
290 drm_gem_object_unreference(gobj);
291 mutex_unlock(&dev->struct_mutex);
292 return r; 282 return r;
293} 283}
294 284
@@ -297,7 +287,7 @@ int radeon_gem_wait_idle_ioctl(struct drm_device *dev, void *data,
297{ 287{
298 struct drm_radeon_gem_wait_idle *args = data; 288 struct drm_radeon_gem_wait_idle *args = data;
299 struct drm_gem_object *gobj; 289 struct drm_gem_object *gobj;
300 struct radeon_object *robj; 290 struct radeon_bo *robj;
301 int r; 291 int r;
302 292
303 gobj = drm_gem_object_lookup(dev, filp, args->handle); 293 gobj = drm_gem_object_lookup(dev, filp, args->handle);
@@ -305,10 +295,11 @@ int radeon_gem_wait_idle_ioctl(struct drm_device *dev, void *data,
305 return -EINVAL; 295 return -EINVAL;
306 } 296 }
307 robj = gobj->driver_private; 297 robj = gobj->driver_private;
308 r = radeon_object_wait(robj); 298 r = radeon_bo_wait(robj, NULL, false);
309 mutex_lock(&dev->struct_mutex); 299 /* callback hw specific functions if any */
310 drm_gem_object_unreference(gobj); 300 if (robj->rdev->asic->ioctl_wait_idle)
311 mutex_unlock(&dev->struct_mutex); 301 robj->rdev->asic->ioctl_wait_idle(robj->rdev, robj);
302 drm_gem_object_unreference_unlocked(gobj);
312 return r; 303 return r;
313} 304}
314 305
@@ -317,7 +308,7 @@ int radeon_gem_set_tiling_ioctl(struct drm_device *dev, void *data,
317{ 308{
318 struct drm_radeon_gem_set_tiling *args = data; 309 struct drm_radeon_gem_set_tiling *args = data;
319 struct drm_gem_object *gobj; 310 struct drm_gem_object *gobj;
320 struct radeon_object *robj; 311 struct radeon_bo *robj;
321 int r = 0; 312 int r = 0;
322 313
323 DRM_DEBUG("%d \n", args->handle); 314 DRM_DEBUG("%d \n", args->handle);
@@ -325,10 +316,8 @@ int radeon_gem_set_tiling_ioctl(struct drm_device *dev, void *data,
325 if (gobj == NULL) 316 if (gobj == NULL)
326 return -EINVAL; 317 return -EINVAL;
327 robj = gobj->driver_private; 318 robj = gobj->driver_private;
328 radeon_object_set_tiling_flags(robj, args->tiling_flags, args->pitch); 319 r = radeon_bo_set_tiling_flags(robj, args->tiling_flags, args->pitch);
329 mutex_lock(&dev->struct_mutex); 320 drm_gem_object_unreference_unlocked(gobj);
330 drm_gem_object_unreference(gobj);
331 mutex_unlock(&dev->struct_mutex);
332 return r; 321 return r;
333} 322}
334 323
@@ -337,18 +326,20 @@ int radeon_gem_get_tiling_ioctl(struct drm_device *dev, void *data,
337{ 326{
338 struct drm_radeon_gem_get_tiling *args = data; 327 struct drm_radeon_gem_get_tiling *args = data;
339 struct drm_gem_object *gobj; 328 struct drm_gem_object *gobj;
340 struct radeon_object *robj; 329 struct radeon_bo *rbo;
341 int r = 0; 330 int r = 0;
342 331
343 DRM_DEBUG("\n"); 332 DRM_DEBUG("\n");
344 gobj = drm_gem_object_lookup(dev, filp, args->handle); 333 gobj = drm_gem_object_lookup(dev, filp, args->handle);
345 if (gobj == NULL) 334 if (gobj == NULL)
346 return -EINVAL; 335 return -EINVAL;
347 robj = gobj->driver_private; 336 rbo = gobj->driver_private;
348 radeon_object_get_tiling_flags(robj, &args->tiling_flags, 337 r = radeon_bo_reserve(rbo, false);
349 &args->pitch); 338 if (unlikely(r != 0))
350 mutex_lock(&dev->struct_mutex); 339 goto out;
351 drm_gem_object_unreference(gobj); 340 radeon_bo_get_tiling_flags(rbo, &args->tiling_flags, &args->pitch);
352 mutex_unlock(&dev->struct_mutex); 341 radeon_bo_unreserve(rbo);
342out:
343 drm_gem_object_unreference_unlocked(gobj);
353 return r; 344 return r;
354} 345}
diff --git a/drivers/gpu/drm/radeon/radeon_i2c.c b/drivers/gpu/drm/radeon/radeon_i2c.c
index dd438d32e5c0..5def6f5dff38 100644
--- a/drivers/gpu/drm/radeon/radeon_i2c.c
+++ b/drivers/gpu/drm/radeon/radeon_i2c.c
@@ -26,6 +26,7 @@
26#include "drmP.h" 26#include "drmP.h"
27#include "radeon_drm.h" 27#include "radeon_drm.h"
28#include "radeon.h" 28#include "radeon.h"
29#include "atom.h"
29 30
30/** 31/**
31 * radeon_ddc_probe 32 * radeon_ddc_probe
@@ -58,36 +59,57 @@ bool radeon_ddc_probe(struct radeon_connector *radeon_connector)
58 return false; 59 return false;
59} 60}
60 61
62/* bit banging i2c */
61 63
62void radeon_i2c_do_lock(struct radeon_connector *radeon_connector, int lock_state) 64static void radeon_i2c_do_lock(struct radeon_i2c_chan *i2c, int lock_state)
63{ 65{
64 struct radeon_device *rdev = radeon_connector->base.dev->dev_private; 66 struct radeon_device *rdev = i2c->dev->dev_private;
67 struct radeon_i2c_bus_rec *rec = &i2c->rec;
65 uint32_t temp; 68 uint32_t temp;
66 struct radeon_i2c_bus_rec *rec = &radeon_connector->ddc_bus->rec;
67 69
68 /* RV410 appears to have a bug where the hw i2c in reset 70 /* RV410 appears to have a bug where the hw i2c in reset
69 * holds the i2c port in a bad state - switch hw i2c away before 71 * holds the i2c port in a bad state - switch hw i2c away before
70 * doing DDC - do this for all r200s/r300s/r400s for safety sake 72 * doing DDC - do this for all r200s/r300s/r400s for safety sake
71 */ 73 */
72 if ((rdev->family >= CHIP_R200) && !ASIC_IS_AVIVO(rdev)) { 74 if (rec->hw_capable) {
73 if (rec->a_clk_reg == RADEON_GPIO_MONID) { 75 if ((rdev->family >= CHIP_R200) && !ASIC_IS_AVIVO(rdev)) {
74 WREG32(RADEON_DVI_I2C_CNTL_0, (RADEON_I2C_SOFT_RST | 76 u32 reg;
75 R200_DVI_I2C_PIN_SEL(R200_SEL_DDC1))); 77
76 } else { 78 if (rdev->family >= CHIP_RV350)
77 WREG32(RADEON_DVI_I2C_CNTL_0, (RADEON_I2C_SOFT_RST | 79 reg = RADEON_GPIO_MONID;
78 R200_DVI_I2C_PIN_SEL(R200_SEL_DDC3))); 80 else if ((rdev->family == CHIP_R300) ||
81 (rdev->family == CHIP_R350))
82 reg = RADEON_GPIO_DVI_DDC;
83 else
84 reg = RADEON_GPIO_CRT2_DDC;
85
86 mutex_lock(&rdev->dc_hw_i2c_mutex);
87 if (rec->a_clk_reg == reg) {
88 WREG32(RADEON_DVI_I2C_CNTL_0, (RADEON_I2C_SOFT_RST |
89 R200_DVI_I2C_PIN_SEL(R200_SEL_DDC1)));
90 } else {
91 WREG32(RADEON_DVI_I2C_CNTL_0, (RADEON_I2C_SOFT_RST |
92 R200_DVI_I2C_PIN_SEL(R200_SEL_DDC3)));
93 }
94 mutex_unlock(&rdev->dc_hw_i2c_mutex);
79 } 95 }
80 } 96 }
81 if (lock_state) {
82 temp = RREG32(rec->a_clk_reg);
83 temp &= ~(rec->a_clk_mask);
84 WREG32(rec->a_clk_reg, temp);
85 97
86 temp = RREG32(rec->a_data_reg); 98 /* clear the output pin values */
87 temp &= ~(rec->a_data_mask); 99 temp = RREG32(rec->a_clk_reg) & ~rec->a_clk_mask;
88 WREG32(rec->a_data_reg, temp); 100 WREG32(rec->a_clk_reg, temp);
89 } 101
102 temp = RREG32(rec->a_data_reg) & ~rec->a_data_mask;
103 WREG32(rec->a_data_reg, temp);
104
105 /* set the pins to input */
106 temp = RREG32(rec->en_clk_reg) & ~rec->en_clk_mask;
107 WREG32(rec->en_clk_reg, temp);
90 108
109 temp = RREG32(rec->en_data_reg) & ~rec->en_data_mask;
110 WREG32(rec->en_data_reg, temp);
111
112 /* mask the gpio pins for software use */
91 temp = RREG32(rec->mask_clk_reg); 113 temp = RREG32(rec->mask_clk_reg);
92 if (lock_state) 114 if (lock_state)
93 temp |= rec->mask_clk_mask; 115 temp |= rec->mask_clk_mask;
@@ -112,8 +134,9 @@ static int get_clock(void *i2c_priv)
112 struct radeon_i2c_bus_rec *rec = &i2c->rec; 134 struct radeon_i2c_bus_rec *rec = &i2c->rec;
113 uint32_t val; 135 uint32_t val;
114 136
115 val = RREG32(rec->get_clk_reg); 137 /* read the value off the pin */
116 val &= rec->get_clk_mask; 138 val = RREG32(rec->y_clk_reg);
139 val &= rec->y_clk_mask;
117 140
118 return (val != 0); 141 return (val != 0);
119} 142}
@@ -126,8 +149,10 @@ static int get_data(void *i2c_priv)
126 struct radeon_i2c_bus_rec *rec = &i2c->rec; 149 struct radeon_i2c_bus_rec *rec = &i2c->rec;
127 uint32_t val; 150 uint32_t val;
128 151
129 val = RREG32(rec->get_data_reg); 152 /* read the value off the pin */
130 val &= rec->get_data_mask; 153 val = RREG32(rec->y_data_reg);
154 val &= rec->y_data_mask;
155
131 return (val != 0); 156 return (val != 0);
132} 157}
133 158
@@ -138,9 +163,10 @@ static void set_clock(void *i2c_priv, int clock)
138 struct radeon_i2c_bus_rec *rec = &i2c->rec; 163 struct radeon_i2c_bus_rec *rec = &i2c->rec;
139 uint32_t val; 164 uint32_t val;
140 165
141 val = RREG32(rec->put_clk_reg) & (uint32_t)~(rec->put_clk_mask); 166 /* set pin direction */
142 val |= clock ? 0 : rec->put_clk_mask; 167 val = RREG32(rec->en_clk_reg) & ~rec->en_clk_mask;
143 WREG32(rec->put_clk_reg, val); 168 val |= clock ? 0 : rec->en_clk_mask;
169 WREG32(rec->en_clk_reg, val);
144} 170}
145 171
146static void set_data(void *i2c_priv, int data) 172static void set_data(void *i2c_priv, int data)
@@ -150,15 +176,698 @@ static void set_data(void *i2c_priv, int data)
150 struct radeon_i2c_bus_rec *rec = &i2c->rec; 176 struct radeon_i2c_bus_rec *rec = &i2c->rec;
151 uint32_t val; 177 uint32_t val;
152 178
153 val = RREG32(rec->put_data_reg) & (uint32_t)~(rec->put_data_mask); 179 /* set pin direction */
154 val |= data ? 0 : rec->put_data_mask; 180 val = RREG32(rec->en_data_reg) & ~rec->en_data_mask;
155 WREG32(rec->put_data_reg, val); 181 val |= data ? 0 : rec->en_data_mask;
182 WREG32(rec->en_data_reg, val);
183}
184
185static int pre_xfer(struct i2c_adapter *i2c_adap)
186{
187 struct radeon_i2c_chan *i2c = i2c_get_adapdata(i2c_adap);
188
189 radeon_i2c_do_lock(i2c, 1);
190
191 return 0;
192}
193
194static void post_xfer(struct i2c_adapter *i2c_adap)
195{
196 struct radeon_i2c_chan *i2c = i2c_get_adapdata(i2c_adap);
197
198 radeon_i2c_do_lock(i2c, 0);
199}
200
201/* hw i2c */
202
203static u32 radeon_get_i2c_prescale(struct radeon_device *rdev)
204{
205 u32 sclk = radeon_get_engine_clock(rdev);
206 u32 prescale = 0;
207 u32 nm;
208 u8 n, m, loop;
209 int i2c_clock;
210
211 switch (rdev->family) {
212 case CHIP_R100:
213 case CHIP_RV100:
214 case CHIP_RS100:
215 case CHIP_RV200:
216 case CHIP_RS200:
217 case CHIP_R200:
218 case CHIP_RV250:
219 case CHIP_RS300:
220 case CHIP_RV280:
221 case CHIP_R300:
222 case CHIP_R350:
223 case CHIP_RV350:
224 i2c_clock = 60;
225 nm = (sclk * 10) / (i2c_clock * 4);
226 for (loop = 1; loop < 255; loop++) {
227 if ((nm / loop) < loop)
228 break;
229 }
230 n = loop - 1;
231 m = loop - 2;
232 prescale = m | (n << 8);
233 break;
234 case CHIP_RV380:
235 case CHIP_RS400:
236 case CHIP_RS480:
237 case CHIP_R420:
238 case CHIP_R423:
239 case CHIP_RV410:
240 prescale = (((sclk * 10)/(4 * 128 * 100) + 1) << 8) + 128;
241 break;
242 case CHIP_RS600:
243 case CHIP_RS690:
244 case CHIP_RS740:
245 /* todo */
246 break;
247 case CHIP_RV515:
248 case CHIP_R520:
249 case CHIP_RV530:
250 case CHIP_RV560:
251 case CHIP_RV570:
252 case CHIP_R580:
253 i2c_clock = 50;
254 if (rdev->family == CHIP_R520)
255 prescale = (127 << 8) + ((sclk * 10) / (4 * 127 * i2c_clock));
256 else
257 prescale = (((sclk * 10)/(4 * 128 * 100) + 1) << 8) + 128;
258 break;
259 case CHIP_R600:
260 case CHIP_RV610:
261 case CHIP_RV630:
262 case CHIP_RV670:
263 /* todo */
264 break;
265 case CHIP_RV620:
266 case CHIP_RV635:
267 case CHIP_RS780:
268 case CHIP_RS880:
269 case CHIP_RV770:
270 case CHIP_RV730:
271 case CHIP_RV710:
272 case CHIP_RV740:
273 /* todo */
274 break;
275 case CHIP_CEDAR:
276 case CHIP_REDWOOD:
277 case CHIP_JUNIPER:
278 case CHIP_CYPRESS:
279 case CHIP_HEMLOCK:
280 /* todo */
281 break;
282 default:
283 DRM_ERROR("i2c: unhandled radeon chip\n");
284 break;
285 }
286 return prescale;
287}
288
289
290/* hw i2c engine for r1xx-4xx hardware
291 * hw can buffer up to 15 bytes
292 */
293static int r100_hw_i2c_xfer(struct i2c_adapter *i2c_adap,
294 struct i2c_msg *msgs, int num)
295{
296 struct radeon_i2c_chan *i2c = i2c_get_adapdata(i2c_adap);
297 struct radeon_device *rdev = i2c->dev->dev_private;
298 struct radeon_i2c_bus_rec *rec = &i2c->rec;
299 struct i2c_msg *p;
300 int i, j, k, ret = num;
301 u32 prescale;
302 u32 i2c_cntl_0, i2c_cntl_1, i2c_data;
303 u32 tmp, reg;
304
305 mutex_lock(&rdev->dc_hw_i2c_mutex);
306 /* take the pm lock since we need a constant sclk */
307 mutex_lock(&rdev->pm.mutex);
308
309 prescale = radeon_get_i2c_prescale(rdev);
310
311 reg = ((prescale << RADEON_I2C_PRESCALE_SHIFT) |
312 RADEON_I2C_DRIVE_EN |
313 RADEON_I2C_START |
314 RADEON_I2C_STOP |
315 RADEON_I2C_GO);
316
317 if (rdev->is_atom_bios) {
318 tmp = RREG32(RADEON_BIOS_6_SCRATCH);
319 WREG32(RADEON_BIOS_6_SCRATCH, tmp | ATOM_S6_HW_I2C_BUSY_STATE);
320 }
321
322 if (rec->mm_i2c) {
323 i2c_cntl_0 = RADEON_I2C_CNTL_0;
324 i2c_cntl_1 = RADEON_I2C_CNTL_1;
325 i2c_data = RADEON_I2C_DATA;
326 } else {
327 i2c_cntl_0 = RADEON_DVI_I2C_CNTL_0;
328 i2c_cntl_1 = RADEON_DVI_I2C_CNTL_1;
329 i2c_data = RADEON_DVI_I2C_DATA;
330
331 switch (rdev->family) {
332 case CHIP_R100:
333 case CHIP_RV100:
334 case CHIP_RS100:
335 case CHIP_RV200:
336 case CHIP_RS200:
337 case CHIP_RS300:
338 switch (rec->mask_clk_reg) {
339 case RADEON_GPIO_DVI_DDC:
340 /* no gpio select bit */
341 break;
342 default:
343 DRM_ERROR("gpio not supported with hw i2c\n");
344 ret = -EINVAL;
345 goto done;
346 }
347 break;
348 case CHIP_R200:
349 /* only bit 4 on r200 */
350 switch (rec->mask_clk_reg) {
351 case RADEON_GPIO_DVI_DDC:
352 reg |= R200_DVI_I2C_PIN_SEL(R200_SEL_DDC1);
353 break;
354 case RADEON_GPIO_MONID:
355 reg |= R200_DVI_I2C_PIN_SEL(R200_SEL_DDC3);
356 break;
357 default:
358 DRM_ERROR("gpio not supported with hw i2c\n");
359 ret = -EINVAL;
360 goto done;
361 }
362 break;
363 case CHIP_RV250:
364 case CHIP_RV280:
365 /* bits 3 and 4 */
366 switch (rec->mask_clk_reg) {
367 case RADEON_GPIO_DVI_DDC:
368 reg |= R200_DVI_I2C_PIN_SEL(R200_SEL_DDC1);
369 break;
370 case RADEON_GPIO_VGA_DDC:
371 reg |= R200_DVI_I2C_PIN_SEL(R200_SEL_DDC2);
372 break;
373 case RADEON_GPIO_CRT2_DDC:
374 reg |= R200_DVI_I2C_PIN_SEL(R200_SEL_DDC3);
375 break;
376 default:
377 DRM_ERROR("gpio not supported with hw i2c\n");
378 ret = -EINVAL;
379 goto done;
380 }
381 break;
382 case CHIP_R300:
383 case CHIP_R350:
384 /* only bit 4 on r300/r350 */
385 switch (rec->mask_clk_reg) {
386 case RADEON_GPIO_VGA_DDC:
387 reg |= R200_DVI_I2C_PIN_SEL(R200_SEL_DDC1);
388 break;
389 case RADEON_GPIO_DVI_DDC:
390 reg |= R200_DVI_I2C_PIN_SEL(R200_SEL_DDC3);
391 break;
392 default:
393 DRM_ERROR("gpio not supported with hw i2c\n");
394 ret = -EINVAL;
395 goto done;
396 }
397 break;
398 case CHIP_RV350:
399 case CHIP_RV380:
400 case CHIP_R420:
401 case CHIP_R423:
402 case CHIP_RV410:
403 case CHIP_RS400:
404 case CHIP_RS480:
405 /* bits 3 and 4 */
406 switch (rec->mask_clk_reg) {
407 case RADEON_GPIO_VGA_DDC:
408 reg |= R200_DVI_I2C_PIN_SEL(R200_SEL_DDC1);
409 break;
410 case RADEON_GPIO_DVI_DDC:
411 reg |= R200_DVI_I2C_PIN_SEL(R200_SEL_DDC2);
412 break;
413 case RADEON_GPIO_MONID:
414 reg |= R200_DVI_I2C_PIN_SEL(R200_SEL_DDC3);
415 break;
416 default:
417 DRM_ERROR("gpio not supported with hw i2c\n");
418 ret = -EINVAL;
419 goto done;
420 }
421 break;
422 default:
423 DRM_ERROR("unsupported asic\n");
424 ret = -EINVAL;
425 goto done;
426 break;
427 }
428 }
429
430 /* check for bus probe */
431 p = &msgs[0];
432 if ((num == 1) && (p->len == 0)) {
433 WREG32(i2c_cntl_0, (RADEON_I2C_DONE |
434 RADEON_I2C_NACK |
435 RADEON_I2C_HALT |
436 RADEON_I2C_SOFT_RST));
437 WREG32(i2c_data, (p->addr << 1) & 0xff);
438 WREG32(i2c_data, 0);
439 WREG32(i2c_cntl_1, ((1 << RADEON_I2C_DATA_COUNT_SHIFT) |
440 (1 << RADEON_I2C_ADDR_COUNT_SHIFT) |
441 RADEON_I2C_EN |
442 (48 << RADEON_I2C_TIME_LIMIT_SHIFT)));
443 WREG32(i2c_cntl_0, reg);
444 for (k = 0; k < 32; k++) {
445 udelay(10);
446 tmp = RREG32(i2c_cntl_0);
447 if (tmp & RADEON_I2C_GO)
448 continue;
449 tmp = RREG32(i2c_cntl_0);
450 if (tmp & RADEON_I2C_DONE)
451 break;
452 else {
453 DRM_DEBUG("i2c write error 0x%08x\n", tmp);
454 WREG32(i2c_cntl_0, tmp | RADEON_I2C_ABORT);
455 ret = -EIO;
456 goto done;
457 }
458 }
459 goto done;
460 }
461
462 for (i = 0; i < num; i++) {
463 p = &msgs[i];
464 for (j = 0; j < p->len; j++) {
465 if (p->flags & I2C_M_RD) {
466 WREG32(i2c_cntl_0, (RADEON_I2C_DONE |
467 RADEON_I2C_NACK |
468 RADEON_I2C_HALT |
469 RADEON_I2C_SOFT_RST));
470 WREG32(i2c_data, ((p->addr << 1) & 0xff) | 0x1);
471 WREG32(i2c_cntl_1, ((1 << RADEON_I2C_DATA_COUNT_SHIFT) |
472 (1 << RADEON_I2C_ADDR_COUNT_SHIFT) |
473 RADEON_I2C_EN |
474 (48 << RADEON_I2C_TIME_LIMIT_SHIFT)));
475 WREG32(i2c_cntl_0, reg | RADEON_I2C_RECEIVE);
476 for (k = 0; k < 32; k++) {
477 udelay(10);
478 tmp = RREG32(i2c_cntl_0);
479 if (tmp & RADEON_I2C_GO)
480 continue;
481 tmp = RREG32(i2c_cntl_0);
482 if (tmp & RADEON_I2C_DONE)
483 break;
484 else {
485 DRM_DEBUG("i2c read error 0x%08x\n", tmp);
486 WREG32(i2c_cntl_0, tmp | RADEON_I2C_ABORT);
487 ret = -EIO;
488 goto done;
489 }
490 }
491 p->buf[j] = RREG32(i2c_data) & 0xff;
492 } else {
493 WREG32(i2c_cntl_0, (RADEON_I2C_DONE |
494 RADEON_I2C_NACK |
495 RADEON_I2C_HALT |
496 RADEON_I2C_SOFT_RST));
497 WREG32(i2c_data, (p->addr << 1) & 0xff);
498 WREG32(i2c_data, p->buf[j]);
499 WREG32(i2c_cntl_1, ((1 << RADEON_I2C_DATA_COUNT_SHIFT) |
500 (1 << RADEON_I2C_ADDR_COUNT_SHIFT) |
501 RADEON_I2C_EN |
502 (48 << RADEON_I2C_TIME_LIMIT_SHIFT)));
503 WREG32(i2c_cntl_0, reg);
504 for (k = 0; k < 32; k++) {
505 udelay(10);
506 tmp = RREG32(i2c_cntl_0);
507 if (tmp & RADEON_I2C_GO)
508 continue;
509 tmp = RREG32(i2c_cntl_0);
510 if (tmp & RADEON_I2C_DONE)
511 break;
512 else {
513 DRM_DEBUG("i2c write error 0x%08x\n", tmp);
514 WREG32(i2c_cntl_0, tmp | RADEON_I2C_ABORT);
515 ret = -EIO;
516 goto done;
517 }
518 }
519 }
520 }
521 }
522
523done:
524 WREG32(i2c_cntl_0, 0);
525 WREG32(i2c_cntl_1, 0);
526 WREG32(i2c_cntl_0, (RADEON_I2C_DONE |
527 RADEON_I2C_NACK |
528 RADEON_I2C_HALT |
529 RADEON_I2C_SOFT_RST));
530
531 if (rdev->is_atom_bios) {
532 tmp = RREG32(RADEON_BIOS_6_SCRATCH);
533 tmp &= ~ATOM_S6_HW_I2C_BUSY_STATE;
534 WREG32(RADEON_BIOS_6_SCRATCH, tmp);
535 }
536
537 mutex_unlock(&rdev->pm.mutex);
538 mutex_unlock(&rdev->dc_hw_i2c_mutex);
539
540 return ret;
541}
542
543/* hw i2c engine for r5xx hardware
544 * hw can buffer up to 15 bytes
545 */
546static int r500_hw_i2c_xfer(struct i2c_adapter *i2c_adap,
547 struct i2c_msg *msgs, int num)
548{
549 struct radeon_i2c_chan *i2c = i2c_get_adapdata(i2c_adap);
550 struct radeon_device *rdev = i2c->dev->dev_private;
551 struct radeon_i2c_bus_rec *rec = &i2c->rec;
552 struct i2c_msg *p;
553 int i, j, remaining, current_count, buffer_offset, ret = num;
554 u32 prescale;
555 u32 tmp, reg;
556 u32 saved1, saved2;
557
558 mutex_lock(&rdev->dc_hw_i2c_mutex);
559 /* take the pm lock since we need a constant sclk */
560 mutex_lock(&rdev->pm.mutex);
561
562 prescale = radeon_get_i2c_prescale(rdev);
563
564 /* clear gpio mask bits */
565 tmp = RREG32(rec->mask_clk_reg);
566 tmp &= ~rec->mask_clk_mask;
567 WREG32(rec->mask_clk_reg, tmp);
568 tmp = RREG32(rec->mask_clk_reg);
569
570 tmp = RREG32(rec->mask_data_reg);
571 tmp &= ~rec->mask_data_mask;
572 WREG32(rec->mask_data_reg, tmp);
573 tmp = RREG32(rec->mask_data_reg);
574
575 /* clear pin values */
576 tmp = RREG32(rec->a_clk_reg);
577 tmp &= ~rec->a_clk_mask;
578 WREG32(rec->a_clk_reg, tmp);
579 tmp = RREG32(rec->a_clk_reg);
580
581 tmp = RREG32(rec->a_data_reg);
582 tmp &= ~rec->a_data_mask;
583 WREG32(rec->a_data_reg, tmp);
584 tmp = RREG32(rec->a_data_reg);
585
586 /* set the pins to input */
587 tmp = RREG32(rec->en_clk_reg);
588 tmp &= ~rec->en_clk_mask;
589 WREG32(rec->en_clk_reg, tmp);
590 tmp = RREG32(rec->en_clk_reg);
591
592 tmp = RREG32(rec->en_data_reg);
593 tmp &= ~rec->en_data_mask;
594 WREG32(rec->en_data_reg, tmp);
595 tmp = RREG32(rec->en_data_reg);
596
597 /* */
598 tmp = RREG32(RADEON_BIOS_6_SCRATCH);
599 WREG32(RADEON_BIOS_6_SCRATCH, tmp | ATOM_S6_HW_I2C_BUSY_STATE);
600 saved1 = RREG32(AVIVO_DC_I2C_CONTROL1);
601 saved2 = RREG32(0x494);
602 WREG32(0x494, saved2 | 0x1);
603
604 WREG32(AVIVO_DC_I2C_ARBITRATION, AVIVO_DC_I2C_SW_WANTS_TO_USE_I2C);
605 for (i = 0; i < 50; i++) {
606 udelay(1);
607 if (RREG32(AVIVO_DC_I2C_ARBITRATION) & AVIVO_DC_I2C_SW_CAN_USE_I2C)
608 break;
609 }
610 if (i == 50) {
611 DRM_ERROR("failed to get i2c bus\n");
612 ret = -EBUSY;
613 goto done;
614 }
615
616 reg = AVIVO_DC_I2C_START | AVIVO_DC_I2C_STOP | AVIVO_DC_I2C_EN;
617 switch (rec->mask_clk_reg) {
618 case AVIVO_DC_GPIO_DDC1_MASK:
619 reg |= AVIVO_DC_I2C_PIN_SELECT(AVIVO_SEL_DDC1);
620 break;
621 case AVIVO_DC_GPIO_DDC2_MASK:
622 reg |= AVIVO_DC_I2C_PIN_SELECT(AVIVO_SEL_DDC2);
623 break;
624 case AVIVO_DC_GPIO_DDC3_MASK:
625 reg |= AVIVO_DC_I2C_PIN_SELECT(AVIVO_SEL_DDC3);
626 break;
627 default:
628 DRM_ERROR("gpio not supported with hw i2c\n");
629 ret = -EINVAL;
630 goto done;
631 }
632
633 /* check for bus probe */
634 p = &msgs[0];
635 if ((num == 1) && (p->len == 0)) {
636 WREG32(AVIVO_DC_I2C_STATUS1, (AVIVO_DC_I2C_DONE |
637 AVIVO_DC_I2C_NACK |
638 AVIVO_DC_I2C_HALT));
639 WREG32(AVIVO_DC_I2C_RESET, AVIVO_DC_I2C_SOFT_RESET);
640 udelay(1);
641 WREG32(AVIVO_DC_I2C_RESET, 0);
642
643 WREG32(AVIVO_DC_I2C_DATA, (p->addr << 1) & 0xff);
644 WREG32(AVIVO_DC_I2C_DATA, 0);
645
646 WREG32(AVIVO_DC_I2C_CONTROL3, AVIVO_DC_I2C_TIME_LIMIT(48));
647 WREG32(AVIVO_DC_I2C_CONTROL2, (AVIVO_DC_I2C_ADDR_COUNT(1) |
648 AVIVO_DC_I2C_DATA_COUNT(1) |
649 (prescale << 16)));
650 WREG32(AVIVO_DC_I2C_CONTROL1, reg);
651 WREG32(AVIVO_DC_I2C_STATUS1, AVIVO_DC_I2C_GO);
652 for (j = 0; j < 200; j++) {
653 udelay(50);
654 tmp = RREG32(AVIVO_DC_I2C_STATUS1);
655 if (tmp & AVIVO_DC_I2C_GO)
656 continue;
657 tmp = RREG32(AVIVO_DC_I2C_STATUS1);
658 if (tmp & AVIVO_DC_I2C_DONE)
659 break;
660 else {
661 DRM_DEBUG("i2c write error 0x%08x\n", tmp);
662 WREG32(AVIVO_DC_I2C_RESET, AVIVO_DC_I2C_ABORT);
663 ret = -EIO;
664 goto done;
665 }
666 }
667 goto done;
668 }
669
670 for (i = 0; i < num; i++) {
671 p = &msgs[i];
672 remaining = p->len;
673 buffer_offset = 0;
674 if (p->flags & I2C_M_RD) {
675 while (remaining) {
676 if (remaining > 15)
677 current_count = 15;
678 else
679 current_count = remaining;
680 WREG32(AVIVO_DC_I2C_STATUS1, (AVIVO_DC_I2C_DONE |
681 AVIVO_DC_I2C_NACK |
682 AVIVO_DC_I2C_HALT));
683 WREG32(AVIVO_DC_I2C_RESET, AVIVO_DC_I2C_SOFT_RESET);
684 udelay(1);
685 WREG32(AVIVO_DC_I2C_RESET, 0);
686
687 WREG32(AVIVO_DC_I2C_DATA, ((p->addr << 1) & 0xff) | 0x1);
688 WREG32(AVIVO_DC_I2C_CONTROL3, AVIVO_DC_I2C_TIME_LIMIT(48));
689 WREG32(AVIVO_DC_I2C_CONTROL2, (AVIVO_DC_I2C_ADDR_COUNT(1) |
690 AVIVO_DC_I2C_DATA_COUNT(current_count) |
691 (prescale << 16)));
692 WREG32(AVIVO_DC_I2C_CONTROL1, reg | AVIVO_DC_I2C_RECEIVE);
693 WREG32(AVIVO_DC_I2C_STATUS1, AVIVO_DC_I2C_GO);
694 for (j = 0; j < 200; j++) {
695 udelay(50);
696 tmp = RREG32(AVIVO_DC_I2C_STATUS1);
697 if (tmp & AVIVO_DC_I2C_GO)
698 continue;
699 tmp = RREG32(AVIVO_DC_I2C_STATUS1);
700 if (tmp & AVIVO_DC_I2C_DONE)
701 break;
702 else {
703 DRM_DEBUG("i2c read error 0x%08x\n", tmp);
704 WREG32(AVIVO_DC_I2C_RESET, AVIVO_DC_I2C_ABORT);
705 ret = -EIO;
706 goto done;
707 }
708 }
709 for (j = 0; j < current_count; j++)
710 p->buf[buffer_offset + j] = RREG32(AVIVO_DC_I2C_DATA) & 0xff;
711 remaining -= current_count;
712 buffer_offset += current_count;
713 }
714 } else {
715 while (remaining) {
716 if (remaining > 15)
717 current_count = 15;
718 else
719 current_count = remaining;
720 WREG32(AVIVO_DC_I2C_STATUS1, (AVIVO_DC_I2C_DONE |
721 AVIVO_DC_I2C_NACK |
722 AVIVO_DC_I2C_HALT));
723 WREG32(AVIVO_DC_I2C_RESET, AVIVO_DC_I2C_SOFT_RESET);
724 udelay(1);
725 WREG32(AVIVO_DC_I2C_RESET, 0);
726
727 WREG32(AVIVO_DC_I2C_DATA, (p->addr << 1) & 0xff);
728 for (j = 0; j < current_count; j++)
729 WREG32(AVIVO_DC_I2C_DATA, p->buf[buffer_offset + j]);
730
731 WREG32(AVIVO_DC_I2C_CONTROL3, AVIVO_DC_I2C_TIME_LIMIT(48));
732 WREG32(AVIVO_DC_I2C_CONTROL2, (AVIVO_DC_I2C_ADDR_COUNT(1) |
733 AVIVO_DC_I2C_DATA_COUNT(current_count) |
734 (prescale << 16)));
735 WREG32(AVIVO_DC_I2C_CONTROL1, reg);
736 WREG32(AVIVO_DC_I2C_STATUS1, AVIVO_DC_I2C_GO);
737 for (j = 0; j < 200; j++) {
738 udelay(50);
739 tmp = RREG32(AVIVO_DC_I2C_STATUS1);
740 if (tmp & AVIVO_DC_I2C_GO)
741 continue;
742 tmp = RREG32(AVIVO_DC_I2C_STATUS1);
743 if (tmp & AVIVO_DC_I2C_DONE)
744 break;
745 else {
746 DRM_DEBUG("i2c write error 0x%08x\n", tmp);
747 WREG32(AVIVO_DC_I2C_RESET, AVIVO_DC_I2C_ABORT);
748 ret = -EIO;
749 goto done;
750 }
751 }
752 remaining -= current_count;
753 buffer_offset += current_count;
754 }
755 }
756 }
757
758done:
759 WREG32(AVIVO_DC_I2C_STATUS1, (AVIVO_DC_I2C_DONE |
760 AVIVO_DC_I2C_NACK |
761 AVIVO_DC_I2C_HALT));
762 WREG32(AVIVO_DC_I2C_RESET, AVIVO_DC_I2C_SOFT_RESET);
763 udelay(1);
764 WREG32(AVIVO_DC_I2C_RESET, 0);
765
766 WREG32(AVIVO_DC_I2C_ARBITRATION, AVIVO_DC_I2C_SW_DONE_USING_I2C);
767 WREG32(AVIVO_DC_I2C_CONTROL1, saved1);
768 WREG32(0x494, saved2);
769 tmp = RREG32(RADEON_BIOS_6_SCRATCH);
770 tmp &= ~ATOM_S6_HW_I2C_BUSY_STATE;
771 WREG32(RADEON_BIOS_6_SCRATCH, tmp);
772
773 mutex_unlock(&rdev->pm.mutex);
774 mutex_unlock(&rdev->dc_hw_i2c_mutex);
775
776 return ret;
777}
778
779static int radeon_hw_i2c_xfer(struct i2c_adapter *i2c_adap,
780 struct i2c_msg *msgs, int num)
781{
782 struct radeon_i2c_chan *i2c = i2c_get_adapdata(i2c_adap);
783 struct radeon_device *rdev = i2c->dev->dev_private;
784 struct radeon_i2c_bus_rec *rec = &i2c->rec;
785 int ret = 0;
786
787 switch (rdev->family) {
788 case CHIP_R100:
789 case CHIP_RV100:
790 case CHIP_RS100:
791 case CHIP_RV200:
792 case CHIP_RS200:
793 case CHIP_R200:
794 case CHIP_RV250:
795 case CHIP_RS300:
796 case CHIP_RV280:
797 case CHIP_R300:
798 case CHIP_R350:
799 case CHIP_RV350:
800 case CHIP_RV380:
801 case CHIP_R420:
802 case CHIP_R423:
803 case CHIP_RV410:
804 case CHIP_RS400:
805 case CHIP_RS480:
806 ret = r100_hw_i2c_xfer(i2c_adap, msgs, num);
807 break;
808 case CHIP_RS600:
809 case CHIP_RS690:
810 case CHIP_RS740:
811 /* XXX fill in hw i2c implementation */
812 break;
813 case CHIP_RV515:
814 case CHIP_R520:
815 case CHIP_RV530:
816 case CHIP_RV560:
817 case CHIP_RV570:
818 case CHIP_R580:
819 if (rec->mm_i2c)
820 ret = r100_hw_i2c_xfer(i2c_adap, msgs, num);
821 else
822 ret = r500_hw_i2c_xfer(i2c_adap, msgs, num);
823 break;
824 case CHIP_R600:
825 case CHIP_RV610:
826 case CHIP_RV630:
827 case CHIP_RV670:
828 /* XXX fill in hw i2c implementation */
829 break;
830 case CHIP_RV620:
831 case CHIP_RV635:
832 case CHIP_RS780:
833 case CHIP_RS880:
834 case CHIP_RV770:
835 case CHIP_RV730:
836 case CHIP_RV710:
837 case CHIP_RV740:
838 /* XXX fill in hw i2c implementation */
839 break;
840 case CHIP_CEDAR:
841 case CHIP_REDWOOD:
842 case CHIP_JUNIPER:
843 case CHIP_CYPRESS:
844 case CHIP_HEMLOCK:
845 /* XXX fill in hw i2c implementation */
846 break;
847 default:
848 DRM_ERROR("i2c: unhandled radeon chip\n");
849 ret = -EIO;
850 break;
851 }
852
853 return ret;
854}
855
856static u32 radeon_hw_i2c_func(struct i2c_adapter *adap)
857{
858 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL;
156} 859}
157 860
861static const struct i2c_algorithm radeon_i2c_algo = {
862 .master_xfer = radeon_hw_i2c_xfer,
863 .functionality = radeon_hw_i2c_func,
864};
865
158struct radeon_i2c_chan *radeon_i2c_create(struct drm_device *dev, 866struct radeon_i2c_chan *radeon_i2c_create(struct drm_device *dev,
159 struct radeon_i2c_bus_rec *rec, 867 struct radeon_i2c_bus_rec *rec,
160 const char *name) 868 const char *name)
161{ 869{
870 struct radeon_device *rdev = dev->dev_private;
162 struct radeon_i2c_chan *i2c; 871 struct radeon_i2c_chan *i2c;
163 int ret; 872 int ret;
164 873
@@ -166,22 +875,71 @@ struct radeon_i2c_chan *radeon_i2c_create(struct drm_device *dev,
166 if (i2c == NULL) 875 if (i2c == NULL)
167 return NULL; 876 return NULL;
168 877
878 i2c->rec = *rec;
169 i2c->adapter.owner = THIS_MODULE; 879 i2c->adapter.owner = THIS_MODULE;
170 i2c->adapter.algo_data = &i2c->algo;
171 i2c->dev = dev; 880 i2c->dev = dev;
172 i2c->algo.setsda = set_data;
173 i2c->algo.setscl = set_clock;
174 i2c->algo.getsda = get_data;
175 i2c->algo.getscl = get_clock;
176 i2c->algo.udelay = 20;
177 /* vesa says 2.2 ms is enough, 1 jiffy doesn't seem to always
178 * make this, 2 jiffies is a lot more reliable */
179 i2c->algo.timeout = 2;
180 i2c->algo.data = i2c;
181 i2c->rec = *rec;
182 i2c_set_adapdata(&i2c->adapter, i2c); 881 i2c_set_adapdata(&i2c->adapter, i2c);
882 if (rec->mm_i2c ||
883 (rec->hw_capable &&
884 radeon_hw_i2c &&
885 ((rdev->family <= CHIP_RS480) ||
886 ((rdev->family >= CHIP_RV515) && (rdev->family <= CHIP_R580))))) {
887 /* set the radeon hw i2c adapter */
888 sprintf(i2c->adapter.name, "Radeon i2c hw bus %s", name);
889 i2c->adapter.algo = &radeon_i2c_algo;
890 ret = i2c_add_adapter(&i2c->adapter);
891 if (ret) {
892 DRM_ERROR("Failed to register hw i2c %s\n", name);
893 goto out_free;
894 }
895 } else {
896 /* set the radeon bit adapter */
897 sprintf(i2c->adapter.name, "Radeon i2c bit bus %s", name);
898 i2c->adapter.algo_data = &i2c->algo.bit;
899 i2c->algo.bit.pre_xfer = pre_xfer;
900 i2c->algo.bit.post_xfer = post_xfer;
901 i2c->algo.bit.setsda = set_data;
902 i2c->algo.bit.setscl = set_clock;
903 i2c->algo.bit.getsda = get_data;
904 i2c->algo.bit.getscl = get_clock;
905 i2c->algo.bit.udelay = 20;
906 /* vesa says 2.2 ms is enough, 1 jiffy doesn't seem to always
907 * make this, 2 jiffies is a lot more reliable */
908 i2c->algo.bit.timeout = 2;
909 i2c->algo.bit.data = i2c;
910 ret = i2c_bit_add_bus(&i2c->adapter);
911 if (ret) {
912 DRM_ERROR("Failed to register bit i2c %s\n", name);
913 goto out_free;
914 }
915 }
916
917 return i2c;
918out_free:
919 kfree(i2c);
920 return NULL;
921
922}
183 923
184 ret = i2c_bit_add_bus(&i2c->adapter); 924struct radeon_i2c_chan *radeon_i2c_create_dp(struct drm_device *dev,
925 struct radeon_i2c_bus_rec *rec,
926 const char *name)
927{
928 struct radeon_i2c_chan *i2c;
929 int ret;
930
931 i2c = kzalloc(sizeof(struct radeon_i2c_chan), GFP_KERNEL);
932 if (i2c == NULL)
933 return NULL;
934
935 i2c->rec = *rec;
936 i2c->adapter.owner = THIS_MODULE;
937 i2c->dev = dev;
938 i2c_set_adapdata(&i2c->adapter, i2c);
939 i2c->adapter.algo_data = &i2c->algo.dp;
940 i2c->algo.dp.aux_ch = radeon_dp_i2c_aux_ch;
941 i2c->algo.dp.address = 0;
942 ret = i2c_dp_aux_add_bus(&i2c->adapter);
185 if (ret) { 943 if (ret) {
186 DRM_INFO("Failed to register i2c %s\n", name); 944 DRM_INFO("Failed to register i2c %s\n", name);
187 goto out_free; 945 goto out_free;
@@ -198,7 +956,6 @@ void radeon_i2c_destroy(struct radeon_i2c_chan *i2c)
198{ 956{
199 if (!i2c) 957 if (!i2c)
200 return; 958 return;
201
202 i2c_del_adapter(&i2c->adapter); 959 i2c_del_adapter(&i2c->adapter);
203 kfree(i2c); 960 kfree(i2c);
204} 961}
@@ -207,3 +964,59 @@ struct drm_encoder *radeon_best_encoder(struct drm_connector *connector)
207{ 964{
208 return NULL; 965 return NULL;
209} 966}
967
968void radeon_i2c_get_byte(struct radeon_i2c_chan *i2c_bus,
969 u8 slave_addr,
970 u8 addr,
971 u8 *val)
972{
973 u8 out_buf[2];
974 u8 in_buf[2];
975 struct i2c_msg msgs[] = {
976 {
977 .addr = slave_addr,
978 .flags = 0,
979 .len = 1,
980 .buf = out_buf,
981 },
982 {
983 .addr = slave_addr,
984 .flags = I2C_M_RD,
985 .len = 1,
986 .buf = in_buf,
987 }
988 };
989
990 out_buf[0] = addr;
991 out_buf[1] = 0;
992
993 if (i2c_transfer(&i2c_bus->adapter, msgs, 2) == 2) {
994 *val = in_buf[0];
995 DRM_DEBUG("val = 0x%02x\n", *val);
996 } else {
997 DRM_ERROR("i2c 0x%02x 0x%02x read failed\n",
998 addr, *val);
999 }
1000}
1001
1002void radeon_i2c_put_byte(struct radeon_i2c_chan *i2c_bus,
1003 u8 slave_addr,
1004 u8 addr,
1005 u8 val)
1006{
1007 uint8_t out_buf[2];
1008 struct i2c_msg msg = {
1009 .addr = slave_addr,
1010 .flags = 0,
1011 .len = 2,
1012 .buf = out_buf,
1013 };
1014
1015 out_buf[0] = addr;
1016 out_buf[1] = val;
1017
1018 if (i2c_transfer(&i2c_bus->adapter, &msg, 1) != 1)
1019 DRM_ERROR("i2c 0x%02x 0x%02x write failed\n",
1020 addr, val);
1021}
1022
diff --git a/drivers/gpu/drm/radeon/radeon_ioc32.c b/drivers/gpu/drm/radeon/radeon_ioc32.c
index a1bf11de308a..48b7cea31e08 100644
--- a/drivers/gpu/drm/radeon/radeon_ioc32.c
+++ b/drivers/gpu/drm/radeon/radeon_ioc32.c
@@ -92,8 +92,7 @@ static int compat_radeon_cp_init(struct file *file, unsigned int cmd,
92 &init->gart_textures_offset)) 92 &init->gart_textures_offset))
93 return -EFAULT; 93 return -EFAULT;
94 94
95 return drm_ioctl(file->f_path.dentry->d_inode, file, 95 return drm_ioctl(file, DRM_IOCTL_RADEON_CP_INIT, (unsigned long)init);
96 DRM_IOCTL_RADEON_CP_INIT, (unsigned long)init);
97} 96}
98 97
99typedef struct drm_radeon_clear32 { 98typedef struct drm_radeon_clear32 {
@@ -125,8 +124,7 @@ static int compat_radeon_cp_clear(struct file *file, unsigned int cmd,
125 &clr->depth_boxes)) 124 &clr->depth_boxes))
126 return -EFAULT; 125 return -EFAULT;
127 126
128 return drm_ioctl(file->f_path.dentry->d_inode, file, 127 return drm_ioctl(file, DRM_IOCTL_RADEON_CLEAR, (unsigned long)clr);
129 DRM_IOCTL_RADEON_CLEAR, (unsigned long)clr);
130} 128}
131 129
132typedef struct drm_radeon_stipple32 { 130typedef struct drm_radeon_stipple32 {
@@ -149,8 +147,7 @@ static int compat_radeon_cp_stipple(struct file *file, unsigned int cmd,
149 &request->mask)) 147 &request->mask))
150 return -EFAULT; 148 return -EFAULT;
151 149
152 return drm_ioctl(file->f_path.dentry->d_inode, file, 150 return drm_ioctl(file, DRM_IOCTL_RADEON_STIPPLE, (unsigned long)request);
153 DRM_IOCTL_RADEON_STIPPLE, (unsigned long)request);
154} 151}
155 152
156typedef struct drm_radeon_tex_image32 { 153typedef struct drm_radeon_tex_image32 {
@@ -204,8 +201,7 @@ static int compat_radeon_cp_texture(struct file *file, unsigned int cmd,
204 &image->data)) 201 &image->data))
205 return -EFAULT; 202 return -EFAULT;
206 203
207 return drm_ioctl(file->f_path.dentry->d_inode, file, 204 return drm_ioctl(file, DRM_IOCTL_RADEON_TEXTURE, (unsigned long)request);
208 DRM_IOCTL_RADEON_TEXTURE, (unsigned long)request);
209} 205}
210 206
211typedef struct drm_radeon_vertex2_32 { 207typedef struct drm_radeon_vertex2_32 {
@@ -238,8 +234,7 @@ static int compat_radeon_cp_vertex2(struct file *file, unsigned int cmd,
238 &request->prim)) 234 &request->prim))
239 return -EFAULT; 235 return -EFAULT;
240 236
241 return drm_ioctl(file->f_path.dentry->d_inode, file, 237 return drm_ioctl(file, DRM_IOCTL_RADEON_VERTEX2, (unsigned long)request);
242 DRM_IOCTL_RADEON_VERTEX2, (unsigned long)request);
243} 238}
244 239
245typedef struct drm_radeon_cmd_buffer32 { 240typedef struct drm_radeon_cmd_buffer32 {
@@ -268,8 +263,7 @@ static int compat_radeon_cp_cmdbuf(struct file *file, unsigned int cmd,
268 &request->boxes)) 263 &request->boxes))
269 return -EFAULT; 264 return -EFAULT;
270 265
271 return drm_ioctl(file->f_path.dentry->d_inode, file, 266 return drm_ioctl(file, DRM_IOCTL_RADEON_CMDBUF, (unsigned long)request);
272 DRM_IOCTL_RADEON_CMDBUF, (unsigned long)request);
273} 267}
274 268
275typedef struct drm_radeon_getparam32 { 269typedef struct drm_radeon_getparam32 {
@@ -293,8 +287,7 @@ static int compat_radeon_cp_getparam(struct file *file, unsigned int cmd,
293 &request->value)) 287 &request->value))
294 return -EFAULT; 288 return -EFAULT;
295 289
296 return drm_ioctl(file->f_path.dentry->d_inode, file, 290 return drm_ioctl(file, DRM_IOCTL_RADEON_GETPARAM, (unsigned long)request);
297 DRM_IOCTL_RADEON_GETPARAM, (unsigned long)request);
298} 291}
299 292
300typedef struct drm_radeon_mem_alloc32 { 293typedef struct drm_radeon_mem_alloc32 {
@@ -322,8 +315,7 @@ static int compat_radeon_mem_alloc(struct file *file, unsigned int cmd,
322 &request->region_offset)) 315 &request->region_offset))
323 return -EFAULT; 316 return -EFAULT;
324 317
325 return drm_ioctl(file->f_path.dentry->d_inode, file, 318 return drm_ioctl(file, DRM_IOCTL_RADEON_ALLOC, (unsigned long)request);
326 DRM_IOCTL_RADEON_ALLOC, (unsigned long)request);
327} 319}
328 320
329typedef struct drm_radeon_irq_emit32 { 321typedef struct drm_radeon_irq_emit32 {
@@ -345,8 +337,7 @@ static int compat_radeon_irq_emit(struct file *file, unsigned int cmd,
345 &request->irq_seq)) 337 &request->irq_seq))
346 return -EFAULT; 338 return -EFAULT;
347 339
348 return drm_ioctl(file->f_path.dentry->d_inode, file, 340 return drm_ioctl(file, DRM_IOCTL_RADEON_IRQ_EMIT, (unsigned long)request);
349 DRM_IOCTL_RADEON_IRQ_EMIT, (unsigned long)request);
350} 341}
351 342
352/* The two 64-bit arches where alignof(u64)==4 in 32-bit code */ 343/* The two 64-bit arches where alignof(u64)==4 in 32-bit code */
@@ -372,8 +363,7 @@ static int compat_radeon_cp_setparam(struct file *file, unsigned int cmd,
372 &request->value)) 363 &request->value))
373 return -EFAULT; 364 return -EFAULT;
374 365
375 return drm_ioctl(file->f_dentry->d_inode, file, 366 return drm_ioctl(file, DRM_IOCTL_RADEON_SETPARAM, (unsigned long) request);
376 DRM_IOCTL_RADEON_SETPARAM, (unsigned long) request);
377} 367}
378#else 368#else
379#define compat_radeon_cp_setparam NULL 369#define compat_radeon_cp_setparam NULL
@@ -413,12 +403,10 @@ long radeon_compat_ioctl(struct file *filp, unsigned int cmd, unsigned long arg)
413 if (nr < DRM_COMMAND_BASE + DRM_ARRAY_SIZE(radeon_compat_ioctls)) 403 if (nr < DRM_COMMAND_BASE + DRM_ARRAY_SIZE(radeon_compat_ioctls))
414 fn = radeon_compat_ioctls[nr - DRM_COMMAND_BASE]; 404 fn = radeon_compat_ioctls[nr - DRM_COMMAND_BASE];
415 405
416 lock_kernel(); /* XXX for now */
417 if (fn != NULL) 406 if (fn != NULL)
418 ret = (*fn) (filp, cmd, arg); 407 ret = (*fn) (filp, cmd, arg);
419 else 408 else
420 ret = drm_ioctl(filp->f_path.dentry->d_inode, filp, cmd, arg); 409 ret = drm_ioctl(filp, cmd, arg);
421 unlock_kernel();
422 410
423 return ret; 411 return ret;
424} 412}
@@ -431,9 +419,7 @@ long radeon_kms_compat_ioctl(struct file *filp, unsigned int cmd, unsigned long
431 if (nr < DRM_COMMAND_BASE) 419 if (nr < DRM_COMMAND_BASE)
432 return drm_compat_ioctl(filp, cmd, arg); 420 return drm_compat_ioctl(filp, cmd, arg);
433 421
434 lock_kernel(); /* XXX for now */ 422 ret = drm_ioctl(filp, cmd, arg);
435 ret = drm_ioctl(filp->f_path.dentry->d_inode, filp, cmd, arg);
436 unlock_kernel();
437 423
438 return ret; 424 return ret;
439} 425}
diff --git a/drivers/gpu/drm/radeon/radeon_irq.c b/drivers/gpu/drm/radeon/radeon_irq.c
index b79ecc4a7cc4..2f349a300195 100644
--- a/drivers/gpu/drm/radeon/radeon_irq.c
+++ b/drivers/gpu/drm/radeon/radeon_irq.c
@@ -289,16 +289,16 @@ int radeon_irq_emit(struct drm_device *dev, void *data, struct drm_file *file_pr
289 drm_radeon_irq_emit_t *emit = data; 289 drm_radeon_irq_emit_t *emit = data;
290 int result; 290 int result;
291 291
292 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R600)
293 return -EINVAL;
294
295 LOCK_TEST_WITH_RETURN(dev, file_priv);
296
297 if (!dev_priv) { 292 if (!dev_priv) {
298 DRM_ERROR("called with no initialization\n"); 293 DRM_ERROR("called with no initialization\n");
299 return -EINVAL; 294 return -EINVAL;
300 } 295 }
301 296
297 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R600)
298 return -EINVAL;
299
300 LOCK_TEST_WITH_RETURN(dev, file_priv);
301
302 result = radeon_emit_irq(dev); 302 result = radeon_emit_irq(dev);
303 303
304 if (DRM_COPY_TO_USER(emit->irq_seq, &result, sizeof(int))) { 304 if (DRM_COPY_TO_USER(emit->irq_seq, &result, sizeof(int))) {
diff --git a/drivers/gpu/drm/radeon/radeon_irq_kms.c b/drivers/gpu/drm/radeon/radeon_irq_kms.c
index a0fe6232dcb6..a212041e8b0b 100644
--- a/drivers/gpu/drm/radeon/radeon_irq_kms.c
+++ b/drivers/gpu/drm/radeon/radeon_irq_kms.c
@@ -39,16 +39,38 @@ irqreturn_t radeon_driver_irq_handler_kms(DRM_IRQ_ARGS)
39 return radeon_irq_process(rdev); 39 return radeon_irq_process(rdev);
40} 40}
41 41
42/*
43 * Handle hotplug events outside the interrupt handler proper.
44 */
45static void radeon_hotplug_work_func(struct work_struct *work)
46{
47 struct radeon_device *rdev = container_of(work, struct radeon_device,
48 hotplug_work);
49 struct drm_device *dev = rdev->ddev;
50 struct drm_mode_config *mode_config = &dev->mode_config;
51 struct drm_connector *connector;
52
53 if (mode_config->num_connector) {
54 list_for_each_entry(connector, &mode_config->connector_list, head)
55 radeon_connector_hotplug(connector);
56 }
57 /* Just fire off a uevent and let userspace tell us what to do */
58 drm_sysfs_hotplug_event(dev);
59}
60
42void radeon_driver_irq_preinstall_kms(struct drm_device *dev) 61void radeon_driver_irq_preinstall_kms(struct drm_device *dev)
43{ 62{
44 struct radeon_device *rdev = dev->dev_private; 63 struct radeon_device *rdev = dev->dev_private;
45 unsigned i; 64 unsigned i;
46 65
66 INIT_WORK(&rdev->hotplug_work, radeon_hotplug_work_func);
67
47 /* Disable *all* interrupts */ 68 /* Disable *all* interrupts */
48 rdev->irq.sw_int = false; 69 rdev->irq.sw_int = false;
49 for (i = 0; i < 2; i++) { 70 for (i = 0; i < rdev->num_crtc; i++)
50 rdev->irq.crtc_vblank_int[i] = false; 71 rdev->irq.crtc_vblank_int[i] = false;
51 } 72 for (i = 0; i < 6; i++)
73 rdev->irq.hpd[i] = false;
52 radeon_irq_set(rdev); 74 radeon_irq_set(rdev);
53 /* Clear bits */ 75 /* Clear bits */
54 radeon_irq_process(rdev); 76 radeon_irq_process(rdev);
@@ -74,43 +96,78 @@ void radeon_driver_irq_uninstall_kms(struct drm_device *dev)
74 } 96 }
75 /* Disable *all* interrupts */ 97 /* Disable *all* interrupts */
76 rdev->irq.sw_int = false; 98 rdev->irq.sw_int = false;
77 for (i = 0; i < 2; i++) { 99 for (i = 0; i < rdev->num_crtc; i++)
78 rdev->irq.crtc_vblank_int[i] = false; 100 rdev->irq.crtc_vblank_int[i] = false;
79 } 101 for (i = 0; i < 6; i++)
102 rdev->irq.hpd[i] = false;
80 radeon_irq_set(rdev); 103 radeon_irq_set(rdev);
81} 104}
82 105
83int radeon_irq_kms_init(struct radeon_device *rdev) 106int radeon_irq_kms_init(struct radeon_device *rdev)
84{ 107{
85 int r = 0; 108 int r = 0;
86 int num_crtc = 2;
87
88 if (rdev->flags & RADEON_SINGLE_CRTC)
89 num_crtc = 1;
90 109
91 r = drm_vblank_init(rdev->ddev, num_crtc); 110 spin_lock_init(&rdev->irq.sw_lock);
111 r = drm_vblank_init(rdev->ddev, rdev->num_crtc);
92 if (r) { 112 if (r) {
93 return r; 113 return r;
94 } 114 }
95 /* enable msi */ 115 /* enable msi */
96 rdev->msi_enabled = 0; 116 rdev->msi_enabled = 0;
97 if (rdev->family >= CHIP_RV380) { 117 /* MSIs don't seem to work reliably on all IGP
118 * chips. Disable MSI on them for now.
119 */
120 if ((rdev->family >= CHIP_RV380) &&
121 (!(rdev->flags & RADEON_IS_IGP))) {
98 int ret = pci_enable_msi(rdev->pdev); 122 int ret = pci_enable_msi(rdev->pdev);
99 if (!ret) 123 if (!ret) {
100 rdev->msi_enabled = 1; 124 rdev->msi_enabled = 1;
125 DRM_INFO("radeon: using MSI.\n");
126 }
101 } 127 }
102 drm_irq_install(rdev->ddev);
103 rdev->irq.installed = true; 128 rdev->irq.installed = true;
129 r = drm_irq_install(rdev->ddev);
130 if (r) {
131 rdev->irq.installed = false;
132 return r;
133 }
104 DRM_INFO("radeon: irq initialized.\n"); 134 DRM_INFO("radeon: irq initialized.\n");
105 return 0; 135 return 0;
106} 136}
107 137
108void radeon_irq_kms_fini(struct radeon_device *rdev) 138void radeon_irq_kms_fini(struct radeon_device *rdev)
109{ 139{
140 drm_vblank_cleanup(rdev->ddev);
110 if (rdev->irq.installed) { 141 if (rdev->irq.installed) {
111 rdev->irq.installed = false;
112 drm_irq_uninstall(rdev->ddev); 142 drm_irq_uninstall(rdev->ddev);
143 rdev->irq.installed = false;
113 if (rdev->msi_enabled) 144 if (rdev->msi_enabled)
114 pci_disable_msi(rdev->pdev); 145 pci_disable_msi(rdev->pdev);
115 } 146 }
116} 147}
148
149void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev)
150{
151 unsigned long irqflags;
152
153 spin_lock_irqsave(&rdev->irq.sw_lock, irqflags);
154 if (rdev->ddev->irq_enabled && (++rdev->irq.sw_refcount == 1)) {
155 rdev->irq.sw_int = true;
156 radeon_irq_set(rdev);
157 }
158 spin_unlock_irqrestore(&rdev->irq.sw_lock, irqflags);
159}
160
161void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev)
162{
163 unsigned long irqflags;
164
165 spin_lock_irqsave(&rdev->irq.sw_lock, irqflags);
166 BUG_ON(rdev->ddev->irq_enabled && rdev->irq.sw_refcount <= 0);
167 if (rdev->ddev->irq_enabled && (--rdev->irq.sw_refcount == 0)) {
168 rdev->irq.sw_int = false;
169 radeon_irq_set(rdev);
170 }
171 spin_unlock_irqrestore(&rdev->irq.sw_lock, irqflags);
172}
173
diff --git a/drivers/gpu/drm/radeon/radeon_kms.c b/drivers/gpu/drm/radeon/radeon_kms.c
index ba128621057a..c633319f98ed 100644
--- a/drivers/gpu/drm/radeon/radeon_kms.c
+++ b/drivers/gpu/drm/radeon/radeon_kms.c
@@ -30,10 +30,22 @@
30#include "radeon.h" 30#include "radeon.h"
31#include "radeon_drm.h" 31#include "radeon_drm.h"
32 32
33#include <linux/vga_switcheroo.h>
34#include <linux/slab.h>
35
36int radeon_driver_unload_kms(struct drm_device *dev)
37{
38 struct radeon_device *rdev = dev->dev_private;
39
40 if (rdev == NULL)
41 return 0;
42 radeon_modeset_fini(rdev);
43 radeon_device_fini(rdev);
44 kfree(rdev);
45 dev->dev_private = NULL;
46 return 0;
47}
33 48
34/*
35 * Driver load/unload
36 */
37int radeon_driver_load_kms(struct drm_device *dev, unsigned long flags) 49int radeon_driver_load_kms(struct drm_device *dev, unsigned long flags)
38{ 50{
39 struct radeon_device *rdev; 51 struct radeon_device *rdev;
@@ -62,31 +74,20 @@ int radeon_driver_load_kms(struct drm_device *dev, unsigned long flags)
62 */ 74 */
63 r = radeon_device_init(rdev, dev, dev->pdev, flags); 75 r = radeon_device_init(rdev, dev, dev->pdev, flags);
64 if (r) { 76 if (r) {
65 DRM_ERROR("Fatal error while trying to initialize radeon.\n"); 77 dev_err(&dev->pdev->dev, "Fatal error during GPU init\n");
66 return r; 78 goto out;
67 } 79 }
68 /* Again modeset_init should fail only on fatal error 80 /* Again modeset_init should fail only on fatal error
69 * otherwise it should provide enough functionalities 81 * otherwise it should provide enough functionalities
70 * for shadowfb to run 82 * for shadowfb to run
71 */ 83 */
72 r = radeon_modeset_init(rdev); 84 r = radeon_modeset_init(rdev);
73 if (r) { 85 if (r)
74 return r; 86 dev_err(&dev->pdev->dev, "Fatal error during modeset init\n");
75 } 87out:
76 return 0; 88 if (r)
77} 89 radeon_driver_unload_kms(dev);
78 90 return r;
79int radeon_driver_unload_kms(struct drm_device *dev)
80{
81 struct radeon_device *rdev = dev->dev_private;
82
83 if (rdev == NULL)
84 return 0;
85 radeon_modeset_fini(rdev);
86 radeon_device_fini(rdev);
87 kfree(rdev);
88 dev->dev_private = NULL;
89 return 0;
90} 91}
91 92
92 93
@@ -138,6 +139,7 @@ int radeon_driver_firstopen_kms(struct drm_device *dev)
138 139
139void radeon_driver_lastclose_kms(struct drm_device *dev) 140void radeon_driver_lastclose_kms(struct drm_device *dev)
140{ 141{
142 vga_switcheroo_process_delayed_switch();
141} 143}
142 144
143int radeon_driver_open_kms(struct drm_device *dev, struct drm_file *file_priv) 145int radeon_driver_open_kms(struct drm_device *dev, struct drm_file *file_priv)
@@ -163,7 +165,7 @@ u32 radeon_get_vblank_counter_kms(struct drm_device *dev, int crtc)
163{ 165{
164 struct radeon_device *rdev = dev->dev_private; 166 struct radeon_device *rdev = dev->dev_private;
165 167
166 if (crtc < 0 || crtc > 1) { 168 if (crtc < 0 || crtc >= rdev->num_crtc) {
167 DRM_ERROR("Invalid crtc %d\n", crtc); 169 DRM_ERROR("Invalid crtc %d\n", crtc);
168 return -EINVAL; 170 return -EINVAL;
169 } 171 }
@@ -175,7 +177,7 @@ int radeon_enable_vblank_kms(struct drm_device *dev, int crtc)
175{ 177{
176 struct radeon_device *rdev = dev->dev_private; 178 struct radeon_device *rdev = dev->dev_private;
177 179
178 if (crtc < 0 || crtc > 1) { 180 if (crtc < 0 || crtc >= rdev->num_crtc) {
179 DRM_ERROR("Invalid crtc %d\n", crtc); 181 DRM_ERROR("Invalid crtc %d\n", crtc);
180 return -EINVAL; 182 return -EINVAL;
181 } 183 }
@@ -189,7 +191,7 @@ void radeon_disable_vblank_kms(struct drm_device *dev, int crtc)
189{ 191{
190 struct radeon_device *rdev = dev->dev_private; 192 struct radeon_device *rdev = dev->dev_private;
191 193
192 if (crtc < 0 || crtc > 1) { 194 if (crtc < 0 || crtc >= rdev->num_crtc) {
193 DRM_ERROR("Invalid crtc %d\n", crtc); 195 DRM_ERROR("Invalid crtc %d\n", crtc);
194 return; 196 return;
195 } 197 }
@@ -278,17 +280,17 @@ struct drm_ioctl_desc radeon_ioctls_kms[] = {
278 DRM_IOCTL_DEF(DRM_RADEON_SURF_ALLOC, radeon_surface_alloc_kms, DRM_AUTH), 280 DRM_IOCTL_DEF(DRM_RADEON_SURF_ALLOC, radeon_surface_alloc_kms, DRM_AUTH),
279 DRM_IOCTL_DEF(DRM_RADEON_SURF_FREE, radeon_surface_free_kms, DRM_AUTH), 281 DRM_IOCTL_DEF(DRM_RADEON_SURF_FREE, radeon_surface_free_kms, DRM_AUTH),
280 /* KMS */ 282 /* KMS */
281 DRM_IOCTL_DEF(DRM_RADEON_GEM_INFO, radeon_gem_info_ioctl, DRM_AUTH), 283 DRM_IOCTL_DEF(DRM_RADEON_GEM_INFO, radeon_gem_info_ioctl, DRM_AUTH|DRM_UNLOCKED),
282 DRM_IOCTL_DEF(DRM_RADEON_GEM_CREATE, radeon_gem_create_ioctl, DRM_AUTH), 284 DRM_IOCTL_DEF(DRM_RADEON_GEM_CREATE, radeon_gem_create_ioctl, DRM_AUTH|DRM_UNLOCKED),
283 DRM_IOCTL_DEF(DRM_RADEON_GEM_MMAP, radeon_gem_mmap_ioctl, DRM_AUTH), 285 DRM_IOCTL_DEF(DRM_RADEON_GEM_MMAP, radeon_gem_mmap_ioctl, DRM_AUTH|DRM_UNLOCKED),
284 DRM_IOCTL_DEF(DRM_RADEON_GEM_SET_DOMAIN, radeon_gem_set_domain_ioctl, DRM_AUTH), 286 DRM_IOCTL_DEF(DRM_RADEON_GEM_SET_DOMAIN, radeon_gem_set_domain_ioctl, DRM_AUTH|DRM_UNLOCKED),
285 DRM_IOCTL_DEF(DRM_RADEON_GEM_PREAD, radeon_gem_pread_ioctl, DRM_AUTH), 287 DRM_IOCTL_DEF(DRM_RADEON_GEM_PREAD, radeon_gem_pread_ioctl, DRM_AUTH|DRM_UNLOCKED),
286 DRM_IOCTL_DEF(DRM_RADEON_GEM_PWRITE, radeon_gem_pwrite_ioctl, DRM_AUTH), 288 DRM_IOCTL_DEF(DRM_RADEON_GEM_PWRITE, radeon_gem_pwrite_ioctl, DRM_AUTH|DRM_UNLOCKED),
287 DRM_IOCTL_DEF(DRM_RADEON_GEM_WAIT_IDLE, radeon_gem_wait_idle_ioctl, DRM_AUTH), 289 DRM_IOCTL_DEF(DRM_RADEON_GEM_WAIT_IDLE, radeon_gem_wait_idle_ioctl, DRM_AUTH|DRM_UNLOCKED),
288 DRM_IOCTL_DEF(DRM_RADEON_CS, radeon_cs_ioctl, DRM_AUTH), 290 DRM_IOCTL_DEF(DRM_RADEON_CS, radeon_cs_ioctl, DRM_AUTH|DRM_UNLOCKED),
289 DRM_IOCTL_DEF(DRM_RADEON_INFO, radeon_info_ioctl, DRM_AUTH), 291 DRM_IOCTL_DEF(DRM_RADEON_INFO, radeon_info_ioctl, DRM_AUTH|DRM_UNLOCKED),
290 DRM_IOCTL_DEF(DRM_RADEON_GEM_SET_TILING, radeon_gem_set_tiling_ioctl, DRM_AUTH), 292 DRM_IOCTL_DEF(DRM_RADEON_GEM_SET_TILING, radeon_gem_set_tiling_ioctl, DRM_AUTH|DRM_UNLOCKED),
291 DRM_IOCTL_DEF(DRM_RADEON_GEM_GET_TILING, radeon_gem_get_tiling_ioctl, DRM_AUTH), 293 DRM_IOCTL_DEF(DRM_RADEON_GEM_GET_TILING, radeon_gem_get_tiling_ioctl, DRM_AUTH|DRM_UNLOCKED),
292 DRM_IOCTL_DEF(DRM_RADEON_GEM_BUSY, radeon_gem_busy_ioctl, DRM_AUTH), 294 DRM_IOCTL_DEF(DRM_RADEON_GEM_BUSY, radeon_gem_busy_ioctl, DRM_AUTH|DRM_UNLOCKED),
293}; 295};
294int radeon_max_kms_ioctl = DRM_ARRAY_SIZE(radeon_ioctls_kms); 296int radeon_max_kms_ioctl = DRM_ARRAY_SIZE(radeon_ioctls_kms);
diff --git a/drivers/gpu/drm/radeon/radeon_legacy_crtc.c b/drivers/gpu/drm/radeon/radeon_legacy_crtc.c
index 8d0b7aa87fa4..88865e38fe30 100644
--- a/drivers/gpu/drm/radeon/radeon_legacy_crtc.c
+++ b/drivers/gpu/drm/radeon/radeon_legacy_crtc.c
@@ -30,9 +30,20 @@
30#include "radeon.h" 30#include "radeon.h"
31#include "atom.h" 31#include "atom.h"
32 32
33static void radeon_overscan_setup(struct drm_crtc *crtc,
34 struct drm_display_mode *mode)
35{
36 struct drm_device *dev = crtc->dev;
37 struct radeon_device *rdev = dev->dev_private;
38 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
39
40 WREG32(RADEON_OVR_CLR + radeon_crtc->crtc_offset, 0);
41 WREG32(RADEON_OVR_WID_LEFT_RIGHT + radeon_crtc->crtc_offset, 0);
42 WREG32(RADEON_OVR_WID_TOP_BOTTOM + radeon_crtc->crtc_offset, 0);
43}
44
33static void radeon_legacy_rmx_mode_set(struct drm_crtc *crtc, 45static void radeon_legacy_rmx_mode_set(struct drm_crtc *crtc,
34 struct drm_display_mode *mode, 46 struct drm_display_mode *mode)
35 struct drm_display_mode *adjusted_mode)
36{ 47{
37 struct drm_device *dev = crtc->dev; 48 struct drm_device *dev = crtc->dev;
38 struct radeon_device *rdev = dev->dev_private; 49 struct radeon_device *rdev = dev->dev_private;
@@ -292,8 +303,7 @@ void radeon_crtc_dpms(struct drm_crtc *crtc, int mode)
292 uint32_t mask; 303 uint32_t mask;
293 304
294 if (radeon_crtc->crtc_id) 305 if (radeon_crtc->crtc_id)
295 mask = (RADEON_CRTC2_EN | 306 mask = (RADEON_CRTC2_DISP_DIS |
296 RADEON_CRTC2_DISP_DIS |
297 RADEON_CRTC2_VSYNC_DIS | 307 RADEON_CRTC2_VSYNC_DIS |
298 RADEON_CRTC2_HSYNC_DIS | 308 RADEON_CRTC2_HSYNC_DIS |
299 RADEON_CRTC2_DISP_REQ_EN_B); 309 RADEON_CRTC2_DISP_REQ_EN_B);
@@ -305,7 +315,7 @@ void radeon_crtc_dpms(struct drm_crtc *crtc, int mode)
305 switch (mode) { 315 switch (mode) {
306 case DRM_MODE_DPMS_ON: 316 case DRM_MODE_DPMS_ON:
307 if (radeon_crtc->crtc_id) 317 if (radeon_crtc->crtc_id)
308 WREG32_P(RADEON_CRTC2_GEN_CNTL, RADEON_CRTC2_EN, ~mask); 318 WREG32_P(RADEON_CRTC2_GEN_CNTL, RADEON_CRTC2_EN, ~(RADEON_CRTC2_EN | mask));
309 else { 319 else {
310 WREG32_P(RADEON_CRTC_GEN_CNTL, RADEON_CRTC_EN, ~(RADEON_CRTC_EN | 320 WREG32_P(RADEON_CRTC_GEN_CNTL, RADEON_CRTC_EN, ~(RADEON_CRTC_EN |
311 RADEON_CRTC_DISP_REQ_EN_B)); 321 RADEON_CRTC_DISP_REQ_EN_B));
@@ -319,7 +329,7 @@ void radeon_crtc_dpms(struct drm_crtc *crtc, int mode)
319 case DRM_MODE_DPMS_OFF: 329 case DRM_MODE_DPMS_OFF:
320 drm_vblank_pre_modeset(dev, radeon_crtc->crtc_id); 330 drm_vblank_pre_modeset(dev, radeon_crtc->crtc_id);
321 if (radeon_crtc->crtc_id) 331 if (radeon_crtc->crtc_id)
322 WREG32_P(RADEON_CRTC2_GEN_CNTL, mask, ~mask); 332 WREG32_P(RADEON_CRTC2_GEN_CNTL, mask, ~(RADEON_CRTC2_EN | mask));
323 else { 333 else {
324 WREG32_P(RADEON_CRTC_GEN_CNTL, RADEON_CRTC_DISP_REQ_EN_B, ~(RADEON_CRTC_EN | 334 WREG32_P(RADEON_CRTC_GEN_CNTL, RADEON_CRTC_DISP_REQ_EN_B, ~(RADEON_CRTC_EN |
325 RADEON_CRTC_DISP_REQ_EN_B)); 335 RADEON_CRTC_DISP_REQ_EN_B));
@@ -329,69 +339,6 @@ void radeon_crtc_dpms(struct drm_crtc *crtc, int mode)
329 } 339 }
330} 340}
331 341
332/* properly set crtc bpp when using atombios */
333void radeon_legacy_atom_set_surface(struct drm_crtc *crtc)
334{
335 struct drm_device *dev = crtc->dev;
336 struct radeon_device *rdev = dev->dev_private;
337 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
338 int format;
339 uint32_t crtc_gen_cntl;
340 uint32_t disp_merge_cntl;
341 uint32_t crtc_pitch;
342
343 switch (crtc->fb->bits_per_pixel) {
344 case 8:
345 format = 2;
346 break;
347 case 15: /* 555 */
348 format = 3;
349 break;
350 case 16: /* 565 */
351 format = 4;
352 break;
353 case 24: /* RGB */
354 format = 5;
355 break;
356 case 32: /* xRGB */
357 format = 6;
358 break;
359 default:
360 return;
361 }
362
363 crtc_pitch = ((((crtc->fb->pitch / (crtc->fb->bits_per_pixel / 8)) * crtc->fb->bits_per_pixel) +
364 ((crtc->fb->bits_per_pixel * 8) - 1)) /
365 (crtc->fb->bits_per_pixel * 8));
366 crtc_pitch |= crtc_pitch << 16;
367
368 WREG32(RADEON_CRTC_PITCH + radeon_crtc->crtc_offset, crtc_pitch);
369
370 switch (radeon_crtc->crtc_id) {
371 case 0:
372 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
373 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
374 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
375
376 crtc_gen_cntl = RREG32(RADEON_CRTC_GEN_CNTL) & 0xfffff0ff;
377 crtc_gen_cntl |= (format << 8);
378 crtc_gen_cntl |= RADEON_CRTC_EXT_DISP_EN;
379 WREG32(RADEON_CRTC_GEN_CNTL, crtc_gen_cntl);
380 break;
381 case 1:
382 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
383 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
384 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
385
386 crtc_gen_cntl = RREG32(RADEON_CRTC2_GEN_CNTL) & 0xfffff0ff;
387 crtc_gen_cntl |= (format << 8);
388 WREG32(RADEON_CRTC2_GEN_CNTL, crtc_gen_cntl);
389 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
390 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
391 break;
392 }
393}
394
395int radeon_crtc_set_base(struct drm_crtc *crtc, int x, int y, 342int radeon_crtc_set_base(struct drm_crtc *crtc, int x, int y,
396 struct drm_framebuffer *old_fb) 343 struct drm_framebuffer *old_fb)
397{ 344{
@@ -400,14 +347,21 @@ int radeon_crtc_set_base(struct drm_crtc *crtc, int x, int y,
400 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 347 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
401 struct radeon_framebuffer *radeon_fb; 348 struct radeon_framebuffer *radeon_fb;
402 struct drm_gem_object *obj; 349 struct drm_gem_object *obj;
350 struct radeon_bo *rbo;
403 uint64_t base; 351 uint64_t base;
404 uint32_t crtc_offset, crtc_offset_cntl, crtc_tile_x0_y0 = 0; 352 uint32_t crtc_offset, crtc_offset_cntl, crtc_tile_x0_y0 = 0;
405 uint32_t crtc_pitch, pitch_pixels; 353 uint32_t crtc_pitch, pitch_pixels;
406 uint32_t tiling_flags; 354 uint32_t tiling_flags;
407 int format; 355 int format;
408 uint32_t gen_cntl_reg, gen_cntl_val; 356 uint32_t gen_cntl_reg, gen_cntl_val;
357 int r;
409 358
410 DRM_DEBUG("\n"); 359 DRM_DEBUG("\n");
360 /* no fb bound */
361 if (!crtc->fb) {
362 DRM_DEBUG("No FB bound\n");
363 return 0;
364 }
411 365
412 radeon_fb = to_radeon_framebuffer(crtc->fb); 366 radeon_fb = to_radeon_framebuffer(crtc->fb);
413 367
@@ -431,13 +385,25 @@ int radeon_crtc_set_base(struct drm_crtc *crtc, int x, int y,
431 return false; 385 return false;
432 } 386 }
433 387
388 /* Pin framebuffer & get tilling informations */
434 obj = radeon_fb->obj; 389 obj = radeon_fb->obj;
435 if (radeon_gem_object_pin(obj, RADEON_GEM_DOMAIN_VRAM, &base)) { 390 rbo = obj->driver_private;
391 r = radeon_bo_reserve(rbo, false);
392 if (unlikely(r != 0))
393 return r;
394 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &base);
395 if (unlikely(r != 0)) {
396 radeon_bo_unreserve(rbo);
436 return -EINVAL; 397 return -EINVAL;
437 } 398 }
399 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
400 radeon_bo_unreserve(rbo);
401 if (tiling_flags & RADEON_TILING_MICRO)
402 DRM_ERROR("trying to scanout microtiled buffer\n");
403
438 /* if scanout was in GTT this really wouldn't work */ 404 /* if scanout was in GTT this really wouldn't work */
439 /* crtc offset is from display base addr not FB location */ 405 /* crtc offset is from display base addr not FB location */
440 radeon_crtc->legacy_display_base_addr = rdev->mc.vram_location; 406 radeon_crtc->legacy_display_base_addr = rdev->mc.vram_start;
441 407
442 base -= radeon_crtc->legacy_display_base_addr; 408 base -= radeon_crtc->legacy_display_base_addr;
443 409
@@ -449,10 +415,6 @@ int radeon_crtc_set_base(struct drm_crtc *crtc, int x, int y,
449 (crtc->fb->bits_per_pixel * 8)); 415 (crtc->fb->bits_per_pixel * 8));
450 crtc_pitch |= crtc_pitch << 16; 416 crtc_pitch |= crtc_pitch << 16;
451 417
452 radeon_object_get_tiling_flags(obj->driver_private,
453 &tiling_flags, NULL);
454 if (tiling_flags & RADEON_TILING_MICRO)
455 DRM_ERROR("trying to scanout microtiled buffer\n");
456 418
457 if (tiling_flags & RADEON_TILING_MACRO) { 419 if (tiling_flags & RADEON_TILING_MACRO) {
458 if (ASIC_IS_R300(rdev)) 420 if (ASIC_IS_R300(rdev))
@@ -530,7 +492,12 @@ int radeon_crtc_set_base(struct drm_crtc *crtc, int x, int y,
530 492
531 if (old_fb && old_fb != crtc->fb) { 493 if (old_fb && old_fb != crtc->fb) {
532 radeon_fb = to_radeon_framebuffer(old_fb); 494 radeon_fb = to_radeon_framebuffer(old_fb);
533 radeon_gem_object_unpin(radeon_fb->obj); 495 rbo = radeon_fb->obj->driver_private;
496 r = radeon_bo_reserve(rbo, false);
497 if (unlikely(r != 0))
498 return r;
499 radeon_bo_unpin(rbo);
500 radeon_bo_unreserve(rbo);
534 } 501 }
535 502
536 /* Bytes per pixel may have changed */ 503 /* Bytes per pixel may have changed */
@@ -615,39 +582,12 @@ static bool radeon_set_crtc_timing(struct drm_crtc *crtc, struct drm_display_mod
615 ? RADEON_CRTC_V_SYNC_POL 582 ? RADEON_CRTC_V_SYNC_POL
616 : 0)); 583 : 0));
617 584
618 /* TODO -> Dell Server */
619 if (0) {
620 uint32_t disp_hw_debug = RREG32(RADEON_DISP_HW_DEBUG);
621 uint32_t tv_dac_cntl = RREG32(RADEON_TV_DAC_CNTL);
622 uint32_t dac2_cntl = RREG32(RADEON_DAC_CNTL2);
623 uint32_t crtc2_gen_cntl = RREG32(RADEON_CRTC2_GEN_CNTL);
624
625 dac2_cntl &= ~RADEON_DAC2_DAC_CLK_SEL;
626 dac2_cntl |= RADEON_DAC2_DAC2_CLK_SEL;
627
628 /* For CRT on DAC2, don't turn it on if BIOS didn't
629 enable it, even it's detected.
630 */
631 disp_hw_debug |= RADEON_CRT2_DISP1_SEL;
632 tv_dac_cntl &= ~((1<<2) | (3<<8) | (7<<24) | (0xff<<16));
633 tv_dac_cntl |= (0x03 | (2<<8) | (0x58<<16));
634
635 WREG32(RADEON_TV_DAC_CNTL, tv_dac_cntl);
636 WREG32(RADEON_DISP_HW_DEBUG, disp_hw_debug);
637 WREG32(RADEON_DAC_CNTL2, dac2_cntl);
638 WREG32(RADEON_CRTC2_GEN_CNTL, crtc2_gen_cntl);
639 }
640
641 if (radeon_crtc->crtc_id) { 585 if (radeon_crtc->crtc_id) {
642 uint32_t crtc2_gen_cntl; 586 uint32_t crtc2_gen_cntl;
643 uint32_t disp2_merge_cntl; 587 uint32_t disp2_merge_cntl;
644 588
645 /* check to see if TV DAC is enabled for another crtc and keep it enabled */ 589 /* if TV DAC is enabled for another crtc and keep it enabled */
646 if (RREG32(RADEON_CRTC2_GEN_CNTL) & RADEON_CRTC2_CRT2_ON) 590 crtc2_gen_cntl = RREG32(RADEON_CRTC2_GEN_CNTL) & 0x00718080;
647 crtc2_gen_cntl = RADEON_CRTC2_CRT2_ON;
648 else
649 crtc2_gen_cntl = 0;
650
651 crtc2_gen_cntl |= ((format << 8) 591 crtc2_gen_cntl |= ((format << 8)
652 | RADEON_CRTC2_VSYNC_DIS 592 | RADEON_CRTC2_VSYNC_DIS
653 | RADEON_CRTC2_HSYNC_DIS 593 | RADEON_CRTC2_HSYNC_DIS
@@ -663,6 +603,10 @@ static bool radeon_set_crtc_timing(struct drm_crtc *crtc, struct drm_display_mod
663 ? RADEON_CRTC2_INTERLACE_EN 603 ? RADEON_CRTC2_INTERLACE_EN
664 : 0)); 604 : 0));
665 605
606 /* rs4xx chips seem to like to have the crtc enabled when the timing is set */
607 if ((rdev->family == CHIP_RS400) || (rdev->family == CHIP_RS480))
608 crtc2_gen_cntl |= RADEON_CRTC2_EN;
609
666 disp2_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL); 610 disp2_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
667 disp2_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN; 611 disp2_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
668 612
@@ -676,7 +620,8 @@ static bool radeon_set_crtc_timing(struct drm_crtc *crtc, struct drm_display_mod
676 uint32_t crtc_ext_cntl; 620 uint32_t crtc_ext_cntl;
677 uint32_t disp_merge_cntl; 621 uint32_t disp_merge_cntl;
678 622
679 crtc_gen_cntl = (RADEON_CRTC_EXT_DISP_EN 623 crtc_gen_cntl = RREG32(RADEON_CRTC_GEN_CNTL) & 0x00718000;
624 crtc_gen_cntl |= (RADEON_CRTC_EXT_DISP_EN
680 | (format << 8) 625 | (format << 8)
681 | RADEON_CRTC_DISP_REQ_EN_B 626 | RADEON_CRTC_DISP_REQ_EN_B
682 | ((mode->flags & DRM_MODE_FLAG_DBLSCAN) 627 | ((mode->flags & DRM_MODE_FLAG_DBLSCAN)
@@ -689,6 +634,10 @@ static bool radeon_set_crtc_timing(struct drm_crtc *crtc, struct drm_display_mod
689 ? RADEON_CRTC_INTERLACE_EN 634 ? RADEON_CRTC_INTERLACE_EN
690 : 0)); 635 : 0));
691 636
637 /* rs4xx chips seem to like to have the crtc enabled when the timing is set */
638 if ((rdev->family == CHIP_RS400) || (rdev->family == CHIP_RS480))
639 crtc_gen_cntl |= RADEON_CRTC_EN;
640
692 crtc_ext_cntl = RREG32(RADEON_CRTC_EXT_CNTL); 641 crtc_ext_cntl = RREG32(RADEON_CRTC_EXT_CNTL);
693 crtc_ext_cntl |= (RADEON_XCRT_CNT_EN | 642 crtc_ext_cntl |= (RADEON_XCRT_CNT_EN |
694 RADEON_CRTC_VSYNC_DIS | 643 RADEON_CRTC_VSYNC_DIS |
@@ -728,7 +677,6 @@ static void radeon_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
728 uint32_t post_divider = 0; 677 uint32_t post_divider = 0;
729 uint32_t freq = 0; 678 uint32_t freq = 0;
730 uint8_t pll_gain; 679 uint8_t pll_gain;
731 int pll_flags = RADEON_PLL_LEGACY;
732 bool use_bios_divs = false; 680 bool use_bios_divs = false;
733 /* PLL registers */ 681 /* PLL registers */
734 uint32_t pll_ref_div = 0; 682 uint32_t pll_ref_div = 0;
@@ -762,10 +710,16 @@ static void radeon_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
762 else 710 else
763 pll = &rdev->clock.p1pll; 711 pll = &rdev->clock.p1pll;
764 712
713 pll->flags = RADEON_PLL_LEGACY;
714 if (radeon_new_pll == 1)
715 pll->algo = PLL_ALGO_NEW;
716 else
717 pll->algo = PLL_ALGO_LEGACY;
718
765 if (mode->clock > 200000) /* range limits??? */ 719 if (mode->clock > 200000) /* range limits??? */
766 pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV; 720 pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
767 else 721 else
768 pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV; 722 pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
769 723
770 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 724 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
771 if (encoder->crtc == crtc) { 725 if (encoder->crtc == crtc) {
@@ -777,20 +731,22 @@ static void radeon_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
777 } 731 }
778 732
779 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC) 733 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
780 pll_flags |= RADEON_PLL_NO_ODD_POST_DIV; 734 pll->flags |= RADEON_PLL_NO_ODD_POST_DIV;
781 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS) { 735 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS) {
782 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 736 if (!rdev->is_atom_bios) {
783 struct radeon_encoder_lvds *lvds = (struct radeon_encoder_lvds *)radeon_encoder->enc_priv; 737 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
784 if (lvds) { 738 struct radeon_encoder_lvds *lvds = (struct radeon_encoder_lvds *)radeon_encoder->enc_priv;
785 if (lvds->use_bios_dividers) { 739 if (lvds) {
786 pll_ref_div = lvds->panel_ref_divider; 740 if (lvds->use_bios_dividers) {
787 pll_fb_post_div = (lvds->panel_fb_divider | 741 pll_ref_div = lvds->panel_ref_divider;
788 (lvds->panel_post_divider << 16)); 742 pll_fb_post_div = (lvds->panel_fb_divider |
789 htotal_cntl = 0; 743 (lvds->panel_post_divider << 16));
790 use_bios_divs = true; 744 htotal_cntl = 0;
745 use_bios_divs = true;
746 }
791 } 747 }
792 } 748 }
793 pll_flags |= RADEON_PLL_USE_REF_DIV; 749 pll->flags |= RADEON_PLL_USE_REF_DIV;
794 } 750 }
795 } 751 }
796 } 752 }
@@ -800,8 +756,7 @@ static void radeon_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
800 if (!use_bios_divs) { 756 if (!use_bios_divs) {
801 radeon_compute_pll(pll, mode->clock, 757 radeon_compute_pll(pll, mode->clock,
802 &freq, &feedback_div, &frac_fb_div, 758 &freq, &feedback_div, &frac_fb_div,
803 &reference_div, &post_divider, 759 &reference_div, &post_divider);
804 pll_flags);
805 760
806 for (post_div = &post_divs[0]; post_div->divider; ++post_div) { 761 for (post_div = &post_divs[0]; post_div->divider; ++post_div) {
807 if (post_div->divider == post_divider) 762 if (post_div->divider == post_divider)
@@ -1027,8 +982,9 @@ static int radeon_crtc_mode_set(struct drm_crtc *crtc,
1027 radeon_crtc_set_base(crtc, x, y, old_fb); 982 radeon_crtc_set_base(crtc, x, y, old_fb);
1028 radeon_set_crtc_timing(crtc, adjusted_mode); 983 radeon_set_crtc_timing(crtc, adjusted_mode);
1029 radeon_set_pll(crtc, adjusted_mode); 984 radeon_set_pll(crtc, adjusted_mode);
985 radeon_overscan_setup(crtc, adjusted_mode);
1030 if (radeon_crtc->crtc_id == 0) { 986 if (radeon_crtc->crtc_id == 0) {
1031 radeon_legacy_rmx_mode_set(crtc, mode, adjusted_mode); 987 radeon_legacy_rmx_mode_set(crtc, adjusted_mode);
1032 } else { 988 } else {
1033 if (radeon_crtc->rmx_type != RMX_OFF) { 989 if (radeon_crtc->rmx_type != RMX_OFF) {
1034 /* FIXME: only first crtc has rmx what should we 990 /* FIXME: only first crtc has rmx what should we
@@ -1042,12 +998,29 @@ static int radeon_crtc_mode_set(struct drm_crtc *crtc,
1042 998
1043static void radeon_crtc_prepare(struct drm_crtc *crtc) 999static void radeon_crtc_prepare(struct drm_crtc *crtc)
1044{ 1000{
1045 radeon_crtc_dpms(crtc, DRM_MODE_DPMS_OFF); 1001 struct drm_device *dev = crtc->dev;
1002 struct drm_crtc *crtci;
1003
1004 /*
1005 * The hardware wedges sometimes if you reconfigure one CRTC
1006 * whilst another is running (see fdo bug #24611).
1007 */
1008 list_for_each_entry(crtci, &dev->mode_config.crtc_list, head)
1009 radeon_crtc_dpms(crtci, DRM_MODE_DPMS_OFF);
1046} 1010}
1047 1011
1048static void radeon_crtc_commit(struct drm_crtc *crtc) 1012static void radeon_crtc_commit(struct drm_crtc *crtc)
1049{ 1013{
1050 radeon_crtc_dpms(crtc, DRM_MODE_DPMS_ON); 1014 struct drm_device *dev = crtc->dev;
1015 struct drm_crtc *crtci;
1016
1017 /*
1018 * Reenable the CRTCs that should be running.
1019 */
1020 list_for_each_entry(crtci, &dev->mode_config.crtc_list, head) {
1021 if (crtci->enabled)
1022 radeon_crtc_dpms(crtci, DRM_MODE_DPMS_ON);
1023 }
1051} 1024}
1052 1025
1053static const struct drm_crtc_helper_funcs legacy_helper_funcs = { 1026static const struct drm_crtc_helper_funcs legacy_helper_funcs = {
diff --git a/drivers/gpu/drm/radeon/radeon_legacy_encoders.c b/drivers/gpu/drm/radeon/radeon_legacy_encoders.c
index 00382122869b..0274abe17ad9 100644
--- a/drivers/gpu/drm/radeon/radeon_legacy_encoders.c
+++ b/drivers/gpu/drm/radeon/radeon_legacy_encoders.c
@@ -46,6 +46,7 @@ static void radeon_legacy_lvds_dpms(struct drm_encoder *encoder, int mode)
46 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 46 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
47 uint32_t lvds_gen_cntl, lvds_pll_cntl, pixclks_cntl, disp_pwr_man; 47 uint32_t lvds_gen_cntl, lvds_pll_cntl, pixclks_cntl, disp_pwr_man;
48 int panel_pwr_delay = 2000; 48 int panel_pwr_delay = 2000;
49 bool is_mac = false;
49 DRM_DEBUG("\n"); 50 DRM_DEBUG("\n");
50 51
51 if (radeon_encoder->enc_priv) { 52 if (radeon_encoder->enc_priv) {
@@ -58,6 +59,15 @@ static void radeon_legacy_lvds_dpms(struct drm_encoder *encoder, int mode)
58 } 59 }
59 } 60 }
60 61
62 /* macs (and possibly some x86 oem systems?) wire up LVDS strangely
63 * Taken from radeonfb.
64 */
65 if ((rdev->mode_info.connector_table == CT_IBOOK) ||
66 (rdev->mode_info.connector_table == CT_POWERBOOK_EXTERNAL) ||
67 (rdev->mode_info.connector_table == CT_POWERBOOK_INTERNAL) ||
68 (rdev->mode_info.connector_table == CT_POWERBOOK_VGA))
69 is_mac = true;
70
61 switch (mode) { 71 switch (mode) {
62 case DRM_MODE_DPMS_ON: 72 case DRM_MODE_DPMS_ON:
63 disp_pwr_man = RREG32(RADEON_DISP_PWR_MAN); 73 disp_pwr_man = RREG32(RADEON_DISP_PWR_MAN);
@@ -74,6 +84,8 @@ static void radeon_legacy_lvds_dpms(struct drm_encoder *encoder, int mode)
74 84
75 lvds_gen_cntl = RREG32(RADEON_LVDS_GEN_CNTL); 85 lvds_gen_cntl = RREG32(RADEON_LVDS_GEN_CNTL);
76 lvds_gen_cntl |= (RADEON_LVDS_ON | RADEON_LVDS_EN | RADEON_LVDS_DIGON | RADEON_LVDS_BLON); 86 lvds_gen_cntl |= (RADEON_LVDS_ON | RADEON_LVDS_EN | RADEON_LVDS_DIGON | RADEON_LVDS_BLON);
87 if (is_mac)
88 lvds_gen_cntl |= RADEON_LVDS_BL_MOD_EN;
77 lvds_gen_cntl &= ~(RADEON_LVDS_DISPLAY_DIS); 89 lvds_gen_cntl &= ~(RADEON_LVDS_DISPLAY_DIS);
78 udelay(panel_pwr_delay * 1000); 90 udelay(panel_pwr_delay * 1000);
79 WREG32(RADEON_LVDS_GEN_CNTL, lvds_gen_cntl); 91 WREG32(RADEON_LVDS_GEN_CNTL, lvds_gen_cntl);
@@ -85,7 +97,14 @@ static void radeon_legacy_lvds_dpms(struct drm_encoder *encoder, int mode)
85 WREG32_PLL_P(RADEON_PIXCLKS_CNTL, 0, ~RADEON_PIXCLK_LVDS_ALWAYS_ONb); 97 WREG32_PLL_P(RADEON_PIXCLKS_CNTL, 0, ~RADEON_PIXCLK_LVDS_ALWAYS_ONb);
86 lvds_gen_cntl = RREG32(RADEON_LVDS_GEN_CNTL); 98 lvds_gen_cntl = RREG32(RADEON_LVDS_GEN_CNTL);
87 lvds_gen_cntl |= RADEON_LVDS_DISPLAY_DIS; 99 lvds_gen_cntl |= RADEON_LVDS_DISPLAY_DIS;
88 lvds_gen_cntl &= ~(RADEON_LVDS_ON | RADEON_LVDS_BLON | RADEON_LVDS_EN | RADEON_LVDS_DIGON); 100 if (is_mac) {
101 lvds_gen_cntl &= ~RADEON_LVDS_BL_MOD_EN;
102 WREG32(RADEON_LVDS_GEN_CNTL, lvds_gen_cntl);
103 lvds_gen_cntl &= ~(RADEON_LVDS_ON | RADEON_LVDS_EN);
104 } else {
105 WREG32(RADEON_LVDS_GEN_CNTL, lvds_gen_cntl);
106 lvds_gen_cntl &= ~(RADEON_LVDS_ON | RADEON_LVDS_BLON | RADEON_LVDS_EN | RADEON_LVDS_DIGON);
107 }
89 udelay(panel_pwr_delay * 1000); 108 udelay(panel_pwr_delay * 1000);
90 WREG32(RADEON_LVDS_GEN_CNTL, lvds_gen_cntl); 109 WREG32(RADEON_LVDS_GEN_CNTL, lvds_gen_cntl);
91 WREG32_PLL(RADEON_PIXCLKS_CNTL, pixclks_cntl); 110 WREG32_PLL(RADEON_PIXCLKS_CNTL, pixclks_cntl);
@@ -96,6 +115,9 @@ static void radeon_legacy_lvds_dpms(struct drm_encoder *encoder, int mode)
96 radeon_atombios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false); 115 radeon_atombios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false);
97 else 116 else
98 radeon_combios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false); 117 radeon_combios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false);
118
119 /* adjust pm to dpms change */
120 radeon_pm_compute_clocks(rdev);
99} 121}
100 122
101static void radeon_legacy_lvds_prepare(struct drm_encoder *encoder) 123static void radeon_legacy_lvds_prepare(struct drm_encoder *encoder)
@@ -136,7 +158,14 @@ static void radeon_legacy_lvds_mode_set(struct drm_encoder *encoder,
136 lvds_pll_cntl &= ~RADEON_LVDS_PLL_EN; 158 lvds_pll_cntl &= ~RADEON_LVDS_PLL_EN;
137 159
138 lvds_ss_gen_cntl = RREG32(RADEON_LVDS_SS_GEN_CNTL); 160 lvds_ss_gen_cntl = RREG32(RADEON_LVDS_SS_GEN_CNTL);
139 if ((!rdev->is_atom_bios)) { 161 if (rdev->is_atom_bios) {
162 /* LVDS_GEN_CNTL parameters are computed in LVDSEncoderControl
163 * need to call that on resume to set up the reg properly.
164 */
165 radeon_encoder->pixel_clock = adjusted_mode->clock;
166 atombios_digital_setup(encoder, PANEL_ENCODER_ACTION_ENABLE);
167 lvds_gen_cntl = RREG32(RADEON_LVDS_GEN_CNTL);
168 } else {
140 struct radeon_encoder_lvds *lvds = (struct radeon_encoder_lvds *)radeon_encoder->enc_priv; 169 struct radeon_encoder_lvds *lvds = (struct radeon_encoder_lvds *)radeon_encoder->enc_priv;
141 if (lvds) { 170 if (lvds) {
142 DRM_DEBUG("bios LVDS_GEN_CNTL: 0x%x\n", lvds->lvds_gen_cntl); 171 DRM_DEBUG("bios LVDS_GEN_CNTL: 0x%x\n", lvds->lvds_gen_cntl);
@@ -147,8 +176,7 @@ static void radeon_legacy_lvds_mode_set(struct drm_encoder *encoder,
147 (lvds->panel_blon_delay << RADEON_LVDS_PWRSEQ_DELAY2_SHIFT)); 176 (lvds->panel_blon_delay << RADEON_LVDS_PWRSEQ_DELAY2_SHIFT));
148 } else 177 } else
149 lvds_gen_cntl = RREG32(RADEON_LVDS_GEN_CNTL); 178 lvds_gen_cntl = RREG32(RADEON_LVDS_GEN_CNTL);
150 } else 179 }
151 lvds_gen_cntl = RREG32(RADEON_LVDS_GEN_CNTL);
152 lvds_gen_cntl |= RADEON_LVDS_DISPLAY_DIS; 180 lvds_gen_cntl |= RADEON_LVDS_DISPLAY_DIS;
153 lvds_gen_cntl &= ~(RADEON_LVDS_ON | 181 lvds_gen_cntl &= ~(RADEON_LVDS_ON |
154 RADEON_LVDS_BLON | 182 RADEON_LVDS_BLON |
@@ -184,25 +212,31 @@ static void radeon_legacy_lvds_mode_set(struct drm_encoder *encoder,
184 radeon_combios_encoder_crtc_scratch_regs(encoder, radeon_crtc->crtc_id); 212 radeon_combios_encoder_crtc_scratch_regs(encoder, radeon_crtc->crtc_id);
185} 213}
186 214
187static bool radeon_legacy_lvds_mode_fixup(struct drm_encoder *encoder, 215static bool radeon_legacy_mode_fixup(struct drm_encoder *encoder,
188 struct drm_display_mode *mode, 216 struct drm_display_mode *mode,
189 struct drm_display_mode *adjusted_mode) 217 struct drm_display_mode *adjusted_mode)
190{ 218{
191 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 219 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
220 struct drm_device *dev = encoder->dev;
221 struct radeon_device *rdev = dev->dev_private;
222
223 /* adjust pm to upcoming mode change */
224 radeon_pm_compute_clocks(rdev);
192 225
193 /* set the active encoder to connector routing */ 226 /* set the active encoder to connector routing */
194 radeon_encoder_set_active_device(encoder); 227 radeon_encoder_set_active_device(encoder);
195 drm_mode_set_crtcinfo(adjusted_mode, 0); 228 drm_mode_set_crtcinfo(adjusted_mode, 0);
196 229
197 if (radeon_encoder->rmx_type != RMX_OFF) 230 /* get the native mode for LVDS */
198 radeon_rmx_mode_fixup(encoder, mode, adjusted_mode); 231 if (radeon_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
232 radeon_panel_mode_fixup(encoder, adjusted_mode);
199 233
200 return true; 234 return true;
201} 235}
202 236
203static const struct drm_encoder_helper_funcs radeon_legacy_lvds_helper_funcs = { 237static const struct drm_encoder_helper_funcs radeon_legacy_lvds_helper_funcs = {
204 .dpms = radeon_legacy_lvds_dpms, 238 .dpms = radeon_legacy_lvds_dpms,
205 .mode_fixup = radeon_legacy_lvds_mode_fixup, 239 .mode_fixup = radeon_legacy_mode_fixup,
206 .prepare = radeon_legacy_lvds_prepare, 240 .prepare = radeon_legacy_lvds_prepare,
207 .mode_set = radeon_legacy_lvds_mode_set, 241 .mode_set = radeon_legacy_lvds_mode_set,
208 .commit = radeon_legacy_lvds_commit, 242 .commit = radeon_legacy_lvds_commit,
@@ -214,17 +248,6 @@ static const struct drm_encoder_funcs radeon_legacy_lvds_enc_funcs = {
214 .destroy = radeon_enc_destroy, 248 .destroy = radeon_enc_destroy,
215}; 249};
216 250
217static bool radeon_legacy_primary_dac_mode_fixup(struct drm_encoder *encoder,
218 struct drm_display_mode *mode,
219 struct drm_display_mode *adjusted_mode)
220{
221 /* set the active encoder to connector routing */
222 radeon_encoder_set_active_device(encoder);
223 drm_mode_set_crtcinfo(adjusted_mode, 0);
224
225 return true;
226}
227
228static void radeon_legacy_primary_dac_dpms(struct drm_encoder *encoder, int mode) 251static void radeon_legacy_primary_dac_dpms(struct drm_encoder *encoder, int mode)
229{ 252{
230 struct drm_device *dev = encoder->dev; 253 struct drm_device *dev = encoder->dev;
@@ -262,6 +285,9 @@ static void radeon_legacy_primary_dac_dpms(struct drm_encoder *encoder, int mode
262 radeon_atombios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false); 285 radeon_atombios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false);
263 else 286 else
264 radeon_combios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false); 287 radeon_combios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false);
288
289 /* adjust pm to dpms change */
290 radeon_pm_compute_clocks(rdev);
265} 291}
266 292
267static void radeon_legacy_primary_dac_prepare(struct drm_encoder *encoder) 293static void radeon_legacy_primary_dac_prepare(struct drm_encoder *encoder)
@@ -410,7 +436,7 @@ static enum drm_connector_status radeon_legacy_primary_dac_detect(struct drm_enc
410 436
411static const struct drm_encoder_helper_funcs radeon_legacy_primary_dac_helper_funcs = { 437static const struct drm_encoder_helper_funcs radeon_legacy_primary_dac_helper_funcs = {
412 .dpms = radeon_legacy_primary_dac_dpms, 438 .dpms = radeon_legacy_primary_dac_dpms,
413 .mode_fixup = radeon_legacy_primary_dac_mode_fixup, 439 .mode_fixup = radeon_legacy_mode_fixup,
414 .prepare = radeon_legacy_primary_dac_prepare, 440 .prepare = radeon_legacy_primary_dac_prepare,
415 .mode_set = radeon_legacy_primary_dac_mode_set, 441 .mode_set = radeon_legacy_primary_dac_mode_set,
416 .commit = radeon_legacy_primary_dac_commit, 442 .commit = radeon_legacy_primary_dac_commit,
@@ -423,16 +449,6 @@ static const struct drm_encoder_funcs radeon_legacy_primary_dac_enc_funcs = {
423 .destroy = radeon_enc_destroy, 449 .destroy = radeon_enc_destroy,
424}; 450};
425 451
426static bool radeon_legacy_tmds_int_mode_fixup(struct drm_encoder *encoder,
427 struct drm_display_mode *mode,
428 struct drm_display_mode *adjusted_mode)
429{
430
431 drm_mode_set_crtcinfo(adjusted_mode, 0);
432
433 return true;
434}
435
436static void radeon_legacy_tmds_int_dpms(struct drm_encoder *encoder, int mode) 452static void radeon_legacy_tmds_int_dpms(struct drm_encoder *encoder, int mode)
437{ 453{
438 struct drm_device *dev = encoder->dev; 454 struct drm_device *dev = encoder->dev;
@@ -457,6 +473,9 @@ static void radeon_legacy_tmds_int_dpms(struct drm_encoder *encoder, int mode)
457 radeon_atombios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false); 473 radeon_atombios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false);
458 else 474 else
459 radeon_combios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false); 475 radeon_combios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false);
476
477 /* adjust pm to dpms change */
478 radeon_pm_compute_clocks(rdev);
460} 479}
461 480
462static void radeon_legacy_tmds_int_prepare(struct drm_encoder *encoder) 481static void radeon_legacy_tmds_int_prepare(struct drm_encoder *encoder)
@@ -584,7 +603,7 @@ static void radeon_legacy_tmds_int_mode_set(struct drm_encoder *encoder,
584 603
585static const struct drm_encoder_helper_funcs radeon_legacy_tmds_int_helper_funcs = { 604static const struct drm_encoder_helper_funcs radeon_legacy_tmds_int_helper_funcs = {
586 .dpms = radeon_legacy_tmds_int_dpms, 605 .dpms = radeon_legacy_tmds_int_dpms,
587 .mode_fixup = radeon_legacy_tmds_int_mode_fixup, 606 .mode_fixup = radeon_legacy_mode_fixup,
588 .prepare = radeon_legacy_tmds_int_prepare, 607 .prepare = radeon_legacy_tmds_int_prepare,
589 .mode_set = radeon_legacy_tmds_int_mode_set, 608 .mode_set = radeon_legacy_tmds_int_mode_set,
590 .commit = radeon_legacy_tmds_int_commit, 609 .commit = radeon_legacy_tmds_int_commit,
@@ -596,17 +615,6 @@ static const struct drm_encoder_funcs radeon_legacy_tmds_int_enc_funcs = {
596 .destroy = radeon_enc_destroy, 615 .destroy = radeon_enc_destroy,
597}; 616};
598 617
599static bool radeon_legacy_tmds_ext_mode_fixup(struct drm_encoder *encoder,
600 struct drm_display_mode *mode,
601 struct drm_display_mode *adjusted_mode)
602{
603 /* set the active encoder to connector routing */
604 radeon_encoder_set_active_device(encoder);
605 drm_mode_set_crtcinfo(adjusted_mode, 0);
606
607 return true;
608}
609
610static void radeon_legacy_tmds_ext_dpms(struct drm_encoder *encoder, int mode) 618static void radeon_legacy_tmds_ext_dpms(struct drm_encoder *encoder, int mode)
611{ 619{
612 struct drm_device *dev = encoder->dev; 620 struct drm_device *dev = encoder->dev;
@@ -633,6 +641,9 @@ static void radeon_legacy_tmds_ext_dpms(struct drm_encoder *encoder, int mode)
633 radeon_atombios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false); 641 radeon_atombios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false);
634 else 642 else
635 radeon_combios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false); 643 radeon_combios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false);
644
645 /* adjust pm to dpms change */
646 radeon_pm_compute_clocks(rdev);
636} 647}
637 648
638static void radeon_legacy_tmds_ext_prepare(struct drm_encoder *encoder) 649static void radeon_legacy_tmds_ext_prepare(struct drm_encoder *encoder)
@@ -697,6 +708,8 @@ static void radeon_legacy_tmds_ext_mode_set(struct drm_encoder *encoder,
697 /*if (mode->clock > 165000) 708 /*if (mode->clock > 165000)
698 fp2_gen_cntl |= R300_FP2_DVO_DUAL_CHANNEL_EN;*/ 709 fp2_gen_cntl |= R300_FP2_DVO_DUAL_CHANNEL_EN;*/
699 } 710 }
711 if (!radeon_combios_external_tmds_setup(encoder))
712 radeon_external_tmds_setup(encoder);
700 } 713 }
701 714
702 if (radeon_crtc->crtc_id == 0) { 715 if (radeon_crtc->crtc_id == 0) {
@@ -724,9 +737,22 @@ static void radeon_legacy_tmds_ext_mode_set(struct drm_encoder *encoder,
724 radeon_combios_encoder_crtc_scratch_regs(encoder, radeon_crtc->crtc_id); 737 radeon_combios_encoder_crtc_scratch_regs(encoder, radeon_crtc->crtc_id);
725} 738}
726 739
740static void radeon_ext_tmds_enc_destroy(struct drm_encoder *encoder)
741{
742 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
743 struct radeon_encoder_ext_tmds *tmds = radeon_encoder->enc_priv;
744 if (tmds) {
745 if (tmds->i2c_bus)
746 radeon_i2c_destroy(tmds->i2c_bus);
747 }
748 kfree(radeon_encoder->enc_priv);
749 drm_encoder_cleanup(encoder);
750 kfree(radeon_encoder);
751}
752
727static const struct drm_encoder_helper_funcs radeon_legacy_tmds_ext_helper_funcs = { 753static const struct drm_encoder_helper_funcs radeon_legacy_tmds_ext_helper_funcs = {
728 .dpms = radeon_legacy_tmds_ext_dpms, 754 .dpms = radeon_legacy_tmds_ext_dpms,
729 .mode_fixup = radeon_legacy_tmds_ext_mode_fixup, 755 .mode_fixup = radeon_legacy_mode_fixup,
730 .prepare = radeon_legacy_tmds_ext_prepare, 756 .prepare = radeon_legacy_tmds_ext_prepare,
731 .mode_set = radeon_legacy_tmds_ext_mode_set, 757 .mode_set = radeon_legacy_tmds_ext_mode_set,
732 .commit = radeon_legacy_tmds_ext_commit, 758 .commit = radeon_legacy_tmds_ext_commit,
@@ -735,20 +761,9 @@ static const struct drm_encoder_helper_funcs radeon_legacy_tmds_ext_helper_funcs
735 761
736 762
737static const struct drm_encoder_funcs radeon_legacy_tmds_ext_enc_funcs = { 763static const struct drm_encoder_funcs radeon_legacy_tmds_ext_enc_funcs = {
738 .destroy = radeon_enc_destroy, 764 .destroy = radeon_ext_tmds_enc_destroy,
739}; 765};
740 766
741static bool radeon_legacy_tv_dac_mode_fixup(struct drm_encoder *encoder,
742 struct drm_display_mode *mode,
743 struct drm_display_mode *adjusted_mode)
744{
745 /* set the active encoder to connector routing */
746 radeon_encoder_set_active_device(encoder);
747 drm_mode_set_crtcinfo(adjusted_mode, 0);
748
749 return true;
750}
751
752static void radeon_legacy_tv_dac_dpms(struct drm_encoder *encoder, int mode) 767static void radeon_legacy_tv_dac_dpms(struct drm_encoder *encoder, int mode)
753{ 768{
754 struct drm_device *dev = encoder->dev; 769 struct drm_device *dev = encoder->dev;
@@ -807,8 +822,8 @@ static void radeon_legacy_tv_dac_dpms(struct drm_encoder *encoder, int mode)
807 crtc2_gen_cntl &= ~RADEON_CRTC2_CRT2_ON; 822 crtc2_gen_cntl &= ~RADEON_CRTC2_CRT2_ON;
808 823
809 if (rdev->family == CHIP_R420 || 824 if (rdev->family == CHIP_R420 ||
810 rdev->family == CHIP_R423 || 825 rdev->family == CHIP_R423 ||
811 rdev->family == CHIP_RV410) 826 rdev->family == CHIP_RV410)
812 tv_dac_cntl |= (R420_TV_DAC_RDACPD | 827 tv_dac_cntl |= (R420_TV_DAC_RDACPD |
813 R420_TV_DAC_GDACPD | 828 R420_TV_DAC_GDACPD |
814 R420_TV_DAC_BDACPD | 829 R420_TV_DAC_BDACPD |
@@ -836,6 +851,9 @@ static void radeon_legacy_tv_dac_dpms(struct drm_encoder *encoder, int mode)
836 radeon_atombios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false); 851 radeon_atombios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false);
837 else 852 else
838 radeon_combios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false); 853 radeon_combios_encoder_dpms_scratch_regs(encoder, (mode == DRM_MODE_DPMS_ON) ? true : false);
854
855 /* adjust pm to dpms change */
856 radeon_pm_compute_clocks(rdev);
839} 857}
840 858
841static void radeon_legacy_tv_dac_prepare(struct drm_encoder *encoder) 859static void radeon_legacy_tv_dac_prepare(struct drm_encoder *encoder)
@@ -881,35 +899,43 @@ static void radeon_legacy_tv_dac_mode_set(struct drm_encoder *encoder,
881 if (rdev->family != CHIP_R200) { 899 if (rdev->family != CHIP_R200) {
882 tv_dac_cntl = RREG32(RADEON_TV_DAC_CNTL); 900 tv_dac_cntl = RREG32(RADEON_TV_DAC_CNTL);
883 if (rdev->family == CHIP_R420 || 901 if (rdev->family == CHIP_R420 ||
884 rdev->family == CHIP_R423 || 902 rdev->family == CHIP_R423 ||
885 rdev->family == CHIP_RV410) { 903 rdev->family == CHIP_RV410) {
886 tv_dac_cntl &= ~(RADEON_TV_DAC_STD_MASK | 904 tv_dac_cntl &= ~(RADEON_TV_DAC_STD_MASK |
887 RADEON_TV_DAC_BGADJ_MASK | 905 RADEON_TV_DAC_BGADJ_MASK |
888 R420_TV_DAC_DACADJ_MASK | 906 R420_TV_DAC_DACADJ_MASK |
889 R420_TV_DAC_RDACPD | 907 R420_TV_DAC_RDACPD |
890 R420_TV_DAC_GDACPD | 908 R420_TV_DAC_GDACPD |
891 R420_TV_DAC_BDACPD | 909 R420_TV_DAC_BDACPD |
892 R420_TV_DAC_TVENABLE); 910 R420_TV_DAC_TVENABLE);
893 } else { 911 } else {
894 tv_dac_cntl &= ~(RADEON_TV_DAC_STD_MASK | 912 tv_dac_cntl &= ~(RADEON_TV_DAC_STD_MASK |
895 RADEON_TV_DAC_BGADJ_MASK | 913 RADEON_TV_DAC_BGADJ_MASK |
896 RADEON_TV_DAC_DACADJ_MASK | 914 RADEON_TV_DAC_DACADJ_MASK |
897 RADEON_TV_DAC_RDACPD | 915 RADEON_TV_DAC_RDACPD |
898 RADEON_TV_DAC_GDACPD | 916 RADEON_TV_DAC_GDACPD |
899 RADEON_TV_DAC_BDACPD); 917 RADEON_TV_DAC_BDACPD);
900 } 918 }
901 919
902 /* FIXME TV */ 920 tv_dac_cntl |= RADEON_TV_DAC_NBLANK | RADEON_TV_DAC_NHOLD;
903 if (tv_dac) { 921
904 struct radeon_encoder_tv_dac *tv_dac = radeon_encoder->enc_priv; 922 if (is_tv) {
905 tv_dac_cntl |= (RADEON_TV_DAC_NBLANK | 923 if (tv_dac->tv_std == TV_STD_NTSC ||
906 RADEON_TV_DAC_NHOLD | 924 tv_dac->tv_std == TV_STD_NTSC_J ||
907 RADEON_TV_DAC_STD_PS2 | 925 tv_dac->tv_std == TV_STD_PAL_M ||
908 tv_dac->ps2_tvdac_adj); 926 tv_dac->tv_std == TV_STD_PAL_60)
927 tv_dac_cntl |= tv_dac->ntsc_tvdac_adj;
928 else
929 tv_dac_cntl |= tv_dac->pal_tvdac_adj;
930
931 if (tv_dac->tv_std == TV_STD_NTSC ||
932 tv_dac->tv_std == TV_STD_NTSC_J)
933 tv_dac_cntl |= RADEON_TV_DAC_STD_NTSC;
934 else
935 tv_dac_cntl |= RADEON_TV_DAC_STD_PAL;
909 } else 936 } else
910 tv_dac_cntl |= (RADEON_TV_DAC_NBLANK | 937 tv_dac_cntl |= (RADEON_TV_DAC_STD_PS2 |
911 RADEON_TV_DAC_NHOLD | 938 tv_dac->ps2_tvdac_adj);
912 RADEON_TV_DAC_STD_PS2);
913 939
914 WREG32(RADEON_TV_DAC_CNTL, tv_dac_cntl); 940 WREG32(RADEON_TV_DAC_CNTL, tv_dac_cntl);
915 } 941 }
@@ -1265,7 +1291,7 @@ static enum drm_connector_status radeon_legacy_tv_dac_detect(struct drm_encoder
1265 1291
1266static const struct drm_encoder_helper_funcs radeon_legacy_tv_dac_helper_funcs = { 1292static const struct drm_encoder_helper_funcs radeon_legacy_tv_dac_helper_funcs = {
1267 .dpms = radeon_legacy_tv_dac_dpms, 1293 .dpms = radeon_legacy_tv_dac_dpms,
1268 .mode_fixup = radeon_legacy_tv_dac_mode_fixup, 1294 .mode_fixup = radeon_legacy_mode_fixup,
1269 .prepare = radeon_legacy_tv_dac_prepare, 1295 .prepare = radeon_legacy_tv_dac_prepare,
1270 .mode_set = radeon_legacy_tv_dac_mode_set, 1296 .mode_set = radeon_legacy_tv_dac_mode_set,
1271 .commit = radeon_legacy_tv_dac_commit, 1297 .commit = radeon_legacy_tv_dac_commit,
@@ -1302,6 +1328,29 @@ static struct radeon_encoder_int_tmds *radeon_legacy_get_tmds_info(struct radeon
1302 return tmds; 1328 return tmds;
1303} 1329}
1304 1330
1331static struct radeon_encoder_ext_tmds *radeon_legacy_get_ext_tmds_info(struct radeon_encoder *encoder)
1332{
1333 struct drm_device *dev = encoder->base.dev;
1334 struct radeon_device *rdev = dev->dev_private;
1335 struct radeon_encoder_ext_tmds *tmds = NULL;
1336 bool ret;
1337
1338 if (rdev->is_atom_bios)
1339 return NULL;
1340
1341 tmds = kzalloc(sizeof(struct radeon_encoder_ext_tmds), GFP_KERNEL);
1342
1343 if (!tmds)
1344 return NULL;
1345
1346 ret = radeon_legacy_get_ext_tmds_info_from_combios(encoder, tmds);
1347
1348 if (ret == false)
1349 radeon_legacy_get_ext_tmds_info_from_table(encoder, tmds);
1350
1351 return tmds;
1352}
1353
1305void 1354void
1306radeon_add_legacy_encoder(struct drm_device *dev, uint32_t encoder_id, uint32_t supported_device) 1355radeon_add_legacy_encoder(struct drm_device *dev, uint32_t encoder_id, uint32_t supported_device)
1307{ 1356{
@@ -1329,7 +1378,6 @@ radeon_add_legacy_encoder(struct drm_device *dev, uint32_t encoder_id, uint32_t
1329 encoder->possible_crtcs = 0x1; 1378 encoder->possible_crtcs = 0x1;
1330 else 1379 else
1331 encoder->possible_crtcs = 0x3; 1380 encoder->possible_crtcs = 0x3;
1332 encoder->possible_clones = 0;
1333 1381
1334 radeon_encoder->enc_priv = NULL; 1382 radeon_encoder->enc_priv = NULL;
1335 1383
@@ -1373,7 +1421,7 @@ radeon_add_legacy_encoder(struct drm_device *dev, uint32_t encoder_id, uint32_t
1373 drm_encoder_init(dev, encoder, &radeon_legacy_tmds_ext_enc_funcs, DRM_MODE_ENCODER_TMDS); 1421 drm_encoder_init(dev, encoder, &radeon_legacy_tmds_ext_enc_funcs, DRM_MODE_ENCODER_TMDS);
1374 drm_encoder_helper_add(encoder, &radeon_legacy_tmds_ext_helper_funcs); 1422 drm_encoder_helper_add(encoder, &radeon_legacy_tmds_ext_helper_funcs);
1375 if (!rdev->is_atom_bios) 1423 if (!rdev->is_atom_bios)
1376 radeon_combios_get_ext_tmds_info(radeon_encoder); 1424 radeon_encoder->enc_priv = radeon_legacy_get_ext_tmds_info(radeon_encoder);
1377 break; 1425 break;
1378 } 1426 }
1379} 1427}
diff --git a/drivers/gpu/drm/radeon/radeon_legacy_tv.c b/drivers/gpu/drm/radeon/radeon_legacy_tv.c
index 3a12bb0c0563..f2ed27c8055b 100644
--- a/drivers/gpu/drm/radeon/radeon_legacy_tv.c
+++ b/drivers/gpu/drm/radeon/radeon_legacy_tv.c
@@ -57,6 +57,10 @@
57#define NTSC_TV_PLL_N_14 693 57#define NTSC_TV_PLL_N_14 693
58#define NTSC_TV_PLL_P_14 7 58#define NTSC_TV_PLL_P_14 7
59 59
60#define PAL_TV_PLL_M_14 19
61#define PAL_TV_PLL_N_14 353
62#define PAL_TV_PLL_P_14 5
63
60#define VERT_LEAD_IN_LINES 2 64#define VERT_LEAD_IN_LINES 2
61#define FRAC_BITS 0xe 65#define FRAC_BITS 0xe
62#define FRAC_MASK 0x3fff 66#define FRAC_MASK 0x3fff
@@ -77,7 +81,7 @@ struct radeon_tv_mode_constants {
77 unsigned pix_to_tv; 81 unsigned pix_to_tv;
78}; 82};
79 83
80static const uint16_t hor_timing_NTSC[] = { 84static const uint16_t hor_timing_NTSC[MAX_H_CODE_TIMING_LEN] = {
81 0x0007, 85 0x0007,
82 0x003f, 86 0x003f,
83 0x0263, 87 0x0263,
@@ -98,7 +102,7 @@ static const uint16_t hor_timing_NTSC[] = {
98 0 102 0
99}; 103};
100 104
101static const uint16_t vert_timing_NTSC[] = { 105static const uint16_t vert_timing_NTSC[MAX_V_CODE_TIMING_LEN] = {
102 0x2001, 106 0x2001,
103 0x200d, 107 0x200d,
104 0x1006, 108 0x1006,
@@ -115,7 +119,7 @@ static const uint16_t vert_timing_NTSC[] = {
115 0 119 0
116}; 120};
117 121
118static const uint16_t hor_timing_PAL[] = { 122static const uint16_t hor_timing_PAL[MAX_H_CODE_TIMING_LEN] = {
119 0x0007, 123 0x0007,
120 0x0058, 124 0x0058,
121 0x027c, 125 0x027c,
@@ -136,7 +140,7 @@ static const uint16_t hor_timing_PAL[] = {
136 0 140 0
137}; 141};
138 142
139static const uint16_t vert_timing_PAL[] = { 143static const uint16_t vert_timing_PAL[MAX_V_CODE_TIMING_LEN] = {
140 0x2001, 144 0x2001,
141 0x200c, 145 0x200c,
142 0x1005, 146 0x1005,
@@ -205,9 +209,24 @@ static const struct radeon_tv_mode_constants available_tv_modes[] = {
205 630627, /* defRestart */ 209 630627, /* defRestart */
206 347, /* crtcPLL_N */ 210 347, /* crtcPLL_N */
207 14, /* crtcPLL_M */ 211 14, /* crtcPLL_M */
208 8, /* crtcPLL_postDiv */ 212 8, /* crtcPLL_postDiv */
209 1022, /* pixToTV */ 213 1022, /* pixToTV */
210 }, 214 },
215 { /* PAL timing for 14 Mhz ref clk */
216 800, /* horResolution */
217 600, /* verResolution */
218 TV_STD_PAL, /* standard */
219 1131, /* horTotal */
220 742, /* verTotal */
221 813, /* horStart */
222 840, /* horSyncStart */
223 633, /* verSyncStart */
224 708369, /* defRestart */
225 211, /* crtcPLL_N */
226 9, /* crtcPLL_M */
227 8, /* crtcPLL_postDiv */
228 759, /* pixToTV */
229 },
211}; 230};
212 231
213#define N_AVAILABLE_MODES ARRAY_SIZE(available_tv_modes) 232#define N_AVAILABLE_MODES ARRAY_SIZE(available_tv_modes)
@@ -242,7 +261,7 @@ static const struct radeon_tv_mode_constants *radeon_legacy_tv_get_std_mode(stru
242 if (pll->reference_freq == 2700) 261 if (pll->reference_freq == 2700)
243 const_ptr = &available_tv_modes[1]; 262 const_ptr = &available_tv_modes[1];
244 else 263 else
245 const_ptr = &available_tv_modes[1]; /* FIX ME */ 264 const_ptr = &available_tv_modes[3];
246 } 265 }
247 return const_ptr; 266 return const_ptr;
248} 267}
@@ -623,9 +642,9 @@ void radeon_legacy_tv_mode_set(struct drm_encoder *encoder,
623 } 642 }
624 flicker_removal = (tmp + 500) / 1000; 643 flicker_removal = (tmp + 500) / 1000;
625 644
626 if (flicker_removal < 3) 645 if (flicker_removal < 2)
627 flicker_removal = 3; 646 flicker_removal = 2;
628 for (i = 0; i < 6; ++i) { 647 for (i = 0; i < ARRAY_SIZE(SLOPE_limit); ++i) {
629 if (flicker_removal == SLOPE_limit[i]) 648 if (flicker_removal == SLOPE_limit[i])
630 break; 649 break;
631 } 650 }
@@ -685,9 +704,9 @@ void radeon_legacy_tv_mode_set(struct drm_encoder *encoder,
685 n = PAL_TV_PLL_N_27; 704 n = PAL_TV_PLL_N_27;
686 p = PAL_TV_PLL_P_27; 705 p = PAL_TV_PLL_P_27;
687 } else { 706 } else {
688 m = PAL_TV_PLL_M_27; 707 m = PAL_TV_PLL_M_14;
689 n = PAL_TV_PLL_N_27; 708 n = PAL_TV_PLL_N_14;
690 p = PAL_TV_PLL_P_27; 709 p = PAL_TV_PLL_P_14;
691 } 710 }
692 } 711 }
693 712
diff --git a/drivers/gpu/drm/radeon/radeon_mode.h b/drivers/gpu/drm/radeon/radeon_mode.h
index ace726aa0d76..5413fcd63086 100644
--- a/drivers/gpu/drm/radeon/radeon_mode.h
+++ b/drivers/gpu/drm/radeon/radeon_mode.h
@@ -33,6 +33,7 @@
33#include <drm_crtc.h> 33#include <drm_crtc.h>
34#include <drm_mode.h> 34#include <drm_mode.h>
35#include <drm_edid.h> 35#include <drm_edid.h>
36#include <drm_dp_helper.h>
36#include <linux/i2c.h> 37#include <linux/i2c.h>
37#include <linux/i2c-id.h> 38#include <linux/i2c-id.h>
38#include <linux/i2c-algo-bit.h> 39#include <linux/i2c-algo-bit.h>
@@ -45,32 +46,6 @@ struct radeon_device;
45#define to_radeon_encoder(x) container_of(x, struct radeon_encoder, base) 46#define to_radeon_encoder(x) container_of(x, struct radeon_encoder, base)
46#define to_radeon_framebuffer(x) container_of(x, struct radeon_framebuffer, base) 47#define to_radeon_framebuffer(x) container_of(x, struct radeon_framebuffer, base)
47 48
48enum radeon_connector_type {
49 CONNECTOR_NONE,
50 CONNECTOR_VGA,
51 CONNECTOR_DVI_I,
52 CONNECTOR_DVI_D,
53 CONNECTOR_DVI_A,
54 CONNECTOR_STV,
55 CONNECTOR_CTV,
56 CONNECTOR_LVDS,
57 CONNECTOR_DIGITAL,
58 CONNECTOR_SCART,
59 CONNECTOR_HDMI_TYPE_A,
60 CONNECTOR_HDMI_TYPE_B,
61 CONNECTOR_0XC,
62 CONNECTOR_0XD,
63 CONNECTOR_DIN,
64 CONNECTOR_DISPLAY_PORT,
65 CONNECTOR_UNSUPPORTED
66};
67
68enum radeon_dvi_type {
69 DVI_AUTO,
70 DVI_DIGITAL,
71 DVI_ANALOG
72};
73
74enum radeon_rmx_type { 49enum radeon_rmx_type {
75 RMX_OFF, 50 RMX_OFF,
76 RMX_FULL, 51 RMX_FULL,
@@ -87,26 +62,50 @@ enum radeon_tv_std {
87 TV_STD_SCART_PAL, 62 TV_STD_SCART_PAL,
88 TV_STD_SECAM, 63 TV_STD_SECAM,
89 TV_STD_PAL_CN, 64 TV_STD_PAL_CN,
65 TV_STD_PAL_N,
90}; 66};
91 67
68/* radeon gpio-based i2c
69 * 1. "mask" reg and bits
70 * grabs the gpio pins for software use
71 * 0=not held 1=held
72 * 2. "a" reg and bits
73 * output pin value
74 * 0=low 1=high
75 * 3. "en" reg and bits
76 * sets the pin direction
77 * 0=input 1=output
78 * 4. "y" reg and bits
79 * input pin value
80 * 0=low 1=high
81 */
92struct radeon_i2c_bus_rec { 82struct radeon_i2c_bus_rec {
93 bool valid; 83 bool valid;
84 /* id used by atom */
85 uint8_t i2c_id;
86 /* id used by atom */
87 uint8_t hpd_id;
88 /* can be used with hw i2c engine */
89 bool hw_capable;
90 /* uses multi-media i2c engine */
91 bool mm_i2c;
92 /* regs and bits */
94 uint32_t mask_clk_reg; 93 uint32_t mask_clk_reg;
95 uint32_t mask_data_reg; 94 uint32_t mask_data_reg;
96 uint32_t a_clk_reg; 95 uint32_t a_clk_reg;
97 uint32_t a_data_reg; 96 uint32_t a_data_reg;
98 uint32_t put_clk_reg; 97 uint32_t en_clk_reg;
99 uint32_t put_data_reg; 98 uint32_t en_data_reg;
100 uint32_t get_clk_reg; 99 uint32_t y_clk_reg;
101 uint32_t get_data_reg; 100 uint32_t y_data_reg;
102 uint32_t mask_clk_mask; 101 uint32_t mask_clk_mask;
103 uint32_t mask_data_mask; 102 uint32_t mask_data_mask;
104 uint32_t put_clk_mask;
105 uint32_t put_data_mask;
106 uint32_t get_clk_mask;
107 uint32_t get_data_mask;
108 uint32_t a_clk_mask; 103 uint32_t a_clk_mask;
109 uint32_t a_data_mask; 104 uint32_t a_data_mask;
105 uint32_t en_clk_mask;
106 uint32_t en_data_mask;
107 uint32_t y_clk_mask;
108 uint32_t y_data_mask;
110}; 109};
111 110
112struct radeon_tmds_pll { 111struct radeon_tmds_pll {
@@ -116,6 +115,7 @@ struct radeon_tmds_pll {
116 115
117#define RADEON_MAX_BIOS_CONNECTOR 16 116#define RADEON_MAX_BIOS_CONNECTOR 16
118 117
118/* pll flags */
119#define RADEON_PLL_USE_BIOS_DIVS (1 << 0) 119#define RADEON_PLL_USE_BIOS_DIVS (1 << 0)
120#define RADEON_PLL_NO_ODD_POST_DIV (1 << 1) 120#define RADEON_PLL_NO_ODD_POST_DIV (1 << 1)
121#define RADEON_PLL_USE_REF_DIV (1 << 2) 121#define RADEON_PLL_USE_REF_DIV (1 << 2)
@@ -128,16 +128,33 @@ struct radeon_tmds_pll {
128#define RADEON_PLL_PREFER_HIGH_POST_DIV (1 << 9) 128#define RADEON_PLL_PREFER_HIGH_POST_DIV (1 << 9)
129#define RADEON_PLL_USE_FRAC_FB_DIV (1 << 10) 129#define RADEON_PLL_USE_FRAC_FB_DIV (1 << 10)
130#define RADEON_PLL_PREFER_CLOSEST_LOWER (1 << 11) 130#define RADEON_PLL_PREFER_CLOSEST_LOWER (1 << 11)
131#define RADEON_PLL_USE_POST_DIV (1 << 12)
132#define RADEON_PLL_IS_LCD (1 << 13)
133
134/* pll algo */
135enum radeon_pll_algo {
136 PLL_ALGO_LEGACY,
137 PLL_ALGO_NEW
138};
131 139
132struct radeon_pll { 140struct radeon_pll {
133 uint16_t reference_freq; 141 /* reference frequency */
134 uint16_t reference_div; 142 uint32_t reference_freq;
143
144 /* fixed dividers */
145 uint32_t reference_div;
146 uint32_t post_div;
147
148 /* pll in/out limits */
135 uint32_t pll_in_min; 149 uint32_t pll_in_min;
136 uint32_t pll_in_max; 150 uint32_t pll_in_max;
137 uint32_t pll_out_min; 151 uint32_t pll_out_min;
138 uint32_t pll_out_max; 152 uint32_t pll_out_max;
139 uint16_t xclk; 153 uint32_t lcd_pll_out_min;
154 uint32_t lcd_pll_out_max;
155 uint32_t best_vco;
140 156
157 /* divider limits */
141 uint32_t min_ref_div; 158 uint32_t min_ref_div;
142 uint32_t max_ref_div; 159 uint32_t max_ref_div;
143 uint32_t min_post_div; 160 uint32_t min_post_div;
@@ -146,13 +163,23 @@ struct radeon_pll {
146 uint32_t max_feedback_div; 163 uint32_t max_feedback_div;
147 uint32_t min_frac_feedback_div; 164 uint32_t min_frac_feedback_div;
148 uint32_t max_frac_feedback_div; 165 uint32_t max_frac_feedback_div;
149 uint32_t best_vco; 166
167 /* flags for the current clock */
168 uint32_t flags;
169
170 /* pll id */
171 uint32_t id;
172 /* pll algo */
173 enum radeon_pll_algo algo;
150}; 174};
151 175
152struct radeon_i2c_chan { 176struct radeon_i2c_chan {
153 struct drm_device *dev;
154 struct i2c_adapter adapter; 177 struct i2c_adapter adapter;
155 struct i2c_algo_bit_data algo; 178 struct drm_device *dev;
179 union {
180 struct i2c_algo_bit_data bit;
181 struct i2c_algo_dp_aux_data dp;
182 } algo;
156 struct radeon_i2c_bus_rec rec; 183 struct radeon_i2c_bus_rec rec;
157}; 184};
158 185
@@ -170,12 +197,17 @@ enum radeon_connector_table {
170 CT_EMAC, 197 CT_EMAC,
171}; 198};
172 199
200enum radeon_dvo_chip {
201 DVO_SIL164,
202 DVO_SIL1178,
203};
204
173struct radeon_mode_info { 205struct radeon_mode_info {
174 struct atom_context *atom_context; 206 struct atom_context *atom_context;
175 struct card_info *atom_card_info; 207 struct card_info *atom_card_info;
176 enum radeon_connector_table connector_table; 208 enum radeon_connector_table connector_table;
177 bool mode_config_initialized; 209 bool mode_config_initialized;
178 struct radeon_crtc *crtcs[2]; 210 struct radeon_crtc *crtcs[6];
179 /* DVI-I properties */ 211 /* DVI-I properties */
180 struct drm_property *coherent_mode_property; 212 struct drm_property *coherent_mode_property;
181 /* DAC enable load detect */ 213 /* DAC enable load detect */
@@ -184,7 +216,8 @@ struct radeon_mode_info {
184 struct drm_property *tv_std_property; 216 struct drm_property *tv_std_property;
185 /* legacy TMDS PLL detect */ 217 /* legacy TMDS PLL detect */
186 struct drm_property *tmds_pll_property; 218 struct drm_property *tmds_pll_property;
187 219 /* hardcoded DFP edid from BIOS */
220 struct edid *bios_hardcoded_edid;
188}; 221};
189 222
190#define MAX_H_CODE_TIMING_LEN 32 223#define MAX_H_CODE_TIMING_LEN 32
@@ -219,6 +252,7 @@ struct radeon_crtc {
219 fixed20_12 vsc; 252 fixed20_12 vsc;
220 fixed20_12 hsc; 253 fixed20_12 hsc;
221 struct drm_display_mode native_mode; 254 struct drm_display_mode native_mode;
255 int pll_id;
222}; 256};
223 257
224struct radeon_encoder_primary_dac { 258struct radeon_encoder_primary_dac {
@@ -261,6 +295,13 @@ struct radeon_encoder_int_tmds {
261 struct radeon_tmds_pll tmds_pll[4]; 295 struct radeon_tmds_pll tmds_pll[4];
262}; 296};
263 297
298struct radeon_encoder_ext_tmds {
299 /* tmds over dvo */
300 struct radeon_i2c_chan *i2c_bus;
301 uint8_t slave_addr;
302 enum radeon_dvo_chip dvo_chip;
303};
304
264/* spread spectrum */ 305/* spread spectrum */
265struct radeon_atom_ss { 306struct radeon_atom_ss {
266 uint16_t percentage; 307 uint16_t percentage;
@@ -274,10 +315,11 @@ struct radeon_atom_ss {
274struct radeon_encoder_atom_dig { 315struct radeon_encoder_atom_dig {
275 /* atom dig */ 316 /* atom dig */
276 bool coherent_mode; 317 bool coherent_mode;
277 int dig_block; 318 int dig_encoder; /* -1 disabled, 0 DIGA, 1 DIGB */
278 /* atom lvds */ 319 /* atom lvds */
279 uint32_t lvds_misc; 320 uint32_t lvds_misc;
280 uint16_t panel_pwr_delay; 321 uint16_t panel_pwr_delay;
322 enum radeon_pll_algo pll_algo;
281 struct radeon_atom_ss *ss; 323 struct radeon_atom_ss *ss;
282 /* panel mode */ 324 /* panel mode */
283 struct drm_display_mode native_mode; 325 struct drm_display_mode native_mode;
@@ -297,11 +339,44 @@ struct radeon_encoder {
297 enum radeon_rmx_type rmx_type; 339 enum radeon_rmx_type rmx_type;
298 struct drm_display_mode native_mode; 340 struct drm_display_mode native_mode;
299 void *enc_priv; 341 void *enc_priv;
342 int hdmi_offset;
343 int hdmi_config_offset;
344 int hdmi_audio_workaround;
345 int hdmi_buffer_status;
300}; 346};
301 347
302struct radeon_connector_atom_dig { 348struct radeon_connector_atom_dig {
303 uint32_t igp_lane_info; 349 uint32_t igp_lane_info;
304 bool linkb; 350 bool linkb;
351 /* displayport */
352 struct radeon_i2c_chan *dp_i2c_bus;
353 u8 dpcd[8];
354 u8 dp_sink_type;
355 int dp_clock;
356 int dp_lane_count;
357};
358
359struct radeon_gpio_rec {
360 bool valid;
361 u8 id;
362 u32 reg;
363 u32 mask;
364};
365
366enum radeon_hpd_id {
367 RADEON_HPD_NONE = 0,
368 RADEON_HPD_1,
369 RADEON_HPD_2,
370 RADEON_HPD_3,
371 RADEON_HPD_4,
372 RADEON_HPD_5,
373 RADEON_HPD_6,
374};
375
376struct radeon_hpd {
377 enum radeon_hpd_id hpd;
378 u8 plugged_state;
379 struct radeon_gpio_rec gpio;
305}; 380};
306 381
307struct radeon_connector { 382struct radeon_connector {
@@ -318,6 +393,7 @@ struct radeon_connector {
318 void *con_priv; 393 void *con_priv;
319 bool dac_load_detect; 394 bool dac_load_detect;
320 uint16_t connector_object_id; 395 uint16_t connector_object_id;
396 struct radeon_hpd hpd;
321}; 397};
322 398
323struct radeon_framebuffer { 399struct radeon_framebuffer {
@@ -325,10 +401,43 @@ struct radeon_framebuffer {
325 struct drm_gem_object *obj; 401 struct drm_gem_object *obj;
326}; 402};
327 403
404extern enum radeon_tv_std
405radeon_combios_get_tv_info(struct radeon_device *rdev);
406extern enum radeon_tv_std
407radeon_atombios_get_tv_info(struct radeon_device *rdev);
408
409extern void radeon_connector_hotplug(struct drm_connector *connector);
410extern bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector);
411extern int radeon_dp_mode_valid_helper(struct radeon_connector *radeon_connector,
412 struct drm_display_mode *mode);
413extern void radeon_dp_set_link_config(struct drm_connector *connector,
414 struct drm_display_mode *mode);
415extern void dp_link_train(struct drm_encoder *encoder,
416 struct drm_connector *connector);
417extern u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector);
418extern bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector);
419extern void atombios_dig_encoder_setup(struct drm_encoder *encoder, int action);
420extern void atombios_dig_transmitter_setup(struct drm_encoder *encoder,
421 int action, uint8_t lane_num,
422 uint8_t lane_set);
423extern int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
424 uint8_t write_byte, uint8_t *read_byte);
425
426extern struct radeon_i2c_chan *radeon_i2c_create_dp(struct drm_device *dev,
427 struct radeon_i2c_bus_rec *rec,
428 const char *name);
328extern struct radeon_i2c_chan *radeon_i2c_create(struct drm_device *dev, 429extern struct radeon_i2c_chan *radeon_i2c_create(struct drm_device *dev,
329 struct radeon_i2c_bus_rec *rec, 430 struct radeon_i2c_bus_rec *rec,
330 const char *name); 431 const char *name);
331extern void radeon_i2c_destroy(struct radeon_i2c_chan *i2c); 432extern void radeon_i2c_destroy(struct radeon_i2c_chan *i2c);
433extern void radeon_i2c_get_byte(struct radeon_i2c_chan *i2c_bus,
434 u8 slave_addr,
435 u8 addr,
436 u8 *val);
437extern void radeon_i2c_put_byte(struct radeon_i2c_chan *i2c,
438 u8 slave_addr,
439 u8 addr,
440 u8 val);
332extern bool radeon_ddc_probe(struct radeon_connector *radeon_connector); 441extern bool radeon_ddc_probe(struct radeon_connector *radeon_connector);
333extern int radeon_ddc_get_modes(struct radeon_connector *radeon_connector); 442extern int radeon_ddc_get_modes(struct radeon_connector *radeon_connector);
334 443
@@ -340,8 +449,9 @@ extern void radeon_compute_pll(struct radeon_pll *pll,
340 uint32_t *fb_div_p, 449 uint32_t *fb_div_p,
341 uint32_t *frac_fb_div_p, 450 uint32_t *frac_fb_div_p,
342 uint32_t *ref_div_p, 451 uint32_t *ref_div_p,
343 uint32_t *post_div_p, 452 uint32_t *post_div_p);
344 int flags); 453
454extern void radeon_setup_encoder_clones(struct drm_device *dev);
345 455
346struct drm_encoder *radeon_encoder_legacy_lvds_add(struct drm_device *dev, int bios_index); 456struct drm_encoder *radeon_encoder_legacy_lvds_add(struct drm_device *dev, int bios_index);
347struct drm_encoder *radeon_encoder_legacy_primary_dac_add(struct drm_device *dev, int bios_index, int with_tv); 457struct drm_encoder *radeon_encoder_legacy_primary_dac_add(struct drm_device *dev, int bios_index, int with_tv);
@@ -349,6 +459,7 @@ struct drm_encoder *radeon_encoder_legacy_tv_dac_add(struct drm_device *dev, int
349struct drm_encoder *radeon_encoder_legacy_tmds_int_add(struct drm_device *dev, int bios_index); 459struct drm_encoder *radeon_encoder_legacy_tmds_int_add(struct drm_device *dev, int bios_index);
350struct drm_encoder *radeon_encoder_legacy_tmds_ext_add(struct drm_device *dev, int bios_index); 460struct drm_encoder *radeon_encoder_legacy_tmds_ext_add(struct drm_device *dev, int bios_index);
351extern void atombios_external_tmds_setup(struct drm_encoder *encoder, int action); 461extern void atombios_external_tmds_setup(struct drm_encoder *encoder, int action);
462extern void atombios_digital_setup(struct drm_encoder *encoder, int action);
352extern int atombios_get_encoder_mode(struct drm_encoder *encoder); 463extern int atombios_get_encoder_mode(struct drm_encoder *encoder);
353extern void radeon_encoder_set_active_device(struct drm_encoder *encoder); 464extern void radeon_encoder_set_active_device(struct drm_encoder *encoder);
354 465
@@ -364,7 +475,6 @@ extern void atombios_crtc_dpms(struct drm_crtc *crtc, int mode);
364 475
365extern int radeon_crtc_set_base(struct drm_crtc *crtc, int x, int y, 476extern int radeon_crtc_set_base(struct drm_crtc *crtc, int x, int y,
366 struct drm_framebuffer *old_fb); 477 struct drm_framebuffer *old_fb);
367extern void radeon_legacy_atom_set_surface(struct drm_crtc *crtc);
368 478
369extern int radeon_crtc_cursor_set(struct drm_crtc *crtc, 479extern int radeon_crtc_cursor_set(struct drm_crtc *crtc,
370 struct drm_file *file_priv, 480 struct drm_file *file_priv,
@@ -374,16 +484,23 @@ extern int radeon_crtc_cursor_set(struct drm_crtc *crtc,
374extern int radeon_crtc_cursor_move(struct drm_crtc *crtc, 484extern int radeon_crtc_cursor_move(struct drm_crtc *crtc,
375 int x, int y); 485 int x, int y);
376 486
487extern bool radeon_combios_check_hardcoded_edid(struct radeon_device *rdev);
488extern struct edid *
489radeon_combios_get_hardcoded_edid(struct radeon_device *rdev);
377extern bool radeon_atom_get_clock_info(struct drm_device *dev); 490extern bool radeon_atom_get_clock_info(struct drm_device *dev);
378extern bool radeon_combios_get_clock_info(struct drm_device *dev); 491extern bool radeon_combios_get_clock_info(struct drm_device *dev);
379extern struct radeon_encoder_atom_dig * 492extern struct radeon_encoder_atom_dig *
380radeon_atombios_get_lvds_info(struct radeon_encoder *encoder); 493radeon_atombios_get_lvds_info(struct radeon_encoder *encoder);
381bool radeon_atombios_get_tmds_info(struct radeon_encoder *encoder, 494extern bool radeon_atombios_get_tmds_info(struct radeon_encoder *encoder,
382 struct radeon_encoder_int_tmds *tmds); 495 struct radeon_encoder_int_tmds *tmds);
383bool radeon_legacy_get_tmds_info_from_combios(struct radeon_encoder *encoder, 496extern bool radeon_legacy_get_tmds_info_from_combios(struct radeon_encoder *encoder,
384 struct radeon_encoder_int_tmds *tmds); 497 struct radeon_encoder_int_tmds *tmds);
385bool radeon_legacy_get_tmds_info_from_table(struct radeon_encoder *encoder, 498extern bool radeon_legacy_get_tmds_info_from_table(struct radeon_encoder *encoder,
386 struct radeon_encoder_int_tmds *tmds); 499 struct radeon_encoder_int_tmds *tmds);
500extern bool radeon_legacy_get_ext_tmds_info_from_combios(struct radeon_encoder *encoder,
501 struct radeon_encoder_ext_tmds *tmds);
502extern bool radeon_legacy_get_ext_tmds_info_from_table(struct radeon_encoder *encoder,
503 struct radeon_encoder_ext_tmds *tmds);
387extern struct radeon_encoder_primary_dac * 504extern struct radeon_encoder_primary_dac *
388radeon_atombios_get_primary_dac_info(struct radeon_encoder *encoder); 505radeon_atombios_get_primary_dac_info(struct radeon_encoder *encoder);
389extern struct radeon_encoder_tv_dac * 506extern struct radeon_encoder_tv_dac *
@@ -395,6 +512,8 @@ extern struct radeon_encoder_tv_dac *
395radeon_combios_get_tv_dac_info(struct radeon_encoder *encoder); 512radeon_combios_get_tv_dac_info(struct radeon_encoder *encoder);
396extern struct radeon_encoder_primary_dac * 513extern struct radeon_encoder_primary_dac *
397radeon_combios_get_primary_dac_info(struct radeon_encoder *encoder); 514radeon_combios_get_primary_dac_info(struct radeon_encoder *encoder);
515extern bool radeon_combios_external_tmds_setup(struct drm_encoder *encoder);
516extern void radeon_external_tmds_setup(struct drm_encoder *encoder);
398extern void radeon_combios_output_lock(struct drm_encoder *encoder, bool lock); 517extern void radeon_combios_output_lock(struct drm_encoder *encoder, bool lock);
399extern void radeon_combios_initialize_bios_scratch_regs(struct drm_device *dev); 518extern void radeon_combios_initialize_bios_scratch_regs(struct drm_device *dev);
400extern void radeon_atom_output_lock(struct drm_encoder *encoder, bool lock); 519extern void radeon_atom_output_lock(struct drm_encoder *encoder, bool lock);
@@ -426,16 +545,12 @@ void radeon_atombios_init_crtc(struct drm_device *dev,
426 struct radeon_crtc *radeon_crtc); 545 struct radeon_crtc *radeon_crtc);
427void radeon_legacy_init_crtc(struct drm_device *dev, 546void radeon_legacy_init_crtc(struct drm_device *dev,
428 struct radeon_crtc *radeon_crtc); 547 struct radeon_crtc *radeon_crtc);
429void radeon_i2c_do_lock(struct radeon_connector *radeon_connector, int lock_state);
430 548
431void radeon_get_clock_info(struct drm_device *dev); 549void radeon_get_clock_info(struct drm_device *dev);
432 550
433extern bool radeon_get_atom_connector_info_from_object_table(struct drm_device *dev); 551extern bool radeon_get_atom_connector_info_from_object_table(struct drm_device *dev);
434extern bool radeon_get_atom_connector_info_from_supported_devices_table(struct drm_device *dev); 552extern bool radeon_get_atom_connector_info_from_supported_devices_table(struct drm_device *dev);
435 553
436void radeon_rmx_mode_fixup(struct drm_encoder *encoder,
437 struct drm_display_mode *mode,
438 struct drm_display_mode *adjusted_mode);
439void radeon_enc_destroy(struct drm_encoder *encoder); 554void radeon_enc_destroy(struct drm_encoder *encoder);
440void radeon_copy_fb(struct drm_device *dev, struct drm_gem_object *dst_obj); 555void radeon_copy_fb(struct drm_device *dev, struct drm_gem_object *dst_obj);
441void radeon_combios_asic_init(struct drm_device *dev); 556void radeon_combios_asic_init(struct drm_device *dev);
@@ -443,6 +558,8 @@ extern int radeon_static_clocks_init(struct drm_device *dev);
443bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc, 558bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
444 struct drm_display_mode *mode, 559 struct drm_display_mode *mode,
445 struct drm_display_mode *adjusted_mode); 560 struct drm_display_mode *adjusted_mode);
561void radeon_panel_mode_fixup(struct drm_encoder *encoder,
562 struct drm_display_mode *adjusted_mode);
446void atom_rv515_force_tv_scaler(struct radeon_device *rdev, struct radeon_crtc *radeon_crtc); 563void atom_rv515_force_tv_scaler(struct radeon_device *rdev, struct radeon_crtc *radeon_crtc);
447 564
448/* legacy tv */ 565/* legacy tv */
diff --git a/drivers/gpu/drm/radeon/radeon_object.c b/drivers/gpu/drm/radeon/radeon_object.c
index 1f056dadc5c2..122774742bd5 100644
--- a/drivers/gpu/drm/radeon/radeon_object.c
+++ b/drivers/gpu/drm/radeon/radeon_object.c
@@ -30,104 +30,67 @@
30 * Dave Airlie 30 * Dave Airlie
31 */ 31 */
32#include <linux/list.h> 32#include <linux/list.h>
33#include <linux/slab.h>
33#include <drm/drmP.h> 34#include <drm/drmP.h>
34#include "radeon_drm.h" 35#include "radeon_drm.h"
35#include "radeon.h" 36#include "radeon.h"
36 37
37struct radeon_object {
38 struct ttm_buffer_object tobj;
39 struct list_head list;
40 struct radeon_device *rdev;
41 struct drm_gem_object *gobj;
42 struct ttm_bo_kmap_obj kmap;
43 unsigned pin_count;
44 uint64_t gpu_addr;
45 void *kptr;
46 bool is_iomem;
47 uint32_t tiling_flags;
48 uint32_t pitch;
49 int surface_reg;
50};
51 38
52int radeon_ttm_init(struct radeon_device *rdev); 39int radeon_ttm_init(struct radeon_device *rdev);
53void radeon_ttm_fini(struct radeon_device *rdev); 40void radeon_ttm_fini(struct radeon_device *rdev);
41static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
54 42
55/* 43/*
56 * To exclude mutual BO access we rely on bo_reserve exclusion, as all 44 * To exclude mutual BO access we rely on bo_reserve exclusion, as all
57 * function are calling it. 45 * function are calling it.
58 */ 46 */
59 47
60static int radeon_object_reserve(struct radeon_object *robj, bool interruptible) 48static void radeon_ttm_bo_destroy(struct ttm_buffer_object *tbo)
61{ 49{
62 return ttm_bo_reserve(&robj->tobj, interruptible, false, false, 0); 50 struct radeon_bo *bo;
63}
64 51
65static void radeon_object_unreserve(struct radeon_object *robj) 52 bo = container_of(tbo, struct radeon_bo, tbo);
66{ 53 mutex_lock(&bo->rdev->gem.mutex);
67 ttm_bo_unreserve(&robj->tobj); 54 list_del_init(&bo->list);
55 mutex_unlock(&bo->rdev->gem.mutex);
56 radeon_bo_clear_surface_reg(bo);
57 kfree(bo);
68} 58}
69 59
70static void radeon_ttm_object_object_destroy(struct ttm_buffer_object *tobj) 60bool radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo)
71{ 61{
72 struct radeon_object *robj; 62 if (bo->destroy == &radeon_ttm_bo_destroy)
73 63 return true;
74 robj = container_of(tobj, struct radeon_object, tobj); 64 return false;
75 list_del_init(&robj->list);
76 radeon_object_clear_surface_reg(robj);
77 kfree(robj);
78} 65}
79 66
80static inline void radeon_object_gpu_addr(struct radeon_object *robj) 67void radeon_ttm_placement_from_domain(struct radeon_bo *rbo, u32 domain)
81{ 68{
82 /* Default gpu address */ 69 u32 c = 0;
83 robj->gpu_addr = 0xFFFFFFFFFFFFFFFFULL;
84 if (robj->tobj.mem.mm_node == NULL) {
85 return;
86 }
87 robj->gpu_addr = ((u64)robj->tobj.mem.mm_node->start) << PAGE_SHIFT;
88 switch (robj->tobj.mem.mem_type) {
89 case TTM_PL_VRAM:
90 robj->gpu_addr += (u64)robj->rdev->mc.vram_location;
91 break;
92 case TTM_PL_TT:
93 robj->gpu_addr += (u64)robj->rdev->mc.gtt_location;
94 break;
95 default:
96 DRM_ERROR("Unknown placement %d\n", robj->tobj.mem.mem_type);
97 robj->gpu_addr = 0xFFFFFFFFFFFFFFFFULL;
98 return;
99 }
100}
101 70
102static inline uint32_t radeon_object_flags_from_domain(uint32_t domain) 71 rbo->placement.fpfn = 0;
103{ 72 rbo->placement.lpfn = 0;
104 uint32_t flags = 0; 73 rbo->placement.placement = rbo->placements;
105 if (domain & RADEON_GEM_DOMAIN_VRAM) { 74 rbo->placement.busy_placement = rbo->placements;
106 flags |= TTM_PL_FLAG_VRAM | TTM_PL_FLAG_WC | TTM_PL_FLAG_UNCACHED; 75 if (domain & RADEON_GEM_DOMAIN_VRAM)
107 } 76 rbo->placements[c++] = TTM_PL_FLAG_WC | TTM_PL_FLAG_UNCACHED |
108 if (domain & RADEON_GEM_DOMAIN_GTT) { 77 TTM_PL_FLAG_VRAM;
109 flags |= TTM_PL_FLAG_TT | TTM_PL_MASK_CACHING; 78 if (domain & RADEON_GEM_DOMAIN_GTT)
110 } 79 rbo->placements[c++] = TTM_PL_MASK_CACHING | TTM_PL_FLAG_TT;
111 if (domain & RADEON_GEM_DOMAIN_CPU) { 80 if (domain & RADEON_GEM_DOMAIN_CPU)
112 flags |= TTM_PL_FLAG_SYSTEM | TTM_PL_MASK_CACHING; 81 rbo->placements[c++] = TTM_PL_MASK_CACHING | TTM_PL_FLAG_SYSTEM;
113 } 82 if (!c)
114 if (!flags) { 83 rbo->placements[c++] = TTM_PL_MASK_CACHING | TTM_PL_FLAG_SYSTEM;
115 flags |= TTM_PL_FLAG_SYSTEM | TTM_PL_MASK_CACHING; 84 rbo->placement.num_placement = c;
116 } 85 rbo->placement.num_busy_placement = c;
117 return flags;
118} 86}
119 87
120int radeon_object_create(struct radeon_device *rdev, 88int radeon_bo_create(struct radeon_device *rdev, struct drm_gem_object *gobj,
121 struct drm_gem_object *gobj, 89 unsigned long size, bool kernel, u32 domain,
122 unsigned long size, 90 struct radeon_bo **bo_ptr)
123 bool kernel,
124 uint32_t domain,
125 bool interruptible,
126 struct radeon_object **robj_ptr)
127{ 91{
128 struct radeon_object *robj; 92 struct radeon_bo *bo;
129 enum ttm_bo_type type; 93 enum ttm_bo_type type;
130 uint32_t flags;
131 int r; 94 int r;
132 95
133 if (unlikely(rdev->mman.bdev.dev_mapping == NULL)) { 96 if (unlikely(rdev->mman.bdev.dev_mapping == NULL)) {
@@ -138,238 +101,164 @@ int radeon_object_create(struct radeon_device *rdev,
138 } else { 101 } else {
139 type = ttm_bo_type_device; 102 type = ttm_bo_type_device;
140 } 103 }
141 *robj_ptr = NULL; 104 *bo_ptr = NULL;
142 robj = kzalloc(sizeof(struct radeon_object), GFP_KERNEL); 105 bo = kzalloc(sizeof(struct radeon_bo), GFP_KERNEL);
143 if (robj == NULL) { 106 if (bo == NULL)
144 return -ENOMEM; 107 return -ENOMEM;
145 } 108 bo->rdev = rdev;
146 robj->rdev = rdev; 109 bo->gobj = gobj;
147 robj->gobj = gobj; 110 bo->surface_reg = -1;
148 robj->surface_reg = -1; 111 INIT_LIST_HEAD(&bo->list);
149 INIT_LIST_HEAD(&robj->list); 112
150 113 radeon_ttm_placement_from_domain(bo, domain);
151 flags = radeon_object_flags_from_domain(domain); 114 /* Kernel allocation are uninterruptible */
152 r = ttm_buffer_object_init(&rdev->mman.bdev, &robj->tobj, size, type, flags, 115 r = ttm_bo_init(&rdev->mman.bdev, &bo->tbo, size, type,
153 0, 0, false, NULL, size, 116 &bo->placement, 0, 0, !kernel, NULL, size,
154 &radeon_ttm_object_object_destroy); 117 &radeon_ttm_bo_destroy);
155 if (unlikely(r != 0)) { 118 if (unlikely(r != 0)) {
156 /* ttm call radeon_ttm_object_object_destroy if error happen */ 119 if (r != -ERESTARTSYS)
157 DRM_ERROR("Failed to allocate TTM object (%ld, 0x%08X, %u)\n", 120 dev_err(rdev->dev,
158 size, flags, 0); 121 "object_init failed for (%lu, 0x%08X)\n",
122 size, domain);
159 return r; 123 return r;
160 } 124 }
161 *robj_ptr = robj; 125 *bo_ptr = bo;
162 if (gobj) { 126 if (gobj) {
163 list_add_tail(&robj->list, &rdev->gem.objects); 127 mutex_lock(&bo->rdev->gem.mutex);
128 list_add_tail(&bo->list, &rdev->gem.objects);
129 mutex_unlock(&bo->rdev->gem.mutex);
164 } 130 }
165 return 0; 131 return 0;
166} 132}
167 133
168int radeon_object_kmap(struct radeon_object *robj, void **ptr) 134int radeon_bo_kmap(struct radeon_bo *bo, void **ptr)
169{ 135{
136 bool is_iomem;
170 int r; 137 int r;
171 138
172 spin_lock(&robj->tobj.lock); 139 if (bo->kptr) {
173 if (robj->kptr) {
174 if (ptr) { 140 if (ptr) {
175 *ptr = robj->kptr; 141 *ptr = bo->kptr;
176 } 142 }
177 spin_unlock(&robj->tobj.lock);
178 return 0; 143 return 0;
179 } 144 }
180 spin_unlock(&robj->tobj.lock); 145 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap);
181 r = ttm_bo_kmap(&robj->tobj, 0, robj->tobj.num_pages, &robj->kmap);
182 if (r) { 146 if (r) {
183 return r; 147 return r;
184 } 148 }
185 spin_lock(&robj->tobj.lock); 149 bo->kptr = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem);
186 robj->kptr = ttm_kmap_obj_virtual(&robj->kmap, &robj->is_iomem);
187 spin_unlock(&robj->tobj.lock);
188 if (ptr) { 150 if (ptr) {
189 *ptr = robj->kptr; 151 *ptr = bo->kptr;
190 } 152 }
191 radeon_object_check_tiling(robj, 0, 0); 153 radeon_bo_check_tiling(bo, 0, 0);
192 return 0; 154 return 0;
193} 155}
194 156
195void radeon_object_kunmap(struct radeon_object *robj) 157void radeon_bo_kunmap(struct radeon_bo *bo)
196{ 158{
197 spin_lock(&robj->tobj.lock); 159 if (bo->kptr == NULL)
198 if (robj->kptr == NULL) {
199 spin_unlock(&robj->tobj.lock);
200 return; 160 return;
201 } 161 bo->kptr = NULL;
202 robj->kptr = NULL; 162 radeon_bo_check_tiling(bo, 0, 0);
203 spin_unlock(&robj->tobj.lock); 163 ttm_bo_kunmap(&bo->kmap);
204 radeon_object_check_tiling(robj, 0, 0);
205 ttm_bo_kunmap(&robj->kmap);
206} 164}
207 165
208void radeon_object_unref(struct radeon_object **robj) 166void radeon_bo_unref(struct radeon_bo **bo)
209{ 167{
210 struct ttm_buffer_object *tobj; 168 struct ttm_buffer_object *tbo;
211 169
212 if ((*robj) == NULL) { 170 if ((*bo) == NULL)
213 return; 171 return;
214 } 172 tbo = &((*bo)->tbo);
215 tobj = &((*robj)->tobj); 173 ttm_bo_unref(&tbo);
216 ttm_bo_unref(&tobj); 174 if (tbo == NULL)
217 if (tobj == NULL) { 175 *bo = NULL;
218 *robj = NULL;
219 }
220}
221
222int radeon_object_mmap(struct radeon_object *robj, uint64_t *offset)
223{
224 *offset = robj->tobj.addr_space_offset;
225 return 0;
226} 176}
227 177
228int radeon_object_pin(struct radeon_object *robj, uint32_t domain, 178int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr)
229 uint64_t *gpu_addr)
230{ 179{
231 uint32_t flags; 180 int r, i;
232 uint32_t tmp;
233 int r;
234 181
235 flags = radeon_object_flags_from_domain(domain); 182 if (bo->pin_count) {
236 spin_lock(&robj->tobj.lock); 183 bo->pin_count++;
237 if (robj->pin_count) { 184 if (gpu_addr)
238 robj->pin_count++; 185 *gpu_addr = radeon_bo_gpu_offset(bo);
239 if (gpu_addr != NULL) {
240 *gpu_addr = robj->gpu_addr;
241 }
242 spin_unlock(&robj->tobj.lock);
243 return 0; 186 return 0;
244 } 187 }
245 spin_unlock(&robj->tobj.lock); 188 radeon_ttm_placement_from_domain(bo, domain);
246 r = radeon_object_reserve(robj, false); 189 if (domain == RADEON_GEM_DOMAIN_VRAM) {
247 if (unlikely(r != 0)) { 190 /* force to pin into visible video ram */
248 DRM_ERROR("radeon: failed to reserve object for pinning it.\n"); 191 bo->placement.lpfn = bo->rdev->mc.visible_vram_size >> PAGE_SHIFT;
249 return r; 192 }
250 } 193 for (i = 0; i < bo->placement.num_placement; i++)
251 tmp = robj->tobj.mem.placement; 194 bo->placements[i] |= TTM_PL_FLAG_NO_EVICT;
252 ttm_flag_masked(&tmp, flags, TTM_PL_MASK_MEM); 195 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false);
253 robj->tobj.proposed_placement = tmp | TTM_PL_FLAG_NO_EVICT | TTM_PL_MASK_CACHING; 196 if (likely(r == 0)) {
254 r = ttm_buffer_object_validate(&robj->tobj, 197 bo->pin_count = 1;
255 robj->tobj.proposed_placement, 198 if (gpu_addr != NULL)
256 false, false); 199 *gpu_addr = radeon_bo_gpu_offset(bo);
257 radeon_object_gpu_addr(robj); 200 }
258 if (gpu_addr != NULL) { 201 if (unlikely(r != 0))
259 *gpu_addr = robj->gpu_addr; 202 dev_err(bo->rdev->dev, "%p pin failed\n", bo);
260 }
261 robj->pin_count = 1;
262 if (unlikely(r != 0)) {
263 DRM_ERROR("radeon: failed to pin object.\n");
264 }
265 radeon_object_unreserve(robj);
266 return r; 203 return r;
267} 204}
268 205
269void radeon_object_unpin(struct radeon_object *robj) 206int radeon_bo_unpin(struct radeon_bo *bo)
270{ 207{
271 uint32_t flags; 208 int r, i;
272 int r;
273 209
274 spin_lock(&robj->tobj.lock); 210 if (!bo->pin_count) {
275 if (!robj->pin_count) { 211 dev_warn(bo->rdev->dev, "%p unpin not necessary\n", bo);
276 spin_unlock(&robj->tobj.lock); 212 return 0;
277 printk(KERN_WARNING "Unpin not necessary for %p !\n", robj);
278 return;
279 }
280 robj->pin_count--;
281 if (robj->pin_count) {
282 spin_unlock(&robj->tobj.lock);
283 return;
284 }
285 spin_unlock(&robj->tobj.lock);
286 r = radeon_object_reserve(robj, false);
287 if (unlikely(r != 0)) {
288 DRM_ERROR("radeon: failed to reserve object for unpinning it.\n");
289 return;
290 }
291 flags = robj->tobj.mem.placement;
292 robj->tobj.proposed_placement = flags & ~TTM_PL_FLAG_NO_EVICT;
293 r = ttm_buffer_object_validate(&robj->tobj,
294 robj->tobj.proposed_placement,
295 false, false);
296 if (unlikely(r != 0)) {
297 DRM_ERROR("radeon: failed to unpin buffer.\n");
298 }
299 radeon_object_unreserve(robj);
300}
301
302int radeon_object_wait(struct radeon_object *robj)
303{
304 int r = 0;
305
306 /* FIXME: should use block reservation instead */
307 r = radeon_object_reserve(robj, true);
308 if (unlikely(r != 0)) {
309 DRM_ERROR("radeon: failed to reserve object for waiting.\n");
310 return r;
311 }
312 spin_lock(&robj->tobj.lock);
313 if (robj->tobj.sync_obj) {
314 r = ttm_bo_wait(&robj->tobj, true, true, false);
315 }
316 spin_unlock(&robj->tobj.lock);
317 radeon_object_unreserve(robj);
318 return r;
319}
320
321int radeon_object_busy_domain(struct radeon_object *robj, uint32_t *cur_placement)
322{
323 int r = 0;
324
325 r = radeon_object_reserve(robj, true);
326 if (unlikely(r != 0)) {
327 DRM_ERROR("radeon: failed to reserve object for waiting.\n");
328 return r;
329 }
330 spin_lock(&robj->tobj.lock);
331 *cur_placement = robj->tobj.mem.mem_type;
332 if (robj->tobj.sync_obj) {
333 r = ttm_bo_wait(&robj->tobj, true, true, true);
334 } 213 }
335 spin_unlock(&robj->tobj.lock); 214 bo->pin_count--;
336 radeon_object_unreserve(robj); 215 if (bo->pin_count)
216 return 0;
217 for (i = 0; i < bo->placement.num_placement; i++)
218 bo->placements[i] &= ~TTM_PL_FLAG_NO_EVICT;
219 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false);
220 if (unlikely(r != 0))
221 dev_err(bo->rdev->dev, "%p validate failed for unpin\n", bo);
337 return r; 222 return r;
338} 223}
339 224
340int radeon_object_evict_vram(struct radeon_device *rdev) 225int radeon_bo_evict_vram(struct radeon_device *rdev)
341{ 226{
342 if (rdev->flags & RADEON_IS_IGP) { 227 /* late 2.6.33 fix IGP hibernate - we need pm ops to do this correct */
343 /* Useless to evict on IGP chips */ 228 if (0 && (rdev->flags & RADEON_IS_IGP)) {
344 return 0; 229 if (rdev->mc.igp_sideport_enabled == false)
230 /* Useless to evict on IGP chips */
231 return 0;
345 } 232 }
346 return ttm_bo_evict_mm(&rdev->mman.bdev, TTM_PL_VRAM); 233 return ttm_bo_evict_mm(&rdev->mman.bdev, TTM_PL_VRAM);
347} 234}
348 235
349void radeon_object_force_delete(struct radeon_device *rdev) 236void radeon_bo_force_delete(struct radeon_device *rdev)
350{ 237{
351 struct radeon_object *robj, *n; 238 struct radeon_bo *bo, *n;
352 struct drm_gem_object *gobj; 239 struct drm_gem_object *gobj;
353 240
354 if (list_empty(&rdev->gem.objects)) { 241 if (list_empty(&rdev->gem.objects)) {
355 return; 242 return;
356 } 243 }
357 DRM_ERROR("Userspace still has active objects !\n"); 244 dev_err(rdev->dev, "Userspace still has active objects !\n");
358 list_for_each_entry_safe(robj, n, &rdev->gem.objects, list) { 245 list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) {
359 mutex_lock(&rdev->ddev->struct_mutex); 246 mutex_lock(&rdev->ddev->struct_mutex);
360 gobj = robj->gobj; 247 gobj = bo->gobj;
361 DRM_ERROR("Force free for (%p,%p,%lu,%lu)\n", 248 dev_err(rdev->dev, "%p %p %lu %lu force free\n",
362 gobj, robj, (unsigned long)gobj->size, 249 gobj, bo, (unsigned long)gobj->size,
363 *((unsigned long *)&gobj->refcount)); 250 *((unsigned long *)&gobj->refcount));
364 list_del_init(&robj->list); 251 mutex_lock(&bo->rdev->gem.mutex);
365 radeon_object_unref(&robj); 252 list_del_init(&bo->list);
253 mutex_unlock(&bo->rdev->gem.mutex);
254 radeon_bo_unref(&bo);
366 gobj->driver_private = NULL; 255 gobj->driver_private = NULL;
367 drm_gem_object_unreference(gobj); 256 drm_gem_object_unreference(gobj);
368 mutex_unlock(&rdev->ddev->struct_mutex); 257 mutex_unlock(&rdev->ddev->struct_mutex);
369 } 258 }
370} 259}
371 260
372int radeon_object_init(struct radeon_device *rdev) 261int radeon_bo_init(struct radeon_device *rdev)
373{ 262{
374 /* Add an MTRR for the VRAM */ 263 /* Add an MTRR for the VRAM */
375 rdev->mc.vram_mtrr = mtrr_add(rdev->mc.aper_base, rdev->mc.aper_size, 264 rdev->mc.vram_mtrr = mtrr_add(rdev->mc.aper_base, rdev->mc.aper_size,
@@ -382,13 +271,13 @@ int radeon_object_init(struct radeon_device *rdev)
382 return radeon_ttm_init(rdev); 271 return radeon_ttm_init(rdev);
383} 272}
384 273
385void radeon_object_fini(struct radeon_device *rdev) 274void radeon_bo_fini(struct radeon_device *rdev)
386{ 275{
387 radeon_ttm_fini(rdev); 276 radeon_ttm_fini(rdev);
388} 277}
389 278
390void radeon_object_list_add_object(struct radeon_object_list *lobj, 279void radeon_bo_list_add_object(struct radeon_bo_list *lobj,
391 struct list_head *head) 280 struct list_head *head)
392{ 281{
393 if (lobj->wdomain) { 282 if (lobj->wdomain) {
394 list_add(&lobj->list, head); 283 list_add(&lobj->list, head);
@@ -397,125 +286,102 @@ void radeon_object_list_add_object(struct radeon_object_list *lobj,
397 } 286 }
398} 287}
399 288
400int radeon_object_list_reserve(struct list_head *head) 289int radeon_bo_list_reserve(struct list_head *head)
401{ 290{
402 struct radeon_object_list *lobj; 291 struct radeon_bo_list *lobj;
403 int r; 292 int r;
404 293
405 list_for_each_entry(lobj, head, list){ 294 list_for_each_entry(lobj, head, list){
406 if (!lobj->robj->pin_count) { 295 r = radeon_bo_reserve(lobj->bo, false);
407 r = radeon_object_reserve(lobj->robj, true); 296 if (unlikely(r != 0))
408 if (unlikely(r != 0)) { 297 return r;
409 DRM_ERROR("radeon: failed to reserve object.\n");
410 return r;
411 }
412 } else {
413 }
414 } 298 }
415 return 0; 299 return 0;
416} 300}
417 301
418void radeon_object_list_unreserve(struct list_head *head) 302void radeon_bo_list_unreserve(struct list_head *head)
419{ 303{
420 struct radeon_object_list *lobj; 304 struct radeon_bo_list *lobj;
421 305
422 list_for_each_entry(lobj, head, list) { 306 list_for_each_entry(lobj, head, list) {
423 if (!lobj->robj->pin_count) { 307 /* only unreserve object we successfully reserved */
424 radeon_object_unreserve(lobj->robj); 308 if (radeon_bo_is_reserved(lobj->bo))
425 } 309 radeon_bo_unreserve(lobj->bo);
426 } 310 }
427} 311}
428 312
429int radeon_object_list_validate(struct list_head *head, void *fence) 313int radeon_bo_list_validate(struct list_head *head)
430{ 314{
431 struct radeon_object_list *lobj; 315 struct radeon_bo_list *lobj;
432 struct radeon_object *robj; 316 struct radeon_bo *bo;
433 struct radeon_fence *old_fence = NULL;
434 int r; 317 int r;
435 318
436 r = radeon_object_list_reserve(head); 319 r = radeon_bo_list_reserve(head);
437 if (unlikely(r != 0)) { 320 if (unlikely(r != 0)) {
438 radeon_object_list_unreserve(head);
439 return r; 321 return r;
440 } 322 }
441 list_for_each_entry(lobj, head, list) { 323 list_for_each_entry(lobj, head, list) {
442 robj = lobj->robj; 324 bo = lobj->bo;
443 if (!robj->pin_count) { 325 if (!bo->pin_count) {
444 if (lobj->wdomain) { 326 if (lobj->wdomain) {
445 robj->tobj.proposed_placement = 327 radeon_ttm_placement_from_domain(bo,
446 radeon_object_flags_from_domain(lobj->wdomain); 328 lobj->wdomain);
447 } else { 329 } else {
448 robj->tobj.proposed_placement = 330 radeon_ttm_placement_from_domain(bo,
449 radeon_object_flags_from_domain(lobj->rdomain); 331 lobj->rdomain);
450 } 332 }
451 r = ttm_buffer_object_validate(&robj->tobj, 333 r = ttm_bo_validate(&bo->tbo, &bo->placement,
452 robj->tobj.proposed_placement, 334 true, false);
453 true, false); 335 if (unlikely(r))
454 if (unlikely(r)) {
455 DRM_ERROR("radeon: failed to validate.\n");
456 return r; 336 return r;
457 }
458 radeon_object_gpu_addr(robj);
459 }
460 lobj->gpu_offset = robj->gpu_addr;
461 lobj->tiling_flags = robj->tiling_flags;
462 if (fence) {
463 old_fence = (struct radeon_fence *)robj->tobj.sync_obj;
464 robj->tobj.sync_obj = radeon_fence_ref(fence);
465 robj->tobj.sync_obj_arg = NULL;
466 }
467 if (old_fence) {
468 radeon_fence_unref(&old_fence);
469 } 337 }
338 lobj->gpu_offset = radeon_bo_gpu_offset(bo);
339 lobj->tiling_flags = bo->tiling_flags;
470 } 340 }
471 return 0; 341 return 0;
472} 342}
473 343
474void radeon_object_list_unvalidate(struct list_head *head) 344void radeon_bo_list_fence(struct list_head *head, void *fence)
475{ 345{
476 struct radeon_object_list *lobj; 346 struct radeon_bo_list *lobj;
347 struct radeon_bo *bo;
477 struct radeon_fence *old_fence = NULL; 348 struct radeon_fence *old_fence = NULL;
478 349
479 list_for_each_entry(lobj, head, list) { 350 list_for_each_entry(lobj, head, list) {
480 old_fence = (struct radeon_fence *)lobj->robj->tobj.sync_obj; 351 bo = lobj->bo;
481 lobj->robj->tobj.sync_obj = NULL; 352 spin_lock(&bo->tbo.lock);
353 old_fence = (struct radeon_fence *)bo->tbo.sync_obj;
354 bo->tbo.sync_obj = radeon_fence_ref(fence);
355 bo->tbo.sync_obj_arg = NULL;
356 spin_unlock(&bo->tbo.lock);
482 if (old_fence) { 357 if (old_fence) {
483 radeon_fence_unref(&old_fence); 358 radeon_fence_unref(&old_fence);
484 } 359 }
485 } 360 }
486 radeon_object_list_unreserve(head);
487} 361}
488 362
489void radeon_object_list_clean(struct list_head *head) 363int radeon_bo_fbdev_mmap(struct radeon_bo *bo,
490{
491 radeon_object_list_unreserve(head);
492}
493
494int radeon_object_fbdev_mmap(struct radeon_object *robj,
495 struct vm_area_struct *vma) 364 struct vm_area_struct *vma)
496{ 365{
497 return ttm_fbdev_mmap(vma, &robj->tobj); 366 return ttm_fbdev_mmap(vma, &bo->tbo);
498}
499
500unsigned long radeon_object_size(struct radeon_object *robj)
501{
502 return robj->tobj.num_pages << PAGE_SHIFT;
503} 367}
504 368
505int radeon_object_get_surface_reg(struct radeon_object *robj) 369int radeon_bo_get_surface_reg(struct radeon_bo *bo)
506{ 370{
507 struct radeon_device *rdev = robj->rdev; 371 struct radeon_device *rdev = bo->rdev;
508 struct radeon_surface_reg *reg; 372 struct radeon_surface_reg *reg;
509 struct radeon_object *old_object; 373 struct radeon_bo *old_object;
510 int steal; 374 int steal;
511 int i; 375 int i;
512 376
513 if (!robj->tiling_flags) 377 BUG_ON(!atomic_read(&bo->tbo.reserved));
378
379 if (!bo->tiling_flags)
514 return 0; 380 return 0;
515 381
516 if (robj->surface_reg >= 0) { 382 if (bo->surface_reg >= 0) {
517 reg = &rdev->surface_regs[robj->surface_reg]; 383 reg = &rdev->surface_regs[bo->surface_reg];
518 i = robj->surface_reg; 384 i = bo->surface_reg;
519 goto out; 385 goto out;
520 } 386 }
521 387
@@ -523,10 +389,10 @@ int radeon_object_get_surface_reg(struct radeon_object *robj)
523 for (i = 0; i < RADEON_GEM_MAX_SURFACES; i++) { 389 for (i = 0; i < RADEON_GEM_MAX_SURFACES; i++) {
524 390
525 reg = &rdev->surface_regs[i]; 391 reg = &rdev->surface_regs[i];
526 if (!reg->robj) 392 if (!reg->bo)
527 break; 393 break;
528 394
529 old_object = reg->robj; 395 old_object = reg->bo;
530 if (old_object->pin_count == 0) 396 if (old_object->pin_count == 0)
531 steal = i; 397 steal = i;
532 } 398 }
@@ -537,91 +403,107 @@ int radeon_object_get_surface_reg(struct radeon_object *robj)
537 return -ENOMEM; 403 return -ENOMEM;
538 /* find someone with a surface reg and nuke their BO */ 404 /* find someone with a surface reg and nuke their BO */
539 reg = &rdev->surface_regs[steal]; 405 reg = &rdev->surface_regs[steal];
540 old_object = reg->robj; 406 old_object = reg->bo;
541 /* blow away the mapping */ 407 /* blow away the mapping */
542 DRM_DEBUG("stealing surface reg %d from %p\n", steal, old_object); 408 DRM_DEBUG("stealing surface reg %d from %p\n", steal, old_object);
543 ttm_bo_unmap_virtual(&old_object->tobj); 409 ttm_bo_unmap_virtual(&old_object->tbo);
544 old_object->surface_reg = -1; 410 old_object->surface_reg = -1;
545 i = steal; 411 i = steal;
546 } 412 }
547 413
548 robj->surface_reg = i; 414 bo->surface_reg = i;
549 reg->robj = robj; 415 reg->bo = bo;
550 416
551out: 417out:
552 radeon_set_surface_reg(rdev, i, robj->tiling_flags, robj->pitch, 418 radeon_set_surface_reg(rdev, i, bo->tiling_flags, bo->pitch,
553 robj->tobj.mem.mm_node->start << PAGE_SHIFT, 419 bo->tbo.mem.mm_node->start << PAGE_SHIFT,
554 robj->tobj.num_pages << PAGE_SHIFT); 420 bo->tbo.num_pages << PAGE_SHIFT);
555 return 0; 421 return 0;
556} 422}
557 423
558void radeon_object_clear_surface_reg(struct radeon_object *robj) 424static void radeon_bo_clear_surface_reg(struct radeon_bo *bo)
559{ 425{
560 struct radeon_device *rdev = robj->rdev; 426 struct radeon_device *rdev = bo->rdev;
561 struct radeon_surface_reg *reg; 427 struct radeon_surface_reg *reg;
562 428
563 if (robj->surface_reg == -1) 429 if (bo->surface_reg == -1)
564 return; 430 return;
565 431
566 reg = &rdev->surface_regs[robj->surface_reg]; 432 reg = &rdev->surface_regs[bo->surface_reg];
567 radeon_clear_surface_reg(rdev, robj->surface_reg); 433 radeon_clear_surface_reg(rdev, bo->surface_reg);
568 434
569 reg->robj = NULL; 435 reg->bo = NULL;
570 robj->surface_reg = -1; 436 bo->surface_reg = -1;
571} 437}
572 438
573void radeon_object_set_tiling_flags(struct radeon_object *robj, 439int radeon_bo_set_tiling_flags(struct radeon_bo *bo,
574 uint32_t tiling_flags, uint32_t pitch) 440 uint32_t tiling_flags, uint32_t pitch)
575{ 441{
576 robj->tiling_flags = tiling_flags; 442 int r;
577 robj->pitch = pitch; 443
444 r = radeon_bo_reserve(bo, false);
445 if (unlikely(r != 0))
446 return r;
447 bo->tiling_flags = tiling_flags;
448 bo->pitch = pitch;
449 radeon_bo_unreserve(bo);
450 return 0;
578} 451}
579 452
580void radeon_object_get_tiling_flags(struct radeon_object *robj, 453void radeon_bo_get_tiling_flags(struct radeon_bo *bo,
581 uint32_t *tiling_flags, 454 uint32_t *tiling_flags,
582 uint32_t *pitch) 455 uint32_t *pitch)
583{ 456{
457 BUG_ON(!atomic_read(&bo->tbo.reserved));
584 if (tiling_flags) 458 if (tiling_flags)
585 *tiling_flags = robj->tiling_flags; 459 *tiling_flags = bo->tiling_flags;
586 if (pitch) 460 if (pitch)
587 *pitch = robj->pitch; 461 *pitch = bo->pitch;
588} 462}
589 463
590int radeon_object_check_tiling(struct radeon_object *robj, bool has_moved, 464int radeon_bo_check_tiling(struct radeon_bo *bo, bool has_moved,
591 bool force_drop) 465 bool force_drop)
592{ 466{
593 if (!(robj->tiling_flags & RADEON_TILING_SURFACE)) 467 BUG_ON(!atomic_read(&bo->tbo.reserved));
468
469 if (!(bo->tiling_flags & RADEON_TILING_SURFACE))
594 return 0; 470 return 0;
595 471
596 if (force_drop) { 472 if (force_drop) {
597 radeon_object_clear_surface_reg(robj); 473 radeon_bo_clear_surface_reg(bo);
598 return 0; 474 return 0;
599 } 475 }
600 476
601 if (robj->tobj.mem.mem_type != TTM_PL_VRAM) { 477 if (bo->tbo.mem.mem_type != TTM_PL_VRAM) {
602 if (!has_moved) 478 if (!has_moved)
603 return 0; 479 return 0;
604 480
605 if (robj->surface_reg >= 0) 481 if (bo->surface_reg >= 0)
606 radeon_object_clear_surface_reg(robj); 482 radeon_bo_clear_surface_reg(bo);
607 return 0; 483 return 0;
608 } 484 }
609 485
610 if ((robj->surface_reg >= 0) && !has_moved) 486 if ((bo->surface_reg >= 0) && !has_moved)
611 return 0; 487 return 0;
612 488
613 return radeon_object_get_surface_reg(robj); 489 return radeon_bo_get_surface_reg(bo);
614} 490}
615 491
616void radeon_bo_move_notify(struct ttm_buffer_object *bo, 492void radeon_bo_move_notify(struct ttm_buffer_object *bo,
617 struct ttm_mem_reg *mem) 493 struct ttm_mem_reg *mem)
618{ 494{
619 struct radeon_object *robj = container_of(bo, struct radeon_object, tobj); 495 struct radeon_bo *rbo;
620 radeon_object_check_tiling(robj, 0, 1); 496 if (!radeon_ttm_bo_is_radeon_bo(bo))
497 return;
498 rbo = container_of(bo, struct radeon_bo, tbo);
499 radeon_bo_check_tiling(rbo, 0, 1);
621} 500}
622 501
623void radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo) 502void radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo)
624{ 503{
625 struct radeon_object *robj = container_of(bo, struct radeon_object, tobj); 504 struct radeon_bo *rbo;
626 radeon_object_check_tiling(robj, 0, 0); 505 if (!radeon_ttm_bo_is_radeon_bo(bo))
506 return;
507 rbo = container_of(bo, struct radeon_bo, tbo);
508 radeon_bo_check_tiling(rbo, 0, 0);
627} 509}
diff --git a/drivers/gpu/drm/radeon/radeon_object.h b/drivers/gpu/drm/radeon/radeon_object.h
index 10e8af6bb456..7ab43de1e244 100644
--- a/drivers/gpu/drm/radeon/radeon_object.h
+++ b/drivers/gpu/drm/radeon/radeon_object.h
@@ -28,19 +28,146 @@
28#ifndef __RADEON_OBJECT_H__ 28#ifndef __RADEON_OBJECT_H__
29#define __RADEON_OBJECT_H__ 29#define __RADEON_OBJECT_H__
30 30
31#include <ttm/ttm_bo_api.h> 31#include <drm/radeon_drm.h>
32#include <ttm/ttm_bo_driver.h> 32#include "radeon.h"
33#include <ttm/ttm_placement.h>
34#include <ttm/ttm_module.h>
35 33
36/* 34/**
37 * TTM. 35 * radeon_mem_type_to_domain - return domain corresponding to mem_type
36 * @mem_type: ttm memory type
37 *
38 * Returns corresponding domain of the ttm mem_type
39 */
40static inline unsigned radeon_mem_type_to_domain(u32 mem_type)
41{
42 switch (mem_type) {
43 case TTM_PL_VRAM:
44 return RADEON_GEM_DOMAIN_VRAM;
45 case TTM_PL_TT:
46 return RADEON_GEM_DOMAIN_GTT;
47 case TTM_PL_SYSTEM:
48 return RADEON_GEM_DOMAIN_CPU;
49 default:
50 break;
51 }
52 return 0;
53}
54
55/**
56 * radeon_bo_reserve - reserve bo
57 * @bo: bo structure
58 * @no_wait: don't sleep while trying to reserve (return -EBUSY)
59 *
60 * Returns:
61 * -EBUSY: buffer is busy and @no_wait is true
62 * -ERESTARTSYS: A wait for the buffer to become unreserved was interrupted by
63 * a signal. Release all buffer reservations and return to user-space.
38 */ 64 */
39struct radeon_mman { 65static inline int radeon_bo_reserve(struct radeon_bo *bo, bool no_wait)
40 struct ttm_bo_global_ref bo_global_ref; 66{
41 struct ttm_global_reference mem_global_ref; 67 int r;
42 bool mem_global_referenced; 68
43 struct ttm_bo_device bdev; 69 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, 0);
44}; 70 if (unlikely(r != 0)) {
71 if (r != -ERESTARTSYS)
72 dev_err(bo->rdev->dev, "%p reserve failed\n", bo);
73 return r;
74 }
75 return 0;
76}
77
78static inline void radeon_bo_unreserve(struct radeon_bo *bo)
79{
80 ttm_bo_unreserve(&bo->tbo);
81}
82
83/**
84 * radeon_bo_gpu_offset - return GPU offset of bo
85 * @bo: radeon object for which we query the offset
86 *
87 * Returns current GPU offset of the object.
88 *
89 * Note: object should either be pinned or reserved when calling this
90 * function, it might be usefull to add check for this for debugging.
91 */
92static inline u64 radeon_bo_gpu_offset(struct radeon_bo *bo)
93{
94 return bo->tbo.offset;
95}
96
97static inline unsigned long radeon_bo_size(struct radeon_bo *bo)
98{
99 return bo->tbo.num_pages << PAGE_SHIFT;
100}
101
102static inline bool radeon_bo_is_reserved(struct radeon_bo *bo)
103{
104 return !!atomic_read(&bo->tbo.reserved);
105}
106
107/**
108 * radeon_bo_mmap_offset - return mmap offset of bo
109 * @bo: radeon object for which we query the offset
110 *
111 * Returns mmap offset of the object.
112 *
113 * Note: addr_space_offset is constant after ttm bo init thus isn't protected
114 * by any lock.
115 */
116static inline u64 radeon_bo_mmap_offset(struct radeon_bo *bo)
117{
118 return bo->tbo.addr_space_offset;
119}
120
121static inline int radeon_bo_wait(struct radeon_bo *bo, u32 *mem_type,
122 bool no_wait)
123{
124 int r;
125
126 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, 0);
127 if (unlikely(r != 0)) {
128 if (r != -ERESTARTSYS)
129 dev_err(bo->rdev->dev, "%p reserve failed for wait\n", bo);
130 return r;
131 }
132 spin_lock(&bo->tbo.lock);
133 if (mem_type)
134 *mem_type = bo->tbo.mem.mem_type;
135 if (bo->tbo.sync_obj)
136 r = ttm_bo_wait(&bo->tbo, true, true, no_wait);
137 spin_unlock(&bo->tbo.lock);
138 ttm_bo_unreserve(&bo->tbo);
139 return r;
140}
45 141
142extern int radeon_bo_create(struct radeon_device *rdev,
143 struct drm_gem_object *gobj, unsigned long size,
144 bool kernel, u32 domain,
145 struct radeon_bo **bo_ptr);
146extern int radeon_bo_kmap(struct radeon_bo *bo, void **ptr);
147extern void radeon_bo_kunmap(struct radeon_bo *bo);
148extern void radeon_bo_unref(struct radeon_bo **bo);
149extern int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr);
150extern int radeon_bo_unpin(struct radeon_bo *bo);
151extern int radeon_bo_evict_vram(struct radeon_device *rdev);
152extern void radeon_bo_force_delete(struct radeon_device *rdev);
153extern int radeon_bo_init(struct radeon_device *rdev);
154extern void radeon_bo_fini(struct radeon_device *rdev);
155extern void radeon_bo_list_add_object(struct radeon_bo_list *lobj,
156 struct list_head *head);
157extern int radeon_bo_list_reserve(struct list_head *head);
158extern void radeon_bo_list_unreserve(struct list_head *head);
159extern int radeon_bo_list_validate(struct list_head *head);
160extern void radeon_bo_list_fence(struct list_head *head, void *fence);
161extern int radeon_bo_fbdev_mmap(struct radeon_bo *bo,
162 struct vm_area_struct *vma);
163extern int radeon_bo_set_tiling_flags(struct radeon_bo *bo,
164 u32 tiling_flags, u32 pitch);
165extern void radeon_bo_get_tiling_flags(struct radeon_bo *bo,
166 u32 *tiling_flags, u32 *pitch);
167extern int radeon_bo_check_tiling(struct radeon_bo *bo, bool has_moved,
168 bool force_drop);
169extern void radeon_bo_move_notify(struct ttm_buffer_object *bo,
170 struct ttm_mem_reg *mem);
171extern void radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo);
172extern int radeon_bo_get_surface_reg(struct radeon_bo *bo);
46#endif 173#endif
diff --git a/drivers/gpu/drm/radeon/radeon_pm.c b/drivers/gpu/drm/radeon/radeon_pm.c
index 46146c6a2a06..a4b57493aa78 100644
--- a/drivers/gpu/drm/radeon/radeon_pm.c
+++ b/drivers/gpu/drm/radeon/radeon_pm.c
@@ -18,21 +18,441 @@
18 * OTHER DEALINGS IN THE SOFTWARE. 18 * OTHER DEALINGS IN THE SOFTWARE.
19 * 19 *
20 * Authors: Rafał Miłecki <zajec5@gmail.com> 20 * Authors: Rafał Miłecki <zajec5@gmail.com>
21 * Alex Deucher <alexdeucher@gmail.com>
21 */ 22 */
22#include "drmP.h" 23#include "drmP.h"
23#include "radeon.h" 24#include "radeon.h"
25#include "avivod.h"
24 26
25int radeon_debugfs_pm_init(struct radeon_device *rdev); 27#define RADEON_IDLE_LOOP_MS 100
28#define RADEON_RECLOCK_DELAY_MS 200
29#define RADEON_WAIT_VBLANK_TIMEOUT 200
30
31static bool radeon_pm_debug_check_in_vbl(struct radeon_device *rdev, bool finish);
32static void radeon_pm_set_clocks_locked(struct radeon_device *rdev);
33static void radeon_pm_set_clocks(struct radeon_device *rdev);
34static void radeon_pm_idle_work_handler(struct work_struct *work);
35static int radeon_debugfs_pm_init(struct radeon_device *rdev);
36
37static const char *pm_state_names[4] = {
38 "PM_STATE_DISABLED",
39 "PM_STATE_MINIMUM",
40 "PM_STATE_PAUSED",
41 "PM_STATE_ACTIVE"
42};
43
44static const char *pm_state_types[5] = {
45 "Default",
46 "Powersave",
47 "Battery",
48 "Balanced",
49 "Performance",
50};
51
52static void radeon_print_power_mode_info(struct radeon_device *rdev)
53{
54 int i, j;
55 bool is_default;
56
57 DRM_INFO("%d Power State(s)\n", rdev->pm.num_power_states);
58 for (i = 0; i < rdev->pm.num_power_states; i++) {
59 if (rdev->pm.default_power_state == &rdev->pm.power_state[i])
60 is_default = true;
61 else
62 is_default = false;
63 DRM_INFO("State %d %s %s\n", i,
64 pm_state_types[rdev->pm.power_state[i].type],
65 is_default ? "(default)" : "");
66 if ((rdev->flags & RADEON_IS_PCIE) && !(rdev->flags & RADEON_IS_IGP))
67 DRM_INFO("\t%d PCIE Lanes\n", rdev->pm.power_state[i].non_clock_info.pcie_lanes);
68 DRM_INFO("\t%d Clock Mode(s)\n", rdev->pm.power_state[i].num_clock_modes);
69 for (j = 0; j < rdev->pm.power_state[i].num_clock_modes; j++) {
70 if (rdev->flags & RADEON_IS_IGP)
71 DRM_INFO("\t\t%d engine: %d\n",
72 j,
73 rdev->pm.power_state[i].clock_info[j].sclk * 10);
74 else
75 DRM_INFO("\t\t%d engine/memory: %d/%d\n",
76 j,
77 rdev->pm.power_state[i].clock_info[j].sclk * 10,
78 rdev->pm.power_state[i].clock_info[j].mclk * 10);
79 }
80 }
81}
82
83static struct radeon_power_state * radeon_pick_power_state(struct radeon_device *rdev,
84 enum radeon_pm_state_type type)
85{
86 int i, j;
87 enum radeon_pm_state_type wanted_types[2];
88 int wanted_count;
89
90 switch (type) {
91 case POWER_STATE_TYPE_DEFAULT:
92 default:
93 return rdev->pm.default_power_state;
94 case POWER_STATE_TYPE_POWERSAVE:
95 if (rdev->flags & RADEON_IS_MOBILITY) {
96 wanted_types[0] = POWER_STATE_TYPE_POWERSAVE;
97 wanted_types[1] = POWER_STATE_TYPE_BATTERY;
98 wanted_count = 2;
99 } else {
100 wanted_types[0] = POWER_STATE_TYPE_PERFORMANCE;
101 wanted_count = 1;
102 }
103 break;
104 case POWER_STATE_TYPE_BATTERY:
105 if (rdev->flags & RADEON_IS_MOBILITY) {
106 wanted_types[0] = POWER_STATE_TYPE_BATTERY;
107 wanted_types[1] = POWER_STATE_TYPE_POWERSAVE;
108 wanted_count = 2;
109 } else {
110 wanted_types[0] = POWER_STATE_TYPE_PERFORMANCE;
111 wanted_count = 1;
112 }
113 break;
114 case POWER_STATE_TYPE_BALANCED:
115 case POWER_STATE_TYPE_PERFORMANCE:
116 wanted_types[0] = type;
117 wanted_count = 1;
118 break;
119 }
120
121 for (i = 0; i < wanted_count; i++) {
122 for (j = 0; j < rdev->pm.num_power_states; j++) {
123 if (rdev->pm.power_state[j].type == wanted_types[i])
124 return &rdev->pm.power_state[j];
125 }
126 }
127
128 return rdev->pm.default_power_state;
129}
130
131static struct radeon_pm_clock_info * radeon_pick_clock_mode(struct radeon_device *rdev,
132 struct radeon_power_state *power_state,
133 enum radeon_pm_clock_mode_type type)
134{
135 switch (type) {
136 case POWER_MODE_TYPE_DEFAULT:
137 default:
138 return power_state->default_clock_mode;
139 case POWER_MODE_TYPE_LOW:
140 return &power_state->clock_info[0];
141 case POWER_MODE_TYPE_MID:
142 if (power_state->num_clock_modes > 2)
143 return &power_state->clock_info[1];
144 else
145 return &power_state->clock_info[0];
146 break;
147 case POWER_MODE_TYPE_HIGH:
148 return &power_state->clock_info[power_state->num_clock_modes - 1];
149 }
150
151}
152
153static void radeon_get_power_state(struct radeon_device *rdev,
154 enum radeon_pm_action action)
155{
156 switch (action) {
157 case PM_ACTION_MINIMUM:
158 rdev->pm.requested_power_state = radeon_pick_power_state(rdev, POWER_STATE_TYPE_BATTERY);
159 rdev->pm.requested_clock_mode =
160 radeon_pick_clock_mode(rdev, rdev->pm.requested_power_state, POWER_MODE_TYPE_LOW);
161 break;
162 case PM_ACTION_DOWNCLOCK:
163 rdev->pm.requested_power_state = radeon_pick_power_state(rdev, POWER_STATE_TYPE_POWERSAVE);
164 rdev->pm.requested_clock_mode =
165 radeon_pick_clock_mode(rdev, rdev->pm.requested_power_state, POWER_MODE_TYPE_MID);
166 break;
167 case PM_ACTION_UPCLOCK:
168 rdev->pm.requested_power_state = radeon_pick_power_state(rdev, POWER_STATE_TYPE_DEFAULT);
169 rdev->pm.requested_clock_mode =
170 radeon_pick_clock_mode(rdev, rdev->pm.requested_power_state, POWER_MODE_TYPE_HIGH);
171 break;
172 case PM_ACTION_NONE:
173 default:
174 DRM_ERROR("Requested mode for not defined action\n");
175 return;
176 }
177 DRM_INFO("Requested: e: %d m: %d p: %d\n",
178 rdev->pm.requested_clock_mode->sclk,
179 rdev->pm.requested_clock_mode->mclk,
180 rdev->pm.requested_power_state->non_clock_info.pcie_lanes);
181}
182
183static inline void radeon_sync_with_vblank(struct radeon_device *rdev)
184{
185 if (rdev->pm.active_crtcs) {
186 rdev->pm.vblank_sync = false;
187 wait_event_timeout(
188 rdev->irq.vblank_queue, rdev->pm.vblank_sync,
189 msecs_to_jiffies(RADEON_WAIT_VBLANK_TIMEOUT));
190 }
191}
192
193static void radeon_set_power_state(struct radeon_device *rdev)
194{
195 /* if *_clock_mode are the same, *_power_state are as well */
196 if (rdev->pm.requested_clock_mode == rdev->pm.current_clock_mode)
197 return;
198
199 DRM_INFO("Setting: e: %d m: %d p: %d\n",
200 rdev->pm.requested_clock_mode->sclk,
201 rdev->pm.requested_clock_mode->mclk,
202 rdev->pm.requested_power_state->non_clock_info.pcie_lanes);
203
204 /* set pcie lanes */
205 /* TODO */
206
207 /* set voltage */
208 /* TODO */
209
210 /* set engine clock */
211 radeon_sync_with_vblank(rdev);
212 radeon_pm_debug_check_in_vbl(rdev, false);
213 radeon_set_engine_clock(rdev, rdev->pm.requested_clock_mode->sclk);
214 radeon_pm_debug_check_in_vbl(rdev, true);
215
216#if 0
217 /* set memory clock */
218 if (rdev->asic->set_memory_clock) {
219 radeon_sync_with_vblank(rdev);
220 radeon_pm_debug_check_in_vbl(rdev, false);
221 radeon_set_memory_clock(rdev, rdev->pm.requested_clock_mode->mclk);
222 radeon_pm_debug_check_in_vbl(rdev, true);
223 }
224#endif
225
226 rdev->pm.current_power_state = rdev->pm.requested_power_state;
227 rdev->pm.current_clock_mode = rdev->pm.requested_clock_mode;
228}
26 229
27int radeon_pm_init(struct radeon_device *rdev) 230int radeon_pm_init(struct radeon_device *rdev)
28{ 231{
232 rdev->pm.state = PM_STATE_DISABLED;
233 rdev->pm.planned_action = PM_ACTION_NONE;
234 rdev->pm.downclocked = false;
235
236 if (rdev->bios) {
237 if (rdev->is_atom_bios)
238 radeon_atombios_get_power_modes(rdev);
239 else
240 radeon_combios_get_power_modes(rdev);
241 radeon_print_power_mode_info(rdev);
242 }
243
29 if (radeon_debugfs_pm_init(rdev)) { 244 if (radeon_debugfs_pm_init(rdev)) {
30 DRM_ERROR("Failed to register debugfs file for CP !\n"); 245 DRM_ERROR("Failed to register debugfs file for PM!\n");
246 }
247
248 INIT_DELAYED_WORK(&rdev->pm.idle_work, radeon_pm_idle_work_handler);
249
250 if (radeon_dynpm != -1 && radeon_dynpm) {
251 rdev->pm.state = PM_STATE_PAUSED;
252 DRM_INFO("radeon: dynamic power management enabled\n");
31 } 253 }
32 254
255 DRM_INFO("radeon: power management initialized\n");
256
33 return 0; 257 return 0;
34} 258}
35 259
260void radeon_pm_fini(struct radeon_device *rdev)
261{
262 if (rdev->pm.i2c_bus)
263 radeon_i2c_destroy(rdev->pm.i2c_bus);
264}
265
266void radeon_pm_compute_clocks(struct radeon_device *rdev)
267{
268 struct drm_device *ddev = rdev->ddev;
269 struct drm_connector *connector;
270 struct radeon_crtc *radeon_crtc;
271 int count = 0;
272
273 if (rdev->pm.state == PM_STATE_DISABLED)
274 return;
275
276 mutex_lock(&rdev->pm.mutex);
277
278 rdev->pm.active_crtcs = 0;
279 list_for_each_entry(connector,
280 &ddev->mode_config.connector_list, head) {
281 if (connector->encoder &&
282 connector->encoder->crtc &&
283 connector->dpms != DRM_MODE_DPMS_OFF) {
284 radeon_crtc = to_radeon_crtc(connector->encoder->crtc);
285 rdev->pm.active_crtcs |= (1 << radeon_crtc->crtc_id);
286 ++count;
287 }
288 }
289
290 if (count > 1) {
291 if (rdev->pm.state == PM_STATE_ACTIVE) {
292 cancel_delayed_work(&rdev->pm.idle_work);
293
294 rdev->pm.state = PM_STATE_PAUSED;
295 rdev->pm.planned_action = PM_ACTION_UPCLOCK;
296 if (rdev->pm.downclocked)
297 radeon_pm_set_clocks(rdev);
298
299 DRM_DEBUG("radeon: dynamic power management deactivated\n");
300 }
301 } else if (count == 1) {
302 /* TODO: Increase clocks if needed for current mode */
303
304 if (rdev->pm.state == PM_STATE_MINIMUM) {
305 rdev->pm.state = PM_STATE_ACTIVE;
306 rdev->pm.planned_action = PM_ACTION_UPCLOCK;
307 radeon_pm_set_clocks(rdev);
308
309 queue_delayed_work(rdev->wq, &rdev->pm.idle_work,
310 msecs_to_jiffies(RADEON_IDLE_LOOP_MS));
311 }
312 else if (rdev->pm.state == PM_STATE_PAUSED) {
313 rdev->pm.state = PM_STATE_ACTIVE;
314 queue_delayed_work(rdev->wq, &rdev->pm.idle_work,
315 msecs_to_jiffies(RADEON_IDLE_LOOP_MS));
316 DRM_DEBUG("radeon: dynamic power management activated\n");
317 }
318 }
319 else { /* count == 0 */
320 if (rdev->pm.state != PM_STATE_MINIMUM) {
321 cancel_delayed_work(&rdev->pm.idle_work);
322
323 rdev->pm.state = PM_STATE_MINIMUM;
324 rdev->pm.planned_action = PM_ACTION_MINIMUM;
325 radeon_pm_set_clocks(rdev);
326 }
327 }
328
329 mutex_unlock(&rdev->pm.mutex);
330}
331
332static bool radeon_pm_debug_check_in_vbl(struct radeon_device *rdev, bool finish)
333{
334 u32 stat_crtc1 = 0, stat_crtc2 = 0;
335 bool in_vbl = true;
336
337 if (ASIC_IS_AVIVO(rdev)) {
338 if (rdev->pm.active_crtcs & (1 << 0)) {
339 stat_crtc1 = RREG32(D1CRTC_STATUS);
340 if (!(stat_crtc1 & 1))
341 in_vbl = false;
342 }
343 if (rdev->pm.active_crtcs & (1 << 1)) {
344 stat_crtc2 = RREG32(D2CRTC_STATUS);
345 if (!(stat_crtc2 & 1))
346 in_vbl = false;
347 }
348 }
349 if (in_vbl == false)
350 DRM_INFO("not in vbl for pm change %08x %08x at %s\n", stat_crtc1,
351 stat_crtc2, finish ? "exit" : "entry");
352 return in_vbl;
353}
354static void radeon_pm_set_clocks_locked(struct radeon_device *rdev)
355{
356 /*radeon_fence_wait_last(rdev);*/
357 switch (rdev->pm.planned_action) {
358 case PM_ACTION_UPCLOCK:
359 rdev->pm.downclocked = false;
360 break;
361 case PM_ACTION_DOWNCLOCK:
362 rdev->pm.downclocked = true;
363 break;
364 case PM_ACTION_MINIMUM:
365 break;
366 case PM_ACTION_NONE:
367 DRM_ERROR("%s: PM_ACTION_NONE\n", __func__);
368 break;
369 }
370
371 radeon_set_power_state(rdev);
372 rdev->pm.planned_action = PM_ACTION_NONE;
373}
374
375static void radeon_pm_set_clocks(struct radeon_device *rdev)
376{
377 radeon_get_power_state(rdev, rdev->pm.planned_action);
378 mutex_lock(&rdev->cp.mutex);
379
380 if (rdev->pm.active_crtcs & (1 << 0)) {
381 rdev->pm.req_vblank |= (1 << 0);
382 drm_vblank_get(rdev->ddev, 0);
383 }
384 if (rdev->pm.active_crtcs & (1 << 1)) {
385 rdev->pm.req_vblank |= (1 << 1);
386 drm_vblank_get(rdev->ddev, 1);
387 }
388 radeon_pm_set_clocks_locked(rdev);
389 if (rdev->pm.req_vblank & (1 << 0)) {
390 rdev->pm.req_vblank &= ~(1 << 0);
391 drm_vblank_put(rdev->ddev, 0);
392 }
393 if (rdev->pm.req_vblank & (1 << 1)) {
394 rdev->pm.req_vblank &= ~(1 << 1);
395 drm_vblank_put(rdev->ddev, 1);
396 }
397
398 mutex_unlock(&rdev->cp.mutex);
399}
400
401static void radeon_pm_idle_work_handler(struct work_struct *work)
402{
403 struct radeon_device *rdev;
404 rdev = container_of(work, struct radeon_device,
405 pm.idle_work.work);
406
407 mutex_lock(&rdev->pm.mutex);
408 if (rdev->pm.state == PM_STATE_ACTIVE) {
409 unsigned long irq_flags;
410 int not_processed = 0;
411
412 read_lock_irqsave(&rdev->fence_drv.lock, irq_flags);
413 if (!list_empty(&rdev->fence_drv.emited)) {
414 struct list_head *ptr;
415 list_for_each(ptr, &rdev->fence_drv.emited) {
416 /* count up to 3, that's enought info */
417 if (++not_processed >= 3)
418 break;
419 }
420 }
421 read_unlock_irqrestore(&rdev->fence_drv.lock, irq_flags);
422
423 if (not_processed >= 3) { /* should upclock */
424 if (rdev->pm.planned_action == PM_ACTION_DOWNCLOCK) {
425 rdev->pm.planned_action = PM_ACTION_NONE;
426 } else if (rdev->pm.planned_action == PM_ACTION_NONE &&
427 rdev->pm.downclocked) {
428 rdev->pm.planned_action =
429 PM_ACTION_UPCLOCK;
430 rdev->pm.action_timeout = jiffies +
431 msecs_to_jiffies(RADEON_RECLOCK_DELAY_MS);
432 }
433 } else if (not_processed == 0) { /* should downclock */
434 if (rdev->pm.planned_action == PM_ACTION_UPCLOCK) {
435 rdev->pm.planned_action = PM_ACTION_NONE;
436 } else if (rdev->pm.planned_action == PM_ACTION_NONE &&
437 !rdev->pm.downclocked) {
438 rdev->pm.planned_action =
439 PM_ACTION_DOWNCLOCK;
440 rdev->pm.action_timeout = jiffies +
441 msecs_to_jiffies(RADEON_RECLOCK_DELAY_MS);
442 }
443 }
444
445 if (rdev->pm.planned_action != PM_ACTION_NONE &&
446 jiffies > rdev->pm.action_timeout) {
447 radeon_pm_set_clocks(rdev);
448 }
449 }
450 mutex_unlock(&rdev->pm.mutex);
451
452 queue_delayed_work(rdev->wq, &rdev->pm.idle_work,
453 msecs_to_jiffies(RADEON_IDLE_LOOP_MS));
454}
455
36/* 456/*
37 * Debugfs info 457 * Debugfs info
38 */ 458 */
@@ -44,8 +464,14 @@ static int radeon_debugfs_pm_info(struct seq_file *m, void *data)
44 struct drm_device *dev = node->minor->dev; 464 struct drm_device *dev = node->minor->dev;
45 struct radeon_device *rdev = dev->dev_private; 465 struct radeon_device *rdev = dev->dev_private;
46 466
47 seq_printf(m, "engine clock: %u0 Hz\n", radeon_get_engine_clock(rdev)); 467 seq_printf(m, "state: %s\n", pm_state_names[rdev->pm.state]);
48 seq_printf(m, "memory clock: %u0 Hz\n", radeon_get_memory_clock(rdev)); 468 seq_printf(m, "default engine clock: %u0 kHz\n", rdev->clock.default_sclk);
469 seq_printf(m, "current engine clock: %u0 kHz\n", radeon_get_engine_clock(rdev));
470 seq_printf(m, "default memory clock: %u0 kHz\n", rdev->clock.default_mclk);
471 if (rdev->asic->get_memory_clock)
472 seq_printf(m, "current memory clock: %u0 kHz\n", radeon_get_memory_clock(rdev));
473 if (rdev->asic->get_pcie_lanes)
474 seq_printf(m, "PCIE lanes: %d\n", radeon_get_pcie_lanes(rdev));
49 475
50 return 0; 476 return 0;
51} 477}
@@ -55,7 +481,7 @@ static struct drm_info_list radeon_pm_info_list[] = {
55}; 481};
56#endif 482#endif
57 483
58int radeon_debugfs_pm_init(struct radeon_device *rdev) 484static int radeon_debugfs_pm_init(struct radeon_device *rdev)
59{ 485{
60#if defined(CONFIG_DEBUG_FS) 486#if defined(CONFIG_DEBUG_FS)
61 return radeon_debugfs_add_files(rdev, radeon_pm_info_list, ARRAY_SIZE(radeon_pm_info_list)); 487 return radeon_debugfs_add_files(rdev, radeon_pm_info_list, ARRAY_SIZE(radeon_pm_info_list));
diff --git a/drivers/gpu/drm/radeon/radeon_reg.h b/drivers/gpu/drm/radeon/radeon_reg.h
index 29ab75903ec1..eabbc9cf30a7 100644
--- a/drivers/gpu/drm/radeon/radeon_reg.h
+++ b/drivers/gpu/drm/radeon/radeon_reg.h
@@ -54,7 +54,7 @@
54#include "r300_reg.h" 54#include "r300_reg.h"
55#include "r500_reg.h" 55#include "r500_reg.h"
56#include "r600_reg.h" 56#include "r600_reg.h"
57 57#include "evergreen_reg.h"
58 58
59#define RADEON_MC_AGP_LOCATION 0x014c 59#define RADEON_MC_AGP_LOCATION 0x014c
60#define RADEON_MC_AGP_START_MASK 0x0000FFFF 60#define RADEON_MC_AGP_START_MASK 0x0000FFFF
@@ -346,6 +346,7 @@
346# define RADEON_TVPLL_PWRMGT_OFF (1 << 30) 346# define RADEON_TVPLL_PWRMGT_OFF (1 << 30)
347# define RADEON_TVCLK_TURNOFF (1 << 31) 347# define RADEON_TVCLK_TURNOFF (1 << 31)
348#define RADEON_PLL_PWRMGT_CNTL 0x0015 /* PLL */ 348#define RADEON_PLL_PWRMGT_CNTL 0x0015 /* PLL */
349# define RADEON_PM_MODE_SEL (1 << 13)
349# define RADEON_TCL_BYPASS_DISABLE (1 << 20) 350# define RADEON_TCL_BYPASS_DISABLE (1 << 20)
350#define RADEON_CLR_CMP_CLR_3D 0x1a24 351#define RADEON_CLR_CMP_CLR_3D 0x1a24
351#define RADEON_CLR_CMP_CLR_DST 0x15c8 352#define RADEON_CLR_CMP_CLR_DST 0x15c8
@@ -887,6 +888,7 @@
887# define RADEON_FP_PANEL_FORMAT (1 << 3) 888# define RADEON_FP_PANEL_FORMAT (1 << 3)
888# define RADEON_FP_EN_TMDS (1 << 7) 889# define RADEON_FP_EN_TMDS (1 << 7)
889# define RADEON_FP_DETECT_SENSE (1 << 8) 890# define RADEON_FP_DETECT_SENSE (1 << 8)
891# define RADEON_FP_DETECT_INT_POL (1 << 9)
890# define R200_FP_SOURCE_SEL_MASK (3 << 10) 892# define R200_FP_SOURCE_SEL_MASK (3 << 10)
891# define R200_FP_SOURCE_SEL_CRTC1 (0 << 10) 893# define R200_FP_SOURCE_SEL_CRTC1 (0 << 10)
892# define R200_FP_SOURCE_SEL_CRTC2 (1 << 10) 894# define R200_FP_SOURCE_SEL_CRTC2 (1 << 10)
@@ -894,6 +896,7 @@
894# define R200_FP_SOURCE_SEL_TRANS (3 << 10) 896# define R200_FP_SOURCE_SEL_TRANS (3 << 10)
895# define RADEON_FP_SEL_CRTC1 (0 << 13) 897# define RADEON_FP_SEL_CRTC1 (0 << 13)
896# define RADEON_FP_SEL_CRTC2 (1 << 13) 898# define RADEON_FP_SEL_CRTC2 (1 << 13)
899# define R300_HPD_SEL(x) ((x) << 13)
897# define RADEON_FP_CRTC_DONT_SHADOW_HPAR (1 << 15) 900# define RADEON_FP_CRTC_DONT_SHADOW_HPAR (1 << 15)
898# define RADEON_FP_CRTC_DONT_SHADOW_VPAR (1 << 16) 901# define RADEON_FP_CRTC_DONT_SHADOW_VPAR (1 << 16)
899# define RADEON_FP_CRTC_DONT_SHADOW_HEND (1 << 17) 902# define RADEON_FP_CRTC_DONT_SHADOW_HEND (1 << 17)
@@ -909,6 +912,7 @@
909# define RADEON_FP2_ON (1 << 2) 912# define RADEON_FP2_ON (1 << 2)
910# define RADEON_FP2_PANEL_FORMAT (1 << 3) 913# define RADEON_FP2_PANEL_FORMAT (1 << 3)
911# define RADEON_FP2_DETECT_SENSE (1 << 8) 914# define RADEON_FP2_DETECT_SENSE (1 << 8)
915# define RADEON_FP2_DETECT_INT_POL (1 << 9)
912# define R200_FP2_SOURCE_SEL_MASK (3 << 10) 916# define R200_FP2_SOURCE_SEL_MASK (3 << 10)
913# define R200_FP2_SOURCE_SEL_CRTC1 (0 << 10) 917# define R200_FP2_SOURCE_SEL_CRTC1 (0 << 10)
914# define R200_FP2_SOURCE_SEL_CRTC2 (1 << 10) 918# define R200_FP2_SOURCE_SEL_CRTC2 (1 << 10)
@@ -988,14 +992,20 @@
988 992
989#define RADEON_GEN_INT_CNTL 0x0040 993#define RADEON_GEN_INT_CNTL 0x0040
990# define RADEON_CRTC_VBLANK_MASK (1 << 0) 994# define RADEON_CRTC_VBLANK_MASK (1 << 0)
995# define RADEON_FP_DETECT_MASK (1 << 4)
991# define RADEON_CRTC2_VBLANK_MASK (1 << 9) 996# define RADEON_CRTC2_VBLANK_MASK (1 << 9)
997# define RADEON_FP2_DETECT_MASK (1 << 10)
992# define RADEON_SW_INT_ENABLE (1 << 25) 998# define RADEON_SW_INT_ENABLE (1 << 25)
993#define RADEON_GEN_INT_STATUS 0x0044 999#define RADEON_GEN_INT_STATUS 0x0044
994# define AVIVO_DISPLAY_INT_STATUS (1 << 0) 1000# define AVIVO_DISPLAY_INT_STATUS (1 << 0)
995# define RADEON_CRTC_VBLANK_STAT (1 << 0) 1001# define RADEON_CRTC_VBLANK_STAT (1 << 0)
996# define RADEON_CRTC_VBLANK_STAT_ACK (1 << 0) 1002# define RADEON_CRTC_VBLANK_STAT_ACK (1 << 0)
1003# define RADEON_FP_DETECT_STAT (1 << 4)
1004# define RADEON_FP_DETECT_STAT_ACK (1 << 4)
997# define RADEON_CRTC2_VBLANK_STAT (1 << 9) 1005# define RADEON_CRTC2_VBLANK_STAT (1 << 9)
998# define RADEON_CRTC2_VBLANK_STAT_ACK (1 << 9) 1006# define RADEON_CRTC2_VBLANK_STAT_ACK (1 << 9)
1007# define RADEON_FP2_DETECT_STAT (1 << 10)
1008# define RADEON_FP2_DETECT_STAT_ACK (1 << 10)
999# define RADEON_SW_INT_FIRE (1 << 26) 1009# define RADEON_SW_INT_FIRE (1 << 26)
1000# define RADEON_SW_INT_TEST (1 << 25) 1010# define RADEON_SW_INT_TEST (1 << 25)
1001# define RADEON_SW_INT_TEST_ACK (1 << 25) 1011# define RADEON_SW_INT_TEST_ACK (1 << 25)
@@ -1051,28 +1061,39 @@
1051 1061
1052 /* Multimedia I2C bus */ 1062 /* Multimedia I2C bus */
1053#define RADEON_I2C_CNTL_0 0x0090 1063#define RADEON_I2C_CNTL_0 0x0090
1054#define RADEON_I2C_DONE (1<<0) 1064# define RADEON_I2C_DONE (1 << 0)
1055#define RADEON_I2C_NACK (1<<1) 1065# define RADEON_I2C_NACK (1 << 1)
1056#define RADEON_I2C_HALT (1<<2) 1066# define RADEON_I2C_HALT (1 << 2)
1057#define RADEON_I2C_SOFT_RST (1<<5) 1067# define RADEON_I2C_SOFT_RST (1 << 5)
1058#define RADEON_I2C_DRIVE_EN (1<<6) 1068# define RADEON_I2C_DRIVE_EN (1 << 6)
1059#define RADEON_I2C_DRIVE_SEL (1<<7) 1069# define RADEON_I2C_DRIVE_SEL (1 << 7)
1060#define RADEON_I2C_START (1<<8) 1070# define RADEON_I2C_START (1 << 8)
1061#define RADEON_I2C_STOP (1<<9) 1071# define RADEON_I2C_STOP (1 << 9)
1062#define RADEON_I2C_RECEIVE (1<<10) 1072# define RADEON_I2C_RECEIVE (1 << 10)
1063#define RADEON_I2C_ABORT (1<<11) 1073# define RADEON_I2C_ABORT (1 << 11)
1064#define RADEON_I2C_GO (1<<12) 1074# define RADEON_I2C_GO (1 << 12)
1075# define RADEON_I2C_PRESCALE_SHIFT 16
1065#define RADEON_I2C_CNTL_1 0x0094 1076#define RADEON_I2C_CNTL_1 0x0094
1066#define RADEON_I2C_SEL (1<<16) 1077# define RADEON_I2C_DATA_COUNT_SHIFT 0
1067#define RADEON_I2C_EN (1<<17) 1078# define RADEON_I2C_ADDR_COUNT_SHIFT 4
1079# define RADEON_I2C_INTRA_BYTE_DELAY_SHIFT 8
1080# define RADEON_I2C_SEL (1 << 16)
1081# define RADEON_I2C_EN (1 << 17)
1082# define RADEON_I2C_TIME_LIMIT_SHIFT 24
1068#define RADEON_I2C_DATA 0x0098 1083#define RADEON_I2C_DATA 0x0098
1069 1084
1070#define RADEON_DVI_I2C_CNTL_0 0x02e0 1085#define RADEON_DVI_I2C_CNTL_0 0x02e0
1071# define R200_DVI_I2C_PIN_SEL(x) ((x) << 3) 1086# define R200_DVI_I2C_PIN_SEL(x) ((x) << 3)
1072# define R200_SEL_DDC1 0 /* 0x60 - VGA_DDC */ 1087# define R200_SEL_DDC1 0 /* depends on asic */
1073# define R200_SEL_DDC2 1 /* 0x64 - DVI_DDC */ 1088# define R200_SEL_DDC2 1 /* depends on asic */
1074# define R200_SEL_DDC3 2 /* 0x68 - MONID_DDC */ 1089# define R200_SEL_DDC3 2 /* depends on asic */
1075#define RADEON_DVI_I2C_CNTL_1 0x02e4 /* ? */ 1090# define RADEON_SW_WANTS_TO_USE_DVI_I2C (1 << 13)
1091# define RADEON_SW_CAN_USE_DVI_I2C (1 << 13)
1092# define RADEON_SW_DONE_USING_DVI_I2C (1 << 14)
1093# define RADEON_HW_NEEDS_DVI_I2C (1 << 14)
1094# define RADEON_ABORT_HW_DVI_I2C (1 << 15)
1095# define RADEON_HW_USING_DVI_I2C (1 << 15)
1096#define RADEON_DVI_I2C_CNTL_1 0x02e4
1076#define RADEON_DVI_I2C_DATA 0x02e8 1097#define RADEON_DVI_I2C_DATA 0x02e8
1077 1098
1078#define RADEON_INTERRUPT_LINE 0x0f3c /* PCI */ 1099#define RADEON_INTERRUPT_LINE 0x0f3c /* PCI */
@@ -1143,15 +1164,16 @@
1143# define RADEON_IO_MCLK_MAX_DYN_STOP_LAT (1 << 13) 1164# define RADEON_IO_MCLK_MAX_DYN_STOP_LAT (1 << 13)
1144# define RADEON_MC_MCLK_DYN_ENABLE (1 << 14) 1165# define RADEON_MC_MCLK_DYN_ENABLE (1 << 14)
1145# define RADEON_IO_MCLK_DYN_ENABLE (1 << 15) 1166# define RADEON_IO_MCLK_DYN_ENABLE (1 << 15)
1146#define RADEON_LCD_GPIO_MASK 0x01a0 1167
1147#define RADEON_GPIOPAD_EN 0x01a0
1148#define RADEON_LCD_GPIO_Y_REG 0x01a4
1149#define RADEON_MDGPIO_A_REG 0x01ac
1150#define RADEON_MDGPIO_EN_REG 0x01b0
1151#define RADEON_MDGPIO_MASK 0x0198
1152#define RADEON_GPIOPAD_MASK 0x0198 1168#define RADEON_GPIOPAD_MASK 0x0198
1153#define RADEON_GPIOPAD_A 0x019c 1169#define RADEON_GPIOPAD_A 0x019c
1154#define RADEON_MDGPIO_Y_REG 0x01b4 1170#define RADEON_GPIOPAD_EN 0x01a0
1171#define RADEON_GPIOPAD_Y 0x01a4
1172#define RADEON_MDGPIO_MASK 0x01a8
1173#define RADEON_MDGPIO_A 0x01ac
1174#define RADEON_MDGPIO_EN 0x01b0
1175#define RADEON_MDGPIO_Y 0x01b4
1176
1155#define RADEON_MEM_ADDR_CONFIG 0x0148 1177#define RADEON_MEM_ADDR_CONFIG 0x0148
1156#define RADEON_MEM_BASE 0x0f10 /* PCI */ 1178#define RADEON_MEM_BASE 0x0f10 /* PCI */
1157#define RADEON_MEM_CNTL 0x0140 1179#define RADEON_MEM_CNTL 0x0140
@@ -1360,6 +1382,9 @@
1360#define RADEON_OVR_CLR 0x0230 1382#define RADEON_OVR_CLR 0x0230
1361#define RADEON_OVR_WID_LEFT_RIGHT 0x0234 1383#define RADEON_OVR_WID_LEFT_RIGHT 0x0234
1362#define RADEON_OVR_WID_TOP_BOTTOM 0x0238 1384#define RADEON_OVR_WID_TOP_BOTTOM 0x0238
1385#define RADEON_OVR2_CLR 0x0330
1386#define RADEON_OVR2_WID_LEFT_RIGHT 0x0334
1387#define RADEON_OVR2_WID_TOP_BOTTOM 0x0338
1363 1388
1364/* first capture unit */ 1389/* first capture unit */
1365 1390
diff --git a/drivers/gpu/drm/radeon/radeon_ring.c b/drivers/gpu/drm/radeon/radeon_ring.c
index 747b4bffb84b..f6e1e8d4d986 100644
--- a/drivers/gpu/drm/radeon/radeon_ring.c
+++ b/drivers/gpu/drm/radeon/radeon_ring.c
@@ -26,6 +26,7 @@
26 * Jerome Glisse 26 * Jerome Glisse
27 */ 27 */
28#include <linux/seq_file.h> 28#include <linux/seq_file.h>
29#include <linux/slab.h>
29#include "drmP.h" 30#include "drmP.h"
30#include "radeon_drm.h" 31#include "radeon_drm.h"
31#include "radeon_reg.h" 32#include "radeon_reg.h"
@@ -34,6 +35,36 @@
34 35
35int radeon_debugfs_ib_init(struct radeon_device *rdev); 36int radeon_debugfs_ib_init(struct radeon_device *rdev);
36 37
38void radeon_ib_bogus_cleanup(struct radeon_device *rdev)
39{
40 struct radeon_ib *ib, *n;
41
42 list_for_each_entry_safe(ib, n, &rdev->ib_pool.bogus_ib, list) {
43 list_del(&ib->list);
44 vfree(ib->ptr);
45 kfree(ib);
46 }
47}
48
49void radeon_ib_bogus_add(struct radeon_device *rdev, struct radeon_ib *ib)
50{
51 struct radeon_ib *bib;
52
53 bib = kmalloc(sizeof(*bib), GFP_KERNEL);
54 if (bib == NULL)
55 return;
56 bib->ptr = vmalloc(ib->length_dw * 4);
57 if (bib->ptr == NULL) {
58 kfree(bib);
59 return;
60 }
61 memcpy(bib->ptr, ib->ptr, ib->length_dw * 4);
62 bib->length_dw = ib->length_dw;
63 mutex_lock(&rdev->ib_pool.mutex);
64 list_add_tail(&bib->list, &rdev->ib_pool.bogus_ib);
65 mutex_unlock(&rdev->ib_pool.mutex);
66}
67
37/* 68/*
38 * IB. 69 * IB.
39 */ 70 */
@@ -41,68 +72,55 @@ int radeon_ib_get(struct radeon_device *rdev, struct radeon_ib **ib)
41{ 72{
42 struct radeon_fence *fence; 73 struct radeon_fence *fence;
43 struct radeon_ib *nib; 74 struct radeon_ib *nib;
44 unsigned long i; 75 int r = 0, i, c;
45 int r = 0;
46 76
47 *ib = NULL; 77 *ib = NULL;
48 r = radeon_fence_create(rdev, &fence); 78 r = radeon_fence_create(rdev, &fence);
49 if (r) { 79 if (r) {
50 DRM_ERROR("failed to create fence for new IB\n"); 80 dev_err(rdev->dev, "failed to create fence for new IB\n");
51 return r; 81 return r;
52 } 82 }
53 mutex_lock(&rdev->ib_pool.mutex); 83 mutex_lock(&rdev->ib_pool.mutex);
54 i = find_first_zero_bit(rdev->ib_pool.alloc_bm, RADEON_IB_POOL_SIZE); 84 for (i = rdev->ib_pool.head_id, c = 0, nib = NULL; c < RADEON_IB_POOL_SIZE; c++, i++) {
55 if (i < RADEON_IB_POOL_SIZE) { 85 i &= (RADEON_IB_POOL_SIZE - 1);
56 set_bit(i, rdev->ib_pool.alloc_bm); 86 if (rdev->ib_pool.ibs[i].free) {
57 rdev->ib_pool.ibs[i].length_dw = 0; 87 nib = &rdev->ib_pool.ibs[i];
58 *ib = &rdev->ib_pool.ibs[i]; 88 break;
59 mutex_unlock(&rdev->ib_pool.mutex); 89 }
60 goto out;
61 } 90 }
62 if (list_empty(&rdev->ib_pool.scheduled_ibs)) { 91 if (nib == NULL) {
63 /* we go do nothings here */ 92 /* This should never happen, it means we allocated all
93 * IB and haven't scheduled one yet, return EBUSY to
94 * userspace hoping that on ioctl recall we get better
95 * luck
96 */
97 dev_err(rdev->dev, "no free indirect buffer !\n");
64 mutex_unlock(&rdev->ib_pool.mutex); 98 mutex_unlock(&rdev->ib_pool.mutex);
65 DRM_ERROR("all IB allocated none scheduled.\n"); 99 radeon_fence_unref(&fence);
66 r = -EINVAL; 100 return -EBUSY;
67 goto out;
68 } 101 }
69 /* get the first ib on the scheduled list */ 102 rdev->ib_pool.head_id = (nib->idx + 1) & (RADEON_IB_POOL_SIZE - 1);
70 nib = list_entry(rdev->ib_pool.scheduled_ibs.next, 103 nib->free = false;
71 struct radeon_ib, list); 104 if (nib->fence) {
72 if (nib->fence == NULL) {
73 /* we go do nothings here */
74 mutex_unlock(&rdev->ib_pool.mutex); 105 mutex_unlock(&rdev->ib_pool.mutex);
75 DRM_ERROR("IB %lu scheduled without a fence.\n", nib->idx); 106 r = radeon_fence_wait(nib->fence, false);
76 r = -EINVAL; 107 if (r) {
77 goto out; 108 dev_err(rdev->dev, "error waiting fence of IB(%u:0x%016lX:%u)\n",
78 } 109 nib->idx, (unsigned long)nib->gpu_addr, nib->length_dw);
79 mutex_unlock(&rdev->ib_pool.mutex); 110 mutex_lock(&rdev->ib_pool.mutex);
80 111 nib->free = true;
81 r = radeon_fence_wait(nib->fence, false); 112 mutex_unlock(&rdev->ib_pool.mutex);
82 if (r) { 113 radeon_fence_unref(&fence);
83 DRM_ERROR("radeon: IB(%lu:0x%016lX:%u)\n", nib->idx, 114 return r;
84 (unsigned long)nib->gpu_addr, nib->length_dw); 115 }
85 DRM_ERROR("radeon: GPU lockup detected, fail to get a IB\n"); 116 mutex_lock(&rdev->ib_pool.mutex);
86 goto out;
87 } 117 }
88 radeon_fence_unref(&nib->fence); 118 radeon_fence_unref(&nib->fence);
89 119 nib->fence = fence;
90 nib->length_dw = 0; 120 nib->length_dw = 0;
91
92 /* scheduled list is accessed here */
93 mutex_lock(&rdev->ib_pool.mutex);
94 list_del(&nib->list);
95 INIT_LIST_HEAD(&nib->list);
96 mutex_unlock(&rdev->ib_pool.mutex); 121 mutex_unlock(&rdev->ib_pool.mutex);
97
98 *ib = nib; 122 *ib = nib;
99out: 123 return 0;
100 if (r) {
101 radeon_fence_unref(&fence);
102 } else {
103 (*ib)->fence = fence;
104 }
105 return r;
106} 124}
107 125
108void radeon_ib_free(struct radeon_device *rdev, struct radeon_ib **ib) 126void radeon_ib_free(struct radeon_device *rdev, struct radeon_ib **ib)
@@ -113,19 +131,10 @@ void radeon_ib_free(struct radeon_device *rdev, struct radeon_ib **ib)
113 if (tmp == NULL) { 131 if (tmp == NULL) {
114 return; 132 return;
115 } 133 }
116 mutex_lock(&rdev->ib_pool.mutex); 134 if (!tmp->fence->emited)
117 if (!list_empty(&tmp->list) && !radeon_fence_signaled(tmp->fence)) {
118 /* IB is scheduled & not signaled don't do anythings */
119 mutex_unlock(&rdev->ib_pool.mutex);
120 return;
121 }
122 list_del(&tmp->list);
123 INIT_LIST_HEAD(&tmp->list);
124 if (tmp->fence)
125 radeon_fence_unref(&tmp->fence); 135 radeon_fence_unref(&tmp->fence);
126 136 mutex_lock(&rdev->ib_pool.mutex);
127 tmp->length_dw = 0; 137 tmp->free = true;
128 clear_bit(tmp->idx, rdev->ib_pool.alloc_bm);
129 mutex_unlock(&rdev->ib_pool.mutex); 138 mutex_unlock(&rdev->ib_pool.mutex);
130} 139}
131 140
@@ -135,7 +144,7 @@ int radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib)
135 144
136 if (!ib->length_dw || !rdev->cp.ready) { 145 if (!ib->length_dw || !rdev->cp.ready) {
137 /* TODO: Nothings in the ib we should report. */ 146 /* TODO: Nothings in the ib we should report. */
138 DRM_ERROR("radeon: couldn't schedule IB(%lu).\n", ib->idx); 147 DRM_ERROR("radeon: couldn't schedule IB(%u).\n", ib->idx);
139 return -EINVAL; 148 return -EINVAL;
140 } 149 }
141 150
@@ -148,7 +157,8 @@ int radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib)
148 radeon_ring_ib_execute(rdev, ib); 157 radeon_ring_ib_execute(rdev, ib);
149 radeon_fence_emit(rdev, ib->fence); 158 radeon_fence_emit(rdev, ib->fence);
150 mutex_lock(&rdev->ib_pool.mutex); 159 mutex_lock(&rdev->ib_pool.mutex);
151 list_add_tail(&ib->list, &rdev->ib_pool.scheduled_ibs); 160 /* once scheduled IB is considered free and protected by the fence */
161 ib->free = true;
152 mutex_unlock(&rdev->ib_pool.mutex); 162 mutex_unlock(&rdev->ib_pool.mutex);
153 radeon_ring_unlock_commit(rdev); 163 radeon_ring_unlock_commit(rdev);
154 return 0; 164 return 0;
@@ -163,21 +173,26 @@ int radeon_ib_pool_init(struct radeon_device *rdev)
163 173
164 if (rdev->ib_pool.robj) 174 if (rdev->ib_pool.robj)
165 return 0; 175 return 0;
176 INIT_LIST_HEAD(&rdev->ib_pool.bogus_ib);
166 /* Allocate 1M object buffer */ 177 /* Allocate 1M object buffer */
167 INIT_LIST_HEAD(&rdev->ib_pool.scheduled_ibs); 178 r = radeon_bo_create(rdev, NULL, RADEON_IB_POOL_SIZE*64*1024,
168 r = radeon_object_create(rdev, NULL, RADEON_IB_POOL_SIZE*64*1024, 179 true, RADEON_GEM_DOMAIN_GTT,
169 true, RADEON_GEM_DOMAIN_GTT, 180 &rdev->ib_pool.robj);
170 false, &rdev->ib_pool.robj);
171 if (r) { 181 if (r) {
172 DRM_ERROR("radeon: failed to ib pool (%d).\n", r); 182 DRM_ERROR("radeon: failed to ib pool (%d).\n", r);
173 return r; 183 return r;
174 } 184 }
175 r = radeon_object_pin(rdev->ib_pool.robj, RADEON_GEM_DOMAIN_GTT, &gpu_addr); 185 r = radeon_bo_reserve(rdev->ib_pool.robj, false);
186 if (unlikely(r != 0))
187 return r;
188 r = radeon_bo_pin(rdev->ib_pool.robj, RADEON_GEM_DOMAIN_GTT, &gpu_addr);
176 if (r) { 189 if (r) {
190 radeon_bo_unreserve(rdev->ib_pool.robj);
177 DRM_ERROR("radeon: failed to pin ib pool (%d).\n", r); 191 DRM_ERROR("radeon: failed to pin ib pool (%d).\n", r);
178 return r; 192 return r;
179 } 193 }
180 r = radeon_object_kmap(rdev->ib_pool.robj, &ptr); 194 r = radeon_bo_kmap(rdev->ib_pool.robj, &ptr);
195 radeon_bo_unreserve(rdev->ib_pool.robj);
181 if (r) { 196 if (r) {
182 DRM_ERROR("radeon: failed to map ib poll (%d).\n", r); 197 DRM_ERROR("radeon: failed to map ib poll (%d).\n", r);
183 return r; 198 return r;
@@ -190,9 +205,9 @@ int radeon_ib_pool_init(struct radeon_device *rdev)
190 rdev->ib_pool.ibs[i].ptr = ptr + offset; 205 rdev->ib_pool.ibs[i].ptr = ptr + offset;
191 rdev->ib_pool.ibs[i].idx = i; 206 rdev->ib_pool.ibs[i].idx = i;
192 rdev->ib_pool.ibs[i].length_dw = 0; 207 rdev->ib_pool.ibs[i].length_dw = 0;
193 INIT_LIST_HEAD(&rdev->ib_pool.ibs[i].list); 208 rdev->ib_pool.ibs[i].free = true;
194 } 209 }
195 bitmap_zero(rdev->ib_pool.alloc_bm, RADEON_IB_POOL_SIZE); 210 rdev->ib_pool.head_id = 0;
196 rdev->ib_pool.ready = true; 211 rdev->ib_pool.ready = true;
197 DRM_INFO("radeon: ib pool ready.\n"); 212 DRM_INFO("radeon: ib pool ready.\n");
198 if (radeon_debugfs_ib_init(rdev)) { 213 if (radeon_debugfs_ib_init(rdev)) {
@@ -203,14 +218,22 @@ int radeon_ib_pool_init(struct radeon_device *rdev)
203 218
204void radeon_ib_pool_fini(struct radeon_device *rdev) 219void radeon_ib_pool_fini(struct radeon_device *rdev)
205{ 220{
221 int r;
222
206 if (!rdev->ib_pool.ready) { 223 if (!rdev->ib_pool.ready) {
207 return; 224 return;
208 } 225 }
209 mutex_lock(&rdev->ib_pool.mutex); 226 mutex_lock(&rdev->ib_pool.mutex);
210 bitmap_zero(rdev->ib_pool.alloc_bm, RADEON_IB_POOL_SIZE); 227 radeon_ib_bogus_cleanup(rdev);
228
211 if (rdev->ib_pool.robj) { 229 if (rdev->ib_pool.robj) {
212 radeon_object_kunmap(rdev->ib_pool.robj); 230 r = radeon_bo_reserve(rdev->ib_pool.robj, false);
213 radeon_object_unref(&rdev->ib_pool.robj); 231 if (likely(r == 0)) {
232 radeon_bo_kunmap(rdev->ib_pool.robj);
233 radeon_bo_unpin(rdev->ib_pool.robj);
234 radeon_bo_unreserve(rdev->ib_pool.robj);
235 }
236 radeon_bo_unref(&rdev->ib_pool.robj);
214 rdev->ib_pool.robj = NULL; 237 rdev->ib_pool.robj = NULL;
215 } 238 }
216 mutex_unlock(&rdev->ib_pool.mutex); 239 mutex_unlock(&rdev->ib_pool.mutex);
@@ -288,29 +311,28 @@ int radeon_ring_init(struct radeon_device *rdev, unsigned ring_size)
288 rdev->cp.ring_size = ring_size; 311 rdev->cp.ring_size = ring_size;
289 /* Allocate ring buffer */ 312 /* Allocate ring buffer */
290 if (rdev->cp.ring_obj == NULL) { 313 if (rdev->cp.ring_obj == NULL) {
291 r = radeon_object_create(rdev, NULL, rdev->cp.ring_size, 314 r = radeon_bo_create(rdev, NULL, rdev->cp.ring_size, true,
292 true, 315 RADEON_GEM_DOMAIN_GTT,
293 RADEON_GEM_DOMAIN_GTT, 316 &rdev->cp.ring_obj);
294 false,
295 &rdev->cp.ring_obj);
296 if (r) { 317 if (r) {
297 DRM_ERROR("radeon: failed to create ring buffer (%d).\n", r); 318 dev_err(rdev->dev, "(%d) ring create failed\n", r);
298 mutex_unlock(&rdev->cp.mutex);
299 return r; 319 return r;
300 } 320 }
301 r = radeon_object_pin(rdev->cp.ring_obj, 321 r = radeon_bo_reserve(rdev->cp.ring_obj, false);
302 RADEON_GEM_DOMAIN_GTT, 322 if (unlikely(r != 0))
303 &rdev->cp.gpu_addr); 323 return r;
324 r = radeon_bo_pin(rdev->cp.ring_obj, RADEON_GEM_DOMAIN_GTT,
325 &rdev->cp.gpu_addr);
304 if (r) { 326 if (r) {
305 DRM_ERROR("radeon: failed to pin ring buffer (%d).\n", r); 327 radeon_bo_unreserve(rdev->cp.ring_obj);
306 mutex_unlock(&rdev->cp.mutex); 328 dev_err(rdev->dev, "(%d) ring pin failed\n", r);
307 return r; 329 return r;
308 } 330 }
309 r = radeon_object_kmap(rdev->cp.ring_obj, 331 r = radeon_bo_kmap(rdev->cp.ring_obj,
310 (void **)&rdev->cp.ring); 332 (void **)&rdev->cp.ring);
333 radeon_bo_unreserve(rdev->cp.ring_obj);
311 if (r) { 334 if (r) {
312 DRM_ERROR("radeon: failed to map ring buffer (%d).\n", r); 335 dev_err(rdev->dev, "(%d) ring map failed\n", r);
313 mutex_unlock(&rdev->cp.mutex);
314 return r; 336 return r;
315 } 337 }
316 } 338 }
@@ -321,11 +343,17 @@ int radeon_ring_init(struct radeon_device *rdev, unsigned ring_size)
321 343
322void radeon_ring_fini(struct radeon_device *rdev) 344void radeon_ring_fini(struct radeon_device *rdev)
323{ 345{
346 int r;
347
324 mutex_lock(&rdev->cp.mutex); 348 mutex_lock(&rdev->cp.mutex);
325 if (rdev->cp.ring_obj) { 349 if (rdev->cp.ring_obj) {
326 radeon_object_kunmap(rdev->cp.ring_obj); 350 r = radeon_bo_reserve(rdev->cp.ring_obj, false);
327 radeon_object_unpin(rdev->cp.ring_obj); 351 if (likely(r == 0)) {
328 radeon_object_unref(&rdev->cp.ring_obj); 352 radeon_bo_kunmap(rdev->cp.ring_obj);
353 radeon_bo_unpin(rdev->cp.ring_obj);
354 radeon_bo_unreserve(rdev->cp.ring_obj);
355 }
356 radeon_bo_unref(&rdev->cp.ring_obj);
329 rdev->cp.ring = NULL; 357 rdev->cp.ring = NULL;
330 rdev->cp.ring_obj = NULL; 358 rdev->cp.ring_obj = NULL;
331 } 359 }
@@ -346,7 +374,7 @@ static int radeon_debugfs_ib_info(struct seq_file *m, void *data)
346 if (ib == NULL) { 374 if (ib == NULL) {
347 return 0; 375 return 0;
348 } 376 }
349 seq_printf(m, "IB %04lu\n", ib->idx); 377 seq_printf(m, "IB %04u\n", ib->idx);
350 seq_printf(m, "IB fence %p\n", ib->fence); 378 seq_printf(m, "IB fence %p\n", ib->fence);
351 seq_printf(m, "IB size %05u dwords\n", ib->length_dw); 379 seq_printf(m, "IB size %05u dwords\n", ib->length_dw);
352 for (i = 0; i < ib->length_dw; i++) { 380 for (i = 0; i < ib->length_dw; i++) {
@@ -355,15 +383,49 @@ static int radeon_debugfs_ib_info(struct seq_file *m, void *data)
355 return 0; 383 return 0;
356} 384}
357 385
386static int radeon_debugfs_ib_bogus_info(struct seq_file *m, void *data)
387{
388 struct drm_info_node *node = (struct drm_info_node *) m->private;
389 struct radeon_device *rdev = node->info_ent->data;
390 struct radeon_ib *ib;
391 unsigned i;
392
393 mutex_lock(&rdev->ib_pool.mutex);
394 if (list_empty(&rdev->ib_pool.bogus_ib)) {
395 mutex_unlock(&rdev->ib_pool.mutex);
396 seq_printf(m, "no bogus IB recorded\n");
397 return 0;
398 }
399 ib = list_first_entry(&rdev->ib_pool.bogus_ib, struct radeon_ib, list);
400 list_del_init(&ib->list);
401 mutex_unlock(&rdev->ib_pool.mutex);
402 seq_printf(m, "IB size %05u dwords\n", ib->length_dw);
403 for (i = 0; i < ib->length_dw; i++) {
404 seq_printf(m, "[%05u]=0x%08X\n", i, ib->ptr[i]);
405 }
406 vfree(ib->ptr);
407 kfree(ib);
408 return 0;
409}
410
358static struct drm_info_list radeon_debugfs_ib_list[RADEON_IB_POOL_SIZE]; 411static struct drm_info_list radeon_debugfs_ib_list[RADEON_IB_POOL_SIZE];
359static char radeon_debugfs_ib_names[RADEON_IB_POOL_SIZE][32]; 412static char radeon_debugfs_ib_names[RADEON_IB_POOL_SIZE][32];
413
414static struct drm_info_list radeon_debugfs_ib_bogus_info_list[] = {
415 {"radeon_ib_bogus", radeon_debugfs_ib_bogus_info, 0, NULL},
416};
360#endif 417#endif
361 418
362int radeon_debugfs_ib_init(struct radeon_device *rdev) 419int radeon_debugfs_ib_init(struct radeon_device *rdev)
363{ 420{
364#if defined(CONFIG_DEBUG_FS) 421#if defined(CONFIG_DEBUG_FS)
365 unsigned i; 422 unsigned i;
423 int r;
366 424
425 radeon_debugfs_ib_bogus_info_list[0].data = rdev;
426 r = radeon_debugfs_add_files(rdev, radeon_debugfs_ib_bogus_info_list, 1);
427 if (r)
428 return r;
367 for (i = 0; i < RADEON_IB_POOL_SIZE; i++) { 429 for (i = 0; i < RADEON_IB_POOL_SIZE; i++) {
368 sprintf(radeon_debugfs_ib_names[i], "radeon_ib_%04u", i); 430 sprintf(radeon_debugfs_ib_names[i], "radeon_ib_%04u", i);
369 radeon_debugfs_ib_list[i].name = radeon_debugfs_ib_names[i]; 431 radeon_debugfs_ib_list[i].name = radeon_debugfs_ib_names[i];
diff --git a/drivers/gpu/drm/radeon/radeon_state.c b/drivers/gpu/drm/radeon/radeon_state.c
index 38537d971a3e..cc5316dcf580 100644
--- a/drivers/gpu/drm/radeon/radeon_state.c
+++ b/drivers/gpu/drm/radeon/radeon_state.c
@@ -29,6 +29,7 @@
29 29
30#include "drmP.h" 30#include "drmP.h"
31#include "drm.h" 31#include "drm.h"
32#include "drm_buffer.h"
32#include "drm_sarea.h" 33#include "drm_sarea.h"
33#include "radeon_drm.h" 34#include "radeon_drm.h"
34#include "radeon_drv.h" 35#include "radeon_drv.h"
@@ -91,21 +92,27 @@ static __inline__ int radeon_check_and_fixup_offset(drm_radeon_private_t *
91static __inline__ int radeon_check_and_fixup_packets(drm_radeon_private_t * 92static __inline__ int radeon_check_and_fixup_packets(drm_radeon_private_t *
92 dev_priv, 93 dev_priv,
93 struct drm_file *file_priv, 94 struct drm_file *file_priv,
94 int id, u32 *data) 95 int id, struct drm_buffer *buf)
95{ 96{
97 u32 *data;
96 switch (id) { 98 switch (id) {
97 99
98 case RADEON_EMIT_PP_MISC: 100 case RADEON_EMIT_PP_MISC:
99 if (radeon_check_and_fixup_offset(dev_priv, file_priv, 101 data = drm_buffer_pointer_to_dword(buf,
100 &data[(RADEON_RB3D_DEPTHOFFSET - RADEON_PP_MISC) / 4])) { 102 (RADEON_RB3D_DEPTHOFFSET - RADEON_PP_MISC) / 4);
103
104 if (radeon_check_and_fixup_offset(dev_priv, file_priv, data)) {
101 DRM_ERROR("Invalid depth buffer offset\n"); 105 DRM_ERROR("Invalid depth buffer offset\n");
102 return -EINVAL; 106 return -EINVAL;
103 } 107 }
108 dev_priv->have_z_offset = 1;
104 break; 109 break;
105 110
106 case RADEON_EMIT_PP_CNTL: 111 case RADEON_EMIT_PP_CNTL:
107 if (radeon_check_and_fixup_offset(dev_priv, file_priv, 112 data = drm_buffer_pointer_to_dword(buf,
108 &data[(RADEON_RB3D_COLOROFFSET - RADEON_PP_CNTL) / 4])) { 113 (RADEON_RB3D_COLOROFFSET - RADEON_PP_CNTL) / 4);
114
115 if (radeon_check_and_fixup_offset(dev_priv, file_priv, data)) {
109 DRM_ERROR("Invalid colour buffer offset\n"); 116 DRM_ERROR("Invalid colour buffer offset\n");
110 return -EINVAL; 117 return -EINVAL;
111 } 118 }
@@ -117,8 +124,8 @@ static __inline__ int radeon_check_and_fixup_packets(drm_radeon_private_t *
117 case R200_EMIT_PP_TXOFFSET_3: 124 case R200_EMIT_PP_TXOFFSET_3:
118 case R200_EMIT_PP_TXOFFSET_4: 125 case R200_EMIT_PP_TXOFFSET_4:
119 case R200_EMIT_PP_TXOFFSET_5: 126 case R200_EMIT_PP_TXOFFSET_5:
120 if (radeon_check_and_fixup_offset(dev_priv, file_priv, 127 data = drm_buffer_pointer_to_dword(buf, 0);
121 &data[0])) { 128 if (radeon_check_and_fixup_offset(dev_priv, file_priv, data)) {
122 DRM_ERROR("Invalid R200 texture offset\n"); 129 DRM_ERROR("Invalid R200 texture offset\n");
123 return -EINVAL; 130 return -EINVAL;
124 } 131 }
@@ -127,8 +134,9 @@ static __inline__ int radeon_check_and_fixup_packets(drm_radeon_private_t *
127 case RADEON_EMIT_PP_TXFILTER_0: 134 case RADEON_EMIT_PP_TXFILTER_0:
128 case RADEON_EMIT_PP_TXFILTER_1: 135 case RADEON_EMIT_PP_TXFILTER_1:
129 case RADEON_EMIT_PP_TXFILTER_2: 136 case RADEON_EMIT_PP_TXFILTER_2:
130 if (radeon_check_and_fixup_offset(dev_priv, file_priv, 137 data = drm_buffer_pointer_to_dword(buf,
131 &data[(RADEON_PP_TXOFFSET_0 - RADEON_PP_TXFILTER_0) / 4])) { 138 (RADEON_PP_TXOFFSET_0 - RADEON_PP_TXFILTER_0) / 4);
139 if (radeon_check_and_fixup_offset(dev_priv, file_priv, data)) {
132 DRM_ERROR("Invalid R100 texture offset\n"); 140 DRM_ERROR("Invalid R100 texture offset\n");
133 return -EINVAL; 141 return -EINVAL;
134 } 142 }
@@ -142,9 +150,10 @@ static __inline__ int radeon_check_and_fixup_packets(drm_radeon_private_t *
142 case R200_EMIT_PP_CUBIC_OFFSETS_5:{ 150 case R200_EMIT_PP_CUBIC_OFFSETS_5:{
143 int i; 151 int i;
144 for (i = 0; i < 5; i++) { 152 for (i = 0; i < 5; i++) {
153 data = drm_buffer_pointer_to_dword(buf, i);
145 if (radeon_check_and_fixup_offset(dev_priv, 154 if (radeon_check_and_fixup_offset(dev_priv,
146 file_priv, 155 file_priv,
147 &data[i])) { 156 data)) {
148 DRM_ERROR 157 DRM_ERROR
149 ("Invalid R200 cubic texture offset\n"); 158 ("Invalid R200 cubic texture offset\n");
150 return -EINVAL; 159 return -EINVAL;
@@ -158,9 +167,10 @@ static __inline__ int radeon_check_and_fixup_packets(drm_radeon_private_t *
158 case RADEON_EMIT_PP_CUBIC_OFFSETS_T2:{ 167 case RADEON_EMIT_PP_CUBIC_OFFSETS_T2:{
159 int i; 168 int i;
160 for (i = 0; i < 5; i++) { 169 for (i = 0; i < 5; i++) {
170 data = drm_buffer_pointer_to_dword(buf, i);
161 if (radeon_check_and_fixup_offset(dev_priv, 171 if (radeon_check_and_fixup_offset(dev_priv,
162 file_priv, 172 file_priv,
163 &data[i])) { 173 data)) {
164 DRM_ERROR 174 DRM_ERROR
165 ("Invalid R100 cubic texture offset\n"); 175 ("Invalid R100 cubic texture offset\n");
166 return -EINVAL; 176 return -EINVAL;
@@ -269,23 +279,24 @@ static __inline__ int radeon_check_and_fixup_packet3(drm_radeon_private_t *
269 cmdbuf, 279 cmdbuf,
270 unsigned int *cmdsz) 280 unsigned int *cmdsz)
271{ 281{
272 u32 *cmd = (u32 *) cmdbuf->buf; 282 u32 *cmd = drm_buffer_pointer_to_dword(cmdbuf->buffer, 0);
273 u32 offset, narrays; 283 u32 offset, narrays;
274 int count, i, k; 284 int count, i, k;
275 285
276 *cmdsz = 2 + ((cmd[0] & RADEON_CP_PACKET_COUNT_MASK) >> 16); 286 count = ((*cmd & RADEON_CP_PACKET_COUNT_MASK) >> 16);
287 *cmdsz = 2 + count;
277 288
278 if ((cmd[0] & 0xc0000000) != RADEON_CP_PACKET3) { 289 if ((*cmd & 0xc0000000) != RADEON_CP_PACKET3) {
279 DRM_ERROR("Not a type 3 packet\n"); 290 DRM_ERROR("Not a type 3 packet\n");
280 return -EINVAL; 291 return -EINVAL;
281 } 292 }
282 293
283 if (4 * *cmdsz > cmdbuf->bufsz) { 294 if (4 * *cmdsz > drm_buffer_unprocessed(cmdbuf->buffer)) {
284 DRM_ERROR("Packet size larger than size of data provided\n"); 295 DRM_ERROR("Packet size larger than size of data provided\n");
285 return -EINVAL; 296 return -EINVAL;
286 } 297 }
287 298
288 switch(cmd[0] & 0xff00) { 299 switch (*cmd & 0xff00) {
289 /* XXX Are there old drivers needing other packets? */ 300 /* XXX Are there old drivers needing other packets? */
290 301
291 case RADEON_3D_DRAW_IMMD: 302 case RADEON_3D_DRAW_IMMD:
@@ -312,7 +323,6 @@ static __inline__ int radeon_check_and_fixup_packet3(drm_radeon_private_t *
312 break; 323 break;
313 324
314 case RADEON_3D_LOAD_VBPNTR: 325 case RADEON_3D_LOAD_VBPNTR:
315 count = (cmd[0] >> 16) & 0x3fff;
316 326
317 if (count > 18) { /* 12 arrays max */ 327 if (count > 18) { /* 12 arrays max */
318 DRM_ERROR("Too large payload in 3D_LOAD_VBPNTR (count=%d)\n", 328 DRM_ERROR("Too large payload in 3D_LOAD_VBPNTR (count=%d)\n",
@@ -321,13 +331,16 @@ static __inline__ int radeon_check_and_fixup_packet3(drm_radeon_private_t *
321 } 331 }
322 332
323 /* carefully check packet contents */ 333 /* carefully check packet contents */
324 narrays = cmd[1] & ~0xc000; 334 cmd = drm_buffer_pointer_to_dword(cmdbuf->buffer, 1);
335
336 narrays = *cmd & ~0xc000;
325 k = 0; 337 k = 0;
326 i = 2; 338 i = 2;
327 while ((k < narrays) && (i < (count + 2))) { 339 while ((k < narrays) && (i < (count + 2))) {
328 i++; /* skip attribute field */ 340 i++; /* skip attribute field */
341 cmd = drm_buffer_pointer_to_dword(cmdbuf->buffer, i);
329 if (radeon_check_and_fixup_offset(dev_priv, file_priv, 342 if (radeon_check_and_fixup_offset(dev_priv, file_priv,
330 &cmd[i])) { 343 cmd)) {
331 DRM_ERROR 344 DRM_ERROR
332 ("Invalid offset (k=%d i=%d) in 3D_LOAD_VBPNTR packet.\n", 345 ("Invalid offset (k=%d i=%d) in 3D_LOAD_VBPNTR packet.\n",
333 k, i); 346 k, i);
@@ -338,8 +351,10 @@ static __inline__ int radeon_check_and_fixup_packet3(drm_radeon_private_t *
338 if (k == narrays) 351 if (k == narrays)
339 break; 352 break;
340 /* have one more to process, they come in pairs */ 353 /* have one more to process, they come in pairs */
354 cmd = drm_buffer_pointer_to_dword(cmdbuf->buffer, i);
355
341 if (radeon_check_and_fixup_offset(dev_priv, 356 if (radeon_check_and_fixup_offset(dev_priv,
342 file_priv, &cmd[i])) 357 file_priv, cmd))
343 { 358 {
344 DRM_ERROR 359 DRM_ERROR
345 ("Invalid offset (k=%d i=%d) in 3D_LOAD_VBPNTR packet.\n", 360 ("Invalid offset (k=%d i=%d) in 3D_LOAD_VBPNTR packet.\n",
@@ -363,7 +378,9 @@ static __inline__ int radeon_check_and_fixup_packet3(drm_radeon_private_t *
363 DRM_ERROR("Invalid 3d packet for r200-class chip\n"); 378 DRM_ERROR("Invalid 3d packet for r200-class chip\n");
364 return -EINVAL; 379 return -EINVAL;
365 } 380 }
366 if (radeon_check_and_fixup_offset(dev_priv, file_priv, &cmd[1])) { 381
382 cmd = drm_buffer_pointer_to_dword(cmdbuf->buffer, 1);
383 if (radeon_check_and_fixup_offset(dev_priv, file_priv, cmd)) {
367 DRM_ERROR("Invalid rndr_gen_indx offset\n"); 384 DRM_ERROR("Invalid rndr_gen_indx offset\n");
368 return -EINVAL; 385 return -EINVAL;
369 } 386 }
@@ -374,12 +391,15 @@ static __inline__ int radeon_check_and_fixup_packet3(drm_radeon_private_t *
374 DRM_ERROR("Invalid 3d packet for r100-class chip\n"); 391 DRM_ERROR("Invalid 3d packet for r100-class chip\n");
375 return -EINVAL; 392 return -EINVAL;
376 } 393 }
377 if ((cmd[1] & 0x8000ffff) != 0x80000810) { 394
378 DRM_ERROR("Invalid indx_buffer reg address %08X\n", cmd[1]); 395 cmd = drm_buffer_pointer_to_dword(cmdbuf->buffer, 1);
396 if ((*cmd & 0x8000ffff) != 0x80000810) {
397 DRM_ERROR("Invalid indx_buffer reg address %08X\n", *cmd);
379 return -EINVAL; 398 return -EINVAL;
380 } 399 }
381 if (radeon_check_and_fixup_offset(dev_priv, file_priv, &cmd[2])) { 400 cmd = drm_buffer_pointer_to_dword(cmdbuf->buffer, 2);
382 DRM_ERROR("Invalid indx_buffer offset is %08X\n", cmd[2]); 401 if (radeon_check_and_fixup_offset(dev_priv, file_priv, cmd)) {
402 DRM_ERROR("Invalid indx_buffer offset is %08X\n", *cmd);
383 return -EINVAL; 403 return -EINVAL;
384 } 404 }
385 break; 405 break;
@@ -388,31 +408,34 @@ static __inline__ int radeon_check_and_fixup_packet3(drm_radeon_private_t *
388 case RADEON_CNTL_PAINT_MULTI: 408 case RADEON_CNTL_PAINT_MULTI:
389 case RADEON_CNTL_BITBLT_MULTI: 409 case RADEON_CNTL_BITBLT_MULTI:
390 /* MSB of opcode: next DWORD GUI_CNTL */ 410 /* MSB of opcode: next DWORD GUI_CNTL */
391 if (cmd[1] & (RADEON_GMC_SRC_PITCH_OFFSET_CNTL 411 cmd = drm_buffer_pointer_to_dword(cmdbuf->buffer, 1);
412 if (*cmd & (RADEON_GMC_SRC_PITCH_OFFSET_CNTL
392 | RADEON_GMC_DST_PITCH_OFFSET_CNTL)) { 413 | RADEON_GMC_DST_PITCH_OFFSET_CNTL)) {
393 offset = cmd[2] << 10; 414 u32 *cmd2 = drm_buffer_pointer_to_dword(cmdbuf->buffer, 2);
415 offset = *cmd2 << 10;
394 if (radeon_check_and_fixup_offset 416 if (radeon_check_and_fixup_offset
395 (dev_priv, file_priv, &offset)) { 417 (dev_priv, file_priv, &offset)) {
396 DRM_ERROR("Invalid first packet offset\n"); 418 DRM_ERROR("Invalid first packet offset\n");
397 return -EINVAL; 419 return -EINVAL;
398 } 420 }
399 cmd[2] = (cmd[2] & 0xffc00000) | offset >> 10; 421 *cmd2 = (*cmd2 & 0xffc00000) | offset >> 10;
400 } 422 }
401 423
402 if ((cmd[1] & RADEON_GMC_SRC_PITCH_OFFSET_CNTL) && 424 if ((*cmd & RADEON_GMC_SRC_PITCH_OFFSET_CNTL) &&
403 (cmd[1] & RADEON_GMC_DST_PITCH_OFFSET_CNTL)) { 425 (*cmd & RADEON_GMC_DST_PITCH_OFFSET_CNTL)) {
404 offset = cmd[3] << 10; 426 u32 *cmd3 = drm_buffer_pointer_to_dword(cmdbuf->buffer, 3);
427 offset = *cmd3 << 10;
405 if (radeon_check_and_fixup_offset 428 if (radeon_check_and_fixup_offset
406 (dev_priv, file_priv, &offset)) { 429 (dev_priv, file_priv, &offset)) {
407 DRM_ERROR("Invalid second packet offset\n"); 430 DRM_ERROR("Invalid second packet offset\n");
408 return -EINVAL; 431 return -EINVAL;
409 } 432 }
410 cmd[3] = (cmd[3] & 0xffc00000) | offset >> 10; 433 *cmd3 = (*cmd3 & 0xffc00000) | offset >> 10;
411 } 434 }
412 break; 435 break;
413 436
414 default: 437 default:
415 DRM_ERROR("Invalid packet type %x\n", cmd[0] & 0xff00); 438 DRM_ERROR("Invalid packet type %x\n", *cmd & 0xff00);
416 return -EINVAL; 439 return -EINVAL;
417 } 440 }
418 441
@@ -876,6 +899,11 @@ static void radeon_cp_dispatch_clear(struct drm_device * dev,
876 if (tmp & RADEON_BACK) 899 if (tmp & RADEON_BACK)
877 flags |= RADEON_FRONT; 900 flags |= RADEON_FRONT;
878 } 901 }
902 if (flags & (RADEON_DEPTH|RADEON_STENCIL)) {
903 if (!dev_priv->have_z_offset)
904 printk_once(KERN_ERR "radeon: illegal depth clear request. Buggy mesa detected - please update.\n");
905 flags &= ~(RADEON_DEPTH | RADEON_STENCIL);
906 }
879 907
880 if (flags & (RADEON_FRONT | RADEON_BACK)) { 908 if (flags & (RADEON_FRONT | RADEON_BACK)) {
881 909
@@ -1065,7 +1093,7 @@ static void radeon_cp_dispatch_clear(struct drm_device * dev,
1065 /* judging by the first tile offset needed, could possibly 1093 /* judging by the first tile offset needed, could possibly
1066 directly address/clear 4x4 tiles instead of 8x2 * 4x4 1094 directly address/clear 4x4 tiles instead of 8x2 * 4x4
1067 macro tiles, though would still need clear mask for 1095 macro tiles, though would still need clear mask for
1068 right/bottom if truely 4x4 granularity is desired ? */ 1096 right/bottom if truly 4x4 granularity is desired ? */
1069 OUT_RING(tileoffset * 16); 1097 OUT_RING(tileoffset * 16);
1070 /* the number of tiles to clear */ 1098 /* the number of tiles to clear */
1071 OUT_RING(nrtilesx + 1); 1099 OUT_RING(nrtilesx + 1);
@@ -1950,7 +1978,7 @@ static void radeon_apply_surface_regs(int surf_index,
1950 * Note that refcount can be at most 2, since during a free refcount=3 1978 * Note that refcount can be at most 2, since during a free refcount=3
1951 * might mean we have to allocate a new surface which might not always 1979 * might mean we have to allocate a new surface which might not always
1952 * be available. 1980 * be available.
1953 * For example : we allocate three contigous surfaces ABC. If B is 1981 * For example : we allocate three contiguous surfaces ABC. If B is
1954 * freed, we suddenly need two surfaces to store A and C, which might 1982 * freed, we suddenly need two surfaces to store A and C, which might
1955 * not always be available. 1983 * not always be available.
1956 */ 1984 */
@@ -2611,7 +2639,6 @@ static int radeon_emit_packets(drm_radeon_private_t * dev_priv,
2611{ 2639{
2612 int id = (int)header.packet.packet_id; 2640 int id = (int)header.packet.packet_id;
2613 int sz, reg; 2641 int sz, reg;
2614 int *data = (int *)cmdbuf->buf;
2615 RING_LOCALS; 2642 RING_LOCALS;
2616 2643
2617 if (id >= RADEON_MAX_STATE_PACKETS) 2644 if (id >= RADEON_MAX_STATE_PACKETS)
@@ -2620,23 +2647,22 @@ static int radeon_emit_packets(drm_radeon_private_t * dev_priv,
2620 sz = packet[id].len; 2647 sz = packet[id].len;
2621 reg = packet[id].start; 2648 reg = packet[id].start;
2622 2649
2623 if (sz * sizeof(int) > cmdbuf->bufsz) { 2650 if (sz * sizeof(u32) > drm_buffer_unprocessed(cmdbuf->buffer)) {
2624 DRM_ERROR("Packet size provided larger than data provided\n"); 2651 DRM_ERROR("Packet size provided larger than data provided\n");
2625 return -EINVAL; 2652 return -EINVAL;
2626 } 2653 }
2627 2654
2628 if (radeon_check_and_fixup_packets(dev_priv, file_priv, id, data)) { 2655 if (radeon_check_and_fixup_packets(dev_priv, file_priv, id,
2656 cmdbuf->buffer)) {
2629 DRM_ERROR("Packet verification failed\n"); 2657 DRM_ERROR("Packet verification failed\n");
2630 return -EINVAL; 2658 return -EINVAL;
2631 } 2659 }
2632 2660
2633 BEGIN_RING(sz + 1); 2661 BEGIN_RING(sz + 1);
2634 OUT_RING(CP_PACKET0(reg, (sz - 1))); 2662 OUT_RING(CP_PACKET0(reg, (sz - 1)));
2635 OUT_RING_TABLE(data, sz); 2663 OUT_RING_DRM_BUFFER(cmdbuf->buffer, sz);
2636 ADVANCE_RING(); 2664 ADVANCE_RING();
2637 2665
2638 cmdbuf->buf += sz * sizeof(int);
2639 cmdbuf->bufsz -= sz * sizeof(int);
2640 return 0; 2666 return 0;
2641} 2667}
2642 2668
@@ -2653,10 +2679,8 @@ static __inline__ int radeon_emit_scalars(drm_radeon_private_t *dev_priv,
2653 OUT_RING(CP_PACKET0(RADEON_SE_TCL_SCALAR_INDX_REG, 0)); 2679 OUT_RING(CP_PACKET0(RADEON_SE_TCL_SCALAR_INDX_REG, 0));
2654 OUT_RING(start | (stride << RADEON_SCAL_INDX_DWORD_STRIDE_SHIFT)); 2680 OUT_RING(start | (stride << RADEON_SCAL_INDX_DWORD_STRIDE_SHIFT));
2655 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_SCALAR_DATA_REG, sz - 1)); 2681 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_SCALAR_DATA_REG, sz - 1));
2656 OUT_RING_TABLE(cmdbuf->buf, sz); 2682 OUT_RING_DRM_BUFFER(cmdbuf->buffer, sz);
2657 ADVANCE_RING(); 2683 ADVANCE_RING();
2658 cmdbuf->buf += sz * sizeof(int);
2659 cmdbuf->bufsz -= sz * sizeof(int);
2660 return 0; 2684 return 0;
2661} 2685}
2662 2686
@@ -2675,10 +2699,8 @@ static __inline__ int radeon_emit_scalars2(drm_radeon_private_t *dev_priv,
2675 OUT_RING(CP_PACKET0(RADEON_SE_TCL_SCALAR_INDX_REG, 0)); 2699 OUT_RING(CP_PACKET0(RADEON_SE_TCL_SCALAR_INDX_REG, 0));
2676 OUT_RING(start | (stride << RADEON_SCAL_INDX_DWORD_STRIDE_SHIFT)); 2700 OUT_RING(start | (stride << RADEON_SCAL_INDX_DWORD_STRIDE_SHIFT));
2677 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_SCALAR_DATA_REG, sz - 1)); 2701 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_SCALAR_DATA_REG, sz - 1));
2678 OUT_RING_TABLE(cmdbuf->buf, sz); 2702 OUT_RING_DRM_BUFFER(cmdbuf->buffer, sz);
2679 ADVANCE_RING(); 2703 ADVANCE_RING();
2680 cmdbuf->buf += sz * sizeof(int);
2681 cmdbuf->bufsz -= sz * sizeof(int);
2682 return 0; 2704 return 0;
2683} 2705}
2684 2706
@@ -2696,11 +2718,9 @@ static __inline__ int radeon_emit_vectors(drm_radeon_private_t *dev_priv,
2696 OUT_RING(CP_PACKET0(RADEON_SE_TCL_VECTOR_INDX_REG, 0)); 2718 OUT_RING(CP_PACKET0(RADEON_SE_TCL_VECTOR_INDX_REG, 0));
2697 OUT_RING(start | (stride << RADEON_VEC_INDX_OCTWORD_STRIDE_SHIFT)); 2719 OUT_RING(start | (stride << RADEON_VEC_INDX_OCTWORD_STRIDE_SHIFT));
2698 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_VECTOR_DATA_REG, (sz - 1))); 2720 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_VECTOR_DATA_REG, (sz - 1)));
2699 OUT_RING_TABLE(cmdbuf->buf, sz); 2721 OUT_RING_DRM_BUFFER(cmdbuf->buffer, sz);
2700 ADVANCE_RING(); 2722 ADVANCE_RING();
2701 2723
2702 cmdbuf->buf += sz * sizeof(int);
2703 cmdbuf->bufsz -= sz * sizeof(int);
2704 return 0; 2724 return 0;
2705} 2725}
2706 2726
@@ -2714,7 +2734,7 @@ static __inline__ int radeon_emit_veclinear(drm_radeon_private_t *dev_priv,
2714 2734
2715 if (!sz) 2735 if (!sz)
2716 return 0; 2736 return 0;
2717 if (sz * 4 > cmdbuf->bufsz) 2737 if (sz * 4 > drm_buffer_unprocessed(cmdbuf->buffer))
2718 return -EINVAL; 2738 return -EINVAL;
2719 2739
2720 BEGIN_RING(5 + sz); 2740 BEGIN_RING(5 + sz);
@@ -2722,11 +2742,9 @@ static __inline__ int radeon_emit_veclinear(drm_radeon_private_t *dev_priv,
2722 OUT_RING(CP_PACKET0(RADEON_SE_TCL_VECTOR_INDX_REG, 0)); 2742 OUT_RING(CP_PACKET0(RADEON_SE_TCL_VECTOR_INDX_REG, 0));
2723 OUT_RING(start | (1 << RADEON_VEC_INDX_OCTWORD_STRIDE_SHIFT)); 2743 OUT_RING(start | (1 << RADEON_VEC_INDX_OCTWORD_STRIDE_SHIFT));
2724 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_VECTOR_DATA_REG, (sz - 1))); 2744 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_VECTOR_DATA_REG, (sz - 1)));
2725 OUT_RING_TABLE(cmdbuf->buf, sz); 2745 OUT_RING_DRM_BUFFER(cmdbuf->buffer, sz);
2726 ADVANCE_RING(); 2746 ADVANCE_RING();
2727 2747
2728 cmdbuf->buf += sz * sizeof(int);
2729 cmdbuf->bufsz -= sz * sizeof(int);
2730 return 0; 2748 return 0;
2731} 2749}
2732 2750
@@ -2748,11 +2766,9 @@ static int radeon_emit_packet3(struct drm_device * dev,
2748 } 2766 }
2749 2767
2750 BEGIN_RING(cmdsz); 2768 BEGIN_RING(cmdsz);
2751 OUT_RING_TABLE(cmdbuf->buf, cmdsz); 2769 OUT_RING_DRM_BUFFER(cmdbuf->buffer, cmdsz);
2752 ADVANCE_RING(); 2770 ADVANCE_RING();
2753 2771
2754 cmdbuf->buf += cmdsz * 4;
2755 cmdbuf->bufsz -= cmdsz * 4;
2756 return 0; 2772 return 0;
2757} 2773}
2758 2774
@@ -2805,16 +2821,16 @@ static int radeon_emit_packet3_cliprect(struct drm_device *dev,
2805 } 2821 }
2806 2822
2807 BEGIN_RING(cmdsz); 2823 BEGIN_RING(cmdsz);
2808 OUT_RING_TABLE(cmdbuf->buf, cmdsz); 2824 OUT_RING_DRM_BUFFER(cmdbuf->buffer, cmdsz);
2809 ADVANCE_RING(); 2825 ADVANCE_RING();
2810 2826
2811 } while (++i < cmdbuf->nbox); 2827 } while (++i < cmdbuf->nbox);
2812 if (cmdbuf->nbox == 1) 2828 if (cmdbuf->nbox == 1)
2813 cmdbuf->nbox = 0; 2829 cmdbuf->nbox = 0;
2814 2830
2831 return 0;
2815 out: 2832 out:
2816 cmdbuf->buf += cmdsz * 4; 2833 drm_buffer_advance(cmdbuf->buffer, cmdsz * 4);
2817 cmdbuf->bufsz -= cmdsz * 4;
2818 return 0; 2834 return 0;
2819} 2835}
2820 2836
@@ -2847,16 +2863,16 @@ static int radeon_emit_wait(struct drm_device * dev, int flags)
2847 return 0; 2863 return 0;
2848} 2864}
2849 2865
2850static int radeon_cp_cmdbuf(struct drm_device *dev, void *data, struct drm_file *file_priv) 2866static int radeon_cp_cmdbuf(struct drm_device *dev, void *data,
2867 struct drm_file *file_priv)
2851{ 2868{
2852 drm_radeon_private_t *dev_priv = dev->dev_private; 2869 drm_radeon_private_t *dev_priv = dev->dev_private;
2853 struct drm_device_dma *dma = dev->dma; 2870 struct drm_device_dma *dma = dev->dma;
2854 struct drm_buf *buf = NULL; 2871 struct drm_buf *buf = NULL;
2872 drm_radeon_cmd_header_t stack_header;
2855 int idx; 2873 int idx;
2856 drm_radeon_kcmd_buffer_t *cmdbuf = data; 2874 drm_radeon_kcmd_buffer_t *cmdbuf = data;
2857 drm_radeon_cmd_header_t header; 2875 int orig_nbox;
2858 int orig_nbox, orig_bufsz;
2859 char *kbuf = NULL;
2860 2876
2861 LOCK_TEST_WITH_RETURN(dev, file_priv); 2877 LOCK_TEST_WITH_RETURN(dev, file_priv);
2862 2878
@@ -2871,18 +2887,20 @@ static int radeon_cp_cmdbuf(struct drm_device *dev, void *data, struct drm_file
2871 * races between checking values and using those values in other code, 2887 * races between checking values and using those values in other code,
2872 * and simply to avoid a lot of function calls to copy in data. 2888 * and simply to avoid a lot of function calls to copy in data.
2873 */ 2889 */
2874 orig_bufsz = cmdbuf->bufsz; 2890 if (cmdbuf->bufsz != 0) {
2875 if (orig_bufsz != 0) { 2891 int rv;
2876 kbuf = kmalloc(cmdbuf->bufsz, GFP_KERNEL); 2892 void __user *buffer = cmdbuf->buffer;
2877 if (kbuf == NULL) 2893 rv = drm_buffer_alloc(&cmdbuf->buffer, cmdbuf->bufsz);
2878 return -ENOMEM; 2894 if (rv)
2879 if (DRM_COPY_FROM_USER(kbuf, (void __user *)cmdbuf->buf, 2895 return rv;
2880 cmdbuf->bufsz)) { 2896 rv = drm_buffer_copy_from_user(cmdbuf->buffer, buffer,
2881 kfree(kbuf); 2897 cmdbuf->bufsz);
2882 return -EFAULT; 2898 if (rv) {
2899 drm_buffer_free(cmdbuf->buffer);
2900 return rv;
2883 } 2901 }
2884 cmdbuf->buf = kbuf; 2902 } else
2885 } 2903 goto done;
2886 2904
2887 orig_nbox = cmdbuf->nbox; 2905 orig_nbox = cmdbuf->nbox;
2888 2906
@@ -2890,24 +2908,23 @@ static int radeon_cp_cmdbuf(struct drm_device *dev, void *data, struct drm_file
2890 int temp; 2908 int temp;
2891 temp = r300_do_cp_cmdbuf(dev, file_priv, cmdbuf); 2909 temp = r300_do_cp_cmdbuf(dev, file_priv, cmdbuf);
2892 2910
2893 if (orig_bufsz != 0) 2911 drm_buffer_free(cmdbuf->buffer);
2894 kfree(kbuf);
2895 2912
2896 return temp; 2913 return temp;
2897 } 2914 }
2898 2915
2899 /* microcode_version != r300 */ 2916 /* microcode_version != r300 */
2900 while (cmdbuf->bufsz >= sizeof(header)) { 2917 while (drm_buffer_unprocessed(cmdbuf->buffer) >= sizeof(stack_header)) {
2901 2918
2902 header.i = *(int *)cmdbuf->buf; 2919 drm_radeon_cmd_header_t *header;
2903 cmdbuf->buf += sizeof(header); 2920 header = drm_buffer_read_object(cmdbuf->buffer,
2904 cmdbuf->bufsz -= sizeof(header); 2921 sizeof(stack_header), &stack_header);
2905 2922
2906 switch (header.header.cmd_type) { 2923 switch (header->header.cmd_type) {
2907 case RADEON_CMD_PACKET: 2924 case RADEON_CMD_PACKET:
2908 DRM_DEBUG("RADEON_CMD_PACKET\n"); 2925 DRM_DEBUG("RADEON_CMD_PACKET\n");
2909 if (radeon_emit_packets 2926 if (radeon_emit_packets
2910 (dev_priv, file_priv, header, cmdbuf)) { 2927 (dev_priv, file_priv, *header, cmdbuf)) {
2911 DRM_ERROR("radeon_emit_packets failed\n"); 2928 DRM_ERROR("radeon_emit_packets failed\n");
2912 goto err; 2929 goto err;
2913 } 2930 }
@@ -2915,7 +2932,7 @@ static int radeon_cp_cmdbuf(struct drm_device *dev, void *data, struct drm_file
2915 2932
2916 case RADEON_CMD_SCALARS: 2933 case RADEON_CMD_SCALARS:
2917 DRM_DEBUG("RADEON_CMD_SCALARS\n"); 2934 DRM_DEBUG("RADEON_CMD_SCALARS\n");
2918 if (radeon_emit_scalars(dev_priv, header, cmdbuf)) { 2935 if (radeon_emit_scalars(dev_priv, *header, cmdbuf)) {
2919 DRM_ERROR("radeon_emit_scalars failed\n"); 2936 DRM_ERROR("radeon_emit_scalars failed\n");
2920 goto err; 2937 goto err;
2921 } 2938 }
@@ -2923,7 +2940,7 @@ static int radeon_cp_cmdbuf(struct drm_device *dev, void *data, struct drm_file
2923 2940
2924 case RADEON_CMD_VECTORS: 2941 case RADEON_CMD_VECTORS:
2925 DRM_DEBUG("RADEON_CMD_VECTORS\n"); 2942 DRM_DEBUG("RADEON_CMD_VECTORS\n");
2926 if (radeon_emit_vectors(dev_priv, header, cmdbuf)) { 2943 if (radeon_emit_vectors(dev_priv, *header, cmdbuf)) {
2927 DRM_ERROR("radeon_emit_vectors failed\n"); 2944 DRM_ERROR("radeon_emit_vectors failed\n");
2928 goto err; 2945 goto err;
2929 } 2946 }
@@ -2931,7 +2948,7 @@ static int radeon_cp_cmdbuf(struct drm_device *dev, void *data, struct drm_file
2931 2948
2932 case RADEON_CMD_DMA_DISCARD: 2949 case RADEON_CMD_DMA_DISCARD:
2933 DRM_DEBUG("RADEON_CMD_DMA_DISCARD\n"); 2950 DRM_DEBUG("RADEON_CMD_DMA_DISCARD\n");
2934 idx = header.dma.buf_idx; 2951 idx = header->dma.buf_idx;
2935 if (idx < 0 || idx >= dma->buf_count) { 2952 if (idx < 0 || idx >= dma->buf_count) {
2936 DRM_ERROR("buffer index %d (of %d max)\n", 2953 DRM_ERROR("buffer index %d (of %d max)\n",
2937 idx, dma->buf_count - 1); 2954 idx, dma->buf_count - 1);
@@ -2968,7 +2985,7 @@ static int radeon_cp_cmdbuf(struct drm_device *dev, void *data, struct drm_file
2968 2985
2969 case RADEON_CMD_SCALARS2: 2986 case RADEON_CMD_SCALARS2:
2970 DRM_DEBUG("RADEON_CMD_SCALARS2\n"); 2987 DRM_DEBUG("RADEON_CMD_SCALARS2\n");
2971 if (radeon_emit_scalars2(dev_priv, header, cmdbuf)) { 2988 if (radeon_emit_scalars2(dev_priv, *header, cmdbuf)) {
2972 DRM_ERROR("radeon_emit_scalars2 failed\n"); 2989 DRM_ERROR("radeon_emit_scalars2 failed\n");
2973 goto err; 2990 goto err;
2974 } 2991 }
@@ -2976,37 +2993,36 @@ static int radeon_cp_cmdbuf(struct drm_device *dev, void *data, struct drm_file
2976 2993
2977 case RADEON_CMD_WAIT: 2994 case RADEON_CMD_WAIT:
2978 DRM_DEBUG("RADEON_CMD_WAIT\n"); 2995 DRM_DEBUG("RADEON_CMD_WAIT\n");
2979 if (radeon_emit_wait(dev, header.wait.flags)) { 2996 if (radeon_emit_wait(dev, header->wait.flags)) {
2980 DRM_ERROR("radeon_emit_wait failed\n"); 2997 DRM_ERROR("radeon_emit_wait failed\n");
2981 goto err; 2998 goto err;
2982 } 2999 }
2983 break; 3000 break;
2984 case RADEON_CMD_VECLINEAR: 3001 case RADEON_CMD_VECLINEAR:
2985 DRM_DEBUG("RADEON_CMD_VECLINEAR\n"); 3002 DRM_DEBUG("RADEON_CMD_VECLINEAR\n");
2986 if (radeon_emit_veclinear(dev_priv, header, cmdbuf)) { 3003 if (radeon_emit_veclinear(dev_priv, *header, cmdbuf)) {
2987 DRM_ERROR("radeon_emit_veclinear failed\n"); 3004 DRM_ERROR("radeon_emit_veclinear failed\n");
2988 goto err; 3005 goto err;
2989 } 3006 }
2990 break; 3007 break;
2991 3008
2992 default: 3009 default:
2993 DRM_ERROR("bad cmd_type %d at %p\n", 3010 DRM_ERROR("bad cmd_type %d at byte %d\n",
2994 header.header.cmd_type, 3011 header->header.cmd_type,
2995 cmdbuf->buf - sizeof(header)); 3012 cmdbuf->buffer->iterator);
2996 goto err; 3013 goto err;
2997 } 3014 }
2998 } 3015 }
2999 3016
3000 if (orig_bufsz != 0) 3017 drm_buffer_free(cmdbuf->buffer);
3001 kfree(kbuf);
3002 3018
3019 done:
3003 DRM_DEBUG("DONE\n"); 3020 DRM_DEBUG("DONE\n");
3004 COMMIT_RING(); 3021 COMMIT_RING();
3005 return 0; 3022 return 0;
3006 3023
3007 err: 3024 err:
3008 if (orig_bufsz != 0) 3025 drm_buffer_free(cmdbuf->buffer);
3009 kfree(kbuf);
3010 return -EINVAL; 3026 return -EINVAL;
3011} 3027}
3012 3028
diff --git a/drivers/gpu/drm/radeon/radeon_test.c b/drivers/gpu/drm/radeon/radeon_test.c
index f8a465d9a1cf..313c96bc09da 100644
--- a/drivers/gpu/drm/radeon/radeon_test.c
+++ b/drivers/gpu/drm/radeon/radeon_test.c
@@ -30,8 +30,8 @@
30/* Test BO GTT->VRAM and VRAM->GTT GPU copies across the whole GTT aperture */ 30/* Test BO GTT->VRAM and VRAM->GTT GPU copies across the whole GTT aperture */
31void radeon_test_moves(struct radeon_device *rdev) 31void radeon_test_moves(struct radeon_device *rdev)
32{ 32{
33 struct radeon_object *vram_obj = NULL; 33 struct radeon_bo *vram_obj = NULL;
34 struct radeon_object **gtt_obj = NULL; 34 struct radeon_bo **gtt_obj = NULL;
35 struct radeon_fence *fence = NULL; 35 struct radeon_fence *fence = NULL;
36 uint64_t gtt_addr, vram_addr; 36 uint64_t gtt_addr, vram_addr;
37 unsigned i, n, size; 37 unsigned i, n, size;
@@ -42,8 +42,8 @@ void radeon_test_moves(struct radeon_device *rdev)
42 /* Number of tests = 42 /* Number of tests =
43 * (Total GTT - IB pool - writeback page - ring buffer) / test size 43 * (Total GTT - IB pool - writeback page - ring buffer) / test size
44 */ 44 */
45 n = (rdev->mc.gtt_size - RADEON_IB_POOL_SIZE*64*1024 - RADEON_GPU_PAGE_SIZE - 45 n = ((u32)(rdev->mc.gtt_size - RADEON_IB_POOL_SIZE*64*1024 - RADEON_GPU_PAGE_SIZE -
46 rdev->cp.ring_size) / size; 46 rdev->cp.ring_size)) / size;
47 47
48 gtt_obj = kzalloc(n * sizeof(*gtt_obj), GFP_KERNEL); 48 gtt_obj = kzalloc(n * sizeof(*gtt_obj), GFP_KERNEL);
49 if (!gtt_obj) { 49 if (!gtt_obj) {
@@ -52,38 +52,42 @@ void radeon_test_moves(struct radeon_device *rdev)
52 goto out_cleanup; 52 goto out_cleanup;
53 } 53 }
54 54
55 r = radeon_object_create(rdev, NULL, size, true, RADEON_GEM_DOMAIN_VRAM, 55 r = radeon_bo_create(rdev, NULL, size, true, RADEON_GEM_DOMAIN_VRAM,
56 false, &vram_obj); 56 &vram_obj);
57 if (r) { 57 if (r) {
58 DRM_ERROR("Failed to create VRAM object\n"); 58 DRM_ERROR("Failed to create VRAM object\n");
59 goto out_cleanup; 59 goto out_cleanup;
60 } 60 }
61 61 r = radeon_bo_reserve(vram_obj, false);
62 r = radeon_object_pin(vram_obj, RADEON_GEM_DOMAIN_VRAM, &vram_addr); 62 if (unlikely(r != 0))
63 goto out_cleanup;
64 r = radeon_bo_pin(vram_obj, RADEON_GEM_DOMAIN_VRAM, &vram_addr);
63 if (r) { 65 if (r) {
64 DRM_ERROR("Failed to pin VRAM object\n"); 66 DRM_ERROR("Failed to pin VRAM object\n");
65 goto out_cleanup; 67 goto out_cleanup;
66 } 68 }
67
68 for (i = 0; i < n; i++) { 69 for (i = 0; i < n; i++) {
69 void *gtt_map, *vram_map; 70 void *gtt_map, *vram_map;
70 void **gtt_start, **gtt_end; 71 void **gtt_start, **gtt_end;
71 void **vram_start, **vram_end; 72 void **vram_start, **vram_end;
72 73
73 r = radeon_object_create(rdev, NULL, size, true, 74 r = radeon_bo_create(rdev, NULL, size, true,
74 RADEON_GEM_DOMAIN_GTT, false, gtt_obj + i); 75 RADEON_GEM_DOMAIN_GTT, gtt_obj + i);
75 if (r) { 76 if (r) {
76 DRM_ERROR("Failed to create GTT object %d\n", i); 77 DRM_ERROR("Failed to create GTT object %d\n", i);
77 goto out_cleanup; 78 goto out_cleanup;
78 } 79 }
79 80
80 r = radeon_object_pin(gtt_obj[i], RADEON_GEM_DOMAIN_GTT, &gtt_addr); 81 r = radeon_bo_reserve(gtt_obj[i], false);
82 if (unlikely(r != 0))
83 goto out_cleanup;
84 r = radeon_bo_pin(gtt_obj[i], RADEON_GEM_DOMAIN_GTT, &gtt_addr);
81 if (r) { 85 if (r) {
82 DRM_ERROR("Failed to pin GTT object %d\n", i); 86 DRM_ERROR("Failed to pin GTT object %d\n", i);
83 goto out_cleanup; 87 goto out_cleanup;
84 } 88 }
85 89
86 r = radeon_object_kmap(gtt_obj[i], &gtt_map); 90 r = radeon_bo_kmap(gtt_obj[i], &gtt_map);
87 if (r) { 91 if (r) {
88 DRM_ERROR("Failed to map GTT object %d\n", i); 92 DRM_ERROR("Failed to map GTT object %d\n", i);
89 goto out_cleanup; 93 goto out_cleanup;
@@ -94,7 +98,7 @@ void radeon_test_moves(struct radeon_device *rdev)
94 gtt_start++) 98 gtt_start++)
95 *gtt_start = gtt_start; 99 *gtt_start = gtt_start;
96 100
97 radeon_object_kunmap(gtt_obj[i]); 101 radeon_bo_kunmap(gtt_obj[i]);
98 102
99 r = radeon_fence_create(rdev, &fence); 103 r = radeon_fence_create(rdev, &fence);
100 if (r) { 104 if (r) {
@@ -116,7 +120,7 @@ void radeon_test_moves(struct radeon_device *rdev)
116 120
117 radeon_fence_unref(&fence); 121 radeon_fence_unref(&fence);
118 122
119 r = radeon_object_kmap(vram_obj, &vram_map); 123 r = radeon_bo_kmap(vram_obj, &vram_map);
120 if (r) { 124 if (r) {
121 DRM_ERROR("Failed to map VRAM object after copy %d\n", i); 125 DRM_ERROR("Failed to map VRAM object after copy %d\n", i);
122 goto out_cleanup; 126 goto out_cleanup;
@@ -131,13 +135,13 @@ void radeon_test_moves(struct radeon_device *rdev)
131 "expected 0x%p (GTT map 0x%p-0x%p)\n", 135 "expected 0x%p (GTT map 0x%p-0x%p)\n",
132 i, *vram_start, gtt_start, gtt_map, 136 i, *vram_start, gtt_start, gtt_map,
133 gtt_end); 137 gtt_end);
134 radeon_object_kunmap(vram_obj); 138 radeon_bo_kunmap(vram_obj);
135 goto out_cleanup; 139 goto out_cleanup;
136 } 140 }
137 *vram_start = vram_start; 141 *vram_start = vram_start;
138 } 142 }
139 143
140 radeon_object_kunmap(vram_obj); 144 radeon_bo_kunmap(vram_obj);
141 145
142 r = radeon_fence_create(rdev, &fence); 146 r = radeon_fence_create(rdev, &fence);
143 if (r) { 147 if (r) {
@@ -159,7 +163,7 @@ void radeon_test_moves(struct radeon_device *rdev)
159 163
160 radeon_fence_unref(&fence); 164 radeon_fence_unref(&fence);
161 165
162 r = radeon_object_kmap(gtt_obj[i], &gtt_map); 166 r = radeon_bo_kmap(gtt_obj[i], &gtt_map);
163 if (r) { 167 if (r) {
164 DRM_ERROR("Failed to map GTT object after copy %d\n", i); 168 DRM_ERROR("Failed to map GTT object after copy %d\n", i);
165 goto out_cleanup; 169 goto out_cleanup;
@@ -174,27 +178,33 @@ void radeon_test_moves(struct radeon_device *rdev)
174 "expected 0x%p (VRAM map 0x%p-0x%p)\n", 178 "expected 0x%p (VRAM map 0x%p-0x%p)\n",
175 i, *gtt_start, vram_start, vram_map, 179 i, *gtt_start, vram_start, vram_map,
176 vram_end); 180 vram_end);
177 radeon_object_kunmap(gtt_obj[i]); 181 radeon_bo_kunmap(gtt_obj[i]);
178 goto out_cleanup; 182 goto out_cleanup;
179 } 183 }
180 } 184 }
181 185
182 radeon_object_kunmap(gtt_obj[i]); 186 radeon_bo_kunmap(gtt_obj[i]);
183 187
184 DRM_INFO("Tested GTT->VRAM and VRAM->GTT copy for GTT offset 0x%llx\n", 188 DRM_INFO("Tested GTT->VRAM and VRAM->GTT copy for GTT offset 0x%llx\n",
185 gtt_addr - rdev->mc.gtt_location); 189 gtt_addr - rdev->mc.gtt_start);
186 } 190 }
187 191
188out_cleanup: 192out_cleanup:
189 if (vram_obj) { 193 if (vram_obj) {
190 radeon_object_unpin(vram_obj); 194 if (radeon_bo_is_reserved(vram_obj)) {
191 radeon_object_unref(&vram_obj); 195 radeon_bo_unpin(vram_obj);
196 radeon_bo_unreserve(vram_obj);
197 }
198 radeon_bo_unref(&vram_obj);
192 } 199 }
193 if (gtt_obj) { 200 if (gtt_obj) {
194 for (i = 0; i < n; i++) { 201 for (i = 0; i < n; i++) {
195 if (gtt_obj[i]) { 202 if (gtt_obj[i]) {
196 radeon_object_unpin(gtt_obj[i]); 203 if (radeon_bo_is_reserved(gtt_obj[i])) {
197 radeon_object_unref(&gtt_obj[i]); 204 radeon_bo_unpin(gtt_obj[i]);
205 radeon_bo_unreserve(gtt_obj[i]);
206 }
207 radeon_bo_unref(&gtt_obj[i]);
198 } 208 }
199 } 209 }
200 kfree(gtt_obj); 210 kfree(gtt_obj);
@@ -206,4 +216,3 @@ out_cleanup:
206 printk(KERN_WARNING "Error while testing BO move.\n"); 216 printk(KERN_WARNING "Error while testing BO move.\n");
207 } 217 }
208} 218}
209
diff --git a/drivers/gpu/drm/radeon/radeon_ttm.c b/drivers/gpu/drm/radeon/radeon_ttm.c
index 1381e06d6af3..d031b6863082 100644
--- a/drivers/gpu/drm/radeon/radeon_ttm.c
+++ b/drivers/gpu/drm/radeon/radeon_ttm.c
@@ -36,6 +36,7 @@
36#include <drm/drmP.h> 36#include <drm/drmP.h>
37#include <drm/radeon_drm.h> 37#include <drm/radeon_drm.h>
38#include <linux/seq_file.h> 38#include <linux/seq_file.h>
39#include <linux/slab.h>
39#include "radeon_reg.h" 40#include "radeon_reg.h"
40#include "radeon.h" 41#include "radeon.h"
41 42
@@ -150,7 +151,7 @@ static int radeon_init_mem_type(struct ttm_bo_device *bdev, uint32_t type,
150 man->default_caching = TTM_PL_FLAG_CACHED; 151 man->default_caching = TTM_PL_FLAG_CACHED;
151 break; 152 break;
152 case TTM_PL_TT: 153 case TTM_PL_TT:
153 man->gpu_offset = 0; 154 man->gpu_offset = rdev->mc.gtt_start;
154 man->available_caching = TTM_PL_MASK_CACHING; 155 man->available_caching = TTM_PL_MASK_CACHING;
155 man->default_caching = TTM_PL_FLAG_CACHED; 156 man->default_caching = TTM_PL_FLAG_CACHED;
156 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE | TTM_MEMTYPE_FLAG_CMA; 157 man->flags = TTM_MEMTYPE_FLAG_MAPPABLE | TTM_MEMTYPE_FLAG_CMA;
@@ -180,7 +181,7 @@ static int radeon_init_mem_type(struct ttm_bo_device *bdev, uint32_t type,
180 break; 181 break;
181 case TTM_PL_VRAM: 182 case TTM_PL_VRAM:
182 /* "On-card" video ram */ 183 /* "On-card" video ram */
183 man->gpu_offset = 0; 184 man->gpu_offset = rdev->mc.vram_start;
184 man->flags = TTM_MEMTYPE_FLAG_FIXED | 185 man->flags = TTM_MEMTYPE_FLAG_FIXED |
185 TTM_MEMTYPE_FLAG_NEEDS_IOREMAP | 186 TTM_MEMTYPE_FLAG_NEEDS_IOREMAP |
186 TTM_MEMTYPE_FLAG_MAPPABLE; 187 TTM_MEMTYPE_FLAG_MAPPABLE;
@@ -197,16 +198,34 @@ static int radeon_init_mem_type(struct ttm_bo_device *bdev, uint32_t type,
197 return 0; 198 return 0;
198} 199}
199 200
200static uint32_t radeon_evict_flags(struct ttm_buffer_object *bo) 201static void radeon_evict_flags(struct ttm_buffer_object *bo,
202 struct ttm_placement *placement)
201{ 203{
202 uint32_t cur_placement = bo->mem.placement & ~TTM_PL_MASK_MEMTYPE; 204 struct radeon_bo *rbo;
205 static u32 placements = TTM_PL_MASK_CACHING | TTM_PL_FLAG_SYSTEM;
203 206
207 if (!radeon_ttm_bo_is_radeon_bo(bo)) {
208 placement->fpfn = 0;
209 placement->lpfn = 0;
210 placement->placement = &placements;
211 placement->busy_placement = &placements;
212 placement->num_placement = 1;
213 placement->num_busy_placement = 1;
214 return;
215 }
216 rbo = container_of(bo, struct radeon_bo, tbo);
204 switch (bo->mem.mem_type) { 217 switch (bo->mem.mem_type) {
218 case TTM_PL_VRAM:
219 if (rbo->rdev->cp.ready == false)
220 radeon_ttm_placement_from_domain(rbo, RADEON_GEM_DOMAIN_CPU);
221 else
222 radeon_ttm_placement_from_domain(rbo, RADEON_GEM_DOMAIN_GTT);
223 break;
224 case TTM_PL_TT:
205 default: 225 default:
206 return (cur_placement & ~TTM_PL_MASK_CACHING) | 226 radeon_ttm_placement_from_domain(rbo, RADEON_GEM_DOMAIN_CPU);
207 TTM_PL_FLAG_SYSTEM |
208 TTM_PL_FLAG_CACHED;
209 } 227 }
228 *placement = rbo->placement;
210} 229}
211 230
212static int radeon_verify_access(struct ttm_buffer_object *bo, struct file *filp) 231static int radeon_verify_access(struct ttm_buffer_object *bo, struct file *filp)
@@ -244,10 +263,10 @@ static int radeon_move_blit(struct ttm_buffer_object *bo,
244 263
245 switch (old_mem->mem_type) { 264 switch (old_mem->mem_type) {
246 case TTM_PL_VRAM: 265 case TTM_PL_VRAM:
247 old_start += rdev->mc.vram_location; 266 old_start += rdev->mc.vram_start;
248 break; 267 break;
249 case TTM_PL_TT: 268 case TTM_PL_TT:
250 old_start += rdev->mc.gtt_location; 269 old_start += rdev->mc.gtt_start;
251 break; 270 break;
252 default: 271 default:
253 DRM_ERROR("Unknown placement %d\n", old_mem->mem_type); 272 DRM_ERROR("Unknown placement %d\n", old_mem->mem_type);
@@ -255,10 +274,10 @@ static int radeon_move_blit(struct ttm_buffer_object *bo,
255 } 274 }
256 switch (new_mem->mem_type) { 275 switch (new_mem->mem_type) {
257 case TTM_PL_VRAM: 276 case TTM_PL_VRAM:
258 new_start += rdev->mc.vram_location; 277 new_start += rdev->mc.vram_start;
259 break; 278 break;
260 case TTM_PL_TT: 279 case TTM_PL_TT:
261 new_start += rdev->mc.gtt_location; 280 new_start += rdev->mc.gtt_start;
262 break; 281 break;
263 default: 282 default:
264 DRM_ERROR("Unknown placement %d\n", old_mem->mem_type); 283 DRM_ERROR("Unknown placement %d\n", old_mem->mem_type);
@@ -283,14 +302,21 @@ static int radeon_move_vram_ram(struct ttm_buffer_object *bo,
283 struct radeon_device *rdev; 302 struct radeon_device *rdev;
284 struct ttm_mem_reg *old_mem = &bo->mem; 303 struct ttm_mem_reg *old_mem = &bo->mem;
285 struct ttm_mem_reg tmp_mem; 304 struct ttm_mem_reg tmp_mem;
286 uint32_t proposed_placement; 305 u32 placements;
306 struct ttm_placement placement;
287 int r; 307 int r;
288 308
289 rdev = radeon_get_rdev(bo->bdev); 309 rdev = radeon_get_rdev(bo->bdev);
290 tmp_mem = *new_mem; 310 tmp_mem = *new_mem;
291 tmp_mem.mm_node = NULL; 311 tmp_mem.mm_node = NULL;
292 proposed_placement = TTM_PL_FLAG_TT | TTM_PL_MASK_CACHING; 312 placement.fpfn = 0;
293 r = ttm_bo_mem_space(bo, proposed_placement, &tmp_mem, 313 placement.lpfn = 0;
314 placement.num_placement = 1;
315 placement.placement = &placements;
316 placement.num_busy_placement = 1;
317 placement.busy_placement = &placements;
318 placements = TTM_PL_MASK_CACHING | TTM_PL_FLAG_TT;
319 r = ttm_bo_mem_space(bo, &placement, &tmp_mem,
294 interruptible, no_wait); 320 interruptible, no_wait);
295 if (unlikely(r)) { 321 if (unlikely(r)) {
296 return r; 322 return r;
@@ -329,15 +355,21 @@ static int radeon_move_ram_vram(struct ttm_buffer_object *bo,
329 struct radeon_device *rdev; 355 struct radeon_device *rdev;
330 struct ttm_mem_reg *old_mem = &bo->mem; 356 struct ttm_mem_reg *old_mem = &bo->mem;
331 struct ttm_mem_reg tmp_mem; 357 struct ttm_mem_reg tmp_mem;
332 uint32_t proposed_flags; 358 struct ttm_placement placement;
359 u32 placements;
333 int r; 360 int r;
334 361
335 rdev = radeon_get_rdev(bo->bdev); 362 rdev = radeon_get_rdev(bo->bdev);
336 tmp_mem = *new_mem; 363 tmp_mem = *new_mem;
337 tmp_mem.mm_node = NULL; 364 tmp_mem.mm_node = NULL;
338 proposed_flags = TTM_PL_FLAG_TT | TTM_PL_MASK_CACHING; 365 placement.fpfn = 0;
339 r = ttm_bo_mem_space(bo, proposed_flags, &tmp_mem, 366 placement.lpfn = 0;
340 interruptible, no_wait); 367 placement.num_placement = 1;
368 placement.placement = &placements;
369 placement.num_busy_placement = 1;
370 placement.busy_placement = &placements;
371 placements = TTM_PL_MASK_CACHING | TTM_PL_FLAG_TT;
372 r = ttm_bo_mem_space(bo, &placement, &tmp_mem, interruptible, no_wait);
341 if (unlikely(r)) { 373 if (unlikely(r)) {
342 return r; 374 return r;
343 } 375 }
@@ -378,7 +410,7 @@ static int radeon_bo_move(struct ttm_buffer_object *bo,
378 new_mem->mem_type == TTM_PL_SYSTEM) || 410 new_mem->mem_type == TTM_PL_SYSTEM) ||
379 (old_mem->mem_type == TTM_PL_SYSTEM && 411 (old_mem->mem_type == TTM_PL_SYSTEM &&
380 new_mem->mem_type == TTM_PL_TT)) { 412 new_mem->mem_type == TTM_PL_TT)) {
381 /* bind is enought */ 413 /* bind is enough */
382 radeon_move_null(bo, new_mem); 414 radeon_move_null(bo, new_mem);
383 return 0; 415 return 0;
384 } 416 }
@@ -407,18 +439,6 @@ memcpy:
407 return r; 439 return r;
408} 440}
409 441
410const uint32_t radeon_mem_prios[] = {
411 TTM_PL_VRAM,
412 TTM_PL_TT,
413 TTM_PL_SYSTEM,
414};
415
416const uint32_t radeon_busy_prios[] = {
417 TTM_PL_TT,
418 TTM_PL_VRAM,
419 TTM_PL_SYSTEM,
420};
421
422static int radeon_sync_obj_wait(void *sync_obj, void *sync_arg, 442static int radeon_sync_obj_wait(void *sync_obj, void *sync_arg,
423 bool lazy, bool interruptible) 443 bool lazy, bool interruptible)
424{ 444{
@@ -446,10 +466,6 @@ static bool radeon_sync_obj_signaled(void *sync_obj, void *sync_arg)
446} 466}
447 467
448static struct ttm_bo_driver radeon_bo_driver = { 468static struct ttm_bo_driver radeon_bo_driver = {
449 .mem_type_prio = radeon_mem_prios,
450 .mem_busy_prio = radeon_busy_prios,
451 .num_mem_type_prio = ARRAY_SIZE(radeon_mem_prios),
452 .num_mem_busy_prio = ARRAY_SIZE(radeon_busy_prios),
453 .create_ttm_backend_entry = &radeon_create_ttm_backend_entry, 469 .create_ttm_backend_entry = &radeon_create_ttm_backend_entry,
454 .invalidate_caches = &radeon_invalidate_caches, 470 .invalidate_caches = &radeon_invalidate_caches,
455 .init_mem_type = &radeon_init_mem_type, 471 .init_mem_type = &radeon_init_mem_type,
@@ -482,27 +498,32 @@ int radeon_ttm_init(struct radeon_device *rdev)
482 DRM_ERROR("failed initializing buffer object driver(%d).\n", r); 498 DRM_ERROR("failed initializing buffer object driver(%d).\n", r);
483 return r; 499 return r;
484 } 500 }
485 r = ttm_bo_init_mm(&rdev->mman.bdev, TTM_PL_VRAM, 0, 501 rdev->mman.initialized = true;
486 ((rdev->mc.real_vram_size) >> PAGE_SHIFT)); 502 r = ttm_bo_init_mm(&rdev->mman.bdev, TTM_PL_VRAM,
503 rdev->mc.real_vram_size >> PAGE_SHIFT);
487 if (r) { 504 if (r) {
488 DRM_ERROR("Failed initializing VRAM heap.\n"); 505 DRM_ERROR("Failed initializing VRAM heap.\n");
489 return r; 506 return r;
490 } 507 }
491 r = radeon_object_create(rdev, NULL, 256 * 1024, true, 508 r = radeon_bo_create(rdev, NULL, 256 * 1024, true,
492 RADEON_GEM_DOMAIN_VRAM, false, 509 RADEON_GEM_DOMAIN_VRAM,
493 &rdev->stollen_vga_memory); 510 &rdev->stollen_vga_memory);
494 if (r) { 511 if (r) {
495 return r; 512 return r;
496 } 513 }
497 r = radeon_object_pin(rdev->stollen_vga_memory, RADEON_GEM_DOMAIN_VRAM, NULL); 514 r = radeon_bo_reserve(rdev->stollen_vga_memory, false);
515 if (r)
516 return r;
517 r = radeon_bo_pin(rdev->stollen_vga_memory, RADEON_GEM_DOMAIN_VRAM, NULL);
518 radeon_bo_unreserve(rdev->stollen_vga_memory);
498 if (r) { 519 if (r) {
499 radeon_object_unref(&rdev->stollen_vga_memory); 520 radeon_bo_unref(&rdev->stollen_vga_memory);
500 return r; 521 return r;
501 } 522 }
502 DRM_INFO("radeon: %uM of VRAM memory ready\n", 523 DRM_INFO("radeon: %uM of VRAM memory ready\n",
503 (unsigned)rdev->mc.real_vram_size / (1024 * 1024)); 524 (unsigned)rdev->mc.real_vram_size / (1024 * 1024));
504 r = ttm_bo_init_mm(&rdev->mman.bdev, TTM_PL_TT, 0, 525 r = ttm_bo_init_mm(&rdev->mman.bdev, TTM_PL_TT,
505 ((rdev->mc.gtt_size) >> PAGE_SHIFT)); 526 rdev->mc.gtt_size >> PAGE_SHIFT);
506 if (r) { 527 if (r) {
507 DRM_ERROR("Failed initializing GTT heap.\n"); 528 DRM_ERROR("Failed initializing GTT heap.\n");
508 return r; 529 return r;
@@ -523,15 +544,24 @@ int radeon_ttm_init(struct radeon_device *rdev)
523 544
524void radeon_ttm_fini(struct radeon_device *rdev) 545void radeon_ttm_fini(struct radeon_device *rdev)
525{ 546{
547 int r;
548
549 if (!rdev->mman.initialized)
550 return;
526 if (rdev->stollen_vga_memory) { 551 if (rdev->stollen_vga_memory) {
527 radeon_object_unpin(rdev->stollen_vga_memory); 552 r = radeon_bo_reserve(rdev->stollen_vga_memory, false);
528 radeon_object_unref(&rdev->stollen_vga_memory); 553 if (r == 0) {
554 radeon_bo_unpin(rdev->stollen_vga_memory);
555 radeon_bo_unreserve(rdev->stollen_vga_memory);
556 }
557 radeon_bo_unref(&rdev->stollen_vga_memory);
529 } 558 }
530 ttm_bo_clean_mm(&rdev->mman.bdev, TTM_PL_VRAM); 559 ttm_bo_clean_mm(&rdev->mman.bdev, TTM_PL_VRAM);
531 ttm_bo_clean_mm(&rdev->mman.bdev, TTM_PL_TT); 560 ttm_bo_clean_mm(&rdev->mman.bdev, TTM_PL_TT);
532 ttm_bo_device_release(&rdev->mman.bdev); 561 ttm_bo_device_release(&rdev->mman.bdev);
533 radeon_gart_fini(rdev); 562 radeon_gart_fini(rdev);
534 radeon_ttm_global_fini(rdev); 563 radeon_ttm_global_fini(rdev);
564 rdev->mman.initialized = false;
535 DRM_INFO("radeon: ttm finalized\n"); 565 DRM_INFO("radeon: ttm finalized\n");
536} 566}
537 567
diff --git a/drivers/gpu/drm/radeon/reg_srcs/r200 b/drivers/gpu/drm/radeon/reg_srcs/r200
index 6021c8849a16..c29ac434ac9c 100644
--- a/drivers/gpu/drm/radeon/reg_srcs/r200
+++ b/drivers/gpu/drm/radeon/reg_srcs/r200
@@ -91,6 +91,8 @@ r200 0x3294
910x22b8 SE_TCL_TEX_CYL_WRAP_CTL 910x22b8 SE_TCL_TEX_CYL_WRAP_CTL
920x22c0 SE_TCL_UCP_VERT_BLEND_CNTL 920x22c0 SE_TCL_UCP_VERT_BLEND_CNTL
930x22c4 SE_TCL_POINT_SPRITE_CNTL 930x22c4 SE_TCL_POINT_SPRITE_CNTL
940x22d0 SE_PVS_CNTL
950x22d4 SE_PVS_CONST_CNTL
940x2648 RE_POINTSIZE 960x2648 RE_POINTSIZE
950x26c0 RE_TOP_LEFT 970x26c0 RE_TOP_LEFT
960x26c4 RE_MISC 980x26c4 RE_MISC
diff --git a/drivers/gpu/drm/radeon/reg_srcs/r300 b/drivers/gpu/drm/radeon/reg_srcs/r300
index 19c4663fa9c6..1e97b2d129fd 100644
--- a/drivers/gpu/drm/radeon/reg_srcs/r300
+++ b/drivers/gpu/drm/radeon/reg_srcs/r300
@@ -125,6 +125,8 @@ r300 0x4f60
1250x4000 GB_VAP_RASTER_VTX_FMT_0 1250x4000 GB_VAP_RASTER_VTX_FMT_0
1260x4004 GB_VAP_RASTER_VTX_FMT_1 1260x4004 GB_VAP_RASTER_VTX_FMT_1
1270x4008 GB_ENABLE 1270x4008 GB_ENABLE
1280x4010 GB_MSPOS0
1290x4014 GB_MSPOS1
1280x401C GB_SELECT 1300x401C GB_SELECT
1290x4020 GB_AA_CONFIG 1310x4020 GB_AA_CONFIG
1300x4024 GB_FIFO_SIZE 1320x4024 GB_FIFO_SIZE
diff --git a/drivers/gpu/drm/radeon/reg_srcs/r420 b/drivers/gpu/drm/radeon/reg_srcs/r420
new file mode 100644
index 000000000000..e958980d00f1
--- /dev/null
+++ b/drivers/gpu/drm/radeon/reg_srcs/r420
@@ -0,0 +1,797 @@
1r420 0x4f60
20x1434 SRC_Y_X
30x1438 DST_Y_X
40x143C DST_HEIGHT_WIDTH
50x146C DP_GUI_MASTER_CNTL
60x1474 BRUSH_Y_X
70x1478 DP_BRUSH_BKGD_CLR
80x147C DP_BRUSH_FRGD_CLR
90x1480 BRUSH_DATA0
100x1484 BRUSH_DATA1
110x1598 DST_WIDTH_HEIGHT
120x15C0 CLR_CMP_CNTL
130x15C4 CLR_CMP_CLR_SRC
140x15C8 CLR_CMP_CLR_DST
150x15CC CLR_CMP_MSK
160x15D8 DP_SRC_FRGD_CLR
170x15DC DP_SRC_BKGD_CLR
180x1600 DST_LINE_START
190x1604 DST_LINE_END
200x1608 DST_LINE_PATCOUNT
210x16C0 DP_CNTL
220x16CC DP_WRITE_MSK
230x16D0 DP_CNTL_XDIR_YDIR_YMAJOR
240x16E8 DEFAULT_SC_BOTTOM_RIGHT
250x16EC SC_TOP_LEFT
260x16F0 SC_BOTTOM_RIGHT
270x16F4 SRC_SC_BOTTOM_RIGHT
280x1714 DSTCACHE_CTLSTAT
290x1720 WAIT_UNTIL
300x172C RBBM_GUICNTL
310x1D98 VAP_VPORT_XSCALE
320x1D9C VAP_VPORT_XOFFSET
330x1DA0 VAP_VPORT_YSCALE
340x1DA4 VAP_VPORT_YOFFSET
350x1DA8 VAP_VPORT_ZSCALE
360x1DAC VAP_VPORT_ZOFFSET
370x2080 VAP_CNTL
380x2090 VAP_OUT_VTX_FMT_0
390x2094 VAP_OUT_VTX_FMT_1
400x20B0 VAP_VTE_CNTL
410x2138 VAP_VF_MIN_VTX_INDX
420x2140 VAP_CNTL_STATUS
430x2150 VAP_PROG_STREAM_CNTL_0
440x2154 VAP_PROG_STREAM_CNTL_1
450x2158 VAP_PROG_STREAM_CNTL_2
460x215C VAP_PROG_STREAM_CNTL_3
470x2160 VAP_PROG_STREAM_CNTL_4
480x2164 VAP_PROG_STREAM_CNTL_5
490x2168 VAP_PROG_STREAM_CNTL_6
500x216C VAP_PROG_STREAM_CNTL_7
510x2180 VAP_VTX_STATE_CNTL
520x2184 VAP_VSM_VTX_ASSM
530x2188 VAP_VTX_STATE_IND_REG_0
540x218C VAP_VTX_STATE_IND_REG_1
550x2190 VAP_VTX_STATE_IND_REG_2
560x2194 VAP_VTX_STATE_IND_REG_3
570x2198 VAP_VTX_STATE_IND_REG_4
580x219C VAP_VTX_STATE_IND_REG_5
590x21A0 VAP_VTX_STATE_IND_REG_6
600x21A4 VAP_VTX_STATE_IND_REG_7
610x21A8 VAP_VTX_STATE_IND_REG_8
620x21AC VAP_VTX_STATE_IND_REG_9
630x21B0 VAP_VTX_STATE_IND_REG_10
640x21B4 VAP_VTX_STATE_IND_REG_11
650x21B8 VAP_VTX_STATE_IND_REG_12
660x21BC VAP_VTX_STATE_IND_REG_13
670x21C0 VAP_VTX_STATE_IND_REG_14
680x21C4 VAP_VTX_STATE_IND_REG_15
690x21DC VAP_PSC_SGN_NORM_CNTL
700x21E0 VAP_PROG_STREAM_CNTL_EXT_0
710x21E4 VAP_PROG_STREAM_CNTL_EXT_1
720x21E8 VAP_PROG_STREAM_CNTL_EXT_2
730x21EC VAP_PROG_STREAM_CNTL_EXT_3
740x21F0 VAP_PROG_STREAM_CNTL_EXT_4
750x21F4 VAP_PROG_STREAM_CNTL_EXT_5
760x21F8 VAP_PROG_STREAM_CNTL_EXT_6
770x21FC VAP_PROG_STREAM_CNTL_EXT_7
780x2200 VAP_PVS_VECTOR_INDX_REG
790x2204 VAP_PVS_VECTOR_DATA_REG
800x2208 VAP_PVS_VECTOR_DATA_REG_128
810x221C VAP_CLIP_CNTL
820x2220 VAP_GB_VERT_CLIP_ADJ
830x2224 VAP_GB_VERT_DISC_ADJ
840x2228 VAP_GB_HORZ_CLIP_ADJ
850x222C VAP_GB_HORZ_DISC_ADJ
860x2230 VAP_PVS_FLOW_CNTL_ADDRS_0
870x2234 VAP_PVS_FLOW_CNTL_ADDRS_1
880x2238 VAP_PVS_FLOW_CNTL_ADDRS_2
890x223C VAP_PVS_FLOW_CNTL_ADDRS_3
900x2240 VAP_PVS_FLOW_CNTL_ADDRS_4
910x2244 VAP_PVS_FLOW_CNTL_ADDRS_5
920x2248 VAP_PVS_FLOW_CNTL_ADDRS_6
930x224C VAP_PVS_FLOW_CNTL_ADDRS_7
940x2250 VAP_PVS_FLOW_CNTL_ADDRS_8
950x2254 VAP_PVS_FLOW_CNTL_ADDRS_9
960x2258 VAP_PVS_FLOW_CNTL_ADDRS_10
970x225C VAP_PVS_FLOW_CNTL_ADDRS_11
980x2260 VAP_PVS_FLOW_CNTL_ADDRS_12
990x2264 VAP_PVS_FLOW_CNTL_ADDRS_13
1000x2268 VAP_PVS_FLOW_CNTL_ADDRS_14
1010x226C VAP_PVS_FLOW_CNTL_ADDRS_15
1020x2284 VAP_PVS_STATE_FLUSH_REG
1030x2288 VAP_PVS_VTX_TIMEOUT_REG
1040x2290 VAP_PVS_FLOW_CNTL_LOOP_INDEX_0
1050x2294 VAP_PVS_FLOW_CNTL_LOOP_INDEX_1
1060x2298 VAP_PVS_FLOW_CNTL_LOOP_INDEX_2
1070x229C VAP_PVS_FLOW_CNTL_LOOP_INDEX_3
1080x22A0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_4
1090x22A4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_5
1100x22A8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_6
1110x22AC VAP_PVS_FLOW_CNTL_LOOP_INDEX_7
1120x22B0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_8
1130x22B4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_9
1140x22B8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_10
1150x22BC VAP_PVS_FLOW_CNTL_LOOP_INDEX_11
1160x22C0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_12
1170x22C4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_13
1180x22C8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_14
1190x22CC VAP_PVS_FLOW_CNTL_LOOP_INDEX_15
1200x22D0 VAP_PVS_CODE_CNTL_0
1210x22D4 VAP_PVS_CONST_CNTL
1220x22D8 VAP_PVS_CODE_CNTL_1
1230x22DC VAP_PVS_FLOW_CNTL_OPC
1240x342C RB2D_DSTCACHE_CTLSTAT
1250x4000 GB_VAP_RASTER_VTX_FMT_0
1260x4004 GB_VAP_RASTER_VTX_FMT_1
1270x4008 GB_ENABLE
1280x4010 GB_MSPOS0
1290x4014 GB_MSPOS1
1300x401C GB_SELECT
1310x4020 GB_AA_CONFIG
1320x4024 GB_FIFO_SIZE
1330x4100 TX_INVALTAGS
1340x4200 GA_POINT_S0
1350x4204 GA_POINT_T0
1360x4208 GA_POINT_S1
1370x420C GA_POINT_T1
1380x4214 GA_TRIANGLE_STIPPLE
1390x421C GA_POINT_SIZE
1400x4230 GA_POINT_MINMAX
1410x4234 GA_LINE_CNTL
1420x4238 GA_LINE_STIPPLE_CONFIG
1430x4260 GA_LINE_STIPPLE_VALUE
1440x4264 GA_LINE_S0
1450x4268 GA_LINE_S1
1460x4278 GA_COLOR_CONTROL
1470x427C GA_SOLID_RG
1480x4280 GA_SOLID_BA
1490x4288 GA_POLY_MODE
1500x428C GA_ROUND_MODE
1510x4290 GA_OFFSET
1520x4294 GA_FOG_SCALE
1530x4298 GA_FOG_OFFSET
1540x42A0 SU_TEX_WRAP
1550x42A4 SU_POLY_OFFSET_FRONT_SCALE
1560x42A8 SU_POLY_OFFSET_FRONT_OFFSET
1570x42AC SU_POLY_OFFSET_BACK_SCALE
1580x42B0 SU_POLY_OFFSET_BACK_OFFSET
1590x42B4 SU_POLY_OFFSET_ENABLE
1600x42B8 SU_CULL_MODE
1610x42C0 SU_DEPTH_SCALE
1620x42C4 SU_DEPTH_OFFSET
1630x42C8 SU_REG_DEST
1640x4300 RS_COUNT
1650x4304 RS_INST_COUNT
1660x4310 RS_IP_0
1670x4314 RS_IP_1
1680x4318 RS_IP_2
1690x431C RS_IP_3
1700x4320 RS_IP_4
1710x4324 RS_IP_5
1720x4328 RS_IP_6
1730x432C RS_IP_7
1740x4330 RS_INST_0
1750x4334 RS_INST_1
1760x4338 RS_INST_2
1770x433C RS_INST_3
1780x4340 RS_INST_4
1790x4344 RS_INST_5
1800x4348 RS_INST_6
1810x434C RS_INST_7
1820x4350 RS_INST_8
1830x4354 RS_INST_9
1840x4358 RS_INST_10
1850x435C RS_INST_11
1860x4360 RS_INST_12
1870x4364 RS_INST_13
1880x4368 RS_INST_14
1890x436C RS_INST_15
1900x43A4 SC_HYPERZ_EN
1910x43A8 SC_EDGERULE
1920x43B0 SC_CLIP_0_A
1930x43B4 SC_CLIP_0_B
1940x43B8 SC_CLIP_1_A
1950x43BC SC_CLIP_1_B
1960x43C0 SC_CLIP_2_A
1970x43C4 SC_CLIP_2_B
1980x43C8 SC_CLIP_3_A
1990x43CC SC_CLIP_3_B
2000x43D0 SC_CLIP_RULE
2010x43E0 SC_SCISSOR0
2020x43E8 SC_SCREENDOOR
2030x4440 TX_FILTER1_0
2040x4444 TX_FILTER1_1
2050x4448 TX_FILTER1_2
2060x444C TX_FILTER1_3
2070x4450 TX_FILTER1_4
2080x4454 TX_FILTER1_5
2090x4458 TX_FILTER1_6
2100x445C TX_FILTER1_7
2110x4460 TX_FILTER1_8
2120x4464 TX_FILTER1_9
2130x4468 TX_FILTER1_10
2140x446C TX_FILTER1_11
2150x4470 TX_FILTER1_12
2160x4474 TX_FILTER1_13
2170x4478 TX_FILTER1_14
2180x447C TX_FILTER1_15
2190x4580 TX_CHROMA_KEY_0
2200x4584 TX_CHROMA_KEY_1
2210x4588 TX_CHROMA_KEY_2
2220x458C TX_CHROMA_KEY_3
2230x4590 TX_CHROMA_KEY_4
2240x4594 TX_CHROMA_KEY_5
2250x4598 TX_CHROMA_KEY_6
2260x459C TX_CHROMA_KEY_7
2270x45A0 TX_CHROMA_KEY_8
2280x45A4 TX_CHROMA_KEY_9
2290x45A8 TX_CHROMA_KEY_10
2300x45AC TX_CHROMA_KEY_11
2310x45B0 TX_CHROMA_KEY_12
2320x45B4 TX_CHROMA_KEY_13
2330x45B8 TX_CHROMA_KEY_14
2340x45BC TX_CHROMA_KEY_15
2350x45C0 TX_BORDER_COLOR_0
2360x45C4 TX_BORDER_COLOR_1
2370x45C8 TX_BORDER_COLOR_2
2380x45CC TX_BORDER_COLOR_3
2390x45D0 TX_BORDER_COLOR_4
2400x45D4 TX_BORDER_COLOR_5
2410x45D8 TX_BORDER_COLOR_6
2420x45DC TX_BORDER_COLOR_7
2430x45E0 TX_BORDER_COLOR_8
2440x45E4 TX_BORDER_COLOR_9
2450x45E8 TX_BORDER_COLOR_10
2460x45EC TX_BORDER_COLOR_11
2470x45F0 TX_BORDER_COLOR_12
2480x45F4 TX_BORDER_COLOR_13
2490x45F8 TX_BORDER_COLOR_14
2500x45FC TX_BORDER_COLOR_15
2510x4600 US_CONFIG
2520x4604 US_PIXSIZE
2530x4608 US_CODE_OFFSET
2540x460C US_RESET
2550x4610 US_CODE_ADDR_0
2560x4614 US_CODE_ADDR_1
2570x4618 US_CODE_ADDR_2
2580x461C US_CODE_ADDR_3
2590x4620 US_TEX_INST_0
2600x4624 US_TEX_INST_1
2610x4628 US_TEX_INST_2
2620x462C US_TEX_INST_3
2630x4630 US_TEX_INST_4
2640x4634 US_TEX_INST_5
2650x4638 US_TEX_INST_6
2660x463C US_TEX_INST_7
2670x4640 US_TEX_INST_8
2680x4644 US_TEX_INST_9
2690x4648 US_TEX_INST_10
2700x464C US_TEX_INST_11
2710x4650 US_TEX_INST_12
2720x4654 US_TEX_INST_13
2730x4658 US_TEX_INST_14
2740x465C US_TEX_INST_15
2750x4660 US_TEX_INST_16
2760x4664 US_TEX_INST_17
2770x4668 US_TEX_INST_18
2780x466C US_TEX_INST_19
2790x4670 US_TEX_INST_20
2800x4674 US_TEX_INST_21
2810x4678 US_TEX_INST_22
2820x467C US_TEX_INST_23
2830x4680 US_TEX_INST_24
2840x4684 US_TEX_INST_25
2850x4688 US_TEX_INST_26
2860x468C US_TEX_INST_27
2870x4690 US_TEX_INST_28
2880x4694 US_TEX_INST_29
2890x4698 US_TEX_INST_30
2900x469C US_TEX_INST_31
2910x46A4 US_OUT_FMT_0
2920x46A8 US_OUT_FMT_1
2930x46AC US_OUT_FMT_2
2940x46B0 US_OUT_FMT_3
2950x46B4 US_W_FMT
2960x46B8 US_CODE_BANK
2970x46BC US_CODE_EXT
2980x46C0 US_ALU_RGB_ADDR_0
2990x46C4 US_ALU_RGB_ADDR_1
3000x46C8 US_ALU_RGB_ADDR_2
3010x46CC US_ALU_RGB_ADDR_3
3020x46D0 US_ALU_RGB_ADDR_4
3030x46D4 US_ALU_RGB_ADDR_5
3040x46D8 US_ALU_RGB_ADDR_6
3050x46DC US_ALU_RGB_ADDR_7
3060x46E0 US_ALU_RGB_ADDR_8
3070x46E4 US_ALU_RGB_ADDR_9
3080x46E8 US_ALU_RGB_ADDR_10
3090x46EC US_ALU_RGB_ADDR_11
3100x46F0 US_ALU_RGB_ADDR_12
3110x46F4 US_ALU_RGB_ADDR_13
3120x46F8 US_ALU_RGB_ADDR_14
3130x46FC US_ALU_RGB_ADDR_15
3140x4700 US_ALU_RGB_ADDR_16
3150x4704 US_ALU_RGB_ADDR_17
3160x4708 US_ALU_RGB_ADDR_18
3170x470C US_ALU_RGB_ADDR_19
3180x4710 US_ALU_RGB_ADDR_20
3190x4714 US_ALU_RGB_ADDR_21
3200x4718 US_ALU_RGB_ADDR_22
3210x471C US_ALU_RGB_ADDR_23
3220x4720 US_ALU_RGB_ADDR_24
3230x4724 US_ALU_RGB_ADDR_25
3240x4728 US_ALU_RGB_ADDR_26
3250x472C US_ALU_RGB_ADDR_27
3260x4730 US_ALU_RGB_ADDR_28
3270x4734 US_ALU_RGB_ADDR_29
3280x4738 US_ALU_RGB_ADDR_30
3290x473C US_ALU_RGB_ADDR_31
3300x4740 US_ALU_RGB_ADDR_32
3310x4744 US_ALU_RGB_ADDR_33
3320x4748 US_ALU_RGB_ADDR_34
3330x474C US_ALU_RGB_ADDR_35
3340x4750 US_ALU_RGB_ADDR_36
3350x4754 US_ALU_RGB_ADDR_37
3360x4758 US_ALU_RGB_ADDR_38
3370x475C US_ALU_RGB_ADDR_39
3380x4760 US_ALU_RGB_ADDR_40
3390x4764 US_ALU_RGB_ADDR_41
3400x4768 US_ALU_RGB_ADDR_42
3410x476C US_ALU_RGB_ADDR_43
3420x4770 US_ALU_RGB_ADDR_44
3430x4774 US_ALU_RGB_ADDR_45
3440x4778 US_ALU_RGB_ADDR_46
3450x477C US_ALU_RGB_ADDR_47
3460x4780 US_ALU_RGB_ADDR_48
3470x4784 US_ALU_RGB_ADDR_49
3480x4788 US_ALU_RGB_ADDR_50
3490x478C US_ALU_RGB_ADDR_51
3500x4790 US_ALU_RGB_ADDR_52
3510x4794 US_ALU_RGB_ADDR_53
3520x4798 US_ALU_RGB_ADDR_54
3530x479C US_ALU_RGB_ADDR_55
3540x47A0 US_ALU_RGB_ADDR_56
3550x47A4 US_ALU_RGB_ADDR_57
3560x47A8 US_ALU_RGB_ADDR_58
3570x47AC US_ALU_RGB_ADDR_59
3580x47B0 US_ALU_RGB_ADDR_60
3590x47B4 US_ALU_RGB_ADDR_61
3600x47B8 US_ALU_RGB_ADDR_62
3610x47BC US_ALU_RGB_ADDR_63
3620x47C0 US_ALU_ALPHA_ADDR_0
3630x47C4 US_ALU_ALPHA_ADDR_1
3640x47C8 US_ALU_ALPHA_ADDR_2
3650x47CC US_ALU_ALPHA_ADDR_3
3660x47D0 US_ALU_ALPHA_ADDR_4
3670x47D4 US_ALU_ALPHA_ADDR_5
3680x47D8 US_ALU_ALPHA_ADDR_6
3690x47DC US_ALU_ALPHA_ADDR_7
3700x47E0 US_ALU_ALPHA_ADDR_8
3710x47E4 US_ALU_ALPHA_ADDR_9
3720x47E8 US_ALU_ALPHA_ADDR_10
3730x47EC US_ALU_ALPHA_ADDR_11
3740x47F0 US_ALU_ALPHA_ADDR_12
3750x47F4 US_ALU_ALPHA_ADDR_13
3760x47F8 US_ALU_ALPHA_ADDR_14
3770x47FC US_ALU_ALPHA_ADDR_15
3780x4800 US_ALU_ALPHA_ADDR_16
3790x4804 US_ALU_ALPHA_ADDR_17
3800x4808 US_ALU_ALPHA_ADDR_18
3810x480C US_ALU_ALPHA_ADDR_19
3820x4810 US_ALU_ALPHA_ADDR_20
3830x4814 US_ALU_ALPHA_ADDR_21
3840x4818 US_ALU_ALPHA_ADDR_22
3850x481C US_ALU_ALPHA_ADDR_23
3860x4820 US_ALU_ALPHA_ADDR_24
3870x4824 US_ALU_ALPHA_ADDR_25
3880x4828 US_ALU_ALPHA_ADDR_26
3890x482C US_ALU_ALPHA_ADDR_27
3900x4830 US_ALU_ALPHA_ADDR_28
3910x4834 US_ALU_ALPHA_ADDR_29
3920x4838 US_ALU_ALPHA_ADDR_30
3930x483C US_ALU_ALPHA_ADDR_31
3940x4840 US_ALU_ALPHA_ADDR_32
3950x4844 US_ALU_ALPHA_ADDR_33
3960x4848 US_ALU_ALPHA_ADDR_34
3970x484C US_ALU_ALPHA_ADDR_35
3980x4850 US_ALU_ALPHA_ADDR_36
3990x4854 US_ALU_ALPHA_ADDR_37
4000x4858 US_ALU_ALPHA_ADDR_38
4010x485C US_ALU_ALPHA_ADDR_39
4020x4860 US_ALU_ALPHA_ADDR_40
4030x4864 US_ALU_ALPHA_ADDR_41
4040x4868 US_ALU_ALPHA_ADDR_42
4050x486C US_ALU_ALPHA_ADDR_43
4060x4870 US_ALU_ALPHA_ADDR_44
4070x4874 US_ALU_ALPHA_ADDR_45
4080x4878 US_ALU_ALPHA_ADDR_46
4090x487C US_ALU_ALPHA_ADDR_47
4100x4880 US_ALU_ALPHA_ADDR_48
4110x4884 US_ALU_ALPHA_ADDR_49
4120x4888 US_ALU_ALPHA_ADDR_50
4130x488C US_ALU_ALPHA_ADDR_51
4140x4890 US_ALU_ALPHA_ADDR_52
4150x4894 US_ALU_ALPHA_ADDR_53
4160x4898 US_ALU_ALPHA_ADDR_54
4170x489C US_ALU_ALPHA_ADDR_55
4180x48A0 US_ALU_ALPHA_ADDR_56
4190x48A4 US_ALU_ALPHA_ADDR_57
4200x48A8 US_ALU_ALPHA_ADDR_58
4210x48AC US_ALU_ALPHA_ADDR_59
4220x48B0 US_ALU_ALPHA_ADDR_60
4230x48B4 US_ALU_ALPHA_ADDR_61
4240x48B8 US_ALU_ALPHA_ADDR_62
4250x48BC US_ALU_ALPHA_ADDR_63
4260x48C0 US_ALU_RGB_INST_0
4270x48C4 US_ALU_RGB_INST_1
4280x48C8 US_ALU_RGB_INST_2
4290x48CC US_ALU_RGB_INST_3
4300x48D0 US_ALU_RGB_INST_4
4310x48D4 US_ALU_RGB_INST_5
4320x48D8 US_ALU_RGB_INST_6
4330x48DC US_ALU_RGB_INST_7
4340x48E0 US_ALU_RGB_INST_8
4350x48E4 US_ALU_RGB_INST_9
4360x48E8 US_ALU_RGB_INST_10
4370x48EC US_ALU_RGB_INST_11
4380x48F0 US_ALU_RGB_INST_12
4390x48F4 US_ALU_RGB_INST_13
4400x48F8 US_ALU_RGB_INST_14
4410x48FC US_ALU_RGB_INST_15
4420x4900 US_ALU_RGB_INST_16
4430x4904 US_ALU_RGB_INST_17
4440x4908 US_ALU_RGB_INST_18
4450x490C US_ALU_RGB_INST_19
4460x4910 US_ALU_RGB_INST_20
4470x4914 US_ALU_RGB_INST_21
4480x4918 US_ALU_RGB_INST_22
4490x491C US_ALU_RGB_INST_23
4500x4920 US_ALU_RGB_INST_24
4510x4924 US_ALU_RGB_INST_25
4520x4928 US_ALU_RGB_INST_26
4530x492C US_ALU_RGB_INST_27
4540x4930 US_ALU_RGB_INST_28
4550x4934 US_ALU_RGB_INST_29
4560x4938 US_ALU_RGB_INST_30
4570x493C US_ALU_RGB_INST_31
4580x4940 US_ALU_RGB_INST_32
4590x4944 US_ALU_RGB_INST_33
4600x4948 US_ALU_RGB_INST_34
4610x494C US_ALU_RGB_INST_35
4620x4950 US_ALU_RGB_INST_36
4630x4954 US_ALU_RGB_INST_37
4640x4958 US_ALU_RGB_INST_38
4650x495C US_ALU_RGB_INST_39
4660x4960 US_ALU_RGB_INST_40
4670x4964 US_ALU_RGB_INST_41
4680x4968 US_ALU_RGB_INST_42
4690x496C US_ALU_RGB_INST_43
4700x4970 US_ALU_RGB_INST_44
4710x4974 US_ALU_RGB_INST_45
4720x4978 US_ALU_RGB_INST_46
4730x497C US_ALU_RGB_INST_47
4740x4980 US_ALU_RGB_INST_48
4750x4984 US_ALU_RGB_INST_49
4760x4988 US_ALU_RGB_INST_50
4770x498C US_ALU_RGB_INST_51
4780x4990 US_ALU_RGB_INST_52
4790x4994 US_ALU_RGB_INST_53
4800x4998 US_ALU_RGB_INST_54
4810x499C US_ALU_RGB_INST_55
4820x49A0 US_ALU_RGB_INST_56
4830x49A4 US_ALU_RGB_INST_57
4840x49A8 US_ALU_RGB_INST_58
4850x49AC US_ALU_RGB_INST_59
4860x49B0 US_ALU_RGB_INST_60
4870x49B4 US_ALU_RGB_INST_61
4880x49B8 US_ALU_RGB_INST_62
4890x49BC US_ALU_RGB_INST_63
4900x49C0 US_ALU_ALPHA_INST_0
4910x49C4 US_ALU_ALPHA_INST_1
4920x49C8 US_ALU_ALPHA_INST_2
4930x49CC US_ALU_ALPHA_INST_3
4940x49D0 US_ALU_ALPHA_INST_4
4950x49D4 US_ALU_ALPHA_INST_5
4960x49D8 US_ALU_ALPHA_INST_6
4970x49DC US_ALU_ALPHA_INST_7
4980x49E0 US_ALU_ALPHA_INST_8
4990x49E4 US_ALU_ALPHA_INST_9
5000x49E8 US_ALU_ALPHA_INST_10
5010x49EC US_ALU_ALPHA_INST_11
5020x49F0 US_ALU_ALPHA_INST_12
5030x49F4 US_ALU_ALPHA_INST_13
5040x49F8 US_ALU_ALPHA_INST_14
5050x49FC US_ALU_ALPHA_INST_15
5060x4A00 US_ALU_ALPHA_INST_16
5070x4A04 US_ALU_ALPHA_INST_17
5080x4A08 US_ALU_ALPHA_INST_18
5090x4A0C US_ALU_ALPHA_INST_19
5100x4A10 US_ALU_ALPHA_INST_20
5110x4A14 US_ALU_ALPHA_INST_21
5120x4A18 US_ALU_ALPHA_INST_22
5130x4A1C US_ALU_ALPHA_INST_23
5140x4A20 US_ALU_ALPHA_INST_24
5150x4A24 US_ALU_ALPHA_INST_25
5160x4A28 US_ALU_ALPHA_INST_26
5170x4A2C US_ALU_ALPHA_INST_27
5180x4A30 US_ALU_ALPHA_INST_28
5190x4A34 US_ALU_ALPHA_INST_29
5200x4A38 US_ALU_ALPHA_INST_30
5210x4A3C US_ALU_ALPHA_INST_31
5220x4A40 US_ALU_ALPHA_INST_32
5230x4A44 US_ALU_ALPHA_INST_33
5240x4A48 US_ALU_ALPHA_INST_34
5250x4A4C US_ALU_ALPHA_INST_35
5260x4A50 US_ALU_ALPHA_INST_36
5270x4A54 US_ALU_ALPHA_INST_37
5280x4A58 US_ALU_ALPHA_INST_38
5290x4A5C US_ALU_ALPHA_INST_39
5300x4A60 US_ALU_ALPHA_INST_40
5310x4A64 US_ALU_ALPHA_INST_41
5320x4A68 US_ALU_ALPHA_INST_42
5330x4A6C US_ALU_ALPHA_INST_43
5340x4A70 US_ALU_ALPHA_INST_44
5350x4A74 US_ALU_ALPHA_INST_45
5360x4A78 US_ALU_ALPHA_INST_46
5370x4A7C US_ALU_ALPHA_INST_47
5380x4A80 US_ALU_ALPHA_INST_48
5390x4A84 US_ALU_ALPHA_INST_49
5400x4A88 US_ALU_ALPHA_INST_50
5410x4A8C US_ALU_ALPHA_INST_51
5420x4A90 US_ALU_ALPHA_INST_52
5430x4A94 US_ALU_ALPHA_INST_53
5440x4A98 US_ALU_ALPHA_INST_54
5450x4A9C US_ALU_ALPHA_INST_55
5460x4AA0 US_ALU_ALPHA_INST_56
5470x4AA4 US_ALU_ALPHA_INST_57
5480x4AA8 US_ALU_ALPHA_INST_58
5490x4AAC US_ALU_ALPHA_INST_59
5500x4AB0 US_ALU_ALPHA_INST_60
5510x4AB4 US_ALU_ALPHA_INST_61
5520x4AB8 US_ALU_ALPHA_INST_62
5530x4ABC US_ALU_ALPHA_INST_63
5540x4AC0 US_ALU_EXT_ADDR_0
5550x4AC4 US_ALU_EXT_ADDR_1
5560x4AC8 US_ALU_EXT_ADDR_2
5570x4ACC US_ALU_EXT_ADDR_3
5580x4AD0 US_ALU_EXT_ADDR_4
5590x4AD4 US_ALU_EXT_ADDR_5
5600x4AD8 US_ALU_EXT_ADDR_6
5610x4ADC US_ALU_EXT_ADDR_7
5620x4AE0 US_ALU_EXT_ADDR_8
5630x4AE4 US_ALU_EXT_ADDR_9
5640x4AE8 US_ALU_EXT_ADDR_10
5650x4AEC US_ALU_EXT_ADDR_11
5660x4AF0 US_ALU_EXT_ADDR_12
5670x4AF4 US_ALU_EXT_ADDR_13
5680x4AF8 US_ALU_EXT_ADDR_14
5690x4AFC US_ALU_EXT_ADDR_15
5700x4B00 US_ALU_EXT_ADDR_16
5710x4B04 US_ALU_EXT_ADDR_17
5720x4B08 US_ALU_EXT_ADDR_18
5730x4B0C US_ALU_EXT_ADDR_19
5740x4B10 US_ALU_EXT_ADDR_20
5750x4B14 US_ALU_EXT_ADDR_21
5760x4B18 US_ALU_EXT_ADDR_22
5770x4B1C US_ALU_EXT_ADDR_23
5780x4B20 US_ALU_EXT_ADDR_24
5790x4B24 US_ALU_EXT_ADDR_25
5800x4B28 US_ALU_EXT_ADDR_26
5810x4B2C US_ALU_EXT_ADDR_27
5820x4B30 US_ALU_EXT_ADDR_28
5830x4B34 US_ALU_EXT_ADDR_29
5840x4B38 US_ALU_EXT_ADDR_30
5850x4B3C US_ALU_EXT_ADDR_31
5860x4B40 US_ALU_EXT_ADDR_32
5870x4B44 US_ALU_EXT_ADDR_33
5880x4B48 US_ALU_EXT_ADDR_34
5890x4B4C US_ALU_EXT_ADDR_35
5900x4B50 US_ALU_EXT_ADDR_36
5910x4B54 US_ALU_EXT_ADDR_37
5920x4B58 US_ALU_EXT_ADDR_38
5930x4B5C US_ALU_EXT_ADDR_39
5940x4B60 US_ALU_EXT_ADDR_40
5950x4B64 US_ALU_EXT_ADDR_41
5960x4B68 US_ALU_EXT_ADDR_42
5970x4B6C US_ALU_EXT_ADDR_43
5980x4B70 US_ALU_EXT_ADDR_44
5990x4B74 US_ALU_EXT_ADDR_45
6000x4B78 US_ALU_EXT_ADDR_46
6010x4B7C US_ALU_EXT_ADDR_47
6020x4B80 US_ALU_EXT_ADDR_48
6030x4B84 US_ALU_EXT_ADDR_49
6040x4B88 US_ALU_EXT_ADDR_50
6050x4B8C US_ALU_EXT_ADDR_51
6060x4B90 US_ALU_EXT_ADDR_52
6070x4B94 US_ALU_EXT_ADDR_53
6080x4B98 US_ALU_EXT_ADDR_54
6090x4B9C US_ALU_EXT_ADDR_55
6100x4BA0 US_ALU_EXT_ADDR_56
6110x4BA4 US_ALU_EXT_ADDR_57
6120x4BA8 US_ALU_EXT_ADDR_58
6130x4BAC US_ALU_EXT_ADDR_59
6140x4BB0 US_ALU_EXT_ADDR_60
6150x4BB4 US_ALU_EXT_ADDR_61
6160x4BB8 US_ALU_EXT_ADDR_62
6170x4BBC US_ALU_EXT_ADDR_63
6180x4BC0 FG_FOG_BLEND
6190x4BC4 FG_FOG_FACTOR
6200x4BC8 FG_FOG_COLOR_R
6210x4BCC FG_FOG_COLOR_G
6220x4BD0 FG_FOG_COLOR_B
6230x4BD4 FG_ALPHA_FUNC
6240x4BD8 FG_DEPTH_SRC
6250x4C00 US_ALU_CONST_R_0
6260x4C04 US_ALU_CONST_G_0
6270x4C08 US_ALU_CONST_B_0
6280x4C0C US_ALU_CONST_A_0
6290x4C10 US_ALU_CONST_R_1
6300x4C14 US_ALU_CONST_G_1
6310x4C18 US_ALU_CONST_B_1
6320x4C1C US_ALU_CONST_A_1
6330x4C20 US_ALU_CONST_R_2
6340x4C24 US_ALU_CONST_G_2
6350x4C28 US_ALU_CONST_B_2
6360x4C2C US_ALU_CONST_A_2
6370x4C30 US_ALU_CONST_R_3
6380x4C34 US_ALU_CONST_G_3
6390x4C38 US_ALU_CONST_B_3
6400x4C3C US_ALU_CONST_A_3
6410x4C40 US_ALU_CONST_R_4
6420x4C44 US_ALU_CONST_G_4
6430x4C48 US_ALU_CONST_B_4
6440x4C4C US_ALU_CONST_A_4
6450x4C50 US_ALU_CONST_R_5
6460x4C54 US_ALU_CONST_G_5
6470x4C58 US_ALU_CONST_B_5
6480x4C5C US_ALU_CONST_A_5
6490x4C60 US_ALU_CONST_R_6
6500x4C64 US_ALU_CONST_G_6
6510x4C68 US_ALU_CONST_B_6
6520x4C6C US_ALU_CONST_A_6
6530x4C70 US_ALU_CONST_R_7
6540x4C74 US_ALU_CONST_G_7
6550x4C78 US_ALU_CONST_B_7
6560x4C7C US_ALU_CONST_A_7
6570x4C80 US_ALU_CONST_R_8
6580x4C84 US_ALU_CONST_G_8
6590x4C88 US_ALU_CONST_B_8
6600x4C8C US_ALU_CONST_A_8
6610x4C90 US_ALU_CONST_R_9
6620x4C94 US_ALU_CONST_G_9
6630x4C98 US_ALU_CONST_B_9
6640x4C9C US_ALU_CONST_A_9
6650x4CA0 US_ALU_CONST_R_10
6660x4CA4 US_ALU_CONST_G_10
6670x4CA8 US_ALU_CONST_B_10
6680x4CAC US_ALU_CONST_A_10
6690x4CB0 US_ALU_CONST_R_11
6700x4CB4 US_ALU_CONST_G_11
6710x4CB8 US_ALU_CONST_B_11
6720x4CBC US_ALU_CONST_A_11
6730x4CC0 US_ALU_CONST_R_12
6740x4CC4 US_ALU_CONST_G_12
6750x4CC8 US_ALU_CONST_B_12
6760x4CCC US_ALU_CONST_A_12
6770x4CD0 US_ALU_CONST_R_13
6780x4CD4 US_ALU_CONST_G_13
6790x4CD8 US_ALU_CONST_B_13
6800x4CDC US_ALU_CONST_A_13
6810x4CE0 US_ALU_CONST_R_14
6820x4CE4 US_ALU_CONST_G_14
6830x4CE8 US_ALU_CONST_B_14
6840x4CEC US_ALU_CONST_A_14
6850x4CF0 US_ALU_CONST_R_15
6860x4CF4 US_ALU_CONST_G_15
6870x4CF8 US_ALU_CONST_B_15
6880x4CFC US_ALU_CONST_A_15
6890x4D00 US_ALU_CONST_R_16
6900x4D04 US_ALU_CONST_G_16
6910x4D08 US_ALU_CONST_B_16
6920x4D0C US_ALU_CONST_A_16
6930x4D10 US_ALU_CONST_R_17
6940x4D14 US_ALU_CONST_G_17
6950x4D18 US_ALU_CONST_B_17
6960x4D1C US_ALU_CONST_A_17
6970x4D20 US_ALU_CONST_R_18
6980x4D24 US_ALU_CONST_G_18
6990x4D28 US_ALU_CONST_B_18
7000x4D2C US_ALU_CONST_A_18
7010x4D30 US_ALU_CONST_R_19
7020x4D34 US_ALU_CONST_G_19
7030x4D38 US_ALU_CONST_B_19
7040x4D3C US_ALU_CONST_A_19
7050x4D40 US_ALU_CONST_R_20
7060x4D44 US_ALU_CONST_G_20
7070x4D48 US_ALU_CONST_B_20
7080x4D4C US_ALU_CONST_A_20
7090x4D50 US_ALU_CONST_R_21
7100x4D54 US_ALU_CONST_G_21
7110x4D58 US_ALU_CONST_B_21
7120x4D5C US_ALU_CONST_A_21
7130x4D60 US_ALU_CONST_R_22
7140x4D64 US_ALU_CONST_G_22
7150x4D68 US_ALU_CONST_B_22
7160x4D6C US_ALU_CONST_A_22
7170x4D70 US_ALU_CONST_R_23
7180x4D74 US_ALU_CONST_G_23
7190x4D78 US_ALU_CONST_B_23
7200x4D7C US_ALU_CONST_A_23
7210x4D80 US_ALU_CONST_R_24
7220x4D84 US_ALU_CONST_G_24
7230x4D88 US_ALU_CONST_B_24
7240x4D8C US_ALU_CONST_A_24
7250x4D90 US_ALU_CONST_R_25
7260x4D94 US_ALU_CONST_G_25
7270x4D98 US_ALU_CONST_B_25
7280x4D9C US_ALU_CONST_A_25
7290x4DA0 US_ALU_CONST_R_26
7300x4DA4 US_ALU_CONST_G_26
7310x4DA8 US_ALU_CONST_B_26
7320x4DAC US_ALU_CONST_A_26
7330x4DB0 US_ALU_CONST_R_27
7340x4DB4 US_ALU_CONST_G_27
7350x4DB8 US_ALU_CONST_B_27
7360x4DBC US_ALU_CONST_A_27
7370x4DC0 US_ALU_CONST_R_28
7380x4DC4 US_ALU_CONST_G_28
7390x4DC8 US_ALU_CONST_B_28
7400x4DCC US_ALU_CONST_A_28
7410x4DD0 US_ALU_CONST_R_29
7420x4DD4 US_ALU_CONST_G_29
7430x4DD8 US_ALU_CONST_B_29
7440x4DDC US_ALU_CONST_A_29
7450x4DE0 US_ALU_CONST_R_30
7460x4DE4 US_ALU_CONST_G_30
7470x4DE8 US_ALU_CONST_B_30
7480x4DEC US_ALU_CONST_A_30
7490x4DF0 US_ALU_CONST_R_31
7500x4DF4 US_ALU_CONST_G_31
7510x4DF8 US_ALU_CONST_B_31
7520x4DFC US_ALU_CONST_A_31
7530x4E04 RB3D_BLENDCNTL_R3
7540x4E08 RB3D_ABLENDCNTL_R3
7550x4E0C RB3D_COLOR_CHANNEL_MASK
7560x4E10 RB3D_CONSTANT_COLOR
7570x4E14 RB3D_COLOR_CLEAR_VALUE
7580x4E18 RB3D_ROPCNTL_R3
7590x4E1C RB3D_CLRCMP_FLIPE_R3
7600x4E20 RB3D_CLRCMP_CLR_R3
7610x4E24 RB3D_CLRCMP_MSK_R3
7620x4E48 RB3D_DEBUG_CTL
7630x4E4C RB3D_DSTCACHE_CTLSTAT_R3
7640x4E50 RB3D_DITHER_CTL
7650x4E54 RB3D_CMASK_OFFSET0
7660x4E58 RB3D_CMASK_OFFSET1
7670x4E5C RB3D_CMASK_OFFSET2
7680x4E60 RB3D_CMASK_OFFSET3
7690x4E64 RB3D_CMASK_PITCH0
7700x4E68 RB3D_CMASK_PITCH1
7710x4E6C RB3D_CMASK_PITCH2
7720x4E70 RB3D_CMASK_PITCH3
7730x4E74 RB3D_CMASK_WRINDEX
7740x4E78 RB3D_CMASK_DWORD
7750x4E7C RB3D_CMASK_RDINDEX
7760x4E80 RB3D_AARESOLVE_OFFSET
7770x4E84 RB3D_AARESOLVE_PITCH
7780x4E88 RB3D_AARESOLVE_CTL
7790x4EA0 RB3D_DISCARD_SRC_PIXEL_LTE_THRESHOLD
7800x4EA4 RB3D_DISCARD_SRC_PIXEL_GTE_THRESHOLD
7810x4F04 ZB_ZSTENCILCNTL
7820x4F08 ZB_STENCILREFMASK
7830x4F14 ZB_ZTOP
7840x4F18 ZB_ZCACHE_CTLSTAT
7850x4F1C ZB_BW_CNTL
7860x4F28 ZB_DEPTHCLEARVALUE
7870x4F30 ZB_ZMASK_OFFSET
7880x4F34 ZB_ZMASK_PITCH
7890x4F38 ZB_ZMASK_WRINDEX
7900x4F3C ZB_ZMASK_DWORD
7910x4F40 ZB_ZMASK_RDINDEX
7920x4F44 ZB_HIZ_OFFSET
7930x4F48 ZB_HIZ_WRINDEX
7940x4F4C ZB_HIZ_DWORD
7950x4F50 ZB_HIZ_RDINDEX
7960x4F54 ZB_HIZ_PITCH
7970x4F58 ZB_ZPASS_DATA
diff --git a/drivers/gpu/drm/radeon/reg_srcs/r600 b/drivers/gpu/drm/radeon/reg_srcs/r600
new file mode 100644
index 000000000000..af0da4ae3f55
--- /dev/null
+++ b/drivers/gpu/drm/radeon/reg_srcs/r600
@@ -0,0 +1,762 @@
1r600 0x9400
20x000287A0 R7xx_CB_SHADER_CONTROL
30x00028230 R7xx_PA_SC_EDGERULE
40x000286C8 R7xx_SPI_THREAD_GROUPING
50x00008D8C R7xx_SQ_DYN_GPR_CNTL_PS_FLUSH_REQ
60x000088C4 VGT_CACHE_INVALIDATION
70x00028A50 VGT_ENHANCE
80x000088CC VGT_ES_PER_GS
90x00028A2C VGT_GROUP_DECR
100x00028A28 VGT_GROUP_FIRST_DECR
110x00028A24 VGT_GROUP_PRIM_TYPE
120x00028A30 VGT_GROUP_VECT_0_CNTL
130x00028A38 VGT_GROUP_VECT_0_FMT_CNTL
140x00028A34 VGT_GROUP_VECT_1_CNTL
150x00028A3C VGT_GROUP_VECT_1_FMT_CNTL
160x00028A40 VGT_GS_MODE
170x00028A6C VGT_GS_OUT_PRIM_TYPE
180x000088C8 VGT_GS_PER_ES
190x000088E8 VGT_GS_PER_VS
200x000088D4 VGT_GS_VERTEX_REUSE
210x00028A14 VGT_HOS_CNTL
220x00028A18 VGT_HOS_MAX_TESS_LEVEL
230x00028A1C VGT_HOS_MIN_TESS_LEVEL
240x00028A20 VGT_HOS_REUSE_DEPTH
250x0000895C VGT_INDEX_TYPE
260x00028408 VGT_INDX_OFFSET
270x00028AA0 VGT_INSTANCE_STEP_RATE_0
280x00028AA4 VGT_INSTANCE_STEP_RATE_1
290x00028400 VGT_MAX_VTX_INDX
300x00028404 VGT_MIN_VTX_INDX
310x00028A94 VGT_MULTI_PRIM_IB_RESET_EN
320x0002840C VGT_MULTI_PRIM_IB_RESET_INDX
330x00008970 VGT_NUM_INDICES
340x00008974 VGT_NUM_INSTANCES
350x00028A10 VGT_OUTPUT_PATH_CNTL
360x00028A84 VGT_PRIMITIVEID_EN
370x00008958 VGT_PRIMITIVE_TYPE
380x00028AB4 VGT_REUSE_OFF
390x00028AB8 VGT_VTX_CNT_EN
400x000088B0 VGT_VTX_VECT_EJECT_REG
410x00028810 PA_CL_CLIP_CNTL
420x00008A14 PA_CL_ENHANCE
430x00028C14 PA_CL_GB_HORZ_CLIP_ADJ
440x00028C18 PA_CL_GB_HORZ_DISC_ADJ
450x00028C0C PA_CL_GB_VERT_CLIP_ADJ
460x00028C10 PA_CL_GB_VERT_DISC_ADJ
470x00028820 PA_CL_NANINF_CNTL
480x00028E1C PA_CL_POINT_CULL_RAD
490x00028E18 PA_CL_POINT_SIZE
500x00028E10 PA_CL_POINT_X_RAD
510x00028E14 PA_CL_POINT_Y_RAD
520x00028E2C PA_CL_UCP_0_W
530x00028E3C PA_CL_UCP_1_W
540x00028E4C PA_CL_UCP_2_W
550x00028E5C PA_CL_UCP_3_W
560x00028E6C PA_CL_UCP_4_W
570x00028E7C PA_CL_UCP_5_W
580x00028E20 PA_CL_UCP_0_X
590x00028E30 PA_CL_UCP_1_X
600x00028E40 PA_CL_UCP_2_X
610x00028E50 PA_CL_UCP_3_X
620x00028E60 PA_CL_UCP_4_X
630x00028E70 PA_CL_UCP_5_X
640x00028E24 PA_CL_UCP_0_Y
650x00028E34 PA_CL_UCP_1_Y
660x00028E44 PA_CL_UCP_2_Y
670x00028E54 PA_CL_UCP_3_Y
680x00028E64 PA_CL_UCP_4_Y
690x00028E74 PA_CL_UCP_5_Y
700x00028E28 PA_CL_UCP_0_Z
710x00028E38 PA_CL_UCP_1_Z
720x00028E48 PA_CL_UCP_2_Z
730x00028E58 PA_CL_UCP_3_Z
740x00028E68 PA_CL_UCP_4_Z
750x00028E78 PA_CL_UCP_5_Z
760x00028440 PA_CL_VPORT_XOFFSET_0
770x00028458 PA_CL_VPORT_XOFFSET_1
780x00028470 PA_CL_VPORT_XOFFSET_2
790x00028488 PA_CL_VPORT_XOFFSET_3
800x000284A0 PA_CL_VPORT_XOFFSET_4
810x000284B8 PA_CL_VPORT_XOFFSET_5
820x000284D0 PA_CL_VPORT_XOFFSET_6
830x000284E8 PA_CL_VPORT_XOFFSET_7
840x00028500 PA_CL_VPORT_XOFFSET_8
850x00028518 PA_CL_VPORT_XOFFSET_9
860x00028530 PA_CL_VPORT_XOFFSET_10
870x00028548 PA_CL_VPORT_XOFFSET_11
880x00028560 PA_CL_VPORT_XOFFSET_12
890x00028578 PA_CL_VPORT_XOFFSET_13
900x00028590 PA_CL_VPORT_XOFFSET_14
910x000285A8 PA_CL_VPORT_XOFFSET_15
920x0002843C PA_CL_VPORT_XSCALE_0
930x00028454 PA_CL_VPORT_XSCALE_1
940x0002846C PA_CL_VPORT_XSCALE_2
950x00028484 PA_CL_VPORT_XSCALE_3
960x0002849C PA_CL_VPORT_XSCALE_4
970x000284B4 PA_CL_VPORT_XSCALE_5
980x000284CC PA_CL_VPORT_XSCALE_6
990x000284E4 PA_CL_VPORT_XSCALE_7
1000x000284FC PA_CL_VPORT_XSCALE_8
1010x00028514 PA_CL_VPORT_XSCALE_9
1020x0002852C PA_CL_VPORT_XSCALE_10
1030x00028544 PA_CL_VPORT_XSCALE_11
1040x0002855C PA_CL_VPORT_XSCALE_12
1050x00028574 PA_CL_VPORT_XSCALE_13
1060x0002858C PA_CL_VPORT_XSCALE_14
1070x000285A4 PA_CL_VPORT_XSCALE_15
1080x00028448 PA_CL_VPORT_YOFFSET_0
1090x00028460 PA_CL_VPORT_YOFFSET_1
1100x00028478 PA_CL_VPORT_YOFFSET_2
1110x00028490 PA_CL_VPORT_YOFFSET_3
1120x000284A8 PA_CL_VPORT_YOFFSET_4
1130x000284C0 PA_CL_VPORT_YOFFSET_5
1140x000284D8 PA_CL_VPORT_YOFFSET_6
1150x000284F0 PA_CL_VPORT_YOFFSET_7
1160x00028508 PA_CL_VPORT_YOFFSET_8
1170x00028520 PA_CL_VPORT_YOFFSET_9
1180x00028538 PA_CL_VPORT_YOFFSET_10
1190x00028550 PA_CL_VPORT_YOFFSET_11
1200x00028568 PA_CL_VPORT_YOFFSET_12
1210x00028580 PA_CL_VPORT_YOFFSET_13
1220x00028598 PA_CL_VPORT_YOFFSET_14
1230x000285B0 PA_CL_VPORT_YOFFSET_15
1240x00028444 PA_CL_VPORT_YSCALE_0
1250x0002845C PA_CL_VPORT_YSCALE_1
1260x00028474 PA_CL_VPORT_YSCALE_2
1270x0002848C PA_CL_VPORT_YSCALE_3
1280x000284A4 PA_CL_VPORT_YSCALE_4
1290x000284BC PA_CL_VPORT_YSCALE_5
1300x000284D4 PA_CL_VPORT_YSCALE_6
1310x000284EC PA_CL_VPORT_YSCALE_7
1320x00028504 PA_CL_VPORT_YSCALE_8
1330x0002851C PA_CL_VPORT_YSCALE_9
1340x00028534 PA_CL_VPORT_YSCALE_10
1350x0002854C PA_CL_VPORT_YSCALE_11
1360x00028564 PA_CL_VPORT_YSCALE_12
1370x0002857C PA_CL_VPORT_YSCALE_13
1380x00028594 PA_CL_VPORT_YSCALE_14
1390x000285AC PA_CL_VPORT_YSCALE_15
1400x00028450 PA_CL_VPORT_ZOFFSET_0
1410x00028468 PA_CL_VPORT_ZOFFSET_1
1420x00028480 PA_CL_VPORT_ZOFFSET_2
1430x00028498 PA_CL_VPORT_ZOFFSET_3
1440x000284B0 PA_CL_VPORT_ZOFFSET_4
1450x000284C8 PA_CL_VPORT_ZOFFSET_5
1460x000284E0 PA_CL_VPORT_ZOFFSET_6
1470x000284F8 PA_CL_VPORT_ZOFFSET_7
1480x00028510 PA_CL_VPORT_ZOFFSET_8
1490x00028528 PA_CL_VPORT_ZOFFSET_9
1500x00028540 PA_CL_VPORT_ZOFFSET_10
1510x00028558 PA_CL_VPORT_ZOFFSET_11
1520x00028570 PA_CL_VPORT_ZOFFSET_12
1530x00028588 PA_CL_VPORT_ZOFFSET_13
1540x000285A0 PA_CL_VPORT_ZOFFSET_14
1550x000285B8 PA_CL_VPORT_ZOFFSET_15
1560x0002844C PA_CL_VPORT_ZSCALE_0
1570x00028464 PA_CL_VPORT_ZSCALE_1
1580x0002847C PA_CL_VPORT_ZSCALE_2
1590x00028494 PA_CL_VPORT_ZSCALE_3
1600x000284AC PA_CL_VPORT_ZSCALE_4
1610x000284C4 PA_CL_VPORT_ZSCALE_5
1620x000284DC PA_CL_VPORT_ZSCALE_6
1630x000284F4 PA_CL_VPORT_ZSCALE_7
1640x0002850C PA_CL_VPORT_ZSCALE_8
1650x00028524 PA_CL_VPORT_ZSCALE_9
1660x0002853C PA_CL_VPORT_ZSCALE_10
1670x00028554 PA_CL_VPORT_ZSCALE_11
1680x0002856C PA_CL_VPORT_ZSCALE_12
1690x00028584 PA_CL_VPORT_ZSCALE_13
1700x0002859C PA_CL_VPORT_ZSCALE_14
1710x000285B4 PA_CL_VPORT_ZSCALE_15
1720x0002881C PA_CL_VS_OUT_CNTL
1730x00028818 PA_CL_VTE_CNTL
1740x00028C48 PA_SC_AA_MASK
1750x00008B40 PA_SC_AA_SAMPLE_LOCS_2S
1760x00008B44 PA_SC_AA_SAMPLE_LOCS_4S
1770x00008B48 PA_SC_AA_SAMPLE_LOCS_8S_WD0
1780x00008B4C PA_SC_AA_SAMPLE_LOCS_8S_WD1
1790x00028C20 PA_SC_AA_SAMPLE_LOCS_8S_WD1_MCTX
1800x00028C1C PA_SC_AA_SAMPLE_LOCS_MCTX
1810x00028214 PA_SC_CLIPRECT_0_BR
1820x0002821C PA_SC_CLIPRECT_1_BR
1830x00028224 PA_SC_CLIPRECT_2_BR
1840x0002822C PA_SC_CLIPRECT_3_BR
1850x00028210 PA_SC_CLIPRECT_0_TL
1860x00028218 PA_SC_CLIPRECT_1_TL
1870x00028220 PA_SC_CLIPRECT_2_TL
1880x00028228 PA_SC_CLIPRECT_3_TL
1890x0002820C PA_SC_CLIPRECT_RULE
1900x00008BF0 PA_SC_ENHANCE
1910x00028244 PA_SC_GENERIC_SCISSOR_BR
1920x00028240 PA_SC_GENERIC_SCISSOR_TL
1930x00028C00 PA_SC_LINE_CNTL
1940x00028A0C PA_SC_LINE_STIPPLE
1950x00008B10 PA_SC_LINE_STIPPLE_STATE
1960x00028A4C PA_SC_MODE_CNTL
1970x00028A48 PA_SC_MPASS_PS_CNTL
1980x00008B20 PA_SC_MULTI_CHIP_CNTL
1990x00028034 PA_SC_SCREEN_SCISSOR_BR
2000x00028030 PA_SC_SCREEN_SCISSOR_TL
2010x00028254 PA_SC_VPORT_SCISSOR_0_BR
2020x0002825C PA_SC_VPORT_SCISSOR_1_BR
2030x00028264 PA_SC_VPORT_SCISSOR_2_BR
2040x0002826C PA_SC_VPORT_SCISSOR_3_BR
2050x00028274 PA_SC_VPORT_SCISSOR_4_BR
2060x0002827C PA_SC_VPORT_SCISSOR_5_BR
2070x00028284 PA_SC_VPORT_SCISSOR_6_BR
2080x0002828C PA_SC_VPORT_SCISSOR_7_BR
2090x00028294 PA_SC_VPORT_SCISSOR_8_BR
2100x0002829C PA_SC_VPORT_SCISSOR_9_BR
2110x000282A4 PA_SC_VPORT_SCISSOR_10_BR
2120x000282AC PA_SC_VPORT_SCISSOR_11_BR
2130x000282B4 PA_SC_VPORT_SCISSOR_12_BR
2140x000282BC PA_SC_VPORT_SCISSOR_13_BR
2150x000282C4 PA_SC_VPORT_SCISSOR_14_BR
2160x000282CC PA_SC_VPORT_SCISSOR_15_BR
2170x00028250 PA_SC_VPORT_SCISSOR_0_TL
2180x00028258 PA_SC_VPORT_SCISSOR_1_TL
2190x00028260 PA_SC_VPORT_SCISSOR_2_TL
2200x00028268 PA_SC_VPORT_SCISSOR_3_TL
2210x00028270 PA_SC_VPORT_SCISSOR_4_TL
2220x00028278 PA_SC_VPORT_SCISSOR_5_TL
2230x00028280 PA_SC_VPORT_SCISSOR_6_TL
2240x00028288 PA_SC_VPORT_SCISSOR_7_TL
2250x00028290 PA_SC_VPORT_SCISSOR_8_TL
2260x00028298 PA_SC_VPORT_SCISSOR_9_TL
2270x000282A0 PA_SC_VPORT_SCISSOR_10_TL
2280x000282A8 PA_SC_VPORT_SCISSOR_11_TL
2290x000282B0 PA_SC_VPORT_SCISSOR_12_TL
2300x000282B8 PA_SC_VPORT_SCISSOR_13_TL
2310x000282C0 PA_SC_VPORT_SCISSOR_14_TL
2320x000282C8 PA_SC_VPORT_SCISSOR_15_TL
2330x000282D4 PA_SC_VPORT_ZMAX_0
2340x000282DC PA_SC_VPORT_ZMAX_1
2350x000282E4 PA_SC_VPORT_ZMAX_2
2360x000282EC PA_SC_VPORT_ZMAX_3
2370x000282F4 PA_SC_VPORT_ZMAX_4
2380x000282FC PA_SC_VPORT_ZMAX_5
2390x00028304 PA_SC_VPORT_ZMAX_6
2400x0002830C PA_SC_VPORT_ZMAX_7
2410x00028314 PA_SC_VPORT_ZMAX_8
2420x0002831C PA_SC_VPORT_ZMAX_9
2430x00028324 PA_SC_VPORT_ZMAX_10
2440x0002832C PA_SC_VPORT_ZMAX_11
2450x00028334 PA_SC_VPORT_ZMAX_12
2460x0002833C PA_SC_VPORT_ZMAX_13
2470x00028344 PA_SC_VPORT_ZMAX_14
2480x0002834C PA_SC_VPORT_ZMAX_15
2490x000282D0 PA_SC_VPORT_ZMIN_0
2500x000282D8 PA_SC_VPORT_ZMIN_1
2510x000282E0 PA_SC_VPORT_ZMIN_2
2520x000282E8 PA_SC_VPORT_ZMIN_3
2530x000282F0 PA_SC_VPORT_ZMIN_4
2540x000282F8 PA_SC_VPORT_ZMIN_5
2550x00028300 PA_SC_VPORT_ZMIN_6
2560x00028308 PA_SC_VPORT_ZMIN_7
2570x00028310 PA_SC_VPORT_ZMIN_8
2580x00028318 PA_SC_VPORT_ZMIN_9
2590x00028320 PA_SC_VPORT_ZMIN_10
2600x00028328 PA_SC_VPORT_ZMIN_11
2610x00028330 PA_SC_VPORT_ZMIN_12
2620x00028338 PA_SC_VPORT_ZMIN_13
2630x00028340 PA_SC_VPORT_ZMIN_14
2640x00028348 PA_SC_VPORT_ZMIN_15
2650x00028200 PA_SC_WINDOW_OFFSET
2660x00028208 PA_SC_WINDOW_SCISSOR_BR
2670x00028204 PA_SC_WINDOW_SCISSOR_TL
2680x00028A08 PA_SU_LINE_CNTL
2690x00028A04 PA_SU_POINT_MINMAX
2700x00028A00 PA_SU_POINT_SIZE
2710x00028E0C PA_SU_POLY_OFFSET_BACK_OFFSET
2720x00028E08 PA_SU_POLY_OFFSET_BACK_SCALE
2730x00028DFC PA_SU_POLY_OFFSET_CLAMP
2740x00028DF8 PA_SU_POLY_OFFSET_DB_FMT_CNTL
2750x00028E04 PA_SU_POLY_OFFSET_FRONT_OFFSET
2760x00028E00 PA_SU_POLY_OFFSET_FRONT_SCALE
2770x00028814 PA_SU_SC_MODE_CNTL
2780x00028C08 PA_SU_VTX_CNTL
2790x00008C04 SQ_GPR_RESOURCE_MGMT_1
2800x00008C08 SQ_GPR_RESOURCE_MGMT_2
2810x00008C10 SQ_STACK_RESOURCE_MGMT_1
2820x00008C14 SQ_STACK_RESOURCE_MGMT_2
2830x00008C0C SQ_THREAD_RESOURCE_MGMT
2840x00028380 SQ_VTX_SEMANTIC_0
2850x00028384 SQ_VTX_SEMANTIC_1
2860x00028388 SQ_VTX_SEMANTIC_2
2870x0002838C SQ_VTX_SEMANTIC_3
2880x00028390 SQ_VTX_SEMANTIC_4
2890x00028394 SQ_VTX_SEMANTIC_5
2900x00028398 SQ_VTX_SEMANTIC_6
2910x0002839C SQ_VTX_SEMANTIC_7
2920x000283A0 SQ_VTX_SEMANTIC_8
2930x000283A4 SQ_VTX_SEMANTIC_9
2940x000283A8 SQ_VTX_SEMANTIC_10
2950x000283AC SQ_VTX_SEMANTIC_11
2960x000283B0 SQ_VTX_SEMANTIC_12
2970x000283B4 SQ_VTX_SEMANTIC_13
2980x000283B8 SQ_VTX_SEMANTIC_14
2990x000283BC SQ_VTX_SEMANTIC_15
3000x000283C0 SQ_VTX_SEMANTIC_16
3010x000283C4 SQ_VTX_SEMANTIC_17
3020x000283C8 SQ_VTX_SEMANTIC_18
3030x000283CC SQ_VTX_SEMANTIC_19
3040x000283D0 SQ_VTX_SEMANTIC_20
3050x000283D4 SQ_VTX_SEMANTIC_21
3060x000283D8 SQ_VTX_SEMANTIC_22
3070x000283DC SQ_VTX_SEMANTIC_23
3080x000283E0 SQ_VTX_SEMANTIC_24
3090x000283E4 SQ_VTX_SEMANTIC_25
3100x000283E8 SQ_VTX_SEMANTIC_26
3110x000283EC SQ_VTX_SEMANTIC_27
3120x000283F0 SQ_VTX_SEMANTIC_28
3130x000283F4 SQ_VTX_SEMANTIC_29
3140x000283F8 SQ_VTX_SEMANTIC_30
3150x000283FC SQ_VTX_SEMANTIC_31
3160x000288E0 SQ_VTX_SEMANTIC_CLEAR
3170x0003CFF4 SQ_VTX_START_INST_LOC
3180x000281C0 SQ_ALU_CONST_BUFFER_SIZE_GS_0
3190x000281C4 SQ_ALU_CONST_BUFFER_SIZE_GS_1
3200x000281C8 SQ_ALU_CONST_BUFFER_SIZE_GS_2
3210x000281CC SQ_ALU_CONST_BUFFER_SIZE_GS_3
3220x000281D0 SQ_ALU_CONST_BUFFER_SIZE_GS_4
3230x000281D4 SQ_ALU_CONST_BUFFER_SIZE_GS_5
3240x000281D8 SQ_ALU_CONST_BUFFER_SIZE_GS_6
3250x000281DC SQ_ALU_CONST_BUFFER_SIZE_GS_7
3260x000281E0 SQ_ALU_CONST_BUFFER_SIZE_GS_8
3270x000281E4 SQ_ALU_CONST_BUFFER_SIZE_GS_9
3280x000281E8 SQ_ALU_CONST_BUFFER_SIZE_GS_10
3290x000281EC SQ_ALU_CONST_BUFFER_SIZE_GS_11
3300x000281F0 SQ_ALU_CONST_BUFFER_SIZE_GS_12
3310x000281F4 SQ_ALU_CONST_BUFFER_SIZE_GS_13
3320x000281F8 SQ_ALU_CONST_BUFFER_SIZE_GS_14
3330x000281FC SQ_ALU_CONST_BUFFER_SIZE_GS_15
3340x00028140 SQ_ALU_CONST_BUFFER_SIZE_PS_0
3350x00028144 SQ_ALU_CONST_BUFFER_SIZE_PS_1
3360x00028148 SQ_ALU_CONST_BUFFER_SIZE_PS_2
3370x0002814C SQ_ALU_CONST_BUFFER_SIZE_PS_3
3380x00028150 SQ_ALU_CONST_BUFFER_SIZE_PS_4
3390x00028154 SQ_ALU_CONST_BUFFER_SIZE_PS_5
3400x00028158 SQ_ALU_CONST_BUFFER_SIZE_PS_6
3410x0002815C SQ_ALU_CONST_BUFFER_SIZE_PS_7
3420x00028160 SQ_ALU_CONST_BUFFER_SIZE_PS_8
3430x00028164 SQ_ALU_CONST_BUFFER_SIZE_PS_9
3440x00028168 SQ_ALU_CONST_BUFFER_SIZE_PS_10
3450x0002816C SQ_ALU_CONST_BUFFER_SIZE_PS_11
3460x00028170 SQ_ALU_CONST_BUFFER_SIZE_PS_12
3470x00028174 SQ_ALU_CONST_BUFFER_SIZE_PS_13
3480x00028178 SQ_ALU_CONST_BUFFER_SIZE_PS_14
3490x0002817C SQ_ALU_CONST_BUFFER_SIZE_PS_15
3500x00028180 SQ_ALU_CONST_BUFFER_SIZE_VS_0
3510x00028184 SQ_ALU_CONST_BUFFER_SIZE_VS_1
3520x00028188 SQ_ALU_CONST_BUFFER_SIZE_VS_2
3530x0002818C SQ_ALU_CONST_BUFFER_SIZE_VS_3
3540x00028190 SQ_ALU_CONST_BUFFER_SIZE_VS_4
3550x00028194 SQ_ALU_CONST_BUFFER_SIZE_VS_5
3560x00028198 SQ_ALU_CONST_BUFFER_SIZE_VS_6
3570x0002819C SQ_ALU_CONST_BUFFER_SIZE_VS_7
3580x000281A0 SQ_ALU_CONST_BUFFER_SIZE_VS_8
3590x000281A4 SQ_ALU_CONST_BUFFER_SIZE_VS_9
3600x000281A8 SQ_ALU_CONST_BUFFER_SIZE_VS_10
3610x000281AC SQ_ALU_CONST_BUFFER_SIZE_VS_11
3620x000281B0 SQ_ALU_CONST_BUFFER_SIZE_VS_12
3630x000281B4 SQ_ALU_CONST_BUFFER_SIZE_VS_13
3640x000281B8 SQ_ALU_CONST_BUFFER_SIZE_VS_14
3650x000281BC SQ_ALU_CONST_BUFFER_SIZE_VS_15
3660x000288D8 SQ_PGM_CF_OFFSET_ES
3670x000288DC SQ_PGM_CF_OFFSET_FS
3680x000288D4 SQ_PGM_CF_OFFSET_GS
3690x000288CC SQ_PGM_CF_OFFSET_PS
3700x000288D0 SQ_PGM_CF_OFFSET_VS
3710x00028854 SQ_PGM_EXPORTS_PS
3720x00028890 SQ_PGM_RESOURCES_ES
3730x000288A4 SQ_PGM_RESOURCES_FS
3740x0002887C SQ_PGM_RESOURCES_GS
3750x00028850 SQ_PGM_RESOURCES_PS
3760x00028868 SQ_PGM_RESOURCES_VS
3770x00009100 SPI_CONFIG_CNTL
3780x0000913C SPI_CONFIG_CNTL_1
3790x000286DC SPI_FOG_CNTL
3800x000286E4 SPI_FOG_FUNC_BIAS
3810x000286E0 SPI_FOG_FUNC_SCALE
3820x000286D8 SPI_INPUT_Z
3830x000286D4 SPI_INTERP_CONTROL_0
3840x00028644 SPI_PS_INPUT_CNTL_0
3850x00028648 SPI_PS_INPUT_CNTL_1
3860x0002864C SPI_PS_INPUT_CNTL_2
3870x00028650 SPI_PS_INPUT_CNTL_3
3880x00028654 SPI_PS_INPUT_CNTL_4
3890x00028658 SPI_PS_INPUT_CNTL_5
3900x0002865C SPI_PS_INPUT_CNTL_6
3910x00028660 SPI_PS_INPUT_CNTL_7
3920x00028664 SPI_PS_INPUT_CNTL_8
3930x00028668 SPI_PS_INPUT_CNTL_9
3940x0002866C SPI_PS_INPUT_CNTL_10
3950x00028670 SPI_PS_INPUT_CNTL_11
3960x00028674 SPI_PS_INPUT_CNTL_12
3970x00028678 SPI_PS_INPUT_CNTL_13
3980x0002867C SPI_PS_INPUT_CNTL_14
3990x00028680 SPI_PS_INPUT_CNTL_15
4000x00028684 SPI_PS_INPUT_CNTL_16
4010x00028688 SPI_PS_INPUT_CNTL_17
4020x0002868C SPI_PS_INPUT_CNTL_18
4030x00028690 SPI_PS_INPUT_CNTL_19
4040x00028694 SPI_PS_INPUT_CNTL_20
4050x00028698 SPI_PS_INPUT_CNTL_21
4060x0002869C SPI_PS_INPUT_CNTL_22
4070x000286A0 SPI_PS_INPUT_CNTL_23
4080x000286A4 SPI_PS_INPUT_CNTL_24
4090x000286A8 SPI_PS_INPUT_CNTL_25
4100x000286AC SPI_PS_INPUT_CNTL_26
4110x000286B0 SPI_PS_INPUT_CNTL_27
4120x000286B4 SPI_PS_INPUT_CNTL_28
4130x000286B8 SPI_PS_INPUT_CNTL_29
4140x000286BC SPI_PS_INPUT_CNTL_30
4150x000286C0 SPI_PS_INPUT_CNTL_31
4160x000286CC SPI_PS_IN_CONTROL_0
4170x000286D0 SPI_PS_IN_CONTROL_1
4180x000286C4 SPI_VS_OUT_CONFIG
4190x00028614 SPI_VS_OUT_ID_0
4200x00028618 SPI_VS_OUT_ID_1
4210x0002861C SPI_VS_OUT_ID_2
4220x00028620 SPI_VS_OUT_ID_3
4230x00028624 SPI_VS_OUT_ID_4
4240x00028628 SPI_VS_OUT_ID_5
4250x0002862C SPI_VS_OUT_ID_6
4260x00028630 SPI_VS_OUT_ID_7
4270x00028634 SPI_VS_OUT_ID_8
4280x00028638 SPI_VS_OUT_ID_9
4290x00028438 SX_ALPHA_REF
4300x00028410 SX_ALPHA_TEST_CONTROL
4310x00028350 SX_MISC
4320x00009604 TC_INVALIDATE
4330x00009400 TD_FILTER4
4340x00009404 TD_FILTER4_1
4350x00009408 TD_FILTER4_2
4360x0000940C TD_FILTER4_3
4370x00009410 TD_FILTER4_4
4380x00009414 TD_FILTER4_5
4390x00009418 TD_FILTER4_6
4400x0000941C TD_FILTER4_7
4410x00009420 TD_FILTER4_8
4420x00009424 TD_FILTER4_9
4430x00009428 TD_FILTER4_10
4440x0000942C TD_FILTER4_11
4450x00009430 TD_FILTER4_12
4460x00009434 TD_FILTER4_13
4470x00009438 TD_FILTER4_14
4480x0000943C TD_FILTER4_15
4490x00009440 TD_FILTER4_16
4500x00009444 TD_FILTER4_17
4510x00009448 TD_FILTER4_18
4520x0000944C TD_FILTER4_19
4530x00009450 TD_FILTER4_20
4540x00009454 TD_FILTER4_21
4550x00009458 TD_FILTER4_22
4560x0000945C TD_FILTER4_23
4570x00009460 TD_FILTER4_24
4580x00009464 TD_FILTER4_25
4590x00009468 TD_FILTER4_26
4600x0000946C TD_FILTER4_27
4610x00009470 TD_FILTER4_28
4620x00009474 TD_FILTER4_29
4630x00009478 TD_FILTER4_30
4640x0000947C TD_FILTER4_31
4650x00009480 TD_FILTER4_32
4660x00009484 TD_FILTER4_33
4670x00009488 TD_FILTER4_34
4680x0000948C TD_FILTER4_35
4690x0000A80C TD_GS_SAMPLER0_BORDER_ALPHA
4700x0000A81C TD_GS_SAMPLER1_BORDER_ALPHA
4710x0000A82C TD_GS_SAMPLER2_BORDER_ALPHA
4720x0000A83C TD_GS_SAMPLER3_BORDER_ALPHA
4730x0000A84C TD_GS_SAMPLER4_BORDER_ALPHA
4740x0000A85C TD_GS_SAMPLER5_BORDER_ALPHA
4750x0000A86C TD_GS_SAMPLER6_BORDER_ALPHA
4760x0000A87C TD_GS_SAMPLER7_BORDER_ALPHA
4770x0000A88C TD_GS_SAMPLER8_BORDER_ALPHA
4780x0000A89C TD_GS_SAMPLER9_BORDER_ALPHA
4790x0000A8AC TD_GS_SAMPLER10_BORDER_ALPHA
4800x0000A8BC TD_GS_SAMPLER11_BORDER_ALPHA
4810x0000A8CC TD_GS_SAMPLER12_BORDER_ALPHA
4820x0000A8DC TD_GS_SAMPLER13_BORDER_ALPHA
4830x0000A8EC TD_GS_SAMPLER14_BORDER_ALPHA
4840x0000A8FC TD_GS_SAMPLER15_BORDER_ALPHA
4850x0000A90C TD_GS_SAMPLER16_BORDER_ALPHA
4860x0000A91C TD_GS_SAMPLER17_BORDER_ALPHA
4870x0000A808 TD_GS_SAMPLER0_BORDER_BLUE
4880x0000A818 TD_GS_SAMPLER1_BORDER_BLUE
4890x0000A828 TD_GS_SAMPLER2_BORDER_BLUE
4900x0000A838 TD_GS_SAMPLER3_BORDER_BLUE
4910x0000A848 TD_GS_SAMPLER4_BORDER_BLUE
4920x0000A858 TD_GS_SAMPLER5_BORDER_BLUE
4930x0000A868 TD_GS_SAMPLER6_BORDER_BLUE
4940x0000A878 TD_GS_SAMPLER7_BORDER_BLUE
4950x0000A888 TD_GS_SAMPLER8_BORDER_BLUE
4960x0000A898 TD_GS_SAMPLER9_BORDER_BLUE
4970x0000A8A8 TD_GS_SAMPLER10_BORDER_BLUE
4980x0000A8B8 TD_GS_SAMPLER11_BORDER_BLUE
4990x0000A8C8 TD_GS_SAMPLER12_BORDER_BLUE
5000x0000A8D8 TD_GS_SAMPLER13_BORDER_BLUE
5010x0000A8E8 TD_GS_SAMPLER14_BORDER_BLUE
5020x0000A8F8 TD_GS_SAMPLER15_BORDER_BLUE
5030x0000A908 TD_GS_SAMPLER16_BORDER_BLUE
5040x0000A918 TD_GS_SAMPLER17_BORDER_BLUE
5050x0000A804 TD_GS_SAMPLER0_BORDER_GREEN
5060x0000A814 TD_GS_SAMPLER1_BORDER_GREEN
5070x0000A824 TD_GS_SAMPLER2_BORDER_GREEN
5080x0000A834 TD_GS_SAMPLER3_BORDER_GREEN
5090x0000A844 TD_GS_SAMPLER4_BORDER_GREEN
5100x0000A854 TD_GS_SAMPLER5_BORDER_GREEN
5110x0000A864 TD_GS_SAMPLER6_BORDER_GREEN
5120x0000A874 TD_GS_SAMPLER7_BORDER_GREEN
5130x0000A884 TD_GS_SAMPLER8_BORDER_GREEN
5140x0000A894 TD_GS_SAMPLER9_BORDER_GREEN
5150x0000A8A4 TD_GS_SAMPLER10_BORDER_GREEN
5160x0000A8B4 TD_GS_SAMPLER11_BORDER_GREEN
5170x0000A8C4 TD_GS_SAMPLER12_BORDER_GREEN
5180x0000A8D4 TD_GS_SAMPLER13_BORDER_GREEN
5190x0000A8E4 TD_GS_SAMPLER14_BORDER_GREEN
5200x0000A8F4 TD_GS_SAMPLER15_BORDER_GREEN
5210x0000A904 TD_GS_SAMPLER16_BORDER_GREEN
5220x0000A914 TD_GS_SAMPLER17_BORDER_GREEN
5230x0000A800 TD_GS_SAMPLER0_BORDER_RED
5240x0000A810 TD_GS_SAMPLER1_BORDER_RED
5250x0000A820 TD_GS_SAMPLER2_BORDER_RED
5260x0000A830 TD_GS_SAMPLER3_BORDER_RED
5270x0000A840 TD_GS_SAMPLER4_BORDER_RED
5280x0000A850 TD_GS_SAMPLER5_BORDER_RED
5290x0000A860 TD_GS_SAMPLER6_BORDER_RED
5300x0000A870 TD_GS_SAMPLER7_BORDER_RED
5310x0000A880 TD_GS_SAMPLER8_BORDER_RED
5320x0000A890 TD_GS_SAMPLER9_BORDER_RED
5330x0000A8A0 TD_GS_SAMPLER10_BORDER_RED
5340x0000A8B0 TD_GS_SAMPLER11_BORDER_RED
5350x0000A8C0 TD_GS_SAMPLER12_BORDER_RED
5360x0000A8D0 TD_GS_SAMPLER13_BORDER_RED
5370x0000A8E0 TD_GS_SAMPLER14_BORDER_RED
5380x0000A8F0 TD_GS_SAMPLER15_BORDER_RED
5390x0000A900 TD_GS_SAMPLER16_BORDER_RED
5400x0000A910 TD_GS_SAMPLER17_BORDER_RED
5410x0000A40C TD_PS_SAMPLER0_BORDER_ALPHA
5420x0000A41C TD_PS_SAMPLER1_BORDER_ALPHA
5430x0000A42C TD_PS_SAMPLER2_BORDER_ALPHA
5440x0000A43C TD_PS_SAMPLER3_BORDER_ALPHA
5450x0000A44C TD_PS_SAMPLER4_BORDER_ALPHA
5460x0000A45C TD_PS_SAMPLER5_BORDER_ALPHA
5470x0000A46C TD_PS_SAMPLER6_BORDER_ALPHA
5480x0000A47C TD_PS_SAMPLER7_BORDER_ALPHA
5490x0000A48C TD_PS_SAMPLER8_BORDER_ALPHA
5500x0000A49C TD_PS_SAMPLER9_BORDER_ALPHA
5510x0000A4AC TD_PS_SAMPLER10_BORDER_ALPHA
5520x0000A4BC TD_PS_SAMPLER11_BORDER_ALPHA
5530x0000A4CC TD_PS_SAMPLER12_BORDER_ALPHA
5540x0000A4DC TD_PS_SAMPLER13_BORDER_ALPHA
5550x0000A4EC TD_PS_SAMPLER14_BORDER_ALPHA
5560x0000A4FC TD_PS_SAMPLER15_BORDER_ALPHA
5570x0000A50C TD_PS_SAMPLER16_BORDER_ALPHA
5580x0000A51C TD_PS_SAMPLER17_BORDER_ALPHA
5590x0000A408 TD_PS_SAMPLER0_BORDER_BLUE
5600x0000A418 TD_PS_SAMPLER1_BORDER_BLUE
5610x0000A428 TD_PS_SAMPLER2_BORDER_BLUE
5620x0000A438 TD_PS_SAMPLER3_BORDER_BLUE
5630x0000A448 TD_PS_SAMPLER4_BORDER_BLUE
5640x0000A458 TD_PS_SAMPLER5_BORDER_BLUE
5650x0000A468 TD_PS_SAMPLER6_BORDER_BLUE
5660x0000A478 TD_PS_SAMPLER7_BORDER_BLUE
5670x0000A488 TD_PS_SAMPLER8_BORDER_BLUE
5680x0000A498 TD_PS_SAMPLER9_BORDER_BLUE
5690x0000A4A8 TD_PS_SAMPLER10_BORDER_BLUE
5700x0000A4B8 TD_PS_SAMPLER11_BORDER_BLUE
5710x0000A4C8 TD_PS_SAMPLER12_BORDER_BLUE
5720x0000A4D8 TD_PS_SAMPLER13_BORDER_BLUE
5730x0000A4E8 TD_PS_SAMPLER14_BORDER_BLUE
5740x0000A4F8 TD_PS_SAMPLER15_BORDER_BLUE
5750x0000A508 TD_PS_SAMPLER16_BORDER_BLUE
5760x0000A518 TD_PS_SAMPLER17_BORDER_BLUE
5770x0000A404 TD_PS_SAMPLER0_BORDER_GREEN
5780x0000A414 TD_PS_SAMPLER1_BORDER_GREEN
5790x0000A424 TD_PS_SAMPLER2_BORDER_GREEN
5800x0000A434 TD_PS_SAMPLER3_BORDER_GREEN
5810x0000A444 TD_PS_SAMPLER4_BORDER_GREEN
5820x0000A454 TD_PS_SAMPLER5_BORDER_GREEN
5830x0000A464 TD_PS_SAMPLER6_BORDER_GREEN
5840x0000A474 TD_PS_SAMPLER7_BORDER_GREEN
5850x0000A484 TD_PS_SAMPLER8_BORDER_GREEN
5860x0000A494 TD_PS_SAMPLER9_BORDER_GREEN
5870x0000A4A4 TD_PS_SAMPLER10_BORDER_GREEN
5880x0000A4B4 TD_PS_SAMPLER11_BORDER_GREEN
5890x0000A4C4 TD_PS_SAMPLER12_BORDER_GREEN
5900x0000A4D4 TD_PS_SAMPLER13_BORDER_GREEN
5910x0000A4E4 TD_PS_SAMPLER14_BORDER_GREEN
5920x0000A4F4 TD_PS_SAMPLER15_BORDER_GREEN
5930x0000A504 TD_PS_SAMPLER16_BORDER_GREEN
5940x0000A514 TD_PS_SAMPLER17_BORDER_GREEN
5950x0000A400 TD_PS_SAMPLER0_BORDER_RED
5960x0000A410 TD_PS_SAMPLER1_BORDER_RED
5970x0000A420 TD_PS_SAMPLER2_BORDER_RED
5980x0000A430 TD_PS_SAMPLER3_BORDER_RED
5990x0000A440 TD_PS_SAMPLER4_BORDER_RED
6000x0000A450 TD_PS_SAMPLER5_BORDER_RED
6010x0000A460 TD_PS_SAMPLER6_BORDER_RED
6020x0000A470 TD_PS_SAMPLER7_BORDER_RED
6030x0000A480 TD_PS_SAMPLER8_BORDER_RED
6040x0000A490 TD_PS_SAMPLER9_BORDER_RED
6050x0000A4A0 TD_PS_SAMPLER10_BORDER_RED
6060x0000A4B0 TD_PS_SAMPLER11_BORDER_RED
6070x0000A4C0 TD_PS_SAMPLER12_BORDER_RED
6080x0000A4D0 TD_PS_SAMPLER13_BORDER_RED
6090x0000A4E0 TD_PS_SAMPLER14_BORDER_RED
6100x0000A4F0 TD_PS_SAMPLER15_BORDER_RED
6110x0000A500 TD_PS_SAMPLER16_BORDER_RED
6120x0000A510 TD_PS_SAMPLER17_BORDER_RED
6130x0000AA00 TD_PS_SAMPLER0_CLEARTYPE_KERNEL
6140x0000AA04 TD_PS_SAMPLER1_CLEARTYPE_KERNEL
6150x0000AA08 TD_PS_SAMPLER2_CLEARTYPE_KERNEL
6160x0000AA0C TD_PS_SAMPLER3_CLEARTYPE_KERNEL
6170x0000AA10 TD_PS_SAMPLER4_CLEARTYPE_KERNEL
6180x0000AA14 TD_PS_SAMPLER5_CLEARTYPE_KERNEL
6190x0000AA18 TD_PS_SAMPLER6_CLEARTYPE_KERNEL
6200x0000AA1C TD_PS_SAMPLER7_CLEARTYPE_KERNEL
6210x0000AA20 TD_PS_SAMPLER8_CLEARTYPE_KERNEL
6220x0000AA24 TD_PS_SAMPLER9_CLEARTYPE_KERNEL
6230x0000AA28 TD_PS_SAMPLER10_CLEARTYPE_KERNEL
6240x0000AA2C TD_PS_SAMPLER11_CLEARTYPE_KERNEL
6250x0000AA30 TD_PS_SAMPLER12_CLEARTYPE_KERNEL
6260x0000AA34 TD_PS_SAMPLER13_CLEARTYPE_KERNEL
6270x0000AA38 TD_PS_SAMPLER14_CLEARTYPE_KERNEL
6280x0000AA3C TD_PS_SAMPLER15_CLEARTYPE_KERNEL
6290x0000AA40 TD_PS_SAMPLER16_CLEARTYPE_KERNEL
6300x0000AA44 TD_PS_SAMPLER17_CLEARTYPE_KERNEL
6310x0000A60C TD_VS_SAMPLER0_BORDER_ALPHA
6320x0000A61C TD_VS_SAMPLER1_BORDER_ALPHA
6330x0000A62C TD_VS_SAMPLER2_BORDER_ALPHA
6340x0000A63C TD_VS_SAMPLER3_BORDER_ALPHA
6350x0000A64C TD_VS_SAMPLER4_BORDER_ALPHA
6360x0000A65C TD_VS_SAMPLER5_BORDER_ALPHA
6370x0000A66C TD_VS_SAMPLER6_BORDER_ALPHA
6380x0000A67C TD_VS_SAMPLER7_BORDER_ALPHA
6390x0000A68C TD_VS_SAMPLER8_BORDER_ALPHA
6400x0000A69C TD_VS_SAMPLER9_BORDER_ALPHA
6410x0000A6AC TD_VS_SAMPLER10_BORDER_ALPHA
6420x0000A6BC TD_VS_SAMPLER11_BORDER_ALPHA
6430x0000A6CC TD_VS_SAMPLER12_BORDER_ALPHA
6440x0000A6DC TD_VS_SAMPLER13_BORDER_ALPHA
6450x0000A6EC TD_VS_SAMPLER14_BORDER_ALPHA
6460x0000A6FC TD_VS_SAMPLER15_BORDER_ALPHA
6470x0000A70C TD_VS_SAMPLER16_BORDER_ALPHA
6480x0000A71C TD_VS_SAMPLER17_BORDER_ALPHA
6490x0000A608 TD_VS_SAMPLER0_BORDER_BLUE
6500x0000A618 TD_VS_SAMPLER1_BORDER_BLUE
6510x0000A628 TD_VS_SAMPLER2_BORDER_BLUE
6520x0000A638 TD_VS_SAMPLER3_BORDER_BLUE
6530x0000A648 TD_VS_SAMPLER4_BORDER_BLUE
6540x0000A658 TD_VS_SAMPLER5_BORDER_BLUE
6550x0000A668 TD_VS_SAMPLER6_BORDER_BLUE
6560x0000A678 TD_VS_SAMPLER7_BORDER_BLUE
6570x0000A688 TD_VS_SAMPLER8_BORDER_BLUE
6580x0000A698 TD_VS_SAMPLER9_BORDER_BLUE
6590x0000A6A8 TD_VS_SAMPLER10_BORDER_BLUE
6600x0000A6B8 TD_VS_SAMPLER11_BORDER_BLUE
6610x0000A6C8 TD_VS_SAMPLER12_BORDER_BLUE
6620x0000A6D8 TD_VS_SAMPLER13_BORDER_BLUE
6630x0000A6E8 TD_VS_SAMPLER14_BORDER_BLUE
6640x0000A6F8 TD_VS_SAMPLER15_BORDER_BLUE
6650x0000A708 TD_VS_SAMPLER16_BORDER_BLUE
6660x0000A718 TD_VS_SAMPLER17_BORDER_BLUE
6670x0000A604 TD_VS_SAMPLER0_BORDER_GREEN
6680x0000A614 TD_VS_SAMPLER1_BORDER_GREEN
6690x0000A624 TD_VS_SAMPLER2_BORDER_GREEN
6700x0000A634 TD_VS_SAMPLER3_BORDER_GREEN
6710x0000A644 TD_VS_SAMPLER4_BORDER_GREEN
6720x0000A654 TD_VS_SAMPLER5_BORDER_GREEN
6730x0000A664 TD_VS_SAMPLER6_BORDER_GREEN
6740x0000A674 TD_VS_SAMPLER7_BORDER_GREEN
6750x0000A684 TD_VS_SAMPLER8_BORDER_GREEN
6760x0000A694 TD_VS_SAMPLER9_BORDER_GREEN
6770x0000A6A4 TD_VS_SAMPLER10_BORDER_GREEN
6780x0000A6B4 TD_VS_SAMPLER11_BORDER_GREEN
6790x0000A6C4 TD_VS_SAMPLER12_BORDER_GREEN
6800x0000A6D4 TD_VS_SAMPLER13_BORDER_GREEN
6810x0000A6E4 TD_VS_SAMPLER14_BORDER_GREEN
6820x0000A6F4 TD_VS_SAMPLER15_BORDER_GREEN
6830x0000A704 TD_VS_SAMPLER16_BORDER_GREEN
6840x0000A714 TD_VS_SAMPLER17_BORDER_GREEN
6850x0000A600 TD_VS_SAMPLER0_BORDER_RED
6860x0000A610 TD_VS_SAMPLER1_BORDER_RED
6870x0000A620 TD_VS_SAMPLER2_BORDER_RED
6880x0000A630 TD_VS_SAMPLER3_BORDER_RED
6890x0000A640 TD_VS_SAMPLER4_BORDER_RED
6900x0000A650 TD_VS_SAMPLER5_BORDER_RED
6910x0000A660 TD_VS_SAMPLER6_BORDER_RED
6920x0000A670 TD_VS_SAMPLER7_BORDER_RED
6930x0000A680 TD_VS_SAMPLER8_BORDER_RED
6940x0000A690 TD_VS_SAMPLER9_BORDER_RED
6950x0000A6A0 TD_VS_SAMPLER10_BORDER_RED
6960x0000A6B0 TD_VS_SAMPLER11_BORDER_RED
6970x0000A6C0 TD_VS_SAMPLER12_BORDER_RED
6980x0000A6D0 TD_VS_SAMPLER13_BORDER_RED
6990x0000A6E0 TD_VS_SAMPLER14_BORDER_RED
7000x0000A6F0 TD_VS_SAMPLER15_BORDER_RED
7010x0000A700 TD_VS_SAMPLER16_BORDER_RED
7020x0000A710 TD_VS_SAMPLER17_BORDER_RED
7030x00009508 TA_CNTL_AUX
7040x0002802C DB_DEPTH_CLEAR
7050x00028D24 DB_HTILE_SURFACE
7060x00028D34 DB_PREFETCH_LIMIT
7070x00028D30 DB_PRELOAD_CONTROL
7080x00028D0C DB_RENDER_CONTROL
7090x00028D10 DB_RENDER_OVERRIDE
7100x0002880C DB_SHADER_CONTROL
7110x00028D2C DB_SRESULTS_COMPARE_STATE1
7120x00028430 DB_STENCILREFMASK
7130x00028434 DB_STENCILREFMASK_BF
7140x00028028 DB_STENCIL_CLEAR
7150x00028780 CB_BLEND0_CONTROL
7160x00028784 CB_BLEND1_CONTROL
7170x00028788 CB_BLEND2_CONTROL
7180x0002878C CB_BLEND3_CONTROL
7190x00028790 CB_BLEND4_CONTROL
7200x00028794 CB_BLEND5_CONTROL
7210x00028798 CB_BLEND6_CONTROL
7220x0002879C CB_BLEND7_CONTROL
7230x00028804 CB_BLEND_CONTROL
7240x00028420 CB_BLEND_ALPHA
7250x0002841C CB_BLEND_BLUE
7260x00028418 CB_BLEND_GREEN
7270x00028414 CB_BLEND_RED
7280x0002812C CB_CLEAR_ALPHA
7290x00028128 CB_CLEAR_BLUE
7300x00028124 CB_CLEAR_GREEN
7310x00028120 CB_CLEAR_RED
7320x00028C30 CB_CLRCMP_CONTROL
7330x00028C38 CB_CLRCMP_DST
7340x00028C3C CB_CLRCMP_MSK
7350x00028C34 CB_CLRCMP_SRC
7360x00028100 CB_COLOR0_MASK
7370x00028104 CB_COLOR1_MASK
7380x00028108 CB_COLOR2_MASK
7390x0002810C CB_COLOR3_MASK
7400x00028110 CB_COLOR4_MASK
7410x00028114 CB_COLOR5_MASK
7420x00028118 CB_COLOR6_MASK
7430x0002811C CB_COLOR7_MASK
7440x00028080 CB_COLOR0_VIEW
7450x00028084 CB_COLOR1_VIEW
7460x00028088 CB_COLOR2_VIEW
7470x0002808C CB_COLOR3_VIEW
7480x00028090 CB_COLOR4_VIEW
7490x00028094 CB_COLOR5_VIEW
7500x00028098 CB_COLOR6_VIEW
7510x0002809C CB_COLOR7_VIEW
7520x00028808 CB_COLOR_CONTROL
7530x0002842C CB_FOG_BLUE
7540x00028428 CB_FOG_GREEN
7550x00028424 CB_FOG_RED
7560x00008040 WAIT_UNTIL
7570x00009714 VC_ENHANCE
7580x00009830 DB_DEBUG
7590x00009838 DB_WATERMARKS
7600x00028D28 DB_SRESULTS_COMPARE_STATE0
7610x00028D44 DB_ALPHA_TO_MASK
7620x00009700 VC_CNTL
diff --git a/drivers/gpu/drm/radeon/reg_srcs/rs600 b/drivers/gpu/drm/radeon/reg_srcs/rs600
index 8e3c0b807add..83e8bc0c2bb2 100644
--- a/drivers/gpu/drm/radeon/reg_srcs/rs600
+++ b/drivers/gpu/drm/radeon/reg_srcs/rs600
@@ -125,6 +125,8 @@ rs600 0x6d40
1250x4000 GB_VAP_RASTER_VTX_FMT_0 1250x4000 GB_VAP_RASTER_VTX_FMT_0
1260x4004 GB_VAP_RASTER_VTX_FMT_1 1260x4004 GB_VAP_RASTER_VTX_FMT_1
1270x4008 GB_ENABLE 1270x4008 GB_ENABLE
1280x4010 GB_MSPOS0
1290x4014 GB_MSPOS1
1280x401C GB_SELECT 1300x401C GB_SELECT
1290x4020 GB_AA_CONFIG 1310x4020 GB_AA_CONFIG
1300x4024 GB_FIFO_SIZE 1320x4024 GB_FIFO_SIZE
@@ -153,7 +155,7 @@ rs600 0x6d40
1530x42A4 SU_POLY_OFFSET_FRONT_SCALE 1550x42A4 SU_POLY_OFFSET_FRONT_SCALE
1540x42A8 SU_POLY_OFFSET_FRONT_OFFSET 1560x42A8 SU_POLY_OFFSET_FRONT_OFFSET
1550x42AC SU_POLY_OFFSET_BACK_SCALE 1570x42AC SU_POLY_OFFSET_BACK_SCALE
1560x42B0 SU_POLY_OFFSET_BACK_OFFSET 1580x42B0 SU_POLY_OFFSET_BACK_OFFSET
1570x42B4 SU_POLY_OFFSET_ENABLE 1590x42B4 SU_POLY_OFFSET_ENABLE
1580x42B8 SU_CULL_MODE 1600x42B8 SU_CULL_MODE
1590x42C0 SU_DEPTH_SCALE 1610x42C0 SU_DEPTH_SCALE
@@ -291,6 +293,8 @@ rs600 0x6d40
2910x46AC US_OUT_FMT_2 2930x46AC US_OUT_FMT_2
2920x46B0 US_OUT_FMT_3 2940x46B0 US_OUT_FMT_3
2930x46B4 US_W_FMT 2950x46B4 US_W_FMT
2960x46B8 US_CODE_BANK
2970x46BC US_CODE_EXT
2940x46C0 US_ALU_RGB_ADDR_0 2980x46C0 US_ALU_RGB_ADDR_0
2950x46C4 US_ALU_RGB_ADDR_1 2990x46C4 US_ALU_RGB_ADDR_1
2960x46C8 US_ALU_RGB_ADDR_2 3000x46C8 US_ALU_RGB_ADDR_2
@@ -547,6 +551,70 @@ rs600 0x6d40
5470x4AB4 US_ALU_ALPHA_INST_61 5510x4AB4 US_ALU_ALPHA_INST_61
5480x4AB8 US_ALU_ALPHA_INST_62 5520x4AB8 US_ALU_ALPHA_INST_62
5490x4ABC US_ALU_ALPHA_INST_63 5530x4ABC US_ALU_ALPHA_INST_63
5540x4AC0 US_ALU_EXT_ADDR_0
5550x4AC4 US_ALU_EXT_ADDR_1
5560x4AC8 US_ALU_EXT_ADDR_2
5570x4ACC US_ALU_EXT_ADDR_3
5580x4AD0 US_ALU_EXT_ADDR_4
5590x4AD4 US_ALU_EXT_ADDR_5
5600x4AD8 US_ALU_EXT_ADDR_6
5610x4ADC US_ALU_EXT_ADDR_7
5620x4AE0 US_ALU_EXT_ADDR_8
5630x4AE4 US_ALU_EXT_ADDR_9
5640x4AE8 US_ALU_EXT_ADDR_10
5650x4AEC US_ALU_EXT_ADDR_11
5660x4AF0 US_ALU_EXT_ADDR_12
5670x4AF4 US_ALU_EXT_ADDR_13
5680x4AF8 US_ALU_EXT_ADDR_14
5690x4AFC US_ALU_EXT_ADDR_15
5700x4B00 US_ALU_EXT_ADDR_16
5710x4B04 US_ALU_EXT_ADDR_17
5720x4B08 US_ALU_EXT_ADDR_18
5730x4B0C US_ALU_EXT_ADDR_19
5740x4B10 US_ALU_EXT_ADDR_20
5750x4B14 US_ALU_EXT_ADDR_21
5760x4B18 US_ALU_EXT_ADDR_22
5770x4B1C US_ALU_EXT_ADDR_23
5780x4B20 US_ALU_EXT_ADDR_24
5790x4B24 US_ALU_EXT_ADDR_25
5800x4B28 US_ALU_EXT_ADDR_26
5810x4B2C US_ALU_EXT_ADDR_27
5820x4B30 US_ALU_EXT_ADDR_28
5830x4B34 US_ALU_EXT_ADDR_29
5840x4B38 US_ALU_EXT_ADDR_30
5850x4B3C US_ALU_EXT_ADDR_31
5860x4B40 US_ALU_EXT_ADDR_32
5870x4B44 US_ALU_EXT_ADDR_33
5880x4B48 US_ALU_EXT_ADDR_34
5890x4B4C US_ALU_EXT_ADDR_35
5900x4B50 US_ALU_EXT_ADDR_36
5910x4B54 US_ALU_EXT_ADDR_37
5920x4B58 US_ALU_EXT_ADDR_38
5930x4B5C US_ALU_EXT_ADDR_39
5940x4B60 US_ALU_EXT_ADDR_40
5950x4B64 US_ALU_EXT_ADDR_41
5960x4B68 US_ALU_EXT_ADDR_42
5970x4B6C US_ALU_EXT_ADDR_43
5980x4B70 US_ALU_EXT_ADDR_44
5990x4B74 US_ALU_EXT_ADDR_45
6000x4B78 US_ALU_EXT_ADDR_46
6010x4B7C US_ALU_EXT_ADDR_47
6020x4B80 US_ALU_EXT_ADDR_48
6030x4B84 US_ALU_EXT_ADDR_49
6040x4B88 US_ALU_EXT_ADDR_50
6050x4B8C US_ALU_EXT_ADDR_51
6060x4B90 US_ALU_EXT_ADDR_52
6070x4B94 US_ALU_EXT_ADDR_53
6080x4B98 US_ALU_EXT_ADDR_54
6090x4B9C US_ALU_EXT_ADDR_55
6100x4BA0 US_ALU_EXT_ADDR_56
6110x4BA4 US_ALU_EXT_ADDR_57
6120x4BA8 US_ALU_EXT_ADDR_58
6130x4BAC US_ALU_EXT_ADDR_59
6140x4BB0 US_ALU_EXT_ADDR_60
6150x4BB4 US_ALU_EXT_ADDR_61
6160x4BB8 US_ALU_EXT_ADDR_62
6170x4BBC US_ALU_EXT_ADDR_63
5500x4BC0 FG_FOG_BLEND 6180x4BC0 FG_FOG_BLEND
5510x4BC4 FG_FOG_FACTOR 6190x4BC4 FG_FOG_FACTOR
5520x4BC8 FG_FOG_COLOR_R 6200x4BC8 FG_FOG_COLOR_R
diff --git a/drivers/gpu/drm/radeon/reg_srcs/rv515 b/drivers/gpu/drm/radeon/reg_srcs/rv515
index 0102a0d5735c..1e46233985eb 100644
--- a/drivers/gpu/drm/radeon/reg_srcs/rv515
+++ b/drivers/gpu/drm/radeon/reg_srcs/rv515
@@ -35,6 +35,7 @@ rv515 0x6d40
350x1DA8 VAP_VPORT_ZSCALE 350x1DA8 VAP_VPORT_ZSCALE
360x1DAC VAP_VPORT_ZOFFSET 360x1DAC VAP_VPORT_ZOFFSET
370x2080 VAP_CNTL 370x2080 VAP_CNTL
380x208C VAP_INDEX_OFFSET
380x2090 VAP_OUT_VTX_FMT_0 390x2090 VAP_OUT_VTX_FMT_0
390x2094 VAP_OUT_VTX_FMT_1 400x2094 VAP_OUT_VTX_FMT_1
400x20B0 VAP_VTE_CNTL 410x20B0 VAP_VTE_CNTL
@@ -158,10 +159,17 @@ rv515 0x6d40
1580x4000 GB_VAP_RASTER_VTX_FMT_0 1590x4000 GB_VAP_RASTER_VTX_FMT_0
1590x4004 GB_VAP_RASTER_VTX_FMT_1 1600x4004 GB_VAP_RASTER_VTX_FMT_1
1600x4008 GB_ENABLE 1610x4008 GB_ENABLE
1620x4010 GB_MSPOS0
1630x4014 GB_MSPOS1
1610x401C GB_SELECT 1640x401C GB_SELECT
1620x4020 GB_AA_CONFIG 1650x4020 GB_AA_CONFIG
1630x4024 GB_FIFO_SIZE 1660x4024 GB_FIFO_SIZE
1670x4028 GB_Z_PEQ_CONFIG
1640x4100 TX_INVALTAGS 1680x4100 TX_INVALTAGS
1690x4114 SU_TEX_WRAP_PS3
1700x4118 PS3_ENABLE
1710x411c PS3_VTX_FMT
1720x4120 PS3_TEX_SOURCE
1650x4200 GA_POINT_S0 1730x4200 GA_POINT_S0
1660x4204 GA_POINT_T0 1740x4204 GA_POINT_T0
1670x4208 GA_POINT_S1 1750x4208 GA_POINT_S1
@@ -171,6 +179,7 @@ rv515 0x6d40
1710x4230 GA_POINT_MINMAX 1790x4230 GA_POINT_MINMAX
1720x4234 GA_LINE_CNTL 1800x4234 GA_LINE_CNTL
1730x4238 GA_LINE_STIPPLE_CONFIG 1810x4238 GA_LINE_STIPPLE_CONFIG
1820x4258 GA_COLOR_CONTROL_PS3
1740x4260 GA_LINE_STIPPLE_VALUE 1830x4260 GA_LINE_STIPPLE_VALUE
1750x4264 GA_LINE_S0 1840x4264 GA_LINE_S0
1760x4268 GA_LINE_S1 1850x4268 GA_LINE_S1
diff --git a/drivers/gpu/drm/radeon/rs400.c b/drivers/gpu/drm/radeon/rs400.c
index ca037160a582..1a41cb268b72 100644
--- a/drivers/gpu/drm/radeon/rs400.c
+++ b/drivers/gpu/drm/radeon/rs400.c
@@ -26,8 +26,10 @@
26 * Jerome Glisse 26 * Jerome Glisse
27 */ 27 */
28#include <linux/seq_file.h> 28#include <linux/seq_file.h>
29#include <linux/slab.h>
29#include <drm/drmP.h> 30#include <drm/drmP.h>
30#include "radeon.h" 31#include "radeon.h"
32#include "radeon_asic.h"
31#include "rs400d.h" 33#include "rs400d.h"
32 34
33/* This files gather functions specifics to : rs400,rs480 */ 35/* This files gather functions specifics to : rs400,rs480 */
@@ -113,6 +115,7 @@ int rs400_gart_enable(struct radeon_device *rdev)
113 uint32_t size_reg; 115 uint32_t size_reg;
114 uint32_t tmp; 116 uint32_t tmp;
115 117
118 radeon_gart_restore(rdev);
116 tmp = RREG32_MC(RS690_AIC_CTRL_SCRATCH); 119 tmp = RREG32_MC(RS690_AIC_CTRL_SCRATCH);
117 tmp |= RS690_DIS_OUT_OF_PCI_GART_ACCESS; 120 tmp |= RS690_DIS_OUT_OF_PCI_GART_ACCESS;
118 WREG32_MC(RS690_AIC_CTRL_SCRATCH, tmp); 121 WREG32_MC(RS690_AIC_CTRL_SCRATCH, tmp);
@@ -150,9 +153,8 @@ int rs400_gart_enable(struct radeon_device *rdev)
150 WREG32(RADEON_AGP_BASE, 0xFFFFFFFF); 153 WREG32(RADEON_AGP_BASE, 0xFFFFFFFF);
151 WREG32(RS480_AGP_BASE_2, 0); 154 WREG32(RS480_AGP_BASE_2, 0);
152 } 155 }
153 tmp = rdev->mc.gtt_location + rdev->mc.gtt_size - 1; 156 tmp = REG_SET(RS690_MC_AGP_TOP, rdev->mc.gtt_end >> 16);
154 tmp = REG_SET(RS690_MC_AGP_TOP, tmp >> 16); 157 tmp |= REG_SET(RS690_MC_AGP_START, rdev->mc.gtt_start >> 16);
155 tmp |= REG_SET(RS690_MC_AGP_START, rdev->mc.gtt_location >> 16);
156 if ((rdev->family == CHIP_RS690) || (rdev->family == CHIP_RS740)) { 158 if ((rdev->family == CHIP_RS690) || (rdev->family == CHIP_RS740)) {
157 WREG32_MC(RS690_MCCFG_AGP_LOCATION, tmp); 159 WREG32_MC(RS690_MCCFG_AGP_LOCATION, tmp);
158 tmp = RREG32(RADEON_BUS_CNTL) & ~RS600_BUS_MASTER_DIS; 160 tmp = RREG32(RADEON_BUS_CNTL) & ~RS600_BUS_MASTER_DIS;
@@ -202,9 +204,9 @@ void rs400_gart_disable(struct radeon_device *rdev)
202 204
203void rs400_gart_fini(struct radeon_device *rdev) 205void rs400_gart_fini(struct radeon_device *rdev)
204{ 206{
207 radeon_gart_fini(rdev);
205 rs400_gart_disable(rdev); 208 rs400_gart_disable(rdev);
206 radeon_gart_table_ram_free(rdev); 209 radeon_gart_table_ram_free(rdev);
207 radeon_gart_fini(rdev);
208} 210}
209 211
210int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr) 212int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr)
@@ -223,26 +225,48 @@ int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr)
223 return 0; 225 return 0;
224} 226}
225 227
228int rs400_mc_wait_for_idle(struct radeon_device *rdev)
229{
230 unsigned i;
231 uint32_t tmp;
232
233 for (i = 0; i < rdev->usec_timeout; i++) {
234 /* read MC_STATUS */
235 tmp = RREG32(0x0150);
236 if (tmp & (1 << 2)) {
237 return 0;
238 }
239 DRM_UDELAY(1);
240 }
241 return -1;
242}
243
226void rs400_gpu_init(struct radeon_device *rdev) 244void rs400_gpu_init(struct radeon_device *rdev)
227{ 245{
228 /* FIXME: HDP same place on rs400 ? */ 246 /* FIXME: HDP same place on rs400 ? */
229 r100_hdp_reset(rdev); 247 r100_hdp_reset(rdev);
230 /* FIXME: is this correct ? */ 248 /* FIXME: is this correct ? */
231 r420_pipes_init(rdev); 249 r420_pipes_init(rdev);
232 if (r300_mc_wait_for_idle(rdev)) { 250 if (rs400_mc_wait_for_idle(rdev)) {
233 printk(KERN_WARNING "Failed to wait MC idle while " 251 printk(KERN_WARNING "rs400: Failed to wait MC idle while "
234 "programming pipes. Bad things might happen.\n"); 252 "programming pipes. Bad things might happen. %08x\n", RREG32(0x150));
235 } 253 }
236} 254}
237 255
238void rs400_vram_info(struct radeon_device *rdev) 256void rs400_mc_init(struct radeon_device *rdev)
239{ 257{
258 u64 base;
259
240 rs400_gart_adjust_size(rdev); 260 rs400_gart_adjust_size(rdev);
261 rdev->mc.igp_sideport_enabled = radeon_combios_sideport_present(rdev);
241 /* DDR for all card after R300 & IGP */ 262 /* DDR for all card after R300 & IGP */
242 rdev->mc.vram_is_ddr = true; 263 rdev->mc.vram_is_ddr = true;
243 rdev->mc.vram_width = 128; 264 rdev->mc.vram_width = 128;
244
245 r100_vram_init_sizes(rdev); 265 r100_vram_init_sizes(rdev);
266 base = (RREG32(RADEON_NB_TOM) & 0xffff) << 16;
267 radeon_vram_location(rdev, &rdev->mc, base);
268 radeon_gtt_location(rdev, &rdev->mc);
269 radeon_update_bandwidth_info(rdev);
246} 270}
247 271
248uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg) 272uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg)
@@ -346,21 +370,6 @@ static int rs400_debugfs_pcie_gart_info_init(struct radeon_device *rdev)
346#endif 370#endif
347} 371}
348 372
349static int rs400_mc_init(struct radeon_device *rdev)
350{
351 int r;
352 u32 tmp;
353
354 /* Setup GPU memory space */
355 tmp = G_00015C_MC_FB_START(RREG32(R_00015C_NB_TOM));
356 rdev->mc.vram_location = G_00015C_MC_FB_START(tmp) << 16;
357 rdev->mc.gtt_location = 0xFFFFFFFFUL;
358 r = radeon_mc_setup(rdev);
359 if (r)
360 return r;
361 return 0;
362}
363
364void rs400_mc_program(struct radeon_device *rdev) 373void rs400_mc_program(struct radeon_device *rdev)
365{ 374{
366 struct r100_mc_save save; 375 struct r100_mc_save save;
@@ -369,8 +378,8 @@ void rs400_mc_program(struct radeon_device *rdev)
369 r100_mc_stop(rdev, &save); 378 r100_mc_stop(rdev, &save);
370 379
371 /* Wait for mc idle */ 380 /* Wait for mc idle */
372 if (r300_mc_wait_for_idle(rdev)) 381 if (rs400_mc_wait_for_idle(rdev))
373 dev_warn(rdev->dev, "Wait MC idle timeout before updating MC.\n"); 382 dev_warn(rdev->dev, "rs400: Wait MC idle timeout before updating MC.\n");
374 WREG32(R_000148_MC_FB_LOCATION, 383 WREG32(R_000148_MC_FB_LOCATION,
375 S_000148_MC_FB_START(rdev->mc.vram_start >> 16) | 384 S_000148_MC_FB_START(rdev->mc.vram_start >> 16) |
376 S_000148_MC_FB_TOP(rdev->mc.vram_end >> 16)); 385 S_000148_MC_FB_TOP(rdev->mc.vram_end >> 16));
@@ -382,19 +391,22 @@ static int rs400_startup(struct radeon_device *rdev)
382{ 391{
383 int r; 392 int r;
384 393
394 r100_set_common_regs(rdev);
395
385 rs400_mc_program(rdev); 396 rs400_mc_program(rdev);
386 /* Resume clock */ 397 /* Resume clock */
387 r300_clock_startup(rdev); 398 r300_clock_startup(rdev);
388 /* Initialize GPU configuration (# pipes, ...) */ 399 /* Initialize GPU configuration (# pipes, ...) */
389 rs400_gpu_init(rdev); 400 rs400_gpu_init(rdev);
401 r100_enable_bm(rdev);
390 /* Initialize GART (initialize after TTM so we can allocate 402 /* Initialize GART (initialize after TTM so we can allocate
391 * memory through TTM but finalize after TTM) */ 403 * memory through TTM but finalize after TTM) */
392 r = rs400_gart_enable(rdev); 404 r = rs400_gart_enable(rdev);
393 if (r) 405 if (r)
394 return r; 406 return r;
395 /* Enable IRQ */ 407 /* Enable IRQ */
396 rdev->irq.sw_int = true;
397 r100_irq_set(rdev); 408 r100_irq_set(rdev);
409 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
398 /* 1M ring buffer */ 410 /* 1M ring buffer */
399 r = r100_cp_init(rdev, 1024 * 1024); 411 r = r100_cp_init(rdev, 1024 * 1024);
400 if (r) { 412 if (r) {
@@ -430,6 +442,8 @@ int rs400_resume(struct radeon_device *rdev)
430 radeon_combios_asic_init(rdev->ddev); 442 radeon_combios_asic_init(rdev->ddev);
431 /* Resume clock after posting */ 443 /* Resume clock after posting */
432 r300_clock_startup(rdev); 444 r300_clock_startup(rdev);
445 /* Initialize surface registers */
446 radeon_surface_init(rdev);
433 return rs400_startup(rdev); 447 return rs400_startup(rdev);
434} 448}
435 449
@@ -444,7 +458,7 @@ int rs400_suspend(struct radeon_device *rdev)
444 458
445void rs400_fini(struct radeon_device *rdev) 459void rs400_fini(struct radeon_device *rdev)
446{ 460{
447 rs400_suspend(rdev); 461 radeon_pm_fini(rdev);
448 r100_cp_fini(rdev); 462 r100_cp_fini(rdev);
449 r100_wb_fini(rdev); 463 r100_wb_fini(rdev);
450 r100_ib_fini(rdev); 464 r100_ib_fini(rdev);
@@ -452,7 +466,7 @@ void rs400_fini(struct radeon_device *rdev)
452 rs400_gart_fini(rdev); 466 rs400_gart_fini(rdev);
453 radeon_irq_kms_fini(rdev); 467 radeon_irq_kms_fini(rdev);
454 radeon_fence_driver_fini(rdev); 468 radeon_fence_driver_fini(rdev);
455 radeon_object_fini(rdev); 469 radeon_bo_fini(rdev);
456 radeon_atombios_fini(rdev); 470 radeon_atombios_fini(rdev);
457 kfree(rdev->bios); 471 kfree(rdev->bios);
458 rdev->bios = NULL; 472 rdev->bios = NULL;
@@ -490,18 +504,15 @@ int rs400_init(struct radeon_device *rdev)
490 RREG32(R_0007C0_CP_STAT)); 504 RREG32(R_0007C0_CP_STAT));
491 } 505 }
492 /* check if cards are posted or not */ 506 /* check if cards are posted or not */
493 if (!radeon_card_posted(rdev) && rdev->bios) { 507 if (radeon_boot_test_post_card(rdev) == false)
494 DRM_INFO("GPU not posted. posting now...\n"); 508 return -EINVAL;
495 radeon_combios_asic_init(rdev->ddev); 509
496 }
497 /* Initialize clocks */ 510 /* Initialize clocks */
498 radeon_get_clock_info(rdev->ddev); 511 radeon_get_clock_info(rdev->ddev);
499 /* Get vram informations */ 512 /* Initialize power management */
500 rs400_vram_info(rdev); 513 radeon_pm_init(rdev);
501 /* Initialize memory controller (also test AGP) */ 514 /* initialize memory controller */
502 r = rs400_mc_init(rdev); 515 rs400_mc_init(rdev);
503 if (r)
504 return r;
505 /* Fence driver */ 516 /* Fence driver */
506 r = radeon_fence_driver_init(rdev); 517 r = radeon_fence_driver_init(rdev);
507 if (r) 518 if (r)
@@ -510,7 +521,7 @@ int rs400_init(struct radeon_device *rdev)
510 if (r) 521 if (r)
511 return r; 522 return r;
512 /* Memory manager */ 523 /* Memory manager */
513 r = radeon_object_init(rdev); 524 r = radeon_bo_init(rdev);
514 if (r) 525 if (r)
515 return r; 526 return r;
516 r = rs400_gart_init(rdev); 527 r = rs400_gart_init(rdev);
@@ -522,7 +533,6 @@ int rs400_init(struct radeon_device *rdev)
522 if (r) { 533 if (r) {
523 /* Somethings want wront with the accel init stop accel */ 534 /* Somethings want wront with the accel init stop accel */
524 dev_err(rdev->dev, "Disabling GPU acceleration\n"); 535 dev_err(rdev->dev, "Disabling GPU acceleration\n");
525 rs400_suspend(rdev);
526 r100_cp_fini(rdev); 536 r100_cp_fini(rdev);
527 r100_wb_fini(rdev); 537 r100_wb_fini(rdev);
528 r100_ib_fini(rdev); 538 r100_ib_fini(rdev);
diff --git a/drivers/gpu/drm/radeon/rs600.c b/drivers/gpu/drm/radeon/rs600.c
index 5f117cd8736a..a81bc7a21e14 100644
--- a/drivers/gpu/drm/radeon/rs600.c
+++ b/drivers/gpu/drm/radeon/rs600.c
@@ -37,6 +37,7 @@
37 */ 37 */
38#include "drmP.h" 38#include "drmP.h"
39#include "radeon.h" 39#include "radeon.h"
40#include "radeon_asic.h"
40#include "atom.h" 41#include "atom.h"
41#include "rs600d.h" 42#include "rs600d.h"
42 43
@@ -45,6 +46,107 @@
45void rs600_gpu_init(struct radeon_device *rdev); 46void rs600_gpu_init(struct radeon_device *rdev);
46int rs600_mc_wait_for_idle(struct radeon_device *rdev); 47int rs600_mc_wait_for_idle(struct radeon_device *rdev);
47 48
49/* hpd for digital panel detect/disconnect */
50bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
51{
52 u32 tmp;
53 bool connected = false;
54
55 switch (hpd) {
56 case RADEON_HPD_1:
57 tmp = RREG32(R_007D04_DC_HOT_PLUG_DETECT1_INT_STATUS);
58 if (G_007D04_DC_HOT_PLUG_DETECT1_SENSE(tmp))
59 connected = true;
60 break;
61 case RADEON_HPD_2:
62 tmp = RREG32(R_007D14_DC_HOT_PLUG_DETECT2_INT_STATUS);
63 if (G_007D14_DC_HOT_PLUG_DETECT2_SENSE(tmp))
64 connected = true;
65 break;
66 default:
67 break;
68 }
69 return connected;
70}
71
72void rs600_hpd_set_polarity(struct radeon_device *rdev,
73 enum radeon_hpd_id hpd)
74{
75 u32 tmp;
76 bool connected = rs600_hpd_sense(rdev, hpd);
77
78 switch (hpd) {
79 case RADEON_HPD_1:
80 tmp = RREG32(R_007D08_DC_HOT_PLUG_DETECT1_INT_CONTROL);
81 if (connected)
82 tmp &= ~S_007D08_DC_HOT_PLUG_DETECT1_INT_POLARITY(1);
83 else
84 tmp |= S_007D08_DC_HOT_PLUG_DETECT1_INT_POLARITY(1);
85 WREG32(R_007D08_DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
86 break;
87 case RADEON_HPD_2:
88 tmp = RREG32(R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL);
89 if (connected)
90 tmp &= ~S_007D18_DC_HOT_PLUG_DETECT2_INT_POLARITY(1);
91 else
92 tmp |= S_007D18_DC_HOT_PLUG_DETECT2_INT_POLARITY(1);
93 WREG32(R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
94 break;
95 default:
96 break;
97 }
98}
99
100void rs600_hpd_init(struct radeon_device *rdev)
101{
102 struct drm_device *dev = rdev->ddev;
103 struct drm_connector *connector;
104
105 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
106 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
107 switch (radeon_connector->hpd.hpd) {
108 case RADEON_HPD_1:
109 WREG32(R_007D00_DC_HOT_PLUG_DETECT1_CONTROL,
110 S_007D00_DC_HOT_PLUG_DETECT1_EN(1));
111 rdev->irq.hpd[0] = true;
112 break;
113 case RADEON_HPD_2:
114 WREG32(R_007D10_DC_HOT_PLUG_DETECT2_CONTROL,
115 S_007D10_DC_HOT_PLUG_DETECT2_EN(1));
116 rdev->irq.hpd[1] = true;
117 break;
118 default:
119 break;
120 }
121 }
122 if (rdev->irq.installed)
123 rs600_irq_set(rdev);
124}
125
126void rs600_hpd_fini(struct radeon_device *rdev)
127{
128 struct drm_device *dev = rdev->ddev;
129 struct drm_connector *connector;
130
131 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
132 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
133 switch (radeon_connector->hpd.hpd) {
134 case RADEON_HPD_1:
135 WREG32(R_007D00_DC_HOT_PLUG_DETECT1_CONTROL,
136 S_007D00_DC_HOT_PLUG_DETECT1_EN(0));
137 rdev->irq.hpd[0] = false;
138 break;
139 case RADEON_HPD_2:
140 WREG32(R_007D10_DC_HOT_PLUG_DETECT2_CONTROL,
141 S_007D10_DC_HOT_PLUG_DETECT2_EN(0));
142 rdev->irq.hpd[1] = false;
143 break;
144 default:
145 break;
146 }
147 }
148}
149
48/* 150/*
49 * GART. 151 * GART.
50 */ 152 */
@@ -57,7 +159,7 @@ void rs600_gart_tlb_flush(struct radeon_device *rdev)
57 WREG32_MC(R_000100_MC_PT0_CNTL, tmp); 159 WREG32_MC(R_000100_MC_PT0_CNTL, tmp);
58 160
59 tmp = RREG32_MC(R_000100_MC_PT0_CNTL); 161 tmp = RREG32_MC(R_000100_MC_PT0_CNTL);
60 tmp |= S_000100_INVALIDATE_ALL_L1_TLBS(1) & S_000100_INVALIDATE_L2_CACHE(1); 162 tmp |= S_000100_INVALIDATE_ALL_L1_TLBS(1) | S_000100_INVALIDATE_L2_CACHE(1);
61 WREG32_MC(R_000100_MC_PT0_CNTL, tmp); 163 WREG32_MC(R_000100_MC_PT0_CNTL, tmp);
62 164
63 tmp = RREG32_MC(R_000100_MC_PT0_CNTL); 165 tmp = RREG32_MC(R_000100_MC_PT0_CNTL);
@@ -95,45 +197,46 @@ int rs600_gart_enable(struct radeon_device *rdev)
95 r = radeon_gart_table_vram_pin(rdev); 197 r = radeon_gart_table_vram_pin(rdev);
96 if (r) 198 if (r)
97 return r; 199 return r;
200 radeon_gart_restore(rdev);
98 /* Enable bus master */ 201 /* Enable bus master */
99 tmp = RREG32(R_00004C_BUS_CNTL) & C_00004C_BUS_MASTER_DIS; 202 tmp = RREG32(R_00004C_BUS_CNTL) & C_00004C_BUS_MASTER_DIS;
100 WREG32(R_00004C_BUS_CNTL, tmp); 203 WREG32(R_00004C_BUS_CNTL, tmp);
101 /* FIXME: setup default page */ 204 /* FIXME: setup default page */
102 WREG32_MC(R_000100_MC_PT0_CNTL, 205 WREG32_MC(R_000100_MC_PT0_CNTL,
103 (S_000100_EFFECTIVE_L2_CACHE_SIZE(6) | 206 (S_000100_EFFECTIVE_L2_CACHE_SIZE(6) |
104 S_000100_EFFECTIVE_L2_QUEUE_SIZE(6))); 207 S_000100_EFFECTIVE_L2_QUEUE_SIZE(6)));
208
105 for (i = 0; i < 19; i++) { 209 for (i = 0; i < 19; i++) {
106 WREG32_MC(R_00016C_MC_PT0_CLIENT0_CNTL + i, 210 WREG32_MC(R_00016C_MC_PT0_CLIENT0_CNTL + i,
107 S_00016C_ENABLE_TRANSLATION_MODE_OVERRIDE(1) | 211 S_00016C_ENABLE_TRANSLATION_MODE_OVERRIDE(1) |
108 S_00016C_SYSTEM_ACCESS_MODE_MASK( 212 S_00016C_SYSTEM_ACCESS_MODE_MASK(
109 V_00016C_SYSTEM_ACCESS_MODE_IN_SYS) | 213 V_00016C_SYSTEM_ACCESS_MODE_NOT_IN_SYS) |
110 S_00016C_SYSTEM_APERTURE_UNMAPPED_ACCESS( 214 S_00016C_SYSTEM_APERTURE_UNMAPPED_ACCESS(
111 V_00016C_SYSTEM_APERTURE_UNMAPPED_DEFAULT_PAGE) | 215 V_00016C_SYSTEM_APERTURE_UNMAPPED_PASSTHROUGH) |
112 S_00016C_EFFECTIVE_L1_CACHE_SIZE(1) | 216 S_00016C_EFFECTIVE_L1_CACHE_SIZE(3) |
113 S_00016C_ENABLE_FRAGMENT_PROCESSING(1) | 217 S_00016C_ENABLE_FRAGMENT_PROCESSING(1) |
114 S_00016C_EFFECTIVE_L1_QUEUE_SIZE(1)); 218 S_00016C_EFFECTIVE_L1_QUEUE_SIZE(3));
115 } 219 }
116
117 /* System context map to GART space */
118 WREG32_MC(R_000112_MC_PT0_SYSTEM_APERTURE_LOW_ADDR, rdev->mc.gtt_start);
119 WREG32_MC(R_000114_MC_PT0_SYSTEM_APERTURE_HIGH_ADDR, rdev->mc.gtt_end);
120
121 /* enable first context */ 220 /* enable first context */
122 WREG32_MC(R_00013C_MC_PT0_CONTEXT0_FLAT_START_ADDR, rdev->mc.gtt_start);
123 WREG32_MC(R_00014C_MC_PT0_CONTEXT0_FLAT_END_ADDR, rdev->mc.gtt_end);
124 WREG32_MC(R_000102_MC_PT0_CONTEXT0_CNTL, 221 WREG32_MC(R_000102_MC_PT0_CONTEXT0_CNTL,
125 S_000102_ENABLE_PAGE_TABLE(1) | 222 S_000102_ENABLE_PAGE_TABLE(1) |
126 S_000102_PAGE_TABLE_DEPTH(V_000102_PAGE_TABLE_FLAT)); 223 S_000102_PAGE_TABLE_DEPTH(V_000102_PAGE_TABLE_FLAT));
224
127 /* disable all other contexts */ 225 /* disable all other contexts */
128 for (i = 1; i < 8; i++) { 226 for (i = 1; i < 8; i++)
129 WREG32_MC(R_000102_MC_PT0_CONTEXT0_CNTL + i, 0); 227 WREG32_MC(R_000102_MC_PT0_CONTEXT0_CNTL + i, 0);
130 }
131 228
132 /* setup the page table */ 229 /* setup the page table */
133 WREG32_MC(R_00012C_MC_PT0_CONTEXT0_FLAT_BASE_ADDR, 230 WREG32_MC(R_00012C_MC_PT0_CONTEXT0_FLAT_BASE_ADDR,
134 rdev->gart.table_addr); 231 rdev->gart.table_addr);
232 WREG32_MC(R_00013C_MC_PT0_CONTEXT0_FLAT_START_ADDR, rdev->mc.gtt_start);
233 WREG32_MC(R_00014C_MC_PT0_CONTEXT0_FLAT_END_ADDR, rdev->mc.gtt_end);
135 WREG32_MC(R_00011C_MC_PT0_CONTEXT0_DEFAULT_READ_ADDR, 0); 234 WREG32_MC(R_00011C_MC_PT0_CONTEXT0_DEFAULT_READ_ADDR, 0);
136 235
236 /* System context maps to VRAM space */
237 WREG32_MC(R_000112_MC_PT0_SYSTEM_APERTURE_LOW_ADDR, rdev->mc.vram_start);
238 WREG32_MC(R_000114_MC_PT0_SYSTEM_APERTURE_HIGH_ADDR, rdev->mc.vram_end);
239
137 /* enable page tables */ 240 /* enable page tables */
138 tmp = RREG32_MC(R_000100_MC_PT0_CNTL); 241 tmp = RREG32_MC(R_000100_MC_PT0_CNTL);
139 WREG32_MC(R_000100_MC_PT0_CNTL, (tmp | S_000100_ENABLE_PT(1))); 242 WREG32_MC(R_000100_MC_PT0_CNTL, (tmp | S_000100_ENABLE_PT(1)));
@@ -146,23 +249,28 @@ int rs600_gart_enable(struct radeon_device *rdev)
146 249
147void rs600_gart_disable(struct radeon_device *rdev) 250void rs600_gart_disable(struct radeon_device *rdev)
148{ 251{
149 uint32_t tmp; 252 u32 tmp;
253 int r;
150 254
151 /* FIXME: disable out of gart access */ 255 /* FIXME: disable out of gart access */
152 WREG32_MC(R_000100_MC_PT0_CNTL, 0); 256 WREG32_MC(R_000100_MC_PT0_CNTL, 0);
153 tmp = RREG32_MC(R_000009_MC_CNTL1); 257 tmp = RREG32_MC(R_000009_MC_CNTL1);
154 WREG32_MC(R_000009_MC_CNTL1, tmp & C_000009_ENABLE_PAGE_TABLES); 258 WREG32_MC(R_000009_MC_CNTL1, tmp & C_000009_ENABLE_PAGE_TABLES);
155 if (rdev->gart.table.vram.robj) { 259 if (rdev->gart.table.vram.robj) {
156 radeon_object_kunmap(rdev->gart.table.vram.robj); 260 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
157 radeon_object_unpin(rdev->gart.table.vram.robj); 261 if (r == 0) {
262 radeon_bo_kunmap(rdev->gart.table.vram.robj);
263 radeon_bo_unpin(rdev->gart.table.vram.robj);
264 radeon_bo_unreserve(rdev->gart.table.vram.robj);
265 }
158 } 266 }
159} 267}
160 268
161void rs600_gart_fini(struct radeon_device *rdev) 269void rs600_gart_fini(struct radeon_device *rdev)
162{ 270{
271 radeon_gart_fini(rdev);
163 rs600_gart_disable(rdev); 272 rs600_gart_disable(rdev);
164 radeon_gart_table_vram_free(rdev); 273 radeon_gart_table_vram_free(rdev);
165 radeon_gart_fini(rdev);
166} 274}
167 275
168#define R600_PTE_VALID (1 << 0) 276#define R600_PTE_VALID (1 << 0)
@@ -189,7 +297,16 @@ int rs600_irq_set(struct radeon_device *rdev)
189{ 297{
190 uint32_t tmp = 0; 298 uint32_t tmp = 0;
191 uint32_t mode_int = 0; 299 uint32_t mode_int = 0;
192 300 u32 hpd1 = RREG32(R_007D08_DC_HOT_PLUG_DETECT1_INT_CONTROL) &
301 ~S_007D08_DC_HOT_PLUG_DETECT1_INT_EN(1);
302 u32 hpd2 = RREG32(R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL) &
303 ~S_007D18_DC_HOT_PLUG_DETECT2_INT_EN(1);
304
305 if (!rdev->irq.installed) {
306 WARN(1, "Can't enable IRQ/MSI because no handler is installed.\n");
307 WREG32(R_000040_GEN_INT_CNTL, 0);
308 return -EINVAL;
309 }
193 if (rdev->irq.sw_int) { 310 if (rdev->irq.sw_int) {
194 tmp |= S_000040_SW_INT_EN(1); 311 tmp |= S_000040_SW_INT_EN(1);
195 } 312 }
@@ -199,8 +316,16 @@ int rs600_irq_set(struct radeon_device *rdev)
199 if (rdev->irq.crtc_vblank_int[1]) { 316 if (rdev->irq.crtc_vblank_int[1]) {
200 mode_int |= S_006540_D2MODE_VBLANK_INT_MASK(1); 317 mode_int |= S_006540_D2MODE_VBLANK_INT_MASK(1);
201 } 318 }
319 if (rdev->irq.hpd[0]) {
320 hpd1 |= S_007D08_DC_HOT_PLUG_DETECT1_INT_EN(1);
321 }
322 if (rdev->irq.hpd[1]) {
323 hpd2 |= S_007D18_DC_HOT_PLUG_DETECT2_INT_EN(1);
324 }
202 WREG32(R_000040_GEN_INT_CNTL, tmp); 325 WREG32(R_000040_GEN_INT_CNTL, tmp);
203 WREG32(R_006540_DxMODE_INT_MASK, mode_int); 326 WREG32(R_006540_DxMODE_INT_MASK, mode_int);
327 WREG32(R_007D08_DC_HOT_PLUG_DETECT1_INT_CONTROL, hpd1);
328 WREG32(R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL, hpd2);
204 return 0; 329 return 0;
205} 330}
206 331
@@ -208,6 +333,7 @@ static inline uint32_t rs600_irq_ack(struct radeon_device *rdev, u32 *r500_disp_
208{ 333{
209 uint32_t irqs = RREG32(R_000044_GEN_INT_STATUS); 334 uint32_t irqs = RREG32(R_000044_GEN_INT_STATUS);
210 uint32_t irq_mask = ~C_000044_SW_INT; 335 uint32_t irq_mask = ~C_000044_SW_INT;
336 u32 tmp;
211 337
212 if (G_000044_DISPLAY_INT_STAT(irqs)) { 338 if (G_000044_DISPLAY_INT_STAT(irqs)) {
213 *r500_disp_int = RREG32(R_007EDC_DISP_INTERRUPT_STATUS); 339 *r500_disp_int = RREG32(R_007EDC_DISP_INTERRUPT_STATUS);
@@ -219,6 +345,16 @@ static inline uint32_t rs600_irq_ack(struct radeon_device *rdev, u32 *r500_disp_
219 WREG32(R_006D34_D2MODE_VBLANK_STATUS, 345 WREG32(R_006D34_D2MODE_VBLANK_STATUS,
220 S_006D34_D2MODE_VBLANK_ACK(1)); 346 S_006D34_D2MODE_VBLANK_ACK(1));
221 } 347 }
348 if (G_007EDC_DC_HOT_PLUG_DETECT1_INTERRUPT(*r500_disp_int)) {
349 tmp = RREG32(R_007D08_DC_HOT_PLUG_DETECT1_INT_CONTROL);
350 tmp |= S_007D08_DC_HOT_PLUG_DETECT1_INT_ACK(1);
351 WREG32(R_007D08_DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
352 }
353 if (G_007EDC_DC_HOT_PLUG_DETECT2_INTERRUPT(*r500_disp_int)) {
354 tmp = RREG32(R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL);
355 tmp |= S_007D18_DC_HOT_PLUG_DETECT2_INT_ACK(1);
356 WREG32(R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
357 }
222 } else { 358 } else {
223 *r500_disp_int = 0; 359 *r500_disp_int = 0;
224 } 360 }
@@ -244,6 +380,7 @@ int rs600_irq_process(struct radeon_device *rdev)
244{ 380{
245 uint32_t status, msi_rearm; 381 uint32_t status, msi_rearm;
246 uint32_t r500_disp_int; 382 uint32_t r500_disp_int;
383 bool queue_hotplug = false;
247 384
248 status = rs600_irq_ack(rdev, &r500_disp_int); 385 status = rs600_irq_ack(rdev, &r500_disp_int);
249 if (!status && !r500_disp_int) { 386 if (!status && !r500_disp_int) {
@@ -251,15 +388,31 @@ int rs600_irq_process(struct radeon_device *rdev)
251 } 388 }
252 while (status || r500_disp_int) { 389 while (status || r500_disp_int) {
253 /* SW interrupt */ 390 /* SW interrupt */
254 if (G_000040_SW_INT_EN(status)) 391 if (G_000044_SW_INT(status))
255 radeon_fence_process(rdev); 392 radeon_fence_process(rdev);
256 /* Vertical blank interrupts */ 393 /* Vertical blank interrupts */
257 if (G_007EDC_LB_D1_VBLANK_INTERRUPT(r500_disp_int)) 394 if (G_007EDC_LB_D1_VBLANK_INTERRUPT(r500_disp_int)) {
258 drm_handle_vblank(rdev->ddev, 0); 395 drm_handle_vblank(rdev->ddev, 0);
259 if (G_007EDC_LB_D2_VBLANK_INTERRUPT(r500_disp_int)) 396 rdev->pm.vblank_sync = true;
397 wake_up(&rdev->irq.vblank_queue);
398 }
399 if (G_007EDC_LB_D2_VBLANK_INTERRUPT(r500_disp_int)) {
260 drm_handle_vblank(rdev->ddev, 1); 400 drm_handle_vblank(rdev->ddev, 1);
401 rdev->pm.vblank_sync = true;
402 wake_up(&rdev->irq.vblank_queue);
403 }
404 if (G_007EDC_DC_HOT_PLUG_DETECT1_INTERRUPT(r500_disp_int)) {
405 queue_hotplug = true;
406 DRM_DEBUG("HPD1\n");
407 }
408 if (G_007EDC_DC_HOT_PLUG_DETECT2_INTERRUPT(r500_disp_int)) {
409 queue_hotplug = true;
410 DRM_DEBUG("HPD2\n");
411 }
261 status = rs600_irq_ack(rdev, &r500_disp_int); 412 status = rs600_irq_ack(rdev, &r500_disp_int);
262 } 413 }
414 if (queue_hotplug)
415 queue_work(rdev->wq, &rdev->hotplug_work);
263 if (rdev->msi_enabled) { 416 if (rdev->msi_enabled) {
264 switch (rdev->family) { 417 switch (rdev->family) {
265 case CHIP_RS600: 418 case CHIP_RS600:
@@ -301,25 +454,59 @@ int rs600_mc_wait_for_idle(struct radeon_device *rdev)
301 454
302void rs600_gpu_init(struct radeon_device *rdev) 455void rs600_gpu_init(struct radeon_device *rdev)
303{ 456{
304 /* FIXME: HDP same place on rs600 ? */
305 r100_hdp_reset(rdev); 457 r100_hdp_reset(rdev);
306 /* FIXME: is this correct ? */
307 r420_pipes_init(rdev); 458 r420_pipes_init(rdev);
308 /* Wait for mc idle */ 459 /* Wait for mc idle */
309 if (rs600_mc_wait_for_idle(rdev)) 460 if (rs600_mc_wait_for_idle(rdev))
310 dev_warn(rdev->dev, "Wait MC idle timeout before updating MC.\n"); 461 dev_warn(rdev->dev, "Wait MC idle timeout before updating MC.\n");
311} 462}
312 463
313void rs600_vram_info(struct radeon_device *rdev) 464void rs600_mc_init(struct radeon_device *rdev)
314{ 465{
315 /* FIXME: to do or is these values sane ? */ 466 u64 base;
467
468 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
469 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
316 rdev->mc.vram_is_ddr = true; 470 rdev->mc.vram_is_ddr = true;
317 rdev->mc.vram_width = 128; 471 rdev->mc.vram_width = 128;
472 rdev->mc.real_vram_size = RREG32(RADEON_CONFIG_MEMSIZE);
473 rdev->mc.mc_vram_size = rdev->mc.real_vram_size;
474 rdev->mc.visible_vram_size = rdev->mc.aper_size;
475 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
476 base = RREG32_MC(R_000004_MC_FB_LOCATION);
477 base = G_000004_MC_FB_START(base) << 16;
478 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
479 radeon_vram_location(rdev, &rdev->mc, base);
480 radeon_gtt_location(rdev, &rdev->mc);
481 radeon_update_bandwidth_info(rdev);
318} 482}
319 483
320void rs600_bandwidth_update(struct radeon_device *rdev) 484void rs600_bandwidth_update(struct radeon_device *rdev)
321{ 485{
322 /* FIXME: implement, should this be like rs690 ? */ 486 struct drm_display_mode *mode0 = NULL;
487 struct drm_display_mode *mode1 = NULL;
488 u32 d1mode_priority_a_cnt, d2mode_priority_a_cnt;
489 /* FIXME: implement full support */
490
491 radeon_update_display_priority(rdev);
492
493 if (rdev->mode_info.crtcs[0]->base.enabled)
494 mode0 = &rdev->mode_info.crtcs[0]->base.mode;
495 if (rdev->mode_info.crtcs[1]->base.enabled)
496 mode1 = &rdev->mode_info.crtcs[1]->base.mode;
497
498 rs690_line_buffer_adjust(rdev, mode0, mode1);
499
500 if (rdev->disp_priority == 2) {
501 d1mode_priority_a_cnt = RREG32(R_006548_D1MODE_PRIORITY_A_CNT);
502 d2mode_priority_a_cnt = RREG32(R_006D48_D2MODE_PRIORITY_A_CNT);
503 d1mode_priority_a_cnt |= S_006548_D1MODE_PRIORITY_A_ALWAYS_ON(1);
504 d2mode_priority_a_cnt |= S_006D48_D2MODE_PRIORITY_A_ALWAYS_ON(1);
505 WREG32(R_006548_D1MODE_PRIORITY_A_CNT, d1mode_priority_a_cnt);
506 WREG32(R_00654C_D1MODE_PRIORITY_B_CNT, d1mode_priority_a_cnt);
507 WREG32(R_006D48_D2MODE_PRIORITY_A_CNT, d2mode_priority_a_cnt);
508 WREG32(R_006D4C_D2MODE_PRIORITY_B_CNT, d2mode_priority_a_cnt);
509 }
323} 510}
324 511
325uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg) 512uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg)
@@ -388,8 +575,8 @@ static int rs600_startup(struct radeon_device *rdev)
388 if (r) 575 if (r)
389 return r; 576 return r;
390 /* Enable IRQ */ 577 /* Enable IRQ */
391 rdev->irq.sw_int = true;
392 rs600_irq_set(rdev); 578 rs600_irq_set(rdev);
579 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
393 /* 1M ring buffer */ 580 /* 1M ring buffer */
394 r = r100_cp_init(rdev, 1024 * 1024); 581 r = r100_cp_init(rdev, 1024 * 1024);
395 if (r) { 582 if (r) {
@@ -423,6 +610,8 @@ int rs600_resume(struct radeon_device *rdev)
423 atom_asic_init(rdev->mode_info.atom_context); 610 atom_asic_init(rdev->mode_info.atom_context);
424 /* Resume clock after posting */ 611 /* Resume clock after posting */
425 rv515_clock_startup(rdev); 612 rv515_clock_startup(rdev);
613 /* Initialize surface registers */
614 radeon_surface_init(rdev);
426 return rs600_startup(rdev); 615 return rs600_startup(rdev);
427} 616}
428 617
@@ -437,7 +626,7 @@ int rs600_suspend(struct radeon_device *rdev)
437 626
438void rs600_fini(struct radeon_device *rdev) 627void rs600_fini(struct radeon_device *rdev)
439{ 628{
440 rs600_suspend(rdev); 629 radeon_pm_fini(rdev);
441 r100_cp_fini(rdev); 630 r100_cp_fini(rdev);
442 r100_wb_fini(rdev); 631 r100_wb_fini(rdev);
443 r100_ib_fini(rdev); 632 r100_ib_fini(rdev);
@@ -445,7 +634,7 @@ void rs600_fini(struct radeon_device *rdev)
445 rs600_gart_fini(rdev); 634 rs600_gart_fini(rdev);
446 radeon_irq_kms_fini(rdev); 635 radeon_irq_kms_fini(rdev);
447 radeon_fence_driver_fini(rdev); 636 radeon_fence_driver_fini(rdev);
448 radeon_object_fini(rdev); 637 radeon_bo_fini(rdev);
449 radeon_atombios_fini(rdev); 638 radeon_atombios_fini(rdev);
450 kfree(rdev->bios); 639 kfree(rdev->bios);
451 rdev->bios = NULL; 640 rdev->bios = NULL;
@@ -482,20 +671,15 @@ int rs600_init(struct radeon_device *rdev)
482 RREG32(R_0007C0_CP_STAT)); 671 RREG32(R_0007C0_CP_STAT));
483 } 672 }
484 /* check if cards are posted or not */ 673 /* check if cards are posted or not */
485 if (!radeon_card_posted(rdev) && rdev->bios) { 674 if (radeon_boot_test_post_card(rdev) == false)
486 DRM_INFO("GPU not posted. posting now...\n"); 675 return -EINVAL;
487 atom_asic_init(rdev->mode_info.atom_context); 676
488 }
489 /* Initialize clocks */ 677 /* Initialize clocks */
490 radeon_get_clock_info(rdev->ddev); 678 radeon_get_clock_info(rdev->ddev);
491 /* Initialize power management */ 679 /* Initialize power management */
492 radeon_pm_init(rdev); 680 radeon_pm_init(rdev);
493 /* Get vram informations */ 681 /* initialize memory controller */
494 rs600_vram_info(rdev); 682 rs600_mc_init(rdev);
495 /* Initialize memory controller (also test AGP) */
496 r = r420_mc_init(rdev);
497 if (r)
498 return r;
499 rs600_debugfs(rdev); 683 rs600_debugfs(rdev);
500 /* Fence driver */ 684 /* Fence driver */
501 r = radeon_fence_driver_init(rdev); 685 r = radeon_fence_driver_init(rdev);
@@ -505,7 +689,7 @@ int rs600_init(struct radeon_device *rdev)
505 if (r) 689 if (r)
506 return r; 690 return r;
507 /* Memory manager */ 691 /* Memory manager */
508 r = radeon_object_init(rdev); 692 r = radeon_bo_init(rdev);
509 if (r) 693 if (r)
510 return r; 694 return r;
511 r = rs600_gart_init(rdev); 695 r = rs600_gart_init(rdev);
@@ -517,7 +701,6 @@ int rs600_init(struct radeon_device *rdev)
517 if (r) { 701 if (r) {
518 /* Somethings want wront with the accel init stop accel */ 702 /* Somethings want wront with the accel init stop accel */
519 dev_err(rdev->dev, "Disabling GPU acceleration\n"); 703 dev_err(rdev->dev, "Disabling GPU acceleration\n");
520 rs600_suspend(rdev);
521 r100_cp_fini(rdev); 704 r100_cp_fini(rdev);
522 r100_wb_fini(rdev); 705 r100_wb_fini(rdev);
523 r100_ib_fini(rdev); 706 r100_ib_fini(rdev);
diff --git a/drivers/gpu/drm/radeon/rs600d.h b/drivers/gpu/drm/radeon/rs600d.h
index 81308924859a..e52d2695510b 100644
--- a/drivers/gpu/drm/radeon/rs600d.h
+++ b/drivers/gpu/drm/radeon/rs600d.h
@@ -30,27 +30,12 @@
30 30
31/* Registers */ 31/* Registers */
32#define R_000040_GEN_INT_CNTL 0x000040 32#define R_000040_GEN_INT_CNTL 0x000040
33#define S_000040_DISPLAY_INT_STATUS(x) (((x) & 0x1) << 0) 33#define S_000040_SCRATCH_INT_MASK(x) (((x) & 0x1) << 18)
34#define G_000040_DISPLAY_INT_STATUS(x) (((x) >> 0) & 0x1) 34#define G_000040_SCRATCH_INT_MASK(x) (((x) >> 18) & 0x1)
35#define C_000040_DISPLAY_INT_STATUS 0xFFFFFFFE 35#define C_000040_SCRATCH_INT_MASK 0xFFFBFFFF
36#define S_000040_DMA_VIPH0_INT_EN(x) (((x) & 0x1) << 12) 36#define S_000040_GUI_IDLE_MASK(x) (((x) & 0x1) << 19)
37#define G_000040_DMA_VIPH0_INT_EN(x) (((x) >> 12) & 0x1) 37#define G_000040_GUI_IDLE_MASK(x) (((x) >> 19) & 0x1)
38#define C_000040_DMA_VIPH0_INT_EN 0xFFFFEFFF 38#define C_000040_GUI_IDLE_MASK 0xFFF7FFFF
39#define S_000040_CRTC2_VSYNC(x) (((x) & 0x1) << 6)
40#define G_000040_CRTC2_VSYNC(x) (((x) >> 6) & 0x1)
41#define C_000040_CRTC2_VSYNC 0xFFFFFFBF
42#define S_000040_SNAPSHOT2(x) (((x) & 0x1) << 7)
43#define G_000040_SNAPSHOT2(x) (((x) >> 7) & 0x1)
44#define C_000040_SNAPSHOT2 0xFFFFFF7F
45#define S_000040_CRTC2_VBLANK(x) (((x) & 0x1) << 9)
46#define G_000040_CRTC2_VBLANK(x) (((x) >> 9) & 0x1)
47#define C_000040_CRTC2_VBLANK 0xFFFFFDFF
48#define S_000040_FP2_DETECT(x) (((x) & 0x1) << 10)
49#define G_000040_FP2_DETECT(x) (((x) >> 10) & 0x1)
50#define C_000040_FP2_DETECT 0xFFFFFBFF
51#define S_000040_VSYNC_DIFF_OVER_LIMIT(x) (((x) & 0x1) << 11)
52#define G_000040_VSYNC_DIFF_OVER_LIMIT(x) (((x) >> 11) & 0x1)
53#define C_000040_VSYNC_DIFF_OVER_LIMIT 0xFFFFF7FF
54#define S_000040_DMA_VIPH1_INT_EN(x) (((x) & 0x1) << 13) 39#define S_000040_DMA_VIPH1_INT_EN(x) (((x) & 0x1) << 13)
55#define G_000040_DMA_VIPH1_INT_EN(x) (((x) >> 13) & 0x1) 40#define G_000040_DMA_VIPH1_INT_EN(x) (((x) >> 13) & 0x1)
56#define C_000040_DMA_VIPH1_INT_EN 0xFFFFDFFF 41#define C_000040_DMA_VIPH1_INT_EN 0xFFFFDFFF
@@ -370,7 +355,90 @@
370#define S_007EDC_LB_D2_VBLANK_INTERRUPT(x) (((x) & 0x1) << 5) 355#define S_007EDC_LB_D2_VBLANK_INTERRUPT(x) (((x) & 0x1) << 5)
371#define G_007EDC_LB_D2_VBLANK_INTERRUPT(x) (((x) >> 5) & 0x1) 356#define G_007EDC_LB_D2_VBLANK_INTERRUPT(x) (((x) >> 5) & 0x1)
372#define C_007EDC_LB_D2_VBLANK_INTERRUPT 0xFFFFFFDF 357#define C_007EDC_LB_D2_VBLANK_INTERRUPT 0xFFFFFFDF
373 358#define S_007EDC_DACA_AUTODETECT_INTERRUPT(x) (((x) & 0x1) << 16)
359#define G_007EDC_DACA_AUTODETECT_INTERRUPT(x) (((x) >> 16) & 0x1)
360#define C_007EDC_DACA_AUTODETECT_INTERRUPT 0xFFFEFFFF
361#define S_007EDC_DACB_AUTODETECT_INTERRUPT(x) (((x) & 0x1) << 17)
362#define G_007EDC_DACB_AUTODETECT_INTERRUPT(x) (((x) >> 17) & 0x1)
363#define C_007EDC_DACB_AUTODETECT_INTERRUPT 0xFFFDFFFF
364#define S_007EDC_DC_HOT_PLUG_DETECT1_INTERRUPT(x) (((x) & 0x1) << 18)
365#define G_007EDC_DC_HOT_PLUG_DETECT1_INTERRUPT(x) (((x) >> 18) & 0x1)
366#define C_007EDC_DC_HOT_PLUG_DETECT1_INTERRUPT 0xFFFBFFFF
367#define S_007EDC_DC_HOT_PLUG_DETECT2_INTERRUPT(x) (((x) & 0x1) << 19)
368#define G_007EDC_DC_HOT_PLUG_DETECT2_INTERRUPT(x) (((x) >> 19) & 0x1)
369#define C_007EDC_DC_HOT_PLUG_DETECT2_INTERRUPT 0xFFF7FFFF
370#define R_007828_DACA_AUTODETECT_CONTROL 0x007828
371#define S_007828_DACA_AUTODETECT_MODE(x) (((x) & 0x3) << 0)
372#define G_007828_DACA_AUTODETECT_MODE(x) (((x) >> 0) & 0x3)
373#define C_007828_DACA_AUTODETECT_MODE 0xFFFFFFFC
374#define S_007828_DACA_AUTODETECT_FRAME_TIME_COUNTER(x) (((x) & 0xff) << 8)
375#define G_007828_DACA_AUTODETECT_FRAME_TIME_COUNTER(x) (((x) >> 8) & 0xff)
376#define C_007828_DACA_AUTODETECT_FRAME_TIME_COUNTER 0xFFFF00FF
377#define S_007828_DACA_AUTODETECT_CHECK_MASK(x) (((x) & 0x3) << 16)
378#define G_007828_DACA_AUTODETECT_CHECK_MASK(x) (((x) >> 16) & 0x3)
379#define C_007828_DACA_AUTODETECT_CHECK_MASK 0xFFFCFFFF
380#define R_007838_DACA_AUTODETECT_INT_CONTROL 0x007838
381#define S_007838_DACA_AUTODETECT_ACK(x) (((x) & 0x1) << 0)
382#define C_007838_DACA_DACA_AUTODETECT_ACK 0xFFFFFFFE
383#define S_007838_DACA_AUTODETECT_INT_ENABLE(x) (((x) & 0x1) << 16)
384#define G_007838_DACA_AUTODETECT_INT_ENABLE(x) (((x) >> 16) & 0x1)
385#define C_007838_DACA_AUTODETECT_INT_ENABLE 0xFFFCFFFF
386#define R_007A28_DACB_AUTODETECT_CONTROL 0x007A28
387#define S_007A28_DACB_AUTODETECT_MODE(x) (((x) & 0x3) << 0)
388#define G_007A28_DACB_AUTODETECT_MODE(x) (((x) >> 0) & 0x3)
389#define C_007A28_DACB_AUTODETECT_MODE 0xFFFFFFFC
390#define S_007A28_DACB_AUTODETECT_FRAME_TIME_COUNTER(x) (((x) & 0xff) << 8)
391#define G_007A28_DACB_AUTODETECT_FRAME_TIME_COUNTER(x) (((x) >> 8) & 0xff)
392#define C_007A28_DACB_AUTODETECT_FRAME_TIME_COUNTER 0xFFFF00FF
393#define S_007A28_DACB_AUTODETECT_CHECK_MASK(x) (((x) & 0x3) << 16)
394#define G_007A28_DACB_AUTODETECT_CHECK_MASK(x) (((x) >> 16) & 0x3)
395#define C_007A28_DACB_AUTODETECT_CHECK_MASK 0xFFFCFFFF
396#define R_007A38_DACB_AUTODETECT_INT_CONTROL 0x007A38
397#define S_007A38_DACB_AUTODETECT_ACK(x) (((x) & 0x1) << 0)
398#define C_007A38_DACB_DACA_AUTODETECT_ACK 0xFFFFFFFE
399#define S_007A38_DACB_AUTODETECT_INT_ENABLE(x) (((x) & 0x1) << 16)
400#define G_007A38_DACB_AUTODETECT_INT_ENABLE(x) (((x) >> 16) & 0x1)
401#define C_007A38_DACB_AUTODETECT_INT_ENABLE 0xFFFCFFFF
402#define R_007D00_DC_HOT_PLUG_DETECT1_CONTROL 0x007D00
403#define S_007D00_DC_HOT_PLUG_DETECT1_EN(x) (((x) & 0x1) << 0)
404#define G_007D00_DC_HOT_PLUG_DETECT1_EN(x) (((x) >> 0) & 0x1)
405#define C_007D00_DC_HOT_PLUG_DETECT1_EN 0xFFFFFFFE
406#define R_007D04_DC_HOT_PLUG_DETECT1_INT_STATUS 0x007D04
407#define S_007D04_DC_HOT_PLUG_DETECT1_INT_STATUS(x) (((x) & 0x1) << 0)
408#define G_007D04_DC_HOT_PLUG_DETECT1_INT_STATUS(x) (((x) >> 0) & 0x1)
409#define C_007D04_DC_HOT_PLUG_DETECT1_INT_STATUS 0xFFFFFFFE
410#define S_007D04_DC_HOT_PLUG_DETECT1_SENSE(x) (((x) & 0x1) << 1)
411#define G_007D04_DC_HOT_PLUG_DETECT1_SENSE(x) (((x) >> 1) & 0x1)
412#define C_007D04_DC_HOT_PLUG_DETECT1_SENSE 0xFFFFFFFD
413#define R_007D08_DC_HOT_PLUG_DETECT1_INT_CONTROL 0x007D08
414#define S_007D08_DC_HOT_PLUG_DETECT1_INT_ACK(x) (((x) & 0x1) << 0)
415#define C_007D08_DC_HOT_PLUG_DETECT1_INT_ACK 0xFFFFFFFE
416#define S_007D08_DC_HOT_PLUG_DETECT1_INT_POLARITY(x) (((x) & 0x1) << 8)
417#define G_007D08_DC_HOT_PLUG_DETECT1_INT_POLARITY(x) (((x) >> 8) & 0x1)
418#define C_007D08_DC_HOT_PLUG_DETECT1_INT_POLARITY 0xFFFFFEFF
419#define S_007D08_DC_HOT_PLUG_DETECT1_INT_EN(x) (((x) & 0x1) << 16)
420#define G_007D08_DC_HOT_PLUG_DETECT1_INT_EN(x) (((x) >> 16) & 0x1)
421#define C_007D08_DC_HOT_PLUG_DETECT1_INT_EN 0xFFFEFFFF
422#define R_007D10_DC_HOT_PLUG_DETECT2_CONTROL 0x007D10
423#define S_007D10_DC_HOT_PLUG_DETECT2_EN(x) (((x) & 0x1) << 0)
424#define G_007D10_DC_HOT_PLUG_DETECT2_EN(x) (((x) >> 0) & 0x1)
425#define C_007D10_DC_HOT_PLUG_DETECT2_EN 0xFFFFFFFE
426#define R_007D14_DC_HOT_PLUG_DETECT2_INT_STATUS 0x007D14
427#define S_007D14_DC_HOT_PLUG_DETECT2_INT_STATUS(x) (((x) & 0x1) << 0)
428#define G_007D14_DC_HOT_PLUG_DETECT2_INT_STATUS(x) (((x) >> 0) & 0x1)
429#define C_007D14_DC_HOT_PLUG_DETECT2_INT_STATUS 0xFFFFFFFE
430#define S_007D14_DC_HOT_PLUG_DETECT2_SENSE(x) (((x) & 0x1) << 1)
431#define G_007D14_DC_HOT_PLUG_DETECT2_SENSE(x) (((x) >> 1) & 0x1)
432#define C_007D14_DC_HOT_PLUG_DETECT2_SENSE 0xFFFFFFFD
433#define R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL 0x007D18
434#define S_007D18_DC_HOT_PLUG_DETECT2_INT_ACK(x) (((x) & 0x1) << 0)
435#define C_007D18_DC_HOT_PLUG_DETECT2_INT_ACK 0xFFFFFFFE
436#define S_007D18_DC_HOT_PLUG_DETECT2_INT_POLARITY(x) (((x) & 0x1) << 8)
437#define G_007D18_DC_HOT_PLUG_DETECT2_INT_POLARITY(x) (((x) >> 8) & 0x1)
438#define C_007D18_DC_HOT_PLUG_DETECT2_INT_POLARITY 0xFFFFFEFF
439#define S_007D18_DC_HOT_PLUG_DETECT2_INT_EN(x) (((x) & 0x1) << 16)
440#define G_007D18_DC_HOT_PLUG_DETECT2_INT_EN(x) (((x) >> 16) & 0x1)
441#define C_007D18_DC_HOT_PLUG_DETECT2_INT_EN 0xFFFEFFFF
374 442
375/* MC registers */ 443/* MC registers */
376#define R_000000_MC_STATUS 0x000000 444#define R_000000_MC_STATUS 0x000000
@@ -467,4 +535,57 @@
467#define G_00016C_INVALIDATE_L1_TLB(x) (((x) >> 20) & 0x1) 535#define G_00016C_INVALIDATE_L1_TLB(x) (((x) >> 20) & 0x1)
468#define C_00016C_INVALIDATE_L1_TLB 0xFFEFFFFF 536#define C_00016C_INVALIDATE_L1_TLB 0xFFEFFFFF
469 537
538#define R_006548_D1MODE_PRIORITY_A_CNT 0x006548
539#define S_006548_D1MODE_PRIORITY_MARK_A(x) (((x) & 0x7FFF) << 0)
540#define G_006548_D1MODE_PRIORITY_MARK_A(x) (((x) >> 0) & 0x7FFF)
541#define C_006548_D1MODE_PRIORITY_MARK_A 0xFFFF8000
542#define S_006548_D1MODE_PRIORITY_A_OFF(x) (((x) & 0x1) << 16)
543#define G_006548_D1MODE_PRIORITY_A_OFF(x) (((x) >> 16) & 0x1)
544#define C_006548_D1MODE_PRIORITY_A_OFF 0xFFFEFFFF
545#define S_006548_D1MODE_PRIORITY_A_ALWAYS_ON(x) (((x) & 0x1) << 20)
546#define G_006548_D1MODE_PRIORITY_A_ALWAYS_ON(x) (((x) >> 20) & 0x1)
547#define C_006548_D1MODE_PRIORITY_A_ALWAYS_ON 0xFFEFFFFF
548#define S_006548_D1MODE_PRIORITY_A_FORCE_MASK(x) (((x) & 0x1) << 24)
549#define G_006548_D1MODE_PRIORITY_A_FORCE_MASK(x) (((x) >> 24) & 0x1)
550#define C_006548_D1MODE_PRIORITY_A_FORCE_MASK 0xFEFFFFFF
551#define R_00654C_D1MODE_PRIORITY_B_CNT 0x00654C
552#define S_00654C_D1MODE_PRIORITY_MARK_B(x) (((x) & 0x7FFF) << 0)
553#define G_00654C_D1MODE_PRIORITY_MARK_B(x) (((x) >> 0) & 0x7FFF)
554#define C_00654C_D1MODE_PRIORITY_MARK_B 0xFFFF8000
555#define S_00654C_D1MODE_PRIORITY_B_OFF(x) (((x) & 0x1) << 16)
556#define G_00654C_D1MODE_PRIORITY_B_OFF(x) (((x) >> 16) & 0x1)
557#define C_00654C_D1MODE_PRIORITY_B_OFF 0xFFFEFFFF
558#define S_00654C_D1MODE_PRIORITY_B_ALWAYS_ON(x) (((x) & 0x1) << 20)
559#define G_00654C_D1MODE_PRIORITY_B_ALWAYS_ON(x) (((x) >> 20) & 0x1)
560#define C_00654C_D1MODE_PRIORITY_B_ALWAYS_ON 0xFFEFFFFF
561#define S_00654C_D1MODE_PRIORITY_B_FORCE_MASK(x) (((x) & 0x1) << 24)
562#define G_00654C_D1MODE_PRIORITY_B_FORCE_MASK(x) (((x) >> 24) & 0x1)
563#define C_00654C_D1MODE_PRIORITY_B_FORCE_MASK 0xFEFFFFFF
564#define R_006D48_D2MODE_PRIORITY_A_CNT 0x006D48
565#define S_006D48_D2MODE_PRIORITY_MARK_A(x) (((x) & 0x7FFF) << 0)
566#define G_006D48_D2MODE_PRIORITY_MARK_A(x) (((x) >> 0) & 0x7FFF)
567#define C_006D48_D2MODE_PRIORITY_MARK_A 0xFFFF8000
568#define S_006D48_D2MODE_PRIORITY_A_OFF(x) (((x) & 0x1) << 16)
569#define G_006D48_D2MODE_PRIORITY_A_OFF(x) (((x) >> 16) & 0x1)
570#define C_006D48_D2MODE_PRIORITY_A_OFF 0xFFFEFFFF
571#define S_006D48_D2MODE_PRIORITY_A_ALWAYS_ON(x) (((x) & 0x1) << 20)
572#define G_006D48_D2MODE_PRIORITY_A_ALWAYS_ON(x) (((x) >> 20) & 0x1)
573#define C_006D48_D2MODE_PRIORITY_A_ALWAYS_ON 0xFFEFFFFF
574#define S_006D48_D2MODE_PRIORITY_A_FORCE_MASK(x) (((x) & 0x1) << 24)
575#define G_006D48_D2MODE_PRIORITY_A_FORCE_MASK(x) (((x) >> 24) & 0x1)
576#define C_006D48_D2MODE_PRIORITY_A_FORCE_MASK 0xFEFFFFFF
577#define R_006D4C_D2MODE_PRIORITY_B_CNT 0x006D4C
578#define S_006D4C_D2MODE_PRIORITY_MARK_B(x) (((x) & 0x7FFF) << 0)
579#define G_006D4C_D2MODE_PRIORITY_MARK_B(x) (((x) >> 0) & 0x7FFF)
580#define C_006D4C_D2MODE_PRIORITY_MARK_B 0xFFFF8000
581#define S_006D4C_D2MODE_PRIORITY_B_OFF(x) (((x) & 0x1) << 16)
582#define G_006D4C_D2MODE_PRIORITY_B_OFF(x) (((x) >> 16) & 0x1)
583#define C_006D4C_D2MODE_PRIORITY_B_OFF 0xFFFEFFFF
584#define S_006D4C_D2MODE_PRIORITY_B_ALWAYS_ON(x) (((x) & 0x1) << 20)
585#define G_006D4C_D2MODE_PRIORITY_B_ALWAYS_ON(x) (((x) >> 20) & 0x1)
586#define C_006D4C_D2MODE_PRIORITY_B_ALWAYS_ON 0xFFEFFFFF
587#define S_006D4C_D2MODE_PRIORITY_B_FORCE_MASK(x) (((x) & 0x1) << 24)
588#define G_006D4C_D2MODE_PRIORITY_B_FORCE_MASK(x) (((x) >> 24) & 0x1)
589#define C_006D4C_D2MODE_PRIORITY_B_FORCE_MASK 0xFEFFFFFF
590
470#endif 591#endif
diff --git a/drivers/gpu/drm/radeon/rs690.c b/drivers/gpu/drm/radeon/rs690.c
index 27547175cf93..bbf3da790fd5 100644
--- a/drivers/gpu/drm/radeon/rs690.c
+++ b/drivers/gpu/drm/radeon/rs690.c
@@ -27,6 +27,7 @@
27 */ 27 */
28#include "drmP.h" 28#include "drmP.h"
29#include "radeon.h" 29#include "radeon.h"
30#include "radeon_asic.h"
30#include "atom.h" 31#include "atom.h"
31#include "rs690d.h" 32#include "rs690d.h"
32 33
@@ -57,42 +58,57 @@ static void rs690_gpu_init(struct radeon_device *rdev)
57 } 58 }
58} 59}
59 60
61union igp_info {
62 struct _ATOM_INTEGRATED_SYSTEM_INFO info;
63 struct _ATOM_INTEGRATED_SYSTEM_INFO_V2 info_v2;
64};
65
60void rs690_pm_info(struct radeon_device *rdev) 66void rs690_pm_info(struct radeon_device *rdev)
61{ 67{
62 int index = GetIndexIntoMasterTable(DATA, IntegratedSystemInfo); 68 int index = GetIndexIntoMasterTable(DATA, IntegratedSystemInfo);
63 struct _ATOM_INTEGRATED_SYSTEM_INFO *info; 69 union igp_info *info;
64 struct _ATOM_INTEGRATED_SYSTEM_INFO_V2 *info_v2;
65 void *ptr;
66 uint16_t data_offset; 70 uint16_t data_offset;
67 uint8_t frev, crev; 71 uint8_t frev, crev;
68 fixed20_12 tmp; 72 fixed20_12 tmp;
69 73
70 atom_parse_data_header(rdev->mode_info.atom_context, index, NULL, 74 if (atom_parse_data_header(rdev->mode_info.atom_context, index, NULL,
71 &frev, &crev, &data_offset); 75 &frev, &crev, &data_offset)) {
72 ptr = rdev->mode_info.atom_context->bios + data_offset; 76 info = (union igp_info *)(rdev->mode_info.atom_context->bios + data_offset);
73 info = (struct _ATOM_INTEGRATED_SYSTEM_INFO *)ptr; 77
74 info_v2 = (struct _ATOM_INTEGRATED_SYSTEM_INFO_V2 *)ptr; 78 /* Get various system informations from bios */
75 /* Get various system informations from bios */ 79 switch (crev) {
76 switch (crev) { 80 case 1:
77 case 1: 81 tmp.full = rfixed_const(100);
78 tmp.full = rfixed_const(100); 82 rdev->pm.igp_sideport_mclk.full = rfixed_const(info->info.ulBootUpMemoryClock);
79 rdev->pm.igp_sideport_mclk.full = rfixed_const(info->ulBootUpMemoryClock); 83 rdev->pm.igp_sideport_mclk.full = rfixed_div(rdev->pm.igp_sideport_mclk, tmp);
80 rdev->pm.igp_sideport_mclk.full = rfixed_div(rdev->pm.igp_sideport_mclk, tmp); 84 rdev->pm.igp_system_mclk.full = rfixed_const(le16_to_cpu(info->info.usK8MemoryClock));
81 rdev->pm.igp_system_mclk.full = rfixed_const(le16_to_cpu(info->usK8MemoryClock)); 85 rdev->pm.igp_ht_link_clk.full = rfixed_const(le16_to_cpu(info->info.usFSBClock));
82 rdev->pm.igp_ht_link_clk.full = rfixed_const(le16_to_cpu(info->usFSBClock)); 86 rdev->pm.igp_ht_link_width.full = rfixed_const(info->info.ucHTLinkWidth);
83 rdev->pm.igp_ht_link_width.full = rfixed_const(info->ucHTLinkWidth); 87 break;
84 break; 88 case 2:
85 case 2: 89 tmp.full = rfixed_const(100);
86 tmp.full = rfixed_const(100); 90 rdev->pm.igp_sideport_mclk.full = rfixed_const(info->info_v2.ulBootUpSidePortClock);
87 rdev->pm.igp_sideport_mclk.full = rfixed_const(info_v2->ulBootUpSidePortClock); 91 rdev->pm.igp_sideport_mclk.full = rfixed_div(rdev->pm.igp_sideport_mclk, tmp);
88 rdev->pm.igp_sideport_mclk.full = rfixed_div(rdev->pm.igp_sideport_mclk, tmp); 92 rdev->pm.igp_system_mclk.full = rfixed_const(info->info_v2.ulBootUpUMAClock);
89 rdev->pm.igp_system_mclk.full = rfixed_const(info_v2->ulBootUpUMAClock); 93 rdev->pm.igp_system_mclk.full = rfixed_div(rdev->pm.igp_system_mclk, tmp);
90 rdev->pm.igp_system_mclk.full = rfixed_div(rdev->pm.igp_system_mclk, tmp); 94 rdev->pm.igp_ht_link_clk.full = rfixed_const(info->info_v2.ulHTLinkFreq);
91 rdev->pm.igp_ht_link_clk.full = rfixed_const(info_v2->ulHTLinkFreq); 95 rdev->pm.igp_ht_link_clk.full = rfixed_div(rdev->pm.igp_ht_link_clk, tmp);
92 rdev->pm.igp_ht_link_clk.full = rfixed_div(rdev->pm.igp_ht_link_clk, tmp); 96 rdev->pm.igp_ht_link_width.full = rfixed_const(le16_to_cpu(info->info_v2.usMinHTLinkWidth));
93 rdev->pm.igp_ht_link_width.full = rfixed_const(le16_to_cpu(info_v2->usMinHTLinkWidth)); 97 break;
94 break; 98 default:
95 default: 99 tmp.full = rfixed_const(100);
100 /* We assume the slower possible clock ie worst case */
101 /* DDR 333Mhz */
102 rdev->pm.igp_sideport_mclk.full = rfixed_const(333);
103 /* FIXME: system clock ? */
104 rdev->pm.igp_system_mclk.full = rfixed_const(100);
105 rdev->pm.igp_system_mclk.full = rfixed_div(rdev->pm.igp_system_mclk, tmp);
106 rdev->pm.igp_ht_link_clk.full = rfixed_const(200);
107 rdev->pm.igp_ht_link_width.full = rfixed_const(8);
108 DRM_ERROR("No integrated system info for your GPU, using safe default\n");
109 break;
110 }
111 } else {
96 tmp.full = rfixed_const(100); 112 tmp.full = rfixed_const(100);
97 /* We assume the slower possible clock ie worst case */ 113 /* We assume the slower possible clock ie worst case */
98 /* DDR 333Mhz */ 114 /* DDR 333Mhz */
@@ -103,7 +119,6 @@ void rs690_pm_info(struct radeon_device *rdev)
103 rdev->pm.igp_ht_link_clk.full = rfixed_const(200); 119 rdev->pm.igp_ht_link_clk.full = rfixed_const(200);
104 rdev->pm.igp_ht_link_width.full = rfixed_const(8); 120 rdev->pm.igp_ht_link_width.full = rfixed_const(8);
105 DRM_ERROR("No integrated system info for your GPU, using safe default\n"); 121 DRM_ERROR("No integrated system info for your GPU, using safe default\n");
106 break;
107 } 122 }
108 /* Compute various bandwidth */ 123 /* Compute various bandwidth */
109 /* k8_bandwidth = (memory_clk / 2) * 2 * 8 * 0.5 = memory_clk * 4 */ 124 /* k8_bandwidth = (memory_clk / 2) * 2 * 8 * 0.5 = memory_clk * 4 */
@@ -129,36 +144,25 @@ void rs690_pm_info(struct radeon_device *rdev)
129 rdev->pm.sideport_bandwidth.full = rfixed_div(rdev->pm.sideport_bandwidth, tmp); 144 rdev->pm.sideport_bandwidth.full = rfixed_div(rdev->pm.sideport_bandwidth, tmp);
130} 145}
131 146
132void rs690_vram_info(struct radeon_device *rdev) 147void rs690_mc_init(struct radeon_device *rdev)
133{ 148{
134 uint32_t tmp; 149 u64 base;
135 fixed20_12 a;
136 150
137 rs400_gart_adjust_size(rdev); 151 rs400_gart_adjust_size(rdev);
138 /* DDR for all card after R300 & IGP */
139 rdev->mc.vram_is_ddr = true; 152 rdev->mc.vram_is_ddr = true;
140 /* FIXME: is this correct for RS690/RS740 ? */ 153 rdev->mc.vram_width = 128;
141 tmp = RREG32(RADEON_MEM_CNTL);
142 if (tmp & R300_MEM_NUM_CHANNELS_MASK) {
143 rdev->mc.vram_width = 128;
144 } else {
145 rdev->mc.vram_width = 64;
146 }
147 rdev->mc.real_vram_size = RREG32(RADEON_CONFIG_MEMSIZE); 154 rdev->mc.real_vram_size = RREG32(RADEON_CONFIG_MEMSIZE);
148 rdev->mc.mc_vram_size = rdev->mc.real_vram_size; 155 rdev->mc.mc_vram_size = rdev->mc.real_vram_size;
149
150 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0); 156 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
151 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0); 157 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
158 rdev->mc.visible_vram_size = rdev->mc.aper_size;
159 base = RREG32_MC(R_000100_MCCFG_FB_LOCATION);
160 base = G_000100_MC_FB_START(base) << 16;
152 rs690_pm_info(rdev); 161 rs690_pm_info(rdev);
153 /* FIXME: we should enforce default clock in case GPU is not in 162 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
154 * default setup 163 radeon_vram_location(rdev, &rdev->mc, base);
155 */ 164 radeon_gtt_location(rdev, &rdev->mc);
156 a.full = rfixed_const(100); 165 radeon_update_bandwidth_info(rdev);
157 rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk);
158 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
159 a.full = rfixed_const(16);
160 /* core_bandwidth = sclk(Mhz) * 16 */
161 rdev->pm.core_bandwidth.full = rfixed_div(rdev->pm.sclk, a);
162} 166}
163 167
164void rs690_line_buffer_adjust(struct radeon_device *rdev, 168void rs690_line_buffer_adjust(struct radeon_device *rdev,
@@ -244,8 +248,9 @@ void rs690_crtc_bandwidth_compute(struct radeon_device *rdev,
244 248
245 b.full = rfixed_const(mode->crtc_hdisplay); 249 b.full = rfixed_const(mode->crtc_hdisplay);
246 c.full = rfixed_const(256); 250 c.full = rfixed_const(256);
247 a.full = rfixed_mul(wm->num_line_pair, b); 251 a.full = rfixed_div(b, c);
248 request_fifo_depth.full = rfixed_div(a, c); 252 request_fifo_depth.full = rfixed_mul(a, wm->num_line_pair);
253 request_fifo_depth.full = rfixed_ceil(request_fifo_depth);
249 if (a.full < rfixed_const(4)) { 254 if (a.full < rfixed_const(4)) {
250 wm->lb_request_fifo_depth = 4; 255 wm->lb_request_fifo_depth = 4;
251 } else { 256 } else {
@@ -374,6 +379,7 @@ void rs690_crtc_bandwidth_compute(struct radeon_device *rdev,
374 a.full = rfixed_const(16); 379 a.full = rfixed_const(16);
375 wm->priority_mark_max.full = rfixed_const(crtc->base.mode.crtc_hdisplay); 380 wm->priority_mark_max.full = rfixed_const(crtc->base.mode.crtc_hdisplay);
376 wm->priority_mark_max.full = rfixed_div(wm->priority_mark_max, a); 381 wm->priority_mark_max.full = rfixed_div(wm->priority_mark_max, a);
382 wm->priority_mark_max.full = rfixed_ceil(wm->priority_mark_max);
377 383
378 /* Determine estimated width */ 384 /* Determine estimated width */
379 estimated_width.full = tolerable_latency.full - wm->worst_case_latency.full; 385 estimated_width.full = tolerable_latency.full - wm->worst_case_latency.full;
@@ -383,6 +389,7 @@ void rs690_crtc_bandwidth_compute(struct radeon_device *rdev,
383 } else { 389 } else {
384 a.full = rfixed_const(16); 390 a.full = rfixed_const(16);
385 wm->priority_mark.full = rfixed_div(estimated_width, a); 391 wm->priority_mark.full = rfixed_div(estimated_width, a);
392 wm->priority_mark.full = rfixed_ceil(wm->priority_mark);
386 wm->priority_mark.full = wm->priority_mark_max.full - wm->priority_mark.full; 393 wm->priority_mark.full = wm->priority_mark_max.full - wm->priority_mark.full;
387 } 394 }
388} 395}
@@ -393,10 +400,12 @@ void rs690_bandwidth_update(struct radeon_device *rdev)
393 struct drm_display_mode *mode1 = NULL; 400 struct drm_display_mode *mode1 = NULL;
394 struct rs690_watermark wm0; 401 struct rs690_watermark wm0;
395 struct rs690_watermark wm1; 402 struct rs690_watermark wm1;
396 u32 tmp; 403 u32 tmp, d1mode_priority_a_cnt, d2mode_priority_a_cnt;
397 fixed20_12 priority_mark02, priority_mark12, fill_rate; 404 fixed20_12 priority_mark02, priority_mark12, fill_rate;
398 fixed20_12 a, b; 405 fixed20_12 a, b;
399 406
407 radeon_update_display_priority(rdev);
408
400 if (rdev->mode_info.crtcs[0]->base.enabled) 409 if (rdev->mode_info.crtcs[0]->base.enabled)
401 mode0 = &rdev->mode_info.crtcs[0]->base.mode; 410 mode0 = &rdev->mode_info.crtcs[0]->base.mode;
402 if (rdev->mode_info.crtcs[1]->base.enabled) 411 if (rdev->mode_info.crtcs[1]->base.enabled)
@@ -406,7 +415,8 @@ void rs690_bandwidth_update(struct radeon_device *rdev)
406 * modes if the user specifies HIGH for displaypriority 415 * modes if the user specifies HIGH for displaypriority
407 * option. 416 * option.
408 */ 417 */
409 if (rdev->disp_priority == 2) { 418 if ((rdev->disp_priority == 2) &&
419 ((rdev->family == CHIP_RS690) || (rdev->family == CHIP_RS740))) {
410 tmp = RREG32_MC(R_000104_MC_INIT_MISC_LAT_TIMER); 420 tmp = RREG32_MC(R_000104_MC_INIT_MISC_LAT_TIMER);
411 tmp &= C_000104_MC_DISP0R_INIT_LAT; 421 tmp &= C_000104_MC_DISP0R_INIT_LAT;
412 tmp &= C_000104_MC_DISP1R_INIT_LAT; 422 tmp &= C_000104_MC_DISP1R_INIT_LAT;
@@ -481,10 +491,16 @@ void rs690_bandwidth_update(struct radeon_device *rdev)
481 priority_mark12.full = 0; 491 priority_mark12.full = 0;
482 if (wm1.priority_mark_max.full > priority_mark12.full) 492 if (wm1.priority_mark_max.full > priority_mark12.full)
483 priority_mark12.full = wm1.priority_mark_max.full; 493 priority_mark12.full = wm1.priority_mark_max.full;
484 WREG32(R_006548_D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02)); 494 d1mode_priority_a_cnt = rfixed_trunc(priority_mark02);
485 WREG32(R_00654C_D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02)); 495 d2mode_priority_a_cnt = rfixed_trunc(priority_mark12);
486 WREG32(R_006D48_D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12)); 496 if (rdev->disp_priority == 2) {
487 WREG32(R_006D4C_D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12)); 497 d1mode_priority_a_cnt |= S_006548_D1MODE_PRIORITY_A_ALWAYS_ON(1);
498 d2mode_priority_a_cnt |= S_006D48_D2MODE_PRIORITY_A_ALWAYS_ON(1);
499 }
500 WREG32(R_006548_D1MODE_PRIORITY_A_CNT, d1mode_priority_a_cnt);
501 WREG32(R_00654C_D1MODE_PRIORITY_B_CNT, d1mode_priority_a_cnt);
502 WREG32(R_006D48_D2MODE_PRIORITY_A_CNT, d2mode_priority_a_cnt);
503 WREG32(R_006D4C_D2MODE_PRIORITY_B_CNT, d2mode_priority_a_cnt);
488 } else if (mode0) { 504 } else if (mode0) {
489 if (rfixed_trunc(wm0.dbpp) > 64) 505 if (rfixed_trunc(wm0.dbpp) > 64)
490 a.full = rfixed_mul(wm0.dbpp, wm0.num_line_pair); 506 a.full = rfixed_mul(wm0.dbpp, wm0.num_line_pair);
@@ -511,8 +527,11 @@ void rs690_bandwidth_update(struct radeon_device *rdev)
511 priority_mark02.full = 0; 527 priority_mark02.full = 0;
512 if (wm0.priority_mark_max.full > priority_mark02.full) 528 if (wm0.priority_mark_max.full > priority_mark02.full)
513 priority_mark02.full = wm0.priority_mark_max.full; 529 priority_mark02.full = wm0.priority_mark_max.full;
514 WREG32(R_006548_D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02)); 530 d1mode_priority_a_cnt = rfixed_trunc(priority_mark02);
515 WREG32(R_00654C_D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02)); 531 if (rdev->disp_priority == 2)
532 d1mode_priority_a_cnt |= S_006548_D1MODE_PRIORITY_A_ALWAYS_ON(1);
533 WREG32(R_006548_D1MODE_PRIORITY_A_CNT, d1mode_priority_a_cnt);
534 WREG32(R_00654C_D1MODE_PRIORITY_B_CNT, d1mode_priority_a_cnt);
516 WREG32(R_006D48_D2MODE_PRIORITY_A_CNT, 535 WREG32(R_006D48_D2MODE_PRIORITY_A_CNT,
517 S_006D48_D2MODE_PRIORITY_A_OFF(1)); 536 S_006D48_D2MODE_PRIORITY_A_OFF(1));
518 WREG32(R_006D4C_D2MODE_PRIORITY_B_CNT, 537 WREG32(R_006D4C_D2MODE_PRIORITY_B_CNT,
@@ -543,12 +562,15 @@ void rs690_bandwidth_update(struct radeon_device *rdev)
543 priority_mark12.full = 0; 562 priority_mark12.full = 0;
544 if (wm1.priority_mark_max.full > priority_mark12.full) 563 if (wm1.priority_mark_max.full > priority_mark12.full)
545 priority_mark12.full = wm1.priority_mark_max.full; 564 priority_mark12.full = wm1.priority_mark_max.full;
565 d2mode_priority_a_cnt = rfixed_trunc(priority_mark12);
566 if (rdev->disp_priority == 2)
567 d2mode_priority_a_cnt |= S_006D48_D2MODE_PRIORITY_A_ALWAYS_ON(1);
546 WREG32(R_006548_D1MODE_PRIORITY_A_CNT, 568 WREG32(R_006548_D1MODE_PRIORITY_A_CNT,
547 S_006548_D1MODE_PRIORITY_A_OFF(1)); 569 S_006548_D1MODE_PRIORITY_A_OFF(1));
548 WREG32(R_00654C_D1MODE_PRIORITY_B_CNT, 570 WREG32(R_00654C_D1MODE_PRIORITY_B_CNT,
549 S_00654C_D1MODE_PRIORITY_B_OFF(1)); 571 S_00654C_D1MODE_PRIORITY_B_OFF(1));
550 WREG32(R_006D48_D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12)); 572 WREG32(R_006D48_D2MODE_PRIORITY_A_CNT, d2mode_priority_a_cnt);
551 WREG32(R_006D4C_D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12)); 573 WREG32(R_006D4C_D2MODE_PRIORITY_B_CNT, d2mode_priority_a_cnt);
552 } 574 }
553} 575}
554 576
@@ -605,8 +627,8 @@ static int rs690_startup(struct radeon_device *rdev)
605 if (r) 627 if (r)
606 return r; 628 return r;
607 /* Enable IRQ */ 629 /* Enable IRQ */
608 rdev->irq.sw_int = true;
609 rs600_irq_set(rdev); 630 rs600_irq_set(rdev);
631 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
610 /* 1M ring buffer */ 632 /* 1M ring buffer */
611 r = r100_cp_init(rdev, 1024 * 1024); 633 r = r100_cp_init(rdev, 1024 * 1024);
612 if (r) { 634 if (r) {
@@ -640,6 +662,8 @@ int rs690_resume(struct radeon_device *rdev)
640 atom_asic_init(rdev->mode_info.atom_context); 662 atom_asic_init(rdev->mode_info.atom_context);
641 /* Resume clock after posting */ 663 /* Resume clock after posting */
642 rv515_clock_startup(rdev); 664 rv515_clock_startup(rdev);
665 /* Initialize surface registers */
666 radeon_surface_init(rdev);
643 return rs690_startup(rdev); 667 return rs690_startup(rdev);
644} 668}
645 669
@@ -654,7 +678,7 @@ int rs690_suspend(struct radeon_device *rdev)
654 678
655void rs690_fini(struct radeon_device *rdev) 679void rs690_fini(struct radeon_device *rdev)
656{ 680{
657 rs690_suspend(rdev); 681 radeon_pm_fini(rdev);
658 r100_cp_fini(rdev); 682 r100_cp_fini(rdev);
659 r100_wb_fini(rdev); 683 r100_wb_fini(rdev);
660 r100_ib_fini(rdev); 684 r100_ib_fini(rdev);
@@ -662,7 +686,7 @@ void rs690_fini(struct radeon_device *rdev)
662 rs400_gart_fini(rdev); 686 rs400_gart_fini(rdev);
663 radeon_irq_kms_fini(rdev); 687 radeon_irq_kms_fini(rdev);
664 radeon_fence_driver_fini(rdev); 688 radeon_fence_driver_fini(rdev);
665 radeon_object_fini(rdev); 689 radeon_bo_fini(rdev);
666 radeon_atombios_fini(rdev); 690 radeon_atombios_fini(rdev);
667 kfree(rdev->bios); 691 kfree(rdev->bios);
668 rdev->bios = NULL; 692 rdev->bios = NULL;
@@ -700,20 +724,15 @@ int rs690_init(struct radeon_device *rdev)
700 RREG32(R_0007C0_CP_STAT)); 724 RREG32(R_0007C0_CP_STAT));
701 } 725 }
702 /* check if cards are posted or not */ 726 /* check if cards are posted or not */
703 if (!radeon_card_posted(rdev) && rdev->bios) { 727 if (radeon_boot_test_post_card(rdev) == false)
704 DRM_INFO("GPU not posted. posting now...\n"); 728 return -EINVAL;
705 atom_asic_init(rdev->mode_info.atom_context); 729
706 }
707 /* Initialize clocks */ 730 /* Initialize clocks */
708 radeon_get_clock_info(rdev->ddev); 731 radeon_get_clock_info(rdev->ddev);
709 /* Initialize power management */ 732 /* Initialize power management */
710 radeon_pm_init(rdev); 733 radeon_pm_init(rdev);
711 /* Get vram informations */ 734 /* initialize memory controller */
712 rs690_vram_info(rdev); 735 rs690_mc_init(rdev);
713 /* Initialize memory controller (also test AGP) */
714 r = r420_mc_init(rdev);
715 if (r)
716 return r;
717 rv515_debugfs(rdev); 736 rv515_debugfs(rdev);
718 /* Fence driver */ 737 /* Fence driver */
719 r = radeon_fence_driver_init(rdev); 738 r = radeon_fence_driver_init(rdev);
@@ -723,7 +742,7 @@ int rs690_init(struct radeon_device *rdev)
723 if (r) 742 if (r)
724 return r; 743 return r;
725 /* Memory manager */ 744 /* Memory manager */
726 r = radeon_object_init(rdev); 745 r = radeon_bo_init(rdev);
727 if (r) 746 if (r)
728 return r; 747 return r;
729 r = rs400_gart_init(rdev); 748 r = rs400_gart_init(rdev);
@@ -735,7 +754,6 @@ int rs690_init(struct radeon_device *rdev)
735 if (r) { 754 if (r) {
736 /* Somethings want wront with the accel init stop accel */ 755 /* Somethings want wront with the accel init stop accel */
737 dev_err(rdev->dev, "Disabling GPU acceleration\n"); 756 dev_err(rdev->dev, "Disabling GPU acceleration\n");
738 rs690_suspend(rdev);
739 r100_cp_fini(rdev); 757 r100_cp_fini(rdev);
740 r100_wb_fini(rdev); 758 r100_wb_fini(rdev);
741 r100_ib_fini(rdev); 759 r100_ib_fini(rdev);
diff --git a/drivers/gpu/drm/radeon/rs690d.h b/drivers/gpu/drm/radeon/rs690d.h
index 62d31e7a897f..36e6398a98ae 100644
--- a/drivers/gpu/drm/radeon/rs690d.h
+++ b/drivers/gpu/drm/radeon/rs690d.h
@@ -182,6 +182,9 @@
182#define S_006548_D1MODE_PRIORITY_A_OFF(x) (((x) & 0x1) << 16) 182#define S_006548_D1MODE_PRIORITY_A_OFF(x) (((x) & 0x1) << 16)
183#define G_006548_D1MODE_PRIORITY_A_OFF(x) (((x) >> 16) & 0x1) 183#define G_006548_D1MODE_PRIORITY_A_OFF(x) (((x) >> 16) & 0x1)
184#define C_006548_D1MODE_PRIORITY_A_OFF 0xFFFEFFFF 184#define C_006548_D1MODE_PRIORITY_A_OFF 0xFFFEFFFF
185#define S_006548_D1MODE_PRIORITY_A_ALWAYS_ON(x) (((x) & 0x1) << 20)
186#define G_006548_D1MODE_PRIORITY_A_ALWAYS_ON(x) (((x) >> 20) & 0x1)
187#define C_006548_D1MODE_PRIORITY_A_ALWAYS_ON 0xFFEFFFFF
185#define S_006548_D1MODE_PRIORITY_A_FORCE_MASK(x) (((x) & 0x1) << 24) 188#define S_006548_D1MODE_PRIORITY_A_FORCE_MASK(x) (((x) & 0x1) << 24)
186#define G_006548_D1MODE_PRIORITY_A_FORCE_MASK(x) (((x) >> 24) & 0x1) 189#define G_006548_D1MODE_PRIORITY_A_FORCE_MASK(x) (((x) >> 24) & 0x1)
187#define C_006548_D1MODE_PRIORITY_A_FORCE_MASK 0xFEFFFFFF 190#define C_006548_D1MODE_PRIORITY_A_FORCE_MASK 0xFEFFFFFF
diff --git a/drivers/gpu/drm/radeon/rv515.c b/drivers/gpu/drm/radeon/rv515.c
index ba68c9fe90a1..9035121f4b58 100644
--- a/drivers/gpu/drm/radeon/rv515.c
+++ b/drivers/gpu/drm/radeon/rv515.c
@@ -26,9 +26,11 @@
26 * Jerome Glisse 26 * Jerome Glisse
27 */ 27 */
28#include <linux/seq_file.h> 28#include <linux/seq_file.h>
29#include <linux/slab.h>
29#include "drmP.h" 30#include "drmP.h"
30#include "rv515d.h" 31#include "rv515d.h"
31#include "radeon.h" 32#include "radeon.h"
33#include "radeon_asic.h"
32#include "atom.h" 34#include "atom.h"
33#include "rv515_reg_safe.h" 35#include "rv515_reg_safe.h"
34 36
@@ -277,19 +279,15 @@ static void rv515_vram_get_type(struct radeon_device *rdev)
277 } 279 }
278} 280}
279 281
280void rv515_vram_info(struct radeon_device *rdev) 282void rv515_mc_init(struct radeon_device *rdev)
281{ 283{
282 fixed20_12 a;
283 284
284 rv515_vram_get_type(rdev); 285 rv515_vram_get_type(rdev);
285
286 r100_vram_init_sizes(rdev); 286 r100_vram_init_sizes(rdev);
287 /* FIXME: we should enforce default clock in case GPU is not in 287 radeon_vram_location(rdev, &rdev->mc, 0);
288 * default setup 288 if (!(rdev->flags & RADEON_IS_AGP))
289 */ 289 radeon_gtt_location(rdev, &rdev->mc);
290 a.full = rfixed_const(100); 290 radeon_update_bandwidth_info(rdev);
291 rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk);
292 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
293} 291}
294 292
295uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg) 293uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg)
@@ -478,8 +476,8 @@ static int rv515_startup(struct radeon_device *rdev)
478 return r; 476 return r;
479 } 477 }
480 /* Enable IRQ */ 478 /* Enable IRQ */
481 rdev->irq.sw_int = true;
482 rs600_irq_set(rdev); 479 rs600_irq_set(rdev);
480 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
483 /* 1M ring buffer */ 481 /* 1M ring buffer */
484 r = r100_cp_init(rdev, 1024 * 1024); 482 r = r100_cp_init(rdev, 1024 * 1024);
485 if (r) { 483 if (r) {
@@ -514,6 +512,8 @@ int rv515_resume(struct radeon_device *rdev)
514 atom_asic_init(rdev->mode_info.atom_context); 512 atom_asic_init(rdev->mode_info.atom_context);
515 /* Resume clock after posting */ 513 /* Resume clock after posting */
516 rv515_clock_startup(rdev); 514 rv515_clock_startup(rdev);
515 /* Initialize surface registers */
516 radeon_surface_init(rdev);
517 return rv515_startup(rdev); 517 return rv515_startup(rdev);
518} 518}
519 519
@@ -535,16 +535,16 @@ void rv515_set_safe_registers(struct radeon_device *rdev)
535 535
536void rv515_fini(struct radeon_device *rdev) 536void rv515_fini(struct radeon_device *rdev)
537{ 537{
538 rv515_suspend(rdev); 538 radeon_pm_fini(rdev);
539 r100_cp_fini(rdev); 539 r100_cp_fini(rdev);
540 r100_wb_fini(rdev); 540 r100_wb_fini(rdev);
541 r100_ib_fini(rdev); 541 r100_ib_fini(rdev);
542 radeon_gem_fini(rdev); 542 radeon_gem_fini(rdev);
543 rv370_pcie_gart_fini(rdev); 543 rv370_pcie_gart_fini(rdev);
544 radeon_agp_fini(rdev); 544 radeon_agp_fini(rdev);
545 radeon_irq_kms_fini(rdev); 545 radeon_irq_kms_fini(rdev);
546 radeon_fence_driver_fini(rdev); 546 radeon_fence_driver_fini(rdev);
547 radeon_object_fini(rdev); 547 radeon_bo_fini(rdev);
548 radeon_atombios_fini(rdev); 548 radeon_atombios_fini(rdev);
549 kfree(rdev->bios); 549 kfree(rdev->bios);
550 rdev->bios = NULL; 550 rdev->bios = NULL;
@@ -580,20 +580,21 @@ int rv515_init(struct radeon_device *rdev)
580 RREG32(R_0007C0_CP_STAT)); 580 RREG32(R_0007C0_CP_STAT));
581 } 581 }
582 /* check if cards are posted or not */ 582 /* check if cards are posted or not */
583 if (!radeon_card_posted(rdev) && rdev->bios) { 583 if (radeon_boot_test_post_card(rdev) == false)
584 DRM_INFO("GPU not posted. posting now...\n"); 584 return -EINVAL;
585 atom_asic_init(rdev->mode_info.atom_context);
586 }
587 /* Initialize clocks */ 585 /* Initialize clocks */
588 radeon_get_clock_info(rdev->ddev); 586 radeon_get_clock_info(rdev->ddev);
589 /* Initialize power management */ 587 /* Initialize power management */
590 radeon_pm_init(rdev); 588 radeon_pm_init(rdev);
591 /* Get vram informations */ 589 /* initialize AGP */
592 rv515_vram_info(rdev); 590 if (rdev->flags & RADEON_IS_AGP) {
593 /* Initialize memory controller (also test AGP) */ 591 r = radeon_agp_init(rdev);
594 r = r420_mc_init(rdev); 592 if (r) {
595 if (r) 593 radeon_agp_disable(rdev);
596 return r; 594 }
595 }
596 /* initialize memory controller */
597 rv515_mc_init(rdev);
597 rv515_debugfs(rdev); 598 rv515_debugfs(rdev);
598 /* Fence driver */ 599 /* Fence driver */
599 r = radeon_fence_driver_init(rdev); 600 r = radeon_fence_driver_init(rdev);
@@ -603,7 +604,7 @@ int rv515_init(struct radeon_device *rdev)
603 if (r) 604 if (r)
604 return r; 605 return r;
605 /* Memory manager */ 606 /* Memory manager */
606 r = radeon_object_init(rdev); 607 r = radeon_bo_init(rdev);
607 if (r) 608 if (r)
608 return r; 609 return r;
609 r = rv370_pcie_gart_init(rdev); 610 r = rv370_pcie_gart_init(rdev);
@@ -615,13 +616,12 @@ int rv515_init(struct radeon_device *rdev)
615 if (r) { 616 if (r) {
616 /* Somethings want wront with the accel init stop accel */ 617 /* Somethings want wront with the accel init stop accel */
617 dev_err(rdev->dev, "Disabling GPU acceleration\n"); 618 dev_err(rdev->dev, "Disabling GPU acceleration\n");
618 rv515_suspend(rdev);
619 r100_cp_fini(rdev); 619 r100_cp_fini(rdev);
620 r100_wb_fini(rdev); 620 r100_wb_fini(rdev);
621 r100_ib_fini(rdev); 621 r100_ib_fini(rdev);
622 radeon_irq_kms_fini(rdev);
622 rv370_pcie_gart_fini(rdev); 623 rv370_pcie_gart_fini(rdev);
623 radeon_agp_fini(rdev); 624 radeon_agp_fini(rdev);
624 radeon_irq_kms_fini(rdev);
625 rdev->accel_working = false; 625 rdev->accel_working = false;
626 } 626 }
627 return 0; 627 return 0;
@@ -892,8 +892,9 @@ void rv515_crtc_bandwidth_compute(struct radeon_device *rdev,
892 892
893 b.full = rfixed_const(mode->crtc_hdisplay); 893 b.full = rfixed_const(mode->crtc_hdisplay);
894 c.full = rfixed_const(256); 894 c.full = rfixed_const(256);
895 a.full = rfixed_mul(wm->num_line_pair, b); 895 a.full = rfixed_div(b, c);
896 request_fifo_depth.full = rfixed_div(a, c); 896 request_fifo_depth.full = rfixed_mul(a, wm->num_line_pair);
897 request_fifo_depth.full = rfixed_ceil(request_fifo_depth);
897 if (a.full < rfixed_const(4)) { 898 if (a.full < rfixed_const(4)) {
898 wm->lb_request_fifo_depth = 4; 899 wm->lb_request_fifo_depth = 4;
899 } else { 900 } else {
@@ -995,15 +996,17 @@ void rv515_crtc_bandwidth_compute(struct radeon_device *rdev,
995 a.full = rfixed_const(16); 996 a.full = rfixed_const(16);
996 wm->priority_mark_max.full = rfixed_const(crtc->base.mode.crtc_hdisplay); 997 wm->priority_mark_max.full = rfixed_const(crtc->base.mode.crtc_hdisplay);
997 wm->priority_mark_max.full = rfixed_div(wm->priority_mark_max, a); 998 wm->priority_mark_max.full = rfixed_div(wm->priority_mark_max, a);
999 wm->priority_mark_max.full = rfixed_ceil(wm->priority_mark_max);
998 1000
999 /* Determine estimated width */ 1001 /* Determine estimated width */
1000 estimated_width.full = tolerable_latency.full - wm->worst_case_latency.full; 1002 estimated_width.full = tolerable_latency.full - wm->worst_case_latency.full;
1001 estimated_width.full = rfixed_div(estimated_width, consumption_time); 1003 estimated_width.full = rfixed_div(estimated_width, consumption_time);
1002 if (rfixed_trunc(estimated_width) > crtc->base.mode.crtc_hdisplay) { 1004 if (rfixed_trunc(estimated_width) > crtc->base.mode.crtc_hdisplay) {
1003 wm->priority_mark.full = rfixed_const(10); 1005 wm->priority_mark.full = wm->priority_mark_max.full;
1004 } else { 1006 } else {
1005 a.full = rfixed_const(16); 1007 a.full = rfixed_const(16);
1006 wm->priority_mark.full = rfixed_div(estimated_width, a); 1008 wm->priority_mark.full = rfixed_div(estimated_width, a);
1009 wm->priority_mark.full = rfixed_ceil(wm->priority_mark);
1007 wm->priority_mark.full = wm->priority_mark_max.full - wm->priority_mark.full; 1010 wm->priority_mark.full = wm->priority_mark_max.full - wm->priority_mark.full;
1008 } 1011 }
1009} 1012}
@@ -1014,7 +1017,7 @@ void rv515_bandwidth_avivo_update(struct radeon_device *rdev)
1014 struct drm_display_mode *mode1 = NULL; 1017 struct drm_display_mode *mode1 = NULL;
1015 struct rv515_watermark wm0; 1018 struct rv515_watermark wm0;
1016 struct rv515_watermark wm1; 1019 struct rv515_watermark wm1;
1017 u32 tmp; 1020 u32 tmp, d1mode_priority_a_cnt, d2mode_priority_a_cnt;
1018 fixed20_12 priority_mark02, priority_mark12, fill_rate; 1021 fixed20_12 priority_mark02, priority_mark12, fill_rate;
1019 fixed20_12 a, b; 1022 fixed20_12 a, b;
1020 1023
@@ -1082,10 +1085,16 @@ void rv515_bandwidth_avivo_update(struct radeon_device *rdev)
1082 priority_mark12.full = 0; 1085 priority_mark12.full = 0;
1083 if (wm1.priority_mark_max.full > priority_mark12.full) 1086 if (wm1.priority_mark_max.full > priority_mark12.full)
1084 priority_mark12.full = wm1.priority_mark_max.full; 1087 priority_mark12.full = wm1.priority_mark_max.full;
1085 WREG32(D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02)); 1088 d1mode_priority_a_cnt = rfixed_trunc(priority_mark02);
1086 WREG32(D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02)); 1089 d2mode_priority_a_cnt = rfixed_trunc(priority_mark12);
1087 WREG32(D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12)); 1090 if (rdev->disp_priority == 2) {
1088 WREG32(D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12)); 1091 d1mode_priority_a_cnt |= MODE_PRIORITY_ALWAYS_ON;
1092 d2mode_priority_a_cnt |= MODE_PRIORITY_ALWAYS_ON;
1093 }
1094 WREG32(D1MODE_PRIORITY_A_CNT, d1mode_priority_a_cnt);
1095 WREG32(D1MODE_PRIORITY_B_CNT, d1mode_priority_a_cnt);
1096 WREG32(D2MODE_PRIORITY_A_CNT, d2mode_priority_a_cnt);
1097 WREG32(D2MODE_PRIORITY_B_CNT, d2mode_priority_a_cnt);
1089 } else if (mode0) { 1098 } else if (mode0) {
1090 if (rfixed_trunc(wm0.dbpp) > 64) 1099 if (rfixed_trunc(wm0.dbpp) > 64)
1091 a.full = rfixed_div(wm0.dbpp, wm0.num_line_pair); 1100 a.full = rfixed_div(wm0.dbpp, wm0.num_line_pair);
@@ -1112,8 +1121,11 @@ void rv515_bandwidth_avivo_update(struct radeon_device *rdev)
1112 priority_mark02.full = 0; 1121 priority_mark02.full = 0;
1113 if (wm0.priority_mark_max.full > priority_mark02.full) 1122 if (wm0.priority_mark_max.full > priority_mark02.full)
1114 priority_mark02.full = wm0.priority_mark_max.full; 1123 priority_mark02.full = wm0.priority_mark_max.full;
1115 WREG32(D1MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark02)); 1124 d1mode_priority_a_cnt = rfixed_trunc(priority_mark02);
1116 WREG32(D1MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark02)); 1125 if (rdev->disp_priority == 2)
1126 d1mode_priority_a_cnt |= MODE_PRIORITY_ALWAYS_ON;
1127 WREG32(D1MODE_PRIORITY_A_CNT, d1mode_priority_a_cnt);
1128 WREG32(D1MODE_PRIORITY_B_CNT, d1mode_priority_a_cnt);
1117 WREG32(D2MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF); 1129 WREG32(D2MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF);
1118 WREG32(D2MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF); 1130 WREG32(D2MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF);
1119 } else { 1131 } else {
@@ -1142,10 +1154,13 @@ void rv515_bandwidth_avivo_update(struct radeon_device *rdev)
1142 priority_mark12.full = 0; 1154 priority_mark12.full = 0;
1143 if (wm1.priority_mark_max.full > priority_mark12.full) 1155 if (wm1.priority_mark_max.full > priority_mark12.full)
1144 priority_mark12.full = wm1.priority_mark_max.full; 1156 priority_mark12.full = wm1.priority_mark_max.full;
1157 d2mode_priority_a_cnt = rfixed_trunc(priority_mark12);
1158 if (rdev->disp_priority == 2)
1159 d2mode_priority_a_cnt |= MODE_PRIORITY_ALWAYS_ON;
1145 WREG32(D1MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF); 1160 WREG32(D1MODE_PRIORITY_A_CNT, MODE_PRIORITY_OFF);
1146 WREG32(D1MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF); 1161 WREG32(D1MODE_PRIORITY_B_CNT, MODE_PRIORITY_OFF);
1147 WREG32(D2MODE_PRIORITY_A_CNT, rfixed_trunc(priority_mark12)); 1162 WREG32(D2MODE_PRIORITY_A_CNT, d2mode_priority_a_cnt);
1148 WREG32(D2MODE_PRIORITY_B_CNT, rfixed_trunc(priority_mark12)); 1163 WREG32(D2MODE_PRIORITY_B_CNT, d2mode_priority_a_cnt);
1149 } 1164 }
1150} 1165}
1151 1166
@@ -1155,6 +1170,8 @@ void rv515_bandwidth_update(struct radeon_device *rdev)
1155 struct drm_display_mode *mode0 = NULL; 1170 struct drm_display_mode *mode0 = NULL;
1156 struct drm_display_mode *mode1 = NULL; 1171 struct drm_display_mode *mode1 = NULL;
1157 1172
1173 radeon_update_display_priority(rdev);
1174
1158 if (rdev->mode_info.crtcs[0]->base.enabled) 1175 if (rdev->mode_info.crtcs[0]->base.enabled)
1159 mode0 = &rdev->mode_info.crtcs[0]->base.mode; 1176 mode0 = &rdev->mode_info.crtcs[0]->base.mode;
1160 if (rdev->mode_info.crtcs[1]->base.enabled) 1177 if (rdev->mode_info.crtcs[1]->base.enabled)
@@ -1164,7 +1181,8 @@ void rv515_bandwidth_update(struct radeon_device *rdev)
1164 * modes if the user specifies HIGH for displaypriority 1181 * modes if the user specifies HIGH for displaypriority
1165 * option. 1182 * option.
1166 */ 1183 */
1167 if (rdev->disp_priority == 2) { 1184 if ((rdev->disp_priority == 2) &&
1185 (rdev->family == CHIP_RV515)) {
1168 tmp = RREG32_MC(MC_MISC_LAT_TIMER); 1186 tmp = RREG32_MC(MC_MISC_LAT_TIMER);
1169 tmp &= ~MC_DISP1R_INIT_LAT_MASK; 1187 tmp &= ~MC_DISP1R_INIT_LAT_MASK;
1170 tmp &= ~MC_DISP0R_INIT_LAT_MASK; 1188 tmp &= ~MC_DISP0R_INIT_LAT_MASK;
diff --git a/drivers/gpu/drm/radeon/rv770.c b/drivers/gpu/drm/radeon/rv770.c
index b0efd0ddae7a..97958a64df1a 100644
--- a/drivers/gpu/drm/radeon/rv770.c
+++ b/drivers/gpu/drm/radeon/rv770.c
@@ -27,8 +27,10 @@
27 */ 27 */
28#include <linux/firmware.h> 28#include <linux/firmware.h>
29#include <linux/platform_device.h> 29#include <linux/platform_device.h>
30#include <linux/slab.h>
30#include "drmP.h" 31#include "drmP.h"
31#include "radeon.h" 32#include "radeon.h"
33#include "radeon_asic.h"
32#include "radeon_drm.h" 34#include "radeon_drm.h"
33#include "rv770d.h" 35#include "rv770d.h"
34#include "atom.h" 36#include "atom.h"
@@ -56,6 +58,7 @@ int rv770_pcie_gart_enable(struct radeon_device *rdev)
56 r = radeon_gart_table_vram_pin(rdev); 58 r = radeon_gart_table_vram_pin(rdev);
57 if (r) 59 if (r)
58 return r; 60 return r;
61 radeon_gart_restore(rdev);
59 /* Setup L2 cache */ 62 /* Setup L2 cache */
60 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING | 63 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
61 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE | 64 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
@@ -92,7 +95,7 @@ int rv770_pcie_gart_enable(struct radeon_device *rdev)
92void rv770_pcie_gart_disable(struct radeon_device *rdev) 95void rv770_pcie_gart_disable(struct radeon_device *rdev)
93{ 96{
94 u32 tmp; 97 u32 tmp;
95 int i; 98 int i, r;
96 99
97 /* Disable all tables */ 100 /* Disable all tables */
98 for (i = 0; i < 7; i++) 101 for (i = 0; i < 7; i++)
@@ -113,16 +116,20 @@ void rv770_pcie_gart_disable(struct radeon_device *rdev)
113 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp); 116 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
114 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp); 117 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
115 if (rdev->gart.table.vram.robj) { 118 if (rdev->gart.table.vram.robj) {
116 radeon_object_kunmap(rdev->gart.table.vram.robj); 119 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
117 radeon_object_unpin(rdev->gart.table.vram.robj); 120 if (likely(r == 0)) {
121 radeon_bo_kunmap(rdev->gart.table.vram.robj);
122 radeon_bo_unpin(rdev->gart.table.vram.robj);
123 radeon_bo_unreserve(rdev->gart.table.vram.robj);
124 }
118 } 125 }
119} 126}
120 127
121void rv770_pcie_gart_fini(struct radeon_device *rdev) 128void rv770_pcie_gart_fini(struct radeon_device *rdev)
122{ 129{
130 radeon_gart_fini(rdev);
123 rv770_pcie_gart_disable(rdev); 131 rv770_pcie_gart_disable(rdev);
124 radeon_gart_table_vram_free(rdev); 132 radeon_gart_table_vram_free(rdev);
125 radeon_gart_fini(rdev);
126} 133}
127 134
128 135
@@ -269,9 +276,10 @@ static int rv770_cp_load_microcode(struct radeon_device *rdev)
269/* 276/*
270 * Core functions 277 * Core functions
271 */ 278 */
272static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes, 279static u32 r700_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
273 u32 num_backends, 280 u32 num_tile_pipes,
274 u32 backend_disable_mask) 281 u32 num_backends,
282 u32 backend_disable_mask)
275{ 283{
276 u32 backend_map = 0; 284 u32 backend_map = 0;
277 u32 enabled_backends_mask; 285 u32 enabled_backends_mask;
@@ -280,6 +288,7 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
280 u32 swizzle_pipe[R7XX_MAX_PIPES]; 288 u32 swizzle_pipe[R7XX_MAX_PIPES];
281 u32 cur_backend; 289 u32 cur_backend;
282 u32 i; 290 u32 i;
291 bool force_no_swizzle;
283 292
284 if (num_tile_pipes > R7XX_MAX_PIPES) 293 if (num_tile_pipes > R7XX_MAX_PIPES)
285 num_tile_pipes = R7XX_MAX_PIPES; 294 num_tile_pipes = R7XX_MAX_PIPES;
@@ -309,6 +318,18 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
309 if (enabled_backends_count != num_backends) 318 if (enabled_backends_count != num_backends)
310 num_backends = enabled_backends_count; 319 num_backends = enabled_backends_count;
311 320
321 switch (rdev->family) {
322 case CHIP_RV770:
323 case CHIP_RV730:
324 force_no_swizzle = false;
325 break;
326 case CHIP_RV710:
327 case CHIP_RV740:
328 default:
329 force_no_swizzle = true;
330 break;
331 }
332
312 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * R7XX_MAX_PIPES); 333 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * R7XX_MAX_PIPES);
313 switch (num_tile_pipes) { 334 switch (num_tile_pipes) {
314 case 1: 335 case 1:
@@ -319,49 +340,100 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
319 swizzle_pipe[1] = 1; 340 swizzle_pipe[1] = 1;
320 break; 341 break;
321 case 3: 342 case 3:
322 swizzle_pipe[0] = 0; 343 if (force_no_swizzle) {
323 swizzle_pipe[1] = 2; 344 swizzle_pipe[0] = 0;
324 swizzle_pipe[2] = 1; 345 swizzle_pipe[1] = 1;
346 swizzle_pipe[2] = 2;
347 } else {
348 swizzle_pipe[0] = 0;
349 swizzle_pipe[1] = 2;
350 swizzle_pipe[2] = 1;
351 }
325 break; 352 break;
326 case 4: 353 case 4:
327 swizzle_pipe[0] = 0; 354 if (force_no_swizzle) {
328 swizzle_pipe[1] = 2; 355 swizzle_pipe[0] = 0;
329 swizzle_pipe[2] = 3; 356 swizzle_pipe[1] = 1;
330 swizzle_pipe[3] = 1; 357 swizzle_pipe[2] = 2;
358 swizzle_pipe[3] = 3;
359 } else {
360 swizzle_pipe[0] = 0;
361 swizzle_pipe[1] = 2;
362 swizzle_pipe[2] = 3;
363 swizzle_pipe[3] = 1;
364 }
331 break; 365 break;
332 case 5: 366 case 5:
333 swizzle_pipe[0] = 0; 367 if (force_no_swizzle) {
334 swizzle_pipe[1] = 2; 368 swizzle_pipe[0] = 0;
335 swizzle_pipe[2] = 4; 369 swizzle_pipe[1] = 1;
336 swizzle_pipe[3] = 1; 370 swizzle_pipe[2] = 2;
337 swizzle_pipe[4] = 3; 371 swizzle_pipe[3] = 3;
372 swizzle_pipe[4] = 4;
373 } else {
374 swizzle_pipe[0] = 0;
375 swizzle_pipe[1] = 2;
376 swizzle_pipe[2] = 4;
377 swizzle_pipe[3] = 1;
378 swizzle_pipe[4] = 3;
379 }
338 break; 380 break;
339 case 6: 381 case 6:
340 swizzle_pipe[0] = 0; 382 if (force_no_swizzle) {
341 swizzle_pipe[1] = 2; 383 swizzle_pipe[0] = 0;
342 swizzle_pipe[2] = 4; 384 swizzle_pipe[1] = 1;
343 swizzle_pipe[3] = 5; 385 swizzle_pipe[2] = 2;
344 swizzle_pipe[4] = 3; 386 swizzle_pipe[3] = 3;
345 swizzle_pipe[5] = 1; 387 swizzle_pipe[4] = 4;
388 swizzle_pipe[5] = 5;
389 } else {
390 swizzle_pipe[0] = 0;
391 swizzle_pipe[1] = 2;
392 swizzle_pipe[2] = 4;
393 swizzle_pipe[3] = 5;
394 swizzle_pipe[4] = 3;
395 swizzle_pipe[5] = 1;
396 }
346 break; 397 break;
347 case 7: 398 case 7:
348 swizzle_pipe[0] = 0; 399 if (force_no_swizzle) {
349 swizzle_pipe[1] = 2; 400 swizzle_pipe[0] = 0;
350 swizzle_pipe[2] = 4; 401 swizzle_pipe[1] = 1;
351 swizzle_pipe[3] = 6; 402 swizzle_pipe[2] = 2;
352 swizzle_pipe[4] = 3; 403 swizzle_pipe[3] = 3;
353 swizzle_pipe[5] = 1; 404 swizzle_pipe[4] = 4;
354 swizzle_pipe[6] = 5; 405 swizzle_pipe[5] = 5;
406 swizzle_pipe[6] = 6;
407 } else {
408 swizzle_pipe[0] = 0;
409 swizzle_pipe[1] = 2;
410 swizzle_pipe[2] = 4;
411 swizzle_pipe[3] = 6;
412 swizzle_pipe[4] = 3;
413 swizzle_pipe[5] = 1;
414 swizzle_pipe[6] = 5;
415 }
355 break; 416 break;
356 case 8: 417 case 8:
357 swizzle_pipe[0] = 0; 418 if (force_no_swizzle) {
358 swizzle_pipe[1] = 2; 419 swizzle_pipe[0] = 0;
359 swizzle_pipe[2] = 4; 420 swizzle_pipe[1] = 1;
360 swizzle_pipe[3] = 6; 421 swizzle_pipe[2] = 2;
361 swizzle_pipe[4] = 3; 422 swizzle_pipe[3] = 3;
362 swizzle_pipe[5] = 1; 423 swizzle_pipe[4] = 4;
363 swizzle_pipe[6] = 7; 424 swizzle_pipe[5] = 5;
364 swizzle_pipe[7] = 5; 425 swizzle_pipe[6] = 6;
426 swizzle_pipe[7] = 7;
427 } else {
428 swizzle_pipe[0] = 0;
429 swizzle_pipe[1] = 2;
430 swizzle_pipe[2] = 4;
431 swizzle_pipe[3] = 6;
432 swizzle_pipe[4] = 3;
433 swizzle_pipe[5] = 1;
434 swizzle_pipe[6] = 7;
435 swizzle_pipe[7] = 5;
436 }
365 break; 437 break;
366 } 438 }
367 439
@@ -381,8 +453,10 @@ static u32 r700_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
381static void rv770_gpu_init(struct radeon_device *rdev) 453static void rv770_gpu_init(struct radeon_device *rdev)
382{ 454{
383 int i, j, num_qd_pipes; 455 int i, j, num_qd_pipes;
456 u32 ta_aux_cntl;
384 u32 sx_debug_1; 457 u32 sx_debug_1;
385 u32 smx_dc_ctl0; 458 u32 smx_dc_ctl0;
459 u32 db_debug3;
386 u32 num_gs_verts_per_thread; 460 u32 num_gs_verts_per_thread;
387 u32 vgt_gs_per_es; 461 u32 vgt_gs_per_es;
388 u32 gs_prim_buffer_depth = 0; 462 u32 gs_prim_buffer_depth = 0;
@@ -511,6 +585,7 @@ static void rv770_gpu_init(struct radeon_device *rdev)
511 585
512 switch (rdev->config.rv770.max_tile_pipes) { 586 switch (rdev->config.rv770.max_tile_pipes) {
513 case 1: 587 case 1:
588 default:
514 gb_tiling_config |= PIPE_TILING(0); 589 gb_tiling_config |= PIPE_TILING(0);
515 break; 590 break;
516 case 2: 591 case 2:
@@ -522,16 +597,17 @@ static void rv770_gpu_init(struct radeon_device *rdev)
522 case 8: 597 case 8:
523 gb_tiling_config |= PIPE_TILING(3); 598 gb_tiling_config |= PIPE_TILING(3);
524 break; 599 break;
525 default:
526 break;
527 } 600 }
601 rdev->config.rv770.tiling_npipes = rdev->config.rv770.max_tile_pipes;
528 602
529 if (rdev->family == CHIP_RV770) 603 if (rdev->family == CHIP_RV770)
530 gb_tiling_config |= BANK_TILING(1); 604 gb_tiling_config |= BANK_TILING(1);
531 else 605 else
532 gb_tiling_config |= BANK_TILING((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT); 606 gb_tiling_config |= BANK_TILING((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
607 rdev->config.rv770.tiling_nbanks = 4 << ((gb_tiling_config >> 4) & 0x3);
533 608
534 gb_tiling_config |= GROUP_SIZE(0); 609 gb_tiling_config |= GROUP_SIZE(0);
610 rdev->config.rv770.tiling_group_size = 256;
535 611
536 if (((mc_arb_ramcfg & NOOFROWS_MASK) >> NOOFROWS_SHIFT) > 3) { 612 if (((mc_arb_ramcfg & NOOFROWS_MASK) >> NOOFROWS_SHIFT) > 3) {
537 gb_tiling_config |= ROW_TILING(3); 613 gb_tiling_config |= ROW_TILING(3);
@@ -545,18 +621,27 @@ static void rv770_gpu_init(struct radeon_device *rdev)
545 621
546 gb_tiling_config |= BANK_SWAPS(1); 622 gb_tiling_config |= BANK_SWAPS(1);
547 623
548 backend_map = r700_get_tile_pipe_to_backend_map(rdev->config.rv770.max_tile_pipes, 624 cc_rb_backend_disable = RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000;
549 rdev->config.rv770.max_backends, 625 cc_rb_backend_disable |=
550 (0xff << rdev->config.rv770.max_backends) & 0xff); 626 BACKEND_DISABLE((R7XX_MAX_BACKENDS_MASK << rdev->config.rv770.max_backends) & R7XX_MAX_BACKENDS_MASK);
551 gb_tiling_config |= BACKEND_MAP(backend_map);
552 627
553 cc_gc_shader_pipe_config = 628 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffffff00;
629 cc_gc_shader_pipe_config |=
554 INACTIVE_QD_PIPES((R7XX_MAX_PIPES_MASK << rdev->config.rv770.max_pipes) & R7XX_MAX_PIPES_MASK); 630 INACTIVE_QD_PIPES((R7XX_MAX_PIPES_MASK << rdev->config.rv770.max_pipes) & R7XX_MAX_PIPES_MASK);
555 cc_gc_shader_pipe_config |= 631 cc_gc_shader_pipe_config |=
556 INACTIVE_SIMDS((R7XX_MAX_SIMDS_MASK << rdev->config.rv770.max_simds) & R7XX_MAX_SIMDS_MASK); 632 INACTIVE_SIMDS((R7XX_MAX_SIMDS_MASK << rdev->config.rv770.max_simds) & R7XX_MAX_SIMDS_MASK);
557 633
558 cc_rb_backend_disable = 634 if (rdev->family == CHIP_RV740)
559 BACKEND_DISABLE((R7XX_MAX_BACKENDS_MASK << rdev->config.rv770.max_backends) & R7XX_MAX_BACKENDS_MASK); 635 backend_map = 0x28;
636 else
637 backend_map = r700_get_tile_pipe_to_backend_map(rdev,
638 rdev->config.rv770.max_tile_pipes,
639 (R7XX_MAX_BACKENDS -
640 r600_count_pipe_bits((cc_rb_backend_disable &
641 R7XX_MAX_BACKENDS_MASK) >> 16)),
642 (cc_rb_backend_disable >> 16));
643 gb_tiling_config |= BACKEND_MAP(backend_map);
644
560 645
561 WREG32(GB_TILING_CONFIG, gb_tiling_config); 646 WREG32(GB_TILING_CONFIG, gb_tiling_config);
562 WREG32(DCP_TILING_CONFIG, (gb_tiling_config & 0xffff)); 647 WREG32(DCP_TILING_CONFIG, (gb_tiling_config & 0xffff));
@@ -565,15 +650,15 @@ static void rv770_gpu_init(struct radeon_device *rdev)
565 WREG32(CC_RB_BACKEND_DISABLE, cc_rb_backend_disable); 650 WREG32(CC_RB_BACKEND_DISABLE, cc_rb_backend_disable);
566 WREG32(CC_GC_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config); 651 WREG32(CC_GC_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config);
567 WREG32(GC_USER_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config); 652 WREG32(GC_USER_SHADER_PIPE_CONFIG, cc_gc_shader_pipe_config);
653 WREG32(CC_SYS_RB_BACKEND_DISABLE, cc_rb_backend_disable);
568 654
569 WREG32(CC_SYS_RB_BACKEND_DISABLE, cc_rb_backend_disable);
570 WREG32(CGTS_SYS_TCC_DISABLE, 0); 655 WREG32(CGTS_SYS_TCC_DISABLE, 0);
571 WREG32(CGTS_TCC_DISABLE, 0); 656 WREG32(CGTS_TCC_DISABLE, 0);
572 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0); 657 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
573 WREG32(CGTS_USER_TCC_DISABLE, 0); 658 WREG32(CGTS_USER_TCC_DISABLE, 0);
574 659
575 num_qd_pipes = 660 num_qd_pipes =
576 R7XX_MAX_BACKENDS - r600_count_pipe_bits(cc_gc_shader_pipe_config & INACTIVE_QD_PIPES_MASK); 661 R7XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & INACTIVE_QD_PIPES_MASK) >> 8);
577 WREG32(VGT_OUT_DEALLOC_CNTL, (num_qd_pipes * 4) & DEALLOC_DIST_MASK); 662 WREG32(VGT_OUT_DEALLOC_CNTL, (num_qd_pipes * 4) & DEALLOC_DIST_MASK);
578 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((num_qd_pipes * 4) - 2) & VTX_REUSE_DEPTH_MASK); 663 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((num_qd_pipes * 4) - 2) & VTX_REUSE_DEPTH_MASK);
579 664
@@ -583,10 +668,8 @@ static void rv770_gpu_init(struct radeon_device *rdev)
583 668
584 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30)); 669 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
585 670
586 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO | 671 ta_aux_cntl = RREG32(TA_CNTL_AUX);
587 SYNC_GRADIENT | 672 WREG32(TA_CNTL_AUX, ta_aux_cntl | DISABLE_CUBE_ANISO);
588 SYNC_WALKER |
589 SYNC_ALIGNER));
590 673
591 sx_debug_1 = RREG32(SX_DEBUG_1); 674 sx_debug_1 = RREG32(SX_DEBUG_1);
592 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS; 675 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
@@ -597,14 +680,28 @@ static void rv770_gpu_init(struct radeon_device *rdev)
597 smx_dc_ctl0 |= CACHE_DEPTH((rdev->config.rv770.sx_num_of_sets * 64) - 1); 680 smx_dc_ctl0 |= CACHE_DEPTH((rdev->config.rv770.sx_num_of_sets * 64) - 1);
598 WREG32(SMX_DC_CTL0, smx_dc_ctl0); 681 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
599 682
600 WREG32(SMX_EVENT_CTL, (ES_FLUSH_CTL(4) | 683 if (rdev->family != CHIP_RV740)
601 GS_FLUSH_CTL(4) | 684 WREG32(SMX_EVENT_CTL, (ES_FLUSH_CTL(4) |
602 ACK_FLUSH_CTL(3) | 685 GS_FLUSH_CTL(4) |
603 SYNC_FLUSH_CTL)); 686 ACK_FLUSH_CTL(3) |
687 SYNC_FLUSH_CTL));
604 688
605 if (rdev->family == CHIP_RV770) 689 db_debug3 = RREG32(DB_DEBUG3);
606 WREG32(DB_DEBUG3, DB_CLK_OFF_DELAY(0x1f)); 690 db_debug3 &= ~DB_CLK_OFF_DELAY(0x1f);
607 else { 691 switch (rdev->family) {
692 case CHIP_RV770:
693 case CHIP_RV740:
694 db_debug3 |= DB_CLK_OFF_DELAY(0x1f);
695 break;
696 case CHIP_RV710:
697 case CHIP_RV730:
698 default:
699 db_debug3 |= DB_CLK_OFF_DELAY(2);
700 break;
701 }
702 WREG32(DB_DEBUG3, db_debug3);
703
704 if (rdev->family != CHIP_RV770) {
608 db_debug4 = RREG32(DB_DEBUG4); 705 db_debug4 = RREG32(DB_DEBUG4);
609 db_debug4 |= DISABLE_TILE_COVERED_FOR_PS_ITER; 706 db_debug4 |= DISABLE_TILE_COVERED_FOR_PS_ITER;
610 WREG32(DB_DEBUG4, db_debug4); 707 WREG32(DB_DEBUG4, db_debug4);
@@ -633,10 +730,10 @@ static void rv770_gpu_init(struct radeon_device *rdev)
633 ALU_UPDATE_FIFO_HIWATER(0x8)); 730 ALU_UPDATE_FIFO_HIWATER(0x8));
634 switch (rdev->family) { 731 switch (rdev->family) {
635 case CHIP_RV770: 732 case CHIP_RV770:
636 sq_ms_fifo_sizes |= FETCH_FIFO_HIWATER(0x1);
637 break;
638 case CHIP_RV730: 733 case CHIP_RV730:
639 case CHIP_RV710: 734 case CHIP_RV710:
735 sq_ms_fifo_sizes |= FETCH_FIFO_HIWATER(0x1);
736 break;
640 case CHIP_RV740: 737 case CHIP_RV740:
641 default: 738 default:
642 sq_ms_fifo_sizes |= FETCH_FIFO_HIWATER(0x4); 739 sq_ms_fifo_sizes |= FETCH_FIFO_HIWATER(0x4);
@@ -772,10 +869,8 @@ static void rv770_gpu_init(struct radeon_device *rdev)
772 869
773int rv770_mc_init(struct radeon_device *rdev) 870int rv770_mc_init(struct radeon_device *rdev)
774{ 871{
775 fixed20_12 a;
776 u32 tmp; 872 u32 tmp;
777 int chansize, numchan; 873 int chansize, numchan;
778 int r;
779 874
780 /* Get VRAM informations */ 875 /* Get VRAM informations */
781 rdev->mc.vram_is_ddr = true; 876 rdev->mc.vram_is_ddr = true;
@@ -810,56 +905,18 @@ int rv770_mc_init(struct radeon_device *rdev)
810 /* Setup GPU memory space */ 905 /* Setup GPU memory space */
811 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE); 906 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
812 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE); 907 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
813 908 rdev->mc.visible_vram_size = rdev->mc.aper_size;
814 if (rdev->mc.mc_vram_size > rdev->mc.aper_size) 909 /* FIXME remove this once we support unmappable VRAM */
910 if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
815 rdev->mc.mc_vram_size = rdev->mc.aper_size; 911 rdev->mc.mc_vram_size = rdev->mc.aper_size;
816
817 if (rdev->mc.real_vram_size > rdev->mc.aper_size)
818 rdev->mc.real_vram_size = rdev->mc.aper_size; 912 rdev->mc.real_vram_size = rdev->mc.aper_size;
819
820 if (rdev->flags & RADEON_IS_AGP) {
821 r = radeon_agp_init(rdev);
822 if (r)
823 return r;
824 /* gtt_size is setup by radeon_agp_init */
825 rdev->mc.gtt_location = rdev->mc.agp_base;
826 tmp = 0xFFFFFFFFUL - rdev->mc.agp_base - rdev->mc.gtt_size;
827 /* Try to put vram before or after AGP because we
828 * we want SYSTEM_APERTURE to cover both VRAM and
829 * AGP so that GPU can catch out of VRAM/AGP access
830 */
831 if (rdev->mc.gtt_location > rdev->mc.mc_vram_size) {
832 /* Enought place before */
833 rdev->mc.vram_location = rdev->mc.gtt_location -
834 rdev->mc.mc_vram_size;
835 } else if (tmp > rdev->mc.mc_vram_size) {
836 /* Enought place after */
837 rdev->mc.vram_location = rdev->mc.gtt_location +
838 rdev->mc.gtt_size;
839 } else {
840 /* Try to setup VRAM then AGP might not
841 * not work on some card
842 */
843 rdev->mc.vram_location = 0x00000000UL;
844 rdev->mc.gtt_location = rdev->mc.mc_vram_size;
845 }
846 } else {
847 rdev->mc.vram_location = 0x00000000UL;
848 rdev->mc.gtt_location = rdev->mc.mc_vram_size;
849 rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024;
850 } 913 }
851 rdev->mc.vram_start = rdev->mc.vram_location; 914 r600_vram_gtt_location(rdev, &rdev->mc);
852 rdev->mc.vram_end = rdev->mc.vram_location + rdev->mc.mc_vram_size - 1; 915 radeon_update_bandwidth_info(rdev);
853 rdev->mc.gtt_start = rdev->mc.gtt_location; 916
854 rdev->mc.gtt_end = rdev->mc.gtt_location + rdev->mc.gtt_size - 1;
855 /* FIXME: we should enforce default clock in case GPU is not in
856 * default setup
857 */
858 a.full = rfixed_const(100);
859 rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk);
860 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
861 return 0; 917 return 0;
862} 918}
919
863int rv770_gpu_reset(struct radeon_device *rdev) 920int rv770_gpu_reset(struct radeon_device *rdev)
864{ 921{
865 /* FIXME: implement any rv770 specific bits */ 922 /* FIXME: implement any rv770 specific bits */
@@ -870,6 +927,14 @@ static int rv770_startup(struct radeon_device *rdev)
870{ 927{
871 int r; 928 int r;
872 929
930 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
931 r = r600_init_microcode(rdev);
932 if (r) {
933 DRM_ERROR("Failed to load firmware!\n");
934 return r;
935 }
936 }
937
873 rv770_mc_program(rdev); 938 rv770_mc_program(rdev);
874 if (rdev->flags & RADEON_IS_AGP) { 939 if (rdev->flags & RADEON_IS_AGP) {
875 rv770_agp_enable(rdev); 940 rv770_agp_enable(rdev);
@@ -879,13 +944,33 @@ static int rv770_startup(struct radeon_device *rdev)
879 return r; 944 return r;
880 } 945 }
881 rv770_gpu_init(rdev); 946 rv770_gpu_init(rdev);
882 947 r = r600_blit_init(rdev);
883 r = radeon_object_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
884 &rdev->r600_blit.shader_gpu_addr);
885 if (r) { 948 if (r) {
886 DRM_ERROR("failed to pin blit object %d\n", r); 949 r600_blit_fini(rdev);
950 rdev->asic->copy = NULL;
951 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
952 }
953 /* pin copy shader into vram */
954 if (rdev->r600_blit.shader_obj) {
955 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
956 if (unlikely(r != 0))
957 return r;
958 r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
959 &rdev->r600_blit.shader_gpu_addr);
960 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
961 if (r) {
962 DRM_ERROR("failed to pin blit object %d\n", r);
963 return r;
964 }
965 }
966 /* Enable IRQ */
967 r = r600_irq_init(rdev);
968 if (r) {
969 DRM_ERROR("radeon: IH init failed (%d).\n", r);
970 radeon_irq_kms_fini(rdev);
887 return r; 971 return r;
888 } 972 }
973 r600_irq_set(rdev);
889 974
890 r = radeon_ring_init(rdev, rdev->cp.ring_size); 975 r = radeon_ring_init(rdev, rdev->cp.ring_size);
891 if (r) 976 if (r)
@@ -928,19 +1013,36 @@ int rv770_resume(struct radeon_device *rdev)
928 DRM_ERROR("radeon: failled testing IB (%d).\n", r); 1013 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
929 return r; 1014 return r;
930 } 1015 }
1016
1017 r = r600_audio_init(rdev);
1018 if (r) {
1019 dev_err(rdev->dev, "radeon: audio init failed\n");
1020 return r;
1021 }
1022
931 return r; 1023 return r;
932 1024
933} 1025}
934 1026
935int rv770_suspend(struct radeon_device *rdev) 1027int rv770_suspend(struct radeon_device *rdev)
936{ 1028{
1029 int r;
1030
1031 r600_audio_fini(rdev);
937 /* FIXME: we should wait for ring to be empty */ 1032 /* FIXME: we should wait for ring to be empty */
938 r700_cp_stop(rdev); 1033 r700_cp_stop(rdev);
939 rdev->cp.ready = false; 1034 rdev->cp.ready = false;
1035 r600_irq_suspend(rdev);
940 r600_wb_disable(rdev); 1036 r600_wb_disable(rdev);
941 rv770_pcie_gart_disable(rdev); 1037 rv770_pcie_gart_disable(rdev);
942 /* unpin shaders bo */ 1038 /* unpin shaders bo */
943 radeon_object_unpin(rdev->r600_blit.shader_obj); 1039 if (rdev->r600_blit.shader_obj) {
1040 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1041 if (likely(r == 0)) {
1042 radeon_bo_unpin(rdev->r600_blit.shader_obj);
1043 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1044 }
1045 }
944 return 0; 1046 return 0;
945} 1047}
946 1048
@@ -975,7 +1077,11 @@ int rv770_init(struct radeon_device *rdev)
975 if (r) 1077 if (r)
976 return r; 1078 return r;
977 /* Post card if necessary */ 1079 /* Post card if necessary */
978 if (!r600_card_posted(rdev) && rdev->bios) { 1080 if (!r600_card_posted(rdev)) {
1081 if (!rdev->bios) {
1082 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
1083 return -EINVAL;
1084 }
979 DRM_INFO("GPU not posted. posting now...\n"); 1085 DRM_INFO("GPU not posted. posting now...\n");
980 atom_asic_init(rdev->mode_info.atom_context); 1086 atom_asic_init(rdev->mode_info.atom_context);
981 } 1087 }
@@ -994,72 +1100,82 @@ int rv770_init(struct radeon_device *rdev)
994 r = radeon_fence_driver_init(rdev); 1100 r = radeon_fence_driver_init(rdev);
995 if (r) 1101 if (r)
996 return r; 1102 return r;
1103 /* initialize AGP */
1104 if (rdev->flags & RADEON_IS_AGP) {
1105 r = radeon_agp_init(rdev);
1106 if (r)
1107 radeon_agp_disable(rdev);
1108 }
997 r = rv770_mc_init(rdev); 1109 r = rv770_mc_init(rdev);
998 if (r) 1110 if (r)
999 return r; 1111 return r;
1000 /* Memory manager */ 1112 /* Memory manager */
1001 r = radeon_object_init(rdev); 1113 r = radeon_bo_init(rdev);
1114 if (r)
1115 return r;
1116
1117 r = radeon_irq_kms_init(rdev);
1002 if (r) 1118 if (r)
1003 return r; 1119 return r;
1120
1004 rdev->cp.ring_obj = NULL; 1121 rdev->cp.ring_obj = NULL;
1005 r600_ring_init(rdev, 1024 * 1024); 1122 r600_ring_init(rdev, 1024 * 1024);
1006 1123
1007 if (!rdev->me_fw || !rdev->pfp_fw) { 1124 rdev->ih.ring_obj = NULL;
1008 r = r600_cp_init_microcode(rdev); 1125 r600_ih_ring_init(rdev, 64 * 1024);
1009 if (r) {
1010 DRM_ERROR("Failed to load firmware!\n");
1011 return r;
1012 }
1013 }
1014 1126
1015 r = r600_pcie_gart_init(rdev); 1127 r = r600_pcie_gart_init(rdev);
1016 if (r) 1128 if (r)
1017 return r; 1129 return r;
1018 1130
1019 rdev->accel_working = true; 1131 rdev->accel_working = true;
1020 r = r600_blit_init(rdev);
1021 if (r) {
1022 DRM_ERROR("radeon: failled blitter (%d).\n", r);
1023 rdev->accel_working = false;
1024 }
1025
1026 r = rv770_startup(rdev); 1132 r = rv770_startup(rdev);
1027 if (r) { 1133 if (r) {
1028 rv770_suspend(rdev); 1134 dev_err(rdev->dev, "disabling GPU acceleration\n");
1135 r600_cp_fini(rdev);
1029 r600_wb_fini(rdev); 1136 r600_wb_fini(rdev);
1030 radeon_ring_fini(rdev); 1137 r600_irq_fini(rdev);
1138 radeon_irq_kms_fini(rdev);
1031 rv770_pcie_gart_fini(rdev); 1139 rv770_pcie_gart_fini(rdev);
1032 rdev->accel_working = false; 1140 rdev->accel_working = false;
1033 } 1141 }
1034 if (rdev->accel_working) { 1142 if (rdev->accel_working) {
1035 r = radeon_ib_pool_init(rdev); 1143 r = radeon_ib_pool_init(rdev);
1036 if (r) { 1144 if (r) {
1037 DRM_ERROR("radeon: failled initializing IB pool (%d).\n", r); 1145 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
1038 rdev->accel_working = false;
1039 }
1040 r = r600_ib_test(rdev);
1041 if (r) {
1042 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
1043 rdev->accel_working = false; 1146 rdev->accel_working = false;
1147 } else {
1148 r = r600_ib_test(rdev);
1149 if (r) {
1150 dev_err(rdev->dev, "IB test failed (%d).\n", r);
1151 rdev->accel_working = false;
1152 }
1044 } 1153 }
1045 } 1154 }
1155
1156 r = r600_audio_init(rdev);
1157 if (r) {
1158 dev_err(rdev->dev, "radeon: audio init failed\n");
1159 return r;
1160 }
1161
1046 return 0; 1162 return 0;
1047} 1163}
1048 1164
1049void rv770_fini(struct radeon_device *rdev) 1165void rv770_fini(struct radeon_device *rdev)
1050{ 1166{
1051 rv770_suspend(rdev); 1167 radeon_pm_fini(rdev);
1052
1053 r600_blit_fini(rdev); 1168 r600_blit_fini(rdev);
1054 radeon_ring_fini(rdev); 1169 r600_cp_fini(rdev);
1055 r600_wb_fini(rdev); 1170 r600_wb_fini(rdev);
1171 r600_irq_fini(rdev);
1172 radeon_irq_kms_fini(rdev);
1056 rv770_pcie_gart_fini(rdev); 1173 rv770_pcie_gart_fini(rdev);
1057 radeon_gem_fini(rdev); 1174 radeon_gem_fini(rdev);
1058 radeon_fence_driver_fini(rdev); 1175 radeon_fence_driver_fini(rdev);
1059 radeon_clocks_fini(rdev); 1176 radeon_clocks_fini(rdev);
1060 if (rdev->flags & RADEON_IS_AGP) 1177 radeon_agp_fini(rdev);
1061 radeon_agp_fini(rdev); 1178 radeon_bo_fini(rdev);
1062 radeon_object_fini(rdev);
1063 radeon_atombios_fini(rdev); 1179 radeon_atombios_fini(rdev);
1064 kfree(rdev->bios); 1180 kfree(rdev->bios);
1065 rdev->bios = NULL; 1181 rdev->bios = NULL;
diff --git a/drivers/gpu/drm/radeon/rv770d.h b/drivers/gpu/drm/radeon/rv770d.h
index a1367ab6f261..9506f8cb99e0 100644
--- a/drivers/gpu/drm/radeon/rv770d.h
+++ b/drivers/gpu/drm/radeon/rv770d.h
@@ -343,4 +343,6 @@
343 343
344#define WAIT_UNTIL 0x8040 344#define WAIT_UNTIL 0x8040
345 345
346#define SRBM_STATUS 0x0E50
347
346#endif 348#endif