aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/radeon
diff options
context:
space:
mode:
authorPaul Mundt <lethal@linux-sh.org>2010-07-05 02:46:08 -0400
committerPaul Mundt <lethal@linux-sh.org>2010-07-05 02:46:08 -0400
commit285eba57db7bd7d7c3c5929fb8621fdcaaea1b00 (patch)
treea9e7f0563cef296b24c53b20dbb388ec5c210172 /drivers/gpu/drm/radeon
parent1c14e6cecb1811543b1016f27e5d308fbea8c08a (diff)
parent815c4163b6c8ebf8152f42b0a5fd015cfdcedc78 (diff)
Merge branch 'master' of git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux-2.6
Conflicts: include/linux/serial_sci.h Signed-off-by: Paul Mundt <lethal@linux-sh.org>
Diffstat (limited to 'drivers/gpu/drm/radeon')
-rw-r--r--drivers/gpu/drm/radeon/Makefile7
-rw-r--r--drivers/gpu/drm/radeon/atombios_crtc.c2
-rw-r--r--drivers/gpu/drm/radeon/evergreen.c50
-rw-r--r--drivers/gpu/drm/radeon/evergreen_cs.c1356
-rw-r--r--drivers/gpu/drm/radeon/evergreen_reg.h3
-rw-r--r--drivers/gpu/drm/radeon/evergreend.h467
-rw-r--r--drivers/gpu/drm/radeon/r100.c91
-rw-r--r--drivers/gpu/drm/radeon/r200.c5
-rw-r--r--drivers/gpu/drm/radeon/r300.c5
-rw-r--r--drivers/gpu/drm/radeon/r420.c12
-rw-r--r--drivers/gpu/drm/radeon/r600.c122
-rw-r--r--drivers/gpu/drm/radeon/radeon.h17
-rw-r--r--drivers/gpu/drm/radeon/radeon_asic.c11
-rw-r--r--drivers/gpu/drm/radeon/radeon_asic.h1
-rw-r--r--drivers/gpu/drm/radeon/radeon_atombios.c76
-rw-r--r--drivers/gpu/drm/radeon/radeon_bios.c4
-rw-r--r--drivers/gpu/drm/radeon/radeon_combios.c58
-rw-r--r--drivers/gpu/drm/radeon/radeon_cursor.c2
-rw-r--r--drivers/gpu/drm/radeon/radeon_device.c16
-rw-r--r--drivers/gpu/drm/radeon/radeon_display.c3
-rw-r--r--drivers/gpu/drm/radeon/radeon_drv.c3
-rw-r--r--drivers/gpu/drm/radeon/radeon_encoders.c4
-rw-r--r--drivers/gpu/drm/radeon/radeon_fb.c12
-rw-r--r--drivers/gpu/drm/radeon/radeon_kms.c9
-rw-r--r--drivers/gpu/drm/radeon/radeon_legacy_encoders.c33
-rw-r--r--drivers/gpu/drm/radeon/radeon_mode.h1
-rw-r--r--drivers/gpu/drm/radeon/radeon_pm.c211
-rw-r--r--drivers/gpu/drm/radeon/reg_srcs/evergreen611
-rw-r--r--drivers/gpu/drm/radeon/rs600.c3
-rw-r--r--drivers/gpu/drm/radeon/rs690.c41
-rw-r--r--drivers/gpu/drm/radeon/rv770.c15
31 files changed, 3049 insertions, 202 deletions
diff --git a/drivers/gpu/drm/radeon/Makefile b/drivers/gpu/drm/radeon/Makefile
index 3c91312dea9a..84b1f2729d43 100644
--- a/drivers/gpu/drm/radeon/Makefile
+++ b/drivers/gpu/drm/radeon/Makefile
@@ -33,6 +33,9 @@ $(obj)/rs600_reg_safe.h: $(src)/reg_srcs/rs600 $(obj)/mkregtable
33$(obj)/r600_reg_safe.h: $(src)/reg_srcs/r600 $(obj)/mkregtable 33$(obj)/r600_reg_safe.h: $(src)/reg_srcs/r600 $(obj)/mkregtable
34 $(call if_changed,mkregtable) 34 $(call if_changed,mkregtable)
35 35
36$(obj)/evergreen_reg_safe.h: $(src)/reg_srcs/evergreen $(obj)/mkregtable
37 $(call if_changed,mkregtable)
38
36$(obj)/r100.o: $(obj)/r100_reg_safe.h $(obj)/rn50_reg_safe.h 39$(obj)/r100.o: $(obj)/r100_reg_safe.h $(obj)/rn50_reg_safe.h
37 40
38$(obj)/r200.o: $(obj)/r200_reg_safe.h 41$(obj)/r200.o: $(obj)/r200_reg_safe.h
@@ -47,6 +50,8 @@ $(obj)/rs600.o: $(obj)/rs600_reg_safe.h
47 50
48$(obj)/r600_cs.o: $(obj)/r600_reg_safe.h 51$(obj)/r600_cs.o: $(obj)/r600_reg_safe.h
49 52
53$(obj)/evergreen_cs.o: $(obj)/evergreen_reg_safe.h
54
50radeon-y := radeon_drv.o radeon_cp.o radeon_state.o radeon_mem.o \ 55radeon-y := radeon_drv.o radeon_cp.o radeon_state.o radeon_mem.o \
51 radeon_irq.o r300_cmdbuf.o r600_cp.o 56 radeon_irq.o r300_cmdbuf.o r600_cp.o
52# add KMS driver 57# add KMS driver
@@ -60,7 +65,7 @@ radeon-y += radeon_device.o radeon_asic.o radeon_kms.o \
60 rs400.o rs600.o rs690.o rv515.o r520.o r600.o rv770.o radeon_test.o \ 65 rs400.o rs600.o rs690.o rv515.o r520.o r600.o rv770.o radeon_test.o \
61 r200.o radeon_legacy_tv.o r600_cs.o r600_blit.o r600_blit_shaders.o \ 66 r200.o radeon_legacy_tv.o r600_cs.o r600_blit.o r600_blit_shaders.o \
62 r600_blit_kms.o radeon_pm.o atombios_dp.o r600_audio.o r600_hdmi.o \ 67 r600_blit_kms.o radeon_pm.o atombios_dp.o r600_audio.o r600_hdmi.o \
63 evergreen.o 68 evergreen.o evergreen_cs.o
64 69
65radeon-$(CONFIG_COMPAT) += radeon_ioc32.o 70radeon-$(CONFIG_COMPAT) += radeon_ioc32.o
66radeon-$(CONFIG_VGA_SWITCHEROO) += radeon_atpx_handler.o 71radeon-$(CONFIG_VGA_SWITCHEROO) += radeon_atpx_handler.o
diff --git a/drivers/gpu/drm/radeon/atombios_crtc.c b/drivers/gpu/drm/radeon/atombios_crtc.c
index f3f2827017ef..8c2d6478a221 100644
--- a/drivers/gpu/drm/radeon/atombios_crtc.c
+++ b/drivers/gpu/drm/radeon/atombios_crtc.c
@@ -498,7 +498,7 @@ static u32 atombios_adjust_pll(struct drm_crtc *crtc,
498 if ((rdev->family == CHIP_RS600) || 498 if ((rdev->family == CHIP_RS600) ||
499 (rdev->family == CHIP_RS690) || 499 (rdev->family == CHIP_RS690) ||
500 (rdev->family == CHIP_RS740)) 500 (rdev->family == CHIP_RS740))
501 pll->flags |= (RADEON_PLL_USE_FRAC_FB_DIV | 501 pll->flags |= (/*RADEON_PLL_USE_FRAC_FB_DIV |*/
502 RADEON_PLL_PREFER_CLOSEST_LOWER); 502 RADEON_PLL_PREFER_CLOSEST_LOWER);
503 503
504 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000) /* range limits??? */ 504 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000) /* range limits??? */
diff --git a/drivers/gpu/drm/radeon/evergreen.c b/drivers/gpu/drm/radeon/evergreen.c
index 8c8e4d3cbaa3..1caf625e472b 100644
--- a/drivers/gpu/drm/radeon/evergreen.c
+++ b/drivers/gpu/drm/radeon/evergreen.c
@@ -41,7 +41,18 @@ void evergreen_fini(struct radeon_device *rdev);
41 41
42void evergreen_pm_misc(struct radeon_device *rdev) 42void evergreen_pm_misc(struct radeon_device *rdev)
43{ 43{
44 44 int req_ps_idx = rdev->pm.requested_power_state_index;
45 int req_cm_idx = rdev->pm.requested_clock_mode_index;
46 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
47 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
48
49 if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
50 if (voltage->voltage != rdev->pm.current_vddc) {
51 radeon_atom_set_voltage(rdev, voltage->voltage);
52 rdev->pm.current_vddc = voltage->voltage;
53 DRM_DEBUG("Setting: v: %d\n", voltage->voltage);
54 }
55 }
45} 56}
46 57
47void evergreen_pm_prepare(struct radeon_device *rdev) 58void evergreen_pm_prepare(struct radeon_device *rdev)
@@ -596,7 +607,7 @@ static void evergreen_mc_program(struct radeon_device *rdev)
596 WREG32(MC_VM_FB_LOCATION, tmp); 607 WREG32(MC_VM_FB_LOCATION, tmp);
597 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); 608 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
598 WREG32(HDP_NONSURFACE_INFO, (2 << 7)); 609 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
599 WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF); 610 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
600 if (rdev->flags & RADEON_IS_AGP) { 611 if (rdev->flags & RADEON_IS_AGP) {
601 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16); 612 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
602 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16); 613 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
@@ -1211,11 +1222,11 @@ static void evergreen_gpu_init(struct radeon_device *rdev)
1211 ps_thread_count = 128; 1222 ps_thread_count = 128;
1212 1223
1213 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count); 1224 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
1214 sq_thread_resource_mgmt |= NUM_VS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8; 1225 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1215 sq_thread_resource_mgmt |= NUM_GS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8; 1226 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1216 sq_thread_resource_mgmt |= NUM_ES_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8; 1227 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1217 sq_thread_resource_mgmt_2 = NUM_HS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8; 1228 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1218 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8; 1229 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1219 1230
1220 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); 1231 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1221 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); 1232 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
@@ -1249,6 +1260,9 @@ static void evergreen_gpu_init(struct radeon_device *rdev)
1249 WREG32(VGT_GS_VERTEX_REUSE, 16); 1260 WREG32(VGT_GS_VERTEX_REUSE, 16);
1250 WREG32(PA_SC_LINE_STIPPLE_STATE, 0); 1261 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
1251 1262
1263 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
1264 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
1265
1252 WREG32(CB_PERF_CTR0_SEL_0, 0); 1266 WREG32(CB_PERF_CTR0_SEL_0, 0);
1253 WREG32(CB_PERF_CTR0_SEL_1, 0); 1267 WREG32(CB_PERF_CTR0_SEL_1, 0);
1254 WREG32(CB_PERF_CTR1_SEL_0, 0); 1268 WREG32(CB_PERF_CTR1_SEL_0, 0);
@@ -1258,6 +1272,26 @@ static void evergreen_gpu_init(struct radeon_device *rdev)
1258 WREG32(CB_PERF_CTR3_SEL_0, 0); 1272 WREG32(CB_PERF_CTR3_SEL_0, 0);
1259 WREG32(CB_PERF_CTR3_SEL_1, 0); 1273 WREG32(CB_PERF_CTR3_SEL_1, 0);
1260 1274
1275 /* clear render buffer base addresses */
1276 WREG32(CB_COLOR0_BASE, 0);
1277 WREG32(CB_COLOR1_BASE, 0);
1278 WREG32(CB_COLOR2_BASE, 0);
1279 WREG32(CB_COLOR3_BASE, 0);
1280 WREG32(CB_COLOR4_BASE, 0);
1281 WREG32(CB_COLOR5_BASE, 0);
1282 WREG32(CB_COLOR6_BASE, 0);
1283 WREG32(CB_COLOR7_BASE, 0);
1284 WREG32(CB_COLOR8_BASE, 0);
1285 WREG32(CB_COLOR9_BASE, 0);
1286 WREG32(CB_COLOR10_BASE, 0);
1287 WREG32(CB_COLOR11_BASE, 0);
1288
1289 /* set the shader const cache sizes to 0 */
1290 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
1291 WREG32(i, 0);
1292 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
1293 WREG32(i, 0);
1294
1261 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL); 1295 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
1262 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl); 1296 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
1263 1297
@@ -2148,7 +2182,7 @@ int evergreen_init(struct radeon_device *rdev)
2148 if (r) 2182 if (r)
2149 return r; 2183 return r;
2150 2184
2151 rdev->accel_working = false; 2185 rdev->accel_working = true;
2152 r = evergreen_startup(rdev); 2186 r = evergreen_startup(rdev);
2153 if (r) { 2187 if (r) {
2154 dev_err(rdev->dev, "disabling GPU acceleration\n"); 2188 dev_err(rdev->dev, "disabling GPU acceleration\n");
diff --git a/drivers/gpu/drm/radeon/evergreen_cs.c b/drivers/gpu/drm/radeon/evergreen_cs.c
new file mode 100644
index 000000000000..010963d4570f
--- /dev/null
+++ b/drivers/gpu/drm/radeon/evergreen_cs.c
@@ -0,0 +1,1356 @@
1/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#include "drmP.h"
29#include "radeon.h"
30#include "evergreend.h"
31#include "evergreen_reg_safe.h"
32
33static int evergreen_cs_packet_next_reloc(struct radeon_cs_parser *p,
34 struct radeon_cs_reloc **cs_reloc);
35
36struct evergreen_cs_track {
37 u32 group_size;
38 u32 nbanks;
39 u32 npipes;
40 /* value we track */
41 u32 nsamples;
42 u32 cb_color_base_last[12];
43 struct radeon_bo *cb_color_bo[12];
44 u32 cb_color_bo_offset[12];
45 struct radeon_bo *cb_color_fmask_bo[8];
46 struct radeon_bo *cb_color_cmask_bo[8];
47 u32 cb_color_info[12];
48 u32 cb_color_view[12];
49 u32 cb_color_pitch_idx[12];
50 u32 cb_color_slice_idx[12];
51 u32 cb_color_dim_idx[12];
52 u32 cb_color_dim[12];
53 u32 cb_color_pitch[12];
54 u32 cb_color_slice[12];
55 u32 cb_color_cmask_slice[8];
56 u32 cb_color_fmask_slice[8];
57 u32 cb_target_mask;
58 u32 cb_shader_mask;
59 u32 vgt_strmout_config;
60 u32 vgt_strmout_buffer_config;
61 u32 db_depth_control;
62 u32 db_depth_view;
63 u32 db_depth_size;
64 u32 db_depth_size_idx;
65 u32 db_z_info;
66 u32 db_z_idx;
67 u32 db_z_read_offset;
68 u32 db_z_write_offset;
69 struct radeon_bo *db_z_read_bo;
70 struct radeon_bo *db_z_write_bo;
71 u32 db_s_info;
72 u32 db_s_idx;
73 u32 db_s_read_offset;
74 u32 db_s_write_offset;
75 struct radeon_bo *db_s_read_bo;
76 struct radeon_bo *db_s_write_bo;
77};
78
79static void evergreen_cs_track_init(struct evergreen_cs_track *track)
80{
81 int i;
82
83 for (i = 0; i < 8; i++) {
84 track->cb_color_fmask_bo[i] = NULL;
85 track->cb_color_cmask_bo[i] = NULL;
86 track->cb_color_cmask_slice[i] = 0;
87 track->cb_color_fmask_slice[i] = 0;
88 }
89
90 for (i = 0; i < 12; i++) {
91 track->cb_color_base_last[i] = 0;
92 track->cb_color_bo[i] = NULL;
93 track->cb_color_bo_offset[i] = 0xFFFFFFFF;
94 track->cb_color_info[i] = 0;
95 track->cb_color_view[i] = 0;
96 track->cb_color_pitch_idx[i] = 0;
97 track->cb_color_slice_idx[i] = 0;
98 track->cb_color_dim[i] = 0;
99 track->cb_color_pitch[i] = 0;
100 track->cb_color_slice[i] = 0;
101 track->cb_color_dim[i] = 0;
102 }
103 track->cb_target_mask = 0xFFFFFFFF;
104 track->cb_shader_mask = 0xFFFFFFFF;
105
106 track->db_depth_view = 0xFFFFC000;
107 track->db_depth_size = 0xFFFFFFFF;
108 track->db_depth_size_idx = 0;
109 track->db_depth_control = 0xFFFFFFFF;
110 track->db_z_info = 0xFFFFFFFF;
111 track->db_z_idx = 0xFFFFFFFF;
112 track->db_z_read_offset = 0xFFFFFFFF;
113 track->db_z_write_offset = 0xFFFFFFFF;
114 track->db_z_read_bo = NULL;
115 track->db_z_write_bo = NULL;
116 track->db_s_info = 0xFFFFFFFF;
117 track->db_s_idx = 0xFFFFFFFF;
118 track->db_s_read_offset = 0xFFFFFFFF;
119 track->db_s_write_offset = 0xFFFFFFFF;
120 track->db_s_read_bo = NULL;
121 track->db_s_write_bo = NULL;
122}
123
124static inline int evergreen_cs_track_validate_cb(struct radeon_cs_parser *p, int i)
125{
126 /* XXX fill in */
127 return 0;
128}
129
130static int evergreen_cs_track_check(struct radeon_cs_parser *p)
131{
132 struct evergreen_cs_track *track = p->track;
133
134 /* we don't support stream out buffer yet */
135 if (track->vgt_strmout_config || track->vgt_strmout_buffer_config) {
136 dev_warn(p->dev, "this kernel doesn't support SMX output buffer\n");
137 return -EINVAL;
138 }
139
140 /* XXX fill in */
141 return 0;
142}
143
144/**
145 * evergreen_cs_packet_parse() - parse cp packet and point ib index to next packet
146 * @parser: parser structure holding parsing context.
147 * @pkt: where to store packet informations
148 *
149 * Assume that chunk_ib_index is properly set. Will return -EINVAL
150 * if packet is bigger than remaining ib size. or if packets is unknown.
151 **/
152int evergreen_cs_packet_parse(struct radeon_cs_parser *p,
153 struct radeon_cs_packet *pkt,
154 unsigned idx)
155{
156 struct radeon_cs_chunk *ib_chunk = &p->chunks[p->chunk_ib_idx];
157 uint32_t header;
158
159 if (idx >= ib_chunk->length_dw) {
160 DRM_ERROR("Can not parse packet at %d after CS end %d !\n",
161 idx, ib_chunk->length_dw);
162 return -EINVAL;
163 }
164 header = radeon_get_ib_value(p, idx);
165 pkt->idx = idx;
166 pkt->type = CP_PACKET_GET_TYPE(header);
167 pkt->count = CP_PACKET_GET_COUNT(header);
168 pkt->one_reg_wr = 0;
169 switch (pkt->type) {
170 case PACKET_TYPE0:
171 pkt->reg = CP_PACKET0_GET_REG(header);
172 break;
173 case PACKET_TYPE3:
174 pkt->opcode = CP_PACKET3_GET_OPCODE(header);
175 break;
176 case PACKET_TYPE2:
177 pkt->count = -1;
178 break;
179 default:
180 DRM_ERROR("Unknown packet type %d at %d !\n", pkt->type, idx);
181 return -EINVAL;
182 }
183 if ((pkt->count + 1 + pkt->idx) >= ib_chunk->length_dw) {
184 DRM_ERROR("Packet (%d:%d:%d) end after CS buffer (%d) !\n",
185 pkt->idx, pkt->type, pkt->count, ib_chunk->length_dw);
186 return -EINVAL;
187 }
188 return 0;
189}
190
191/**
192 * evergreen_cs_packet_next_reloc() - parse next packet which should be reloc packet3
193 * @parser: parser structure holding parsing context.
194 * @data: pointer to relocation data
195 * @offset_start: starting offset
196 * @offset_mask: offset mask (to align start offset on)
197 * @reloc: reloc informations
198 *
199 * Check next packet is relocation packet3, do bo validation and compute
200 * GPU offset using the provided start.
201 **/
202static int evergreen_cs_packet_next_reloc(struct radeon_cs_parser *p,
203 struct radeon_cs_reloc **cs_reloc)
204{
205 struct radeon_cs_chunk *relocs_chunk;
206 struct radeon_cs_packet p3reloc;
207 unsigned idx;
208 int r;
209
210 if (p->chunk_relocs_idx == -1) {
211 DRM_ERROR("No relocation chunk !\n");
212 return -EINVAL;
213 }
214 *cs_reloc = NULL;
215 relocs_chunk = &p->chunks[p->chunk_relocs_idx];
216 r = evergreen_cs_packet_parse(p, &p3reloc, p->idx);
217 if (r) {
218 return r;
219 }
220 p->idx += p3reloc.count + 2;
221 if (p3reloc.type != PACKET_TYPE3 || p3reloc.opcode != PACKET3_NOP) {
222 DRM_ERROR("No packet3 for relocation for packet at %d.\n",
223 p3reloc.idx);
224 return -EINVAL;
225 }
226 idx = radeon_get_ib_value(p, p3reloc.idx + 1);
227 if (idx >= relocs_chunk->length_dw) {
228 DRM_ERROR("Relocs at %d after relocations chunk end %d !\n",
229 idx, relocs_chunk->length_dw);
230 return -EINVAL;
231 }
232 /* FIXME: we assume reloc size is 4 dwords */
233 *cs_reloc = p->relocs_ptr[(idx / 4)];
234 return 0;
235}
236
237/**
238 * evergreen_cs_packet_next_is_pkt3_nop() - test if next packet is packet3 nop for reloc
239 * @parser: parser structure holding parsing context.
240 *
241 * Check next packet is relocation packet3, do bo validation and compute
242 * GPU offset using the provided start.
243 **/
244static inline int evergreen_cs_packet_next_is_pkt3_nop(struct radeon_cs_parser *p)
245{
246 struct radeon_cs_packet p3reloc;
247 int r;
248
249 r = evergreen_cs_packet_parse(p, &p3reloc, p->idx);
250 if (r) {
251 return 0;
252 }
253 if (p3reloc.type != PACKET_TYPE3 || p3reloc.opcode != PACKET3_NOP) {
254 return 0;
255 }
256 return 1;
257}
258
259/**
260 * evergreen_cs_packet_next_vline() - parse userspace VLINE packet
261 * @parser: parser structure holding parsing context.
262 *
263 * Userspace sends a special sequence for VLINE waits.
264 * PACKET0 - VLINE_START_END + value
265 * PACKET3 - WAIT_REG_MEM poll vline status reg
266 * RELOC (P3) - crtc_id in reloc.
267 *
268 * This function parses this and relocates the VLINE START END
269 * and WAIT_REG_MEM packets to the correct crtc.
270 * It also detects a switched off crtc and nulls out the
271 * wait in that case.
272 */
273static int evergreen_cs_packet_parse_vline(struct radeon_cs_parser *p)
274{
275 struct drm_mode_object *obj;
276 struct drm_crtc *crtc;
277 struct radeon_crtc *radeon_crtc;
278 struct radeon_cs_packet p3reloc, wait_reg_mem;
279 int crtc_id;
280 int r;
281 uint32_t header, h_idx, reg, wait_reg_mem_info;
282 volatile uint32_t *ib;
283
284 ib = p->ib->ptr;
285
286 /* parse the WAIT_REG_MEM */
287 r = evergreen_cs_packet_parse(p, &wait_reg_mem, p->idx);
288 if (r)
289 return r;
290
291 /* check its a WAIT_REG_MEM */
292 if (wait_reg_mem.type != PACKET_TYPE3 ||
293 wait_reg_mem.opcode != PACKET3_WAIT_REG_MEM) {
294 DRM_ERROR("vline wait missing WAIT_REG_MEM segment\n");
295 r = -EINVAL;
296 return r;
297 }
298
299 wait_reg_mem_info = radeon_get_ib_value(p, wait_reg_mem.idx + 1);
300 /* bit 4 is reg (0) or mem (1) */
301 if (wait_reg_mem_info & 0x10) {
302 DRM_ERROR("vline WAIT_REG_MEM waiting on MEM rather than REG\n");
303 r = -EINVAL;
304 return r;
305 }
306 /* waiting for value to be equal */
307 if ((wait_reg_mem_info & 0x7) != 0x3) {
308 DRM_ERROR("vline WAIT_REG_MEM function not equal\n");
309 r = -EINVAL;
310 return r;
311 }
312 if ((radeon_get_ib_value(p, wait_reg_mem.idx + 2) << 2) != EVERGREEN_VLINE_STATUS) {
313 DRM_ERROR("vline WAIT_REG_MEM bad reg\n");
314 r = -EINVAL;
315 return r;
316 }
317
318 if (radeon_get_ib_value(p, wait_reg_mem.idx + 5) != EVERGREEN_VLINE_STAT) {
319 DRM_ERROR("vline WAIT_REG_MEM bad bit mask\n");
320 r = -EINVAL;
321 return r;
322 }
323
324 /* jump over the NOP */
325 r = evergreen_cs_packet_parse(p, &p3reloc, p->idx + wait_reg_mem.count + 2);
326 if (r)
327 return r;
328
329 h_idx = p->idx - 2;
330 p->idx += wait_reg_mem.count + 2;
331 p->idx += p3reloc.count + 2;
332
333 header = radeon_get_ib_value(p, h_idx);
334 crtc_id = radeon_get_ib_value(p, h_idx + 2 + 7 + 1);
335 reg = CP_PACKET0_GET_REG(header);
336 mutex_lock(&p->rdev->ddev->mode_config.mutex);
337 obj = drm_mode_object_find(p->rdev->ddev, crtc_id, DRM_MODE_OBJECT_CRTC);
338 if (!obj) {
339 DRM_ERROR("cannot find crtc %d\n", crtc_id);
340 r = -EINVAL;
341 goto out;
342 }
343 crtc = obj_to_crtc(obj);
344 radeon_crtc = to_radeon_crtc(crtc);
345 crtc_id = radeon_crtc->crtc_id;
346
347 if (!crtc->enabled) {
348 /* if the CRTC isn't enabled - we need to nop out the WAIT_REG_MEM */
349 ib[h_idx + 2] = PACKET2(0);
350 ib[h_idx + 3] = PACKET2(0);
351 ib[h_idx + 4] = PACKET2(0);
352 ib[h_idx + 5] = PACKET2(0);
353 ib[h_idx + 6] = PACKET2(0);
354 ib[h_idx + 7] = PACKET2(0);
355 ib[h_idx + 8] = PACKET2(0);
356 } else {
357 switch (reg) {
358 case EVERGREEN_VLINE_START_END:
359 header &= ~R600_CP_PACKET0_REG_MASK;
360 header |= (EVERGREEN_VLINE_START_END + radeon_crtc->crtc_offset) >> 2;
361 ib[h_idx] = header;
362 ib[h_idx + 4] = (EVERGREEN_VLINE_STATUS + radeon_crtc->crtc_offset) >> 2;
363 break;
364 default:
365 DRM_ERROR("unknown crtc reloc\n");
366 r = -EINVAL;
367 goto out;
368 }
369 }
370out:
371 mutex_unlock(&p->rdev->ddev->mode_config.mutex);
372 return r;
373}
374
375static int evergreen_packet0_check(struct radeon_cs_parser *p,
376 struct radeon_cs_packet *pkt,
377 unsigned idx, unsigned reg)
378{
379 int r;
380
381 switch (reg) {
382 case EVERGREEN_VLINE_START_END:
383 r = evergreen_cs_packet_parse_vline(p);
384 if (r) {
385 DRM_ERROR("No reloc for ib[%d]=0x%04X\n",
386 idx, reg);
387 return r;
388 }
389 break;
390 default:
391 printk(KERN_ERR "Forbidden register 0x%04X in cs at %d\n",
392 reg, idx);
393 return -EINVAL;
394 }
395 return 0;
396}
397
398static int evergreen_cs_parse_packet0(struct radeon_cs_parser *p,
399 struct radeon_cs_packet *pkt)
400{
401 unsigned reg, i;
402 unsigned idx;
403 int r;
404
405 idx = pkt->idx + 1;
406 reg = pkt->reg;
407 for (i = 0; i <= pkt->count; i++, idx++, reg += 4) {
408 r = evergreen_packet0_check(p, pkt, idx, reg);
409 if (r) {
410 return r;
411 }
412 }
413 return 0;
414}
415
416/**
417 * evergreen_cs_check_reg() - check if register is authorized or not
418 * @parser: parser structure holding parsing context
419 * @reg: register we are testing
420 * @idx: index into the cs buffer
421 *
422 * This function will test against evergreen_reg_safe_bm and return 0
423 * if register is safe. If register is not flag as safe this function
424 * will test it against a list of register needind special handling.
425 */
426static inline int evergreen_cs_check_reg(struct radeon_cs_parser *p, u32 reg, u32 idx)
427{
428 struct evergreen_cs_track *track = (struct evergreen_cs_track *)p->track;
429 struct radeon_cs_reloc *reloc;
430 u32 last_reg = ARRAY_SIZE(evergreen_reg_safe_bm);
431 u32 m, i, tmp, *ib;
432 int r;
433
434 i = (reg >> 7);
435 if (i > last_reg) {
436 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx);
437 return -EINVAL;
438 }
439 m = 1 << ((reg >> 2) & 31);
440 if (!(evergreen_reg_safe_bm[i] & m))
441 return 0;
442 ib = p->ib->ptr;
443 switch (reg) {
444 /* force following reg to 0 in an attemp to disable out buffer
445 * which will need us to better understand how it works to perform
446 * security check on it (Jerome)
447 */
448 case SQ_ESGS_RING_SIZE:
449 case SQ_GSVS_RING_SIZE:
450 case SQ_ESTMP_RING_SIZE:
451 case SQ_GSTMP_RING_SIZE:
452 case SQ_HSTMP_RING_SIZE:
453 case SQ_LSTMP_RING_SIZE:
454 case SQ_PSTMP_RING_SIZE:
455 case SQ_VSTMP_RING_SIZE:
456 case SQ_ESGS_RING_ITEMSIZE:
457 case SQ_ESTMP_RING_ITEMSIZE:
458 case SQ_GSTMP_RING_ITEMSIZE:
459 case SQ_GSVS_RING_ITEMSIZE:
460 case SQ_GS_VERT_ITEMSIZE:
461 case SQ_GS_VERT_ITEMSIZE_1:
462 case SQ_GS_VERT_ITEMSIZE_2:
463 case SQ_GS_VERT_ITEMSIZE_3:
464 case SQ_GSVS_RING_OFFSET_1:
465 case SQ_GSVS_RING_OFFSET_2:
466 case SQ_GSVS_RING_OFFSET_3:
467 case SQ_HSTMP_RING_ITEMSIZE:
468 case SQ_LSTMP_RING_ITEMSIZE:
469 case SQ_PSTMP_RING_ITEMSIZE:
470 case SQ_VSTMP_RING_ITEMSIZE:
471 case VGT_TF_RING_SIZE:
472 /* get value to populate the IB don't remove */
473 tmp =radeon_get_ib_value(p, idx);
474 ib[idx] = 0;
475 break;
476 case DB_DEPTH_CONTROL:
477 track->db_depth_control = radeon_get_ib_value(p, idx);
478 break;
479 case DB_Z_INFO:
480 r = evergreen_cs_packet_next_reloc(p, &reloc);
481 if (r) {
482 dev_warn(p->dev, "bad SET_CONTEXT_REG "
483 "0x%04X\n", reg);
484 return -EINVAL;
485 }
486 track->db_z_info = radeon_get_ib_value(p, idx);
487 ib[idx] &= ~Z_ARRAY_MODE(0xf);
488 track->db_z_info &= ~Z_ARRAY_MODE(0xf);
489 if (reloc->lobj.tiling_flags & RADEON_TILING_MACRO) {
490 ib[idx] |= Z_ARRAY_MODE(ARRAY_2D_TILED_THIN1);
491 track->db_z_info |= Z_ARRAY_MODE(ARRAY_2D_TILED_THIN1);
492 } else {
493 ib[idx] |= Z_ARRAY_MODE(ARRAY_1D_TILED_THIN1);
494 track->db_z_info |= Z_ARRAY_MODE(ARRAY_1D_TILED_THIN1);
495 }
496 break;
497 case DB_STENCIL_INFO:
498 track->db_s_info = radeon_get_ib_value(p, idx);
499 break;
500 case DB_DEPTH_VIEW:
501 track->db_depth_view = radeon_get_ib_value(p, idx);
502 break;
503 case DB_DEPTH_SIZE:
504 track->db_depth_size = radeon_get_ib_value(p, idx);
505 track->db_depth_size_idx = idx;
506 break;
507 case DB_Z_READ_BASE:
508 r = evergreen_cs_packet_next_reloc(p, &reloc);
509 if (r) {
510 dev_warn(p->dev, "bad SET_CONTEXT_REG "
511 "0x%04X\n", reg);
512 return -EINVAL;
513 }
514 track->db_z_read_offset = radeon_get_ib_value(p, idx);
515 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
516 track->db_z_read_bo = reloc->robj;
517 break;
518 case DB_Z_WRITE_BASE:
519 r = evergreen_cs_packet_next_reloc(p, &reloc);
520 if (r) {
521 dev_warn(p->dev, "bad SET_CONTEXT_REG "
522 "0x%04X\n", reg);
523 return -EINVAL;
524 }
525 track->db_z_write_offset = radeon_get_ib_value(p, idx);
526 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
527 track->db_z_write_bo = reloc->robj;
528 break;
529 case DB_STENCIL_READ_BASE:
530 r = evergreen_cs_packet_next_reloc(p, &reloc);
531 if (r) {
532 dev_warn(p->dev, "bad SET_CONTEXT_REG "
533 "0x%04X\n", reg);
534 return -EINVAL;
535 }
536 track->db_s_read_offset = radeon_get_ib_value(p, idx);
537 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
538 track->db_s_read_bo = reloc->robj;
539 break;
540 case DB_STENCIL_WRITE_BASE:
541 r = evergreen_cs_packet_next_reloc(p, &reloc);
542 if (r) {
543 dev_warn(p->dev, "bad SET_CONTEXT_REG "
544 "0x%04X\n", reg);
545 return -EINVAL;
546 }
547 track->db_s_write_offset = radeon_get_ib_value(p, idx);
548 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
549 track->db_s_write_bo = reloc->robj;
550 break;
551 case VGT_STRMOUT_CONFIG:
552 track->vgt_strmout_config = radeon_get_ib_value(p, idx);
553 break;
554 case VGT_STRMOUT_BUFFER_CONFIG:
555 track->vgt_strmout_buffer_config = radeon_get_ib_value(p, idx);
556 break;
557 case CB_TARGET_MASK:
558 track->cb_target_mask = radeon_get_ib_value(p, idx);
559 break;
560 case CB_SHADER_MASK:
561 track->cb_shader_mask = radeon_get_ib_value(p, idx);
562 break;
563 case PA_SC_AA_CONFIG:
564 tmp = radeon_get_ib_value(p, idx) & MSAA_NUM_SAMPLES_MASK;
565 track->nsamples = 1 << tmp;
566 break;
567 case CB_COLOR0_VIEW:
568 case CB_COLOR1_VIEW:
569 case CB_COLOR2_VIEW:
570 case CB_COLOR3_VIEW:
571 case CB_COLOR4_VIEW:
572 case CB_COLOR5_VIEW:
573 case CB_COLOR6_VIEW:
574 case CB_COLOR7_VIEW:
575 tmp = (reg - CB_COLOR0_VIEW) / 0x3c;
576 track->cb_color_view[tmp] = radeon_get_ib_value(p, idx);
577 break;
578 case CB_COLOR8_VIEW:
579 case CB_COLOR9_VIEW:
580 case CB_COLOR10_VIEW:
581 case CB_COLOR11_VIEW:
582 tmp = ((reg - CB_COLOR8_VIEW) / 0x1c) + 8;
583 track->cb_color_view[tmp] = radeon_get_ib_value(p, idx);
584 break;
585 case CB_COLOR0_INFO:
586 case CB_COLOR1_INFO:
587 case CB_COLOR2_INFO:
588 case CB_COLOR3_INFO:
589 case CB_COLOR4_INFO:
590 case CB_COLOR5_INFO:
591 case CB_COLOR6_INFO:
592 case CB_COLOR7_INFO:
593 r = evergreen_cs_packet_next_reloc(p, &reloc);
594 if (r) {
595 dev_warn(p->dev, "bad SET_CONTEXT_REG "
596 "0x%04X\n", reg);
597 return -EINVAL;
598 }
599 tmp = (reg - CB_COLOR0_INFO) / 0x3c;
600 track->cb_color_info[tmp] = radeon_get_ib_value(p, idx);
601 if (reloc->lobj.tiling_flags & RADEON_TILING_MACRO) {
602 ib[idx] |= CB_ARRAY_MODE(ARRAY_2D_TILED_THIN1);
603 track->cb_color_info[tmp] |= CB_ARRAY_MODE(ARRAY_2D_TILED_THIN1);
604 } else if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO) {
605 ib[idx] |= CB_ARRAY_MODE(ARRAY_1D_TILED_THIN1);
606 track->cb_color_info[tmp] |= CB_ARRAY_MODE(ARRAY_1D_TILED_THIN1);
607 }
608 break;
609 case CB_COLOR8_INFO:
610 case CB_COLOR9_INFO:
611 case CB_COLOR10_INFO:
612 case CB_COLOR11_INFO:
613 r = evergreen_cs_packet_next_reloc(p, &reloc);
614 if (r) {
615 dev_warn(p->dev, "bad SET_CONTEXT_REG "
616 "0x%04X\n", reg);
617 return -EINVAL;
618 }
619 tmp = ((reg - CB_COLOR8_INFO) / 0x1c) + 8;
620 track->cb_color_info[tmp] = radeon_get_ib_value(p, idx);
621 if (reloc->lobj.tiling_flags & RADEON_TILING_MACRO) {
622 ib[idx] |= CB_ARRAY_MODE(ARRAY_2D_TILED_THIN1);
623 track->cb_color_info[tmp] |= CB_ARRAY_MODE(ARRAY_2D_TILED_THIN1);
624 } else if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO) {
625 ib[idx] |= CB_ARRAY_MODE(ARRAY_1D_TILED_THIN1);
626 track->cb_color_info[tmp] |= CB_ARRAY_MODE(ARRAY_1D_TILED_THIN1);
627 }
628 break;
629 case CB_COLOR0_PITCH:
630 case CB_COLOR1_PITCH:
631 case CB_COLOR2_PITCH:
632 case CB_COLOR3_PITCH:
633 case CB_COLOR4_PITCH:
634 case CB_COLOR5_PITCH:
635 case CB_COLOR6_PITCH:
636 case CB_COLOR7_PITCH:
637 tmp = (reg - CB_COLOR0_PITCH) / 0x3c;
638 track->cb_color_pitch[tmp] = radeon_get_ib_value(p, idx);
639 track->cb_color_pitch_idx[tmp] = idx;
640 break;
641 case CB_COLOR8_PITCH:
642 case CB_COLOR9_PITCH:
643 case CB_COLOR10_PITCH:
644 case CB_COLOR11_PITCH:
645 tmp = ((reg - CB_COLOR8_PITCH) / 0x1c) + 8;
646 track->cb_color_pitch[tmp] = radeon_get_ib_value(p, idx);
647 track->cb_color_pitch_idx[tmp] = idx;
648 break;
649 case CB_COLOR0_SLICE:
650 case CB_COLOR1_SLICE:
651 case CB_COLOR2_SLICE:
652 case CB_COLOR3_SLICE:
653 case CB_COLOR4_SLICE:
654 case CB_COLOR5_SLICE:
655 case CB_COLOR6_SLICE:
656 case CB_COLOR7_SLICE:
657 tmp = (reg - CB_COLOR0_SLICE) / 0x3c;
658 track->cb_color_slice[tmp] = radeon_get_ib_value(p, idx);
659 track->cb_color_slice_idx[tmp] = idx;
660 break;
661 case CB_COLOR8_SLICE:
662 case CB_COLOR9_SLICE:
663 case CB_COLOR10_SLICE:
664 case CB_COLOR11_SLICE:
665 tmp = ((reg - CB_COLOR8_SLICE) / 0x1c) + 8;
666 track->cb_color_slice[tmp] = radeon_get_ib_value(p, idx);
667 track->cb_color_slice_idx[tmp] = idx;
668 break;
669 case CB_COLOR0_ATTRIB:
670 case CB_COLOR1_ATTRIB:
671 case CB_COLOR2_ATTRIB:
672 case CB_COLOR3_ATTRIB:
673 case CB_COLOR4_ATTRIB:
674 case CB_COLOR5_ATTRIB:
675 case CB_COLOR6_ATTRIB:
676 case CB_COLOR7_ATTRIB:
677 case CB_COLOR8_ATTRIB:
678 case CB_COLOR9_ATTRIB:
679 case CB_COLOR10_ATTRIB:
680 case CB_COLOR11_ATTRIB:
681 break;
682 case CB_COLOR0_DIM:
683 case CB_COLOR1_DIM:
684 case CB_COLOR2_DIM:
685 case CB_COLOR3_DIM:
686 case CB_COLOR4_DIM:
687 case CB_COLOR5_DIM:
688 case CB_COLOR6_DIM:
689 case CB_COLOR7_DIM:
690 tmp = (reg - CB_COLOR0_DIM) / 0x3c;
691 track->cb_color_dim[tmp] = radeon_get_ib_value(p, idx);
692 track->cb_color_dim_idx[tmp] = idx;
693 break;
694 case CB_COLOR8_DIM:
695 case CB_COLOR9_DIM:
696 case CB_COLOR10_DIM:
697 case CB_COLOR11_DIM:
698 tmp = ((reg - CB_COLOR8_DIM) / 0x1c) + 8;
699 track->cb_color_dim[tmp] = radeon_get_ib_value(p, idx);
700 track->cb_color_dim_idx[tmp] = idx;
701 break;
702 case CB_COLOR0_FMASK:
703 case CB_COLOR1_FMASK:
704 case CB_COLOR2_FMASK:
705 case CB_COLOR3_FMASK:
706 case CB_COLOR4_FMASK:
707 case CB_COLOR5_FMASK:
708 case CB_COLOR6_FMASK:
709 case CB_COLOR7_FMASK:
710 tmp = (reg - CB_COLOR0_FMASK) / 0x3c;
711 r = evergreen_cs_packet_next_reloc(p, &reloc);
712 if (r) {
713 dev_err(p->dev, "bad SET_CONTEXT_REG 0x%04X\n", reg);
714 return -EINVAL;
715 }
716 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
717 track->cb_color_fmask_bo[tmp] = reloc->robj;
718 break;
719 case CB_COLOR0_CMASK:
720 case CB_COLOR1_CMASK:
721 case CB_COLOR2_CMASK:
722 case CB_COLOR3_CMASK:
723 case CB_COLOR4_CMASK:
724 case CB_COLOR5_CMASK:
725 case CB_COLOR6_CMASK:
726 case CB_COLOR7_CMASK:
727 tmp = (reg - CB_COLOR0_CMASK) / 0x3c;
728 r = evergreen_cs_packet_next_reloc(p, &reloc);
729 if (r) {
730 dev_err(p->dev, "bad SET_CONTEXT_REG 0x%04X\n", reg);
731 return -EINVAL;
732 }
733 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
734 track->cb_color_cmask_bo[tmp] = reloc->robj;
735 break;
736 case CB_COLOR0_FMASK_SLICE:
737 case CB_COLOR1_FMASK_SLICE:
738 case CB_COLOR2_FMASK_SLICE:
739 case CB_COLOR3_FMASK_SLICE:
740 case CB_COLOR4_FMASK_SLICE:
741 case CB_COLOR5_FMASK_SLICE:
742 case CB_COLOR6_FMASK_SLICE:
743 case CB_COLOR7_FMASK_SLICE:
744 tmp = (reg - CB_COLOR0_FMASK_SLICE) / 0x3c;
745 track->cb_color_fmask_slice[tmp] = radeon_get_ib_value(p, idx);
746 break;
747 case CB_COLOR0_CMASK_SLICE:
748 case CB_COLOR1_CMASK_SLICE:
749 case CB_COLOR2_CMASK_SLICE:
750 case CB_COLOR3_CMASK_SLICE:
751 case CB_COLOR4_CMASK_SLICE:
752 case CB_COLOR5_CMASK_SLICE:
753 case CB_COLOR6_CMASK_SLICE:
754 case CB_COLOR7_CMASK_SLICE:
755 tmp = (reg - CB_COLOR0_CMASK_SLICE) / 0x3c;
756 track->cb_color_cmask_slice[tmp] = radeon_get_ib_value(p, idx);
757 break;
758 case CB_COLOR0_BASE:
759 case CB_COLOR1_BASE:
760 case CB_COLOR2_BASE:
761 case CB_COLOR3_BASE:
762 case CB_COLOR4_BASE:
763 case CB_COLOR5_BASE:
764 case CB_COLOR6_BASE:
765 case CB_COLOR7_BASE:
766 r = evergreen_cs_packet_next_reloc(p, &reloc);
767 if (r) {
768 dev_warn(p->dev, "bad SET_CONTEXT_REG "
769 "0x%04X\n", reg);
770 return -EINVAL;
771 }
772 tmp = (reg - CB_COLOR0_BASE) / 0x3c;
773 track->cb_color_bo_offset[tmp] = radeon_get_ib_value(p, idx);
774 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
775 track->cb_color_base_last[tmp] = ib[idx];
776 track->cb_color_bo[tmp] = reloc->robj;
777 break;
778 case CB_COLOR8_BASE:
779 case CB_COLOR9_BASE:
780 case CB_COLOR10_BASE:
781 case CB_COLOR11_BASE:
782 r = evergreen_cs_packet_next_reloc(p, &reloc);
783 if (r) {
784 dev_warn(p->dev, "bad SET_CONTEXT_REG "
785 "0x%04X\n", reg);
786 return -EINVAL;
787 }
788 tmp = ((reg - CB_COLOR8_BASE) / 0x1c) + 8;
789 track->cb_color_bo_offset[tmp] = radeon_get_ib_value(p, idx);
790 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
791 track->cb_color_base_last[tmp] = ib[idx];
792 track->cb_color_bo[tmp] = reloc->robj;
793 break;
794 case CB_IMMED0_BASE:
795 case CB_IMMED1_BASE:
796 case CB_IMMED2_BASE:
797 case CB_IMMED3_BASE:
798 case CB_IMMED4_BASE:
799 case CB_IMMED5_BASE:
800 case CB_IMMED6_BASE:
801 case CB_IMMED7_BASE:
802 case CB_IMMED8_BASE:
803 case CB_IMMED9_BASE:
804 case CB_IMMED10_BASE:
805 case CB_IMMED11_BASE:
806 case DB_HTILE_DATA_BASE:
807 case SQ_PGM_START_FS:
808 case SQ_PGM_START_ES:
809 case SQ_PGM_START_VS:
810 case SQ_PGM_START_GS:
811 case SQ_PGM_START_PS:
812 case SQ_PGM_START_HS:
813 case SQ_PGM_START_LS:
814 case GDS_ADDR_BASE:
815 case SQ_CONST_MEM_BASE:
816 case SQ_ALU_CONST_CACHE_GS_0:
817 case SQ_ALU_CONST_CACHE_GS_1:
818 case SQ_ALU_CONST_CACHE_GS_2:
819 case SQ_ALU_CONST_CACHE_GS_3:
820 case SQ_ALU_CONST_CACHE_GS_4:
821 case SQ_ALU_CONST_CACHE_GS_5:
822 case SQ_ALU_CONST_CACHE_GS_6:
823 case SQ_ALU_CONST_CACHE_GS_7:
824 case SQ_ALU_CONST_CACHE_GS_8:
825 case SQ_ALU_CONST_CACHE_GS_9:
826 case SQ_ALU_CONST_CACHE_GS_10:
827 case SQ_ALU_CONST_CACHE_GS_11:
828 case SQ_ALU_CONST_CACHE_GS_12:
829 case SQ_ALU_CONST_CACHE_GS_13:
830 case SQ_ALU_CONST_CACHE_GS_14:
831 case SQ_ALU_CONST_CACHE_GS_15:
832 case SQ_ALU_CONST_CACHE_PS_0:
833 case SQ_ALU_CONST_CACHE_PS_1:
834 case SQ_ALU_CONST_CACHE_PS_2:
835 case SQ_ALU_CONST_CACHE_PS_3:
836 case SQ_ALU_CONST_CACHE_PS_4:
837 case SQ_ALU_CONST_CACHE_PS_5:
838 case SQ_ALU_CONST_CACHE_PS_6:
839 case SQ_ALU_CONST_CACHE_PS_7:
840 case SQ_ALU_CONST_CACHE_PS_8:
841 case SQ_ALU_CONST_CACHE_PS_9:
842 case SQ_ALU_CONST_CACHE_PS_10:
843 case SQ_ALU_CONST_CACHE_PS_11:
844 case SQ_ALU_CONST_CACHE_PS_12:
845 case SQ_ALU_CONST_CACHE_PS_13:
846 case SQ_ALU_CONST_CACHE_PS_14:
847 case SQ_ALU_CONST_CACHE_PS_15:
848 case SQ_ALU_CONST_CACHE_VS_0:
849 case SQ_ALU_CONST_CACHE_VS_1:
850 case SQ_ALU_CONST_CACHE_VS_2:
851 case SQ_ALU_CONST_CACHE_VS_3:
852 case SQ_ALU_CONST_CACHE_VS_4:
853 case SQ_ALU_CONST_CACHE_VS_5:
854 case SQ_ALU_CONST_CACHE_VS_6:
855 case SQ_ALU_CONST_CACHE_VS_7:
856 case SQ_ALU_CONST_CACHE_VS_8:
857 case SQ_ALU_CONST_CACHE_VS_9:
858 case SQ_ALU_CONST_CACHE_VS_10:
859 case SQ_ALU_CONST_CACHE_VS_11:
860 case SQ_ALU_CONST_CACHE_VS_12:
861 case SQ_ALU_CONST_CACHE_VS_13:
862 case SQ_ALU_CONST_CACHE_VS_14:
863 case SQ_ALU_CONST_CACHE_VS_15:
864 case SQ_ALU_CONST_CACHE_HS_0:
865 case SQ_ALU_CONST_CACHE_HS_1:
866 case SQ_ALU_CONST_CACHE_HS_2:
867 case SQ_ALU_CONST_CACHE_HS_3:
868 case SQ_ALU_CONST_CACHE_HS_4:
869 case SQ_ALU_CONST_CACHE_HS_5:
870 case SQ_ALU_CONST_CACHE_HS_6:
871 case SQ_ALU_CONST_CACHE_HS_7:
872 case SQ_ALU_CONST_CACHE_HS_8:
873 case SQ_ALU_CONST_CACHE_HS_9:
874 case SQ_ALU_CONST_CACHE_HS_10:
875 case SQ_ALU_CONST_CACHE_HS_11:
876 case SQ_ALU_CONST_CACHE_HS_12:
877 case SQ_ALU_CONST_CACHE_HS_13:
878 case SQ_ALU_CONST_CACHE_HS_14:
879 case SQ_ALU_CONST_CACHE_HS_15:
880 case SQ_ALU_CONST_CACHE_LS_0:
881 case SQ_ALU_CONST_CACHE_LS_1:
882 case SQ_ALU_CONST_CACHE_LS_2:
883 case SQ_ALU_CONST_CACHE_LS_3:
884 case SQ_ALU_CONST_CACHE_LS_4:
885 case SQ_ALU_CONST_CACHE_LS_5:
886 case SQ_ALU_CONST_CACHE_LS_6:
887 case SQ_ALU_CONST_CACHE_LS_7:
888 case SQ_ALU_CONST_CACHE_LS_8:
889 case SQ_ALU_CONST_CACHE_LS_9:
890 case SQ_ALU_CONST_CACHE_LS_10:
891 case SQ_ALU_CONST_CACHE_LS_11:
892 case SQ_ALU_CONST_CACHE_LS_12:
893 case SQ_ALU_CONST_CACHE_LS_13:
894 case SQ_ALU_CONST_CACHE_LS_14:
895 case SQ_ALU_CONST_CACHE_LS_15:
896 r = evergreen_cs_packet_next_reloc(p, &reloc);
897 if (r) {
898 dev_warn(p->dev, "bad SET_CONTEXT_REG "
899 "0x%04X\n", reg);
900 return -EINVAL;
901 }
902 ib[idx] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
903 break;
904 default:
905 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx);
906 return -EINVAL;
907 }
908 return 0;
909}
910
911/**
912 * evergreen_check_texture_resource() - check if register is authorized or not
913 * @p: parser structure holding parsing context
914 * @idx: index into the cs buffer
915 * @texture: texture's bo structure
916 * @mipmap: mipmap's bo structure
917 *
918 * This function will check that the resource has valid field and that
919 * the texture and mipmap bo object are big enough to cover this resource.
920 */
921static inline int evergreen_check_texture_resource(struct radeon_cs_parser *p, u32 idx,
922 struct radeon_bo *texture,
923 struct radeon_bo *mipmap)
924{
925 /* XXX fill in */
926 return 0;
927}
928
929static int evergreen_packet3_check(struct radeon_cs_parser *p,
930 struct radeon_cs_packet *pkt)
931{
932 struct radeon_cs_reloc *reloc;
933 struct evergreen_cs_track *track;
934 volatile u32 *ib;
935 unsigned idx;
936 unsigned i;
937 unsigned start_reg, end_reg, reg;
938 int r;
939 u32 idx_value;
940
941 track = (struct evergreen_cs_track *)p->track;
942 ib = p->ib->ptr;
943 idx = pkt->idx + 1;
944 idx_value = radeon_get_ib_value(p, idx);
945
946 switch (pkt->opcode) {
947 case PACKET3_CONTEXT_CONTROL:
948 if (pkt->count != 1) {
949 DRM_ERROR("bad CONTEXT_CONTROL\n");
950 return -EINVAL;
951 }
952 break;
953 case PACKET3_INDEX_TYPE:
954 case PACKET3_NUM_INSTANCES:
955 case PACKET3_CLEAR_STATE:
956 if (pkt->count) {
957 DRM_ERROR("bad INDEX_TYPE/NUM_INSTANCES/CLEAR_STATE\n");
958 return -EINVAL;
959 }
960 break;
961 case PACKET3_INDEX_BASE:
962 if (pkt->count != 1) {
963 DRM_ERROR("bad INDEX_BASE\n");
964 return -EINVAL;
965 }
966 r = evergreen_cs_packet_next_reloc(p, &reloc);
967 if (r) {
968 DRM_ERROR("bad INDEX_BASE\n");
969 return -EINVAL;
970 }
971 ib[idx+0] = idx_value + (u32)(reloc->lobj.gpu_offset & 0xffffffff);
972 ib[idx+1] += upper_32_bits(reloc->lobj.gpu_offset) & 0xff;
973 r = evergreen_cs_track_check(p);
974 if (r) {
975 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
976 return r;
977 }
978 break;
979 case PACKET3_DRAW_INDEX:
980 if (pkt->count != 3) {
981 DRM_ERROR("bad DRAW_INDEX\n");
982 return -EINVAL;
983 }
984 r = evergreen_cs_packet_next_reloc(p, &reloc);
985 if (r) {
986 DRM_ERROR("bad DRAW_INDEX\n");
987 return -EINVAL;
988 }
989 ib[idx+0] = idx_value + (u32)(reloc->lobj.gpu_offset & 0xffffffff);
990 ib[idx+1] += upper_32_bits(reloc->lobj.gpu_offset) & 0xff;
991 r = evergreen_cs_track_check(p);
992 if (r) {
993 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
994 return r;
995 }
996 break;
997 case PACKET3_DRAW_INDEX_2:
998 if (pkt->count != 4) {
999 DRM_ERROR("bad DRAW_INDEX_2\n");
1000 return -EINVAL;
1001 }
1002 r = evergreen_cs_packet_next_reloc(p, &reloc);
1003 if (r) {
1004 DRM_ERROR("bad DRAW_INDEX_2\n");
1005 return -EINVAL;
1006 }
1007 ib[idx+1] = idx_value + (u32)(reloc->lobj.gpu_offset & 0xffffffff);
1008 ib[idx+2] += upper_32_bits(reloc->lobj.gpu_offset) & 0xff;
1009 r = evergreen_cs_track_check(p);
1010 if (r) {
1011 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
1012 return r;
1013 }
1014 break;
1015 case PACKET3_DRAW_INDEX_AUTO:
1016 if (pkt->count != 1) {
1017 DRM_ERROR("bad DRAW_INDEX_AUTO\n");
1018 return -EINVAL;
1019 }
1020 r = evergreen_cs_track_check(p);
1021 if (r) {
1022 dev_warn(p->dev, "%s:%d invalid cmd stream %d\n", __func__, __LINE__, idx);
1023 return r;
1024 }
1025 break;
1026 case PACKET3_DRAW_INDEX_MULTI_AUTO:
1027 if (pkt->count != 2) {
1028 DRM_ERROR("bad DRAW_INDEX_MULTI_AUTO\n");
1029 return -EINVAL;
1030 }
1031 r = evergreen_cs_track_check(p);
1032 if (r) {
1033 dev_warn(p->dev, "%s:%d invalid cmd stream %d\n", __func__, __LINE__, idx);
1034 return r;
1035 }
1036 break;
1037 case PACKET3_DRAW_INDEX_IMMD:
1038 if (pkt->count < 2) {
1039 DRM_ERROR("bad DRAW_INDEX_IMMD\n");
1040 return -EINVAL;
1041 }
1042 r = evergreen_cs_track_check(p);
1043 if (r) {
1044 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
1045 return r;
1046 }
1047 break;
1048 case PACKET3_DRAW_INDEX_OFFSET:
1049 if (pkt->count != 2) {
1050 DRM_ERROR("bad DRAW_INDEX_OFFSET\n");
1051 return -EINVAL;
1052 }
1053 r = evergreen_cs_track_check(p);
1054 if (r) {
1055 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
1056 return r;
1057 }
1058 break;
1059 case PACKET3_DRAW_INDEX_OFFSET_2:
1060 if (pkt->count != 3) {
1061 DRM_ERROR("bad DRAW_INDEX_OFFSET_2\n");
1062 return -EINVAL;
1063 }
1064 r = evergreen_cs_track_check(p);
1065 if (r) {
1066 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
1067 return r;
1068 }
1069 break;
1070 case PACKET3_WAIT_REG_MEM:
1071 if (pkt->count != 5) {
1072 DRM_ERROR("bad WAIT_REG_MEM\n");
1073 return -EINVAL;
1074 }
1075 /* bit 4 is reg (0) or mem (1) */
1076 if (idx_value & 0x10) {
1077 r = evergreen_cs_packet_next_reloc(p, &reloc);
1078 if (r) {
1079 DRM_ERROR("bad WAIT_REG_MEM\n");
1080 return -EINVAL;
1081 }
1082 ib[idx+1] += (u32)(reloc->lobj.gpu_offset & 0xffffffff);
1083 ib[idx+2] += upper_32_bits(reloc->lobj.gpu_offset) & 0xff;
1084 }
1085 break;
1086 case PACKET3_SURFACE_SYNC:
1087 if (pkt->count != 3) {
1088 DRM_ERROR("bad SURFACE_SYNC\n");
1089 return -EINVAL;
1090 }
1091 /* 0xffffffff/0x0 is flush all cache flag */
1092 if (radeon_get_ib_value(p, idx + 1) != 0xffffffff ||
1093 radeon_get_ib_value(p, idx + 2) != 0) {
1094 r = evergreen_cs_packet_next_reloc(p, &reloc);
1095 if (r) {
1096 DRM_ERROR("bad SURFACE_SYNC\n");
1097 return -EINVAL;
1098 }
1099 ib[idx+2] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
1100 }
1101 break;
1102 case PACKET3_EVENT_WRITE:
1103 if (pkt->count != 2 && pkt->count != 0) {
1104 DRM_ERROR("bad EVENT_WRITE\n");
1105 return -EINVAL;
1106 }
1107 if (pkt->count) {
1108 r = evergreen_cs_packet_next_reloc(p, &reloc);
1109 if (r) {
1110 DRM_ERROR("bad EVENT_WRITE\n");
1111 return -EINVAL;
1112 }
1113 ib[idx+1] += (u32)(reloc->lobj.gpu_offset & 0xffffffff);
1114 ib[idx+2] += upper_32_bits(reloc->lobj.gpu_offset) & 0xff;
1115 }
1116 break;
1117 case PACKET3_EVENT_WRITE_EOP:
1118 if (pkt->count != 4) {
1119 DRM_ERROR("bad EVENT_WRITE_EOP\n");
1120 return -EINVAL;
1121 }
1122 r = evergreen_cs_packet_next_reloc(p, &reloc);
1123 if (r) {
1124 DRM_ERROR("bad EVENT_WRITE_EOP\n");
1125 return -EINVAL;
1126 }
1127 ib[idx+1] += (u32)(reloc->lobj.gpu_offset & 0xffffffff);
1128 ib[idx+2] += upper_32_bits(reloc->lobj.gpu_offset) & 0xff;
1129 break;
1130 case PACKET3_EVENT_WRITE_EOS:
1131 if (pkt->count != 3) {
1132 DRM_ERROR("bad EVENT_WRITE_EOS\n");
1133 return -EINVAL;
1134 }
1135 r = evergreen_cs_packet_next_reloc(p, &reloc);
1136 if (r) {
1137 DRM_ERROR("bad EVENT_WRITE_EOS\n");
1138 return -EINVAL;
1139 }
1140 ib[idx+1] += (u32)(reloc->lobj.gpu_offset & 0xffffffff);
1141 ib[idx+2] += upper_32_bits(reloc->lobj.gpu_offset) & 0xff;
1142 break;
1143 case PACKET3_SET_CONFIG_REG:
1144 start_reg = (idx_value << 2) + PACKET3_SET_CONFIG_REG_START;
1145 end_reg = 4 * pkt->count + start_reg - 4;
1146 if ((start_reg < PACKET3_SET_CONFIG_REG_START) ||
1147 (start_reg >= PACKET3_SET_CONFIG_REG_END) ||
1148 (end_reg >= PACKET3_SET_CONFIG_REG_END)) {
1149 DRM_ERROR("bad PACKET3_SET_CONFIG_REG\n");
1150 return -EINVAL;
1151 }
1152 for (i = 0; i < pkt->count; i++) {
1153 reg = start_reg + (4 * i);
1154 r = evergreen_cs_check_reg(p, reg, idx+1+i);
1155 if (r)
1156 return r;
1157 }
1158 break;
1159 case PACKET3_SET_CONTEXT_REG:
1160 start_reg = (idx_value << 2) + PACKET3_SET_CONTEXT_REG_START;
1161 end_reg = 4 * pkt->count + start_reg - 4;
1162 if ((start_reg < PACKET3_SET_CONTEXT_REG_START) ||
1163 (start_reg >= PACKET3_SET_CONTEXT_REG_END) ||
1164 (end_reg >= PACKET3_SET_CONTEXT_REG_END)) {
1165 DRM_ERROR("bad PACKET3_SET_CONTEXT_REG\n");
1166 return -EINVAL;
1167 }
1168 for (i = 0; i < pkt->count; i++) {
1169 reg = start_reg + (4 * i);
1170 r = evergreen_cs_check_reg(p, reg, idx+1+i);
1171 if (r)
1172 return r;
1173 }
1174 break;
1175 case PACKET3_SET_RESOURCE:
1176 if (pkt->count % 8) {
1177 DRM_ERROR("bad SET_RESOURCE\n");
1178 return -EINVAL;
1179 }
1180 start_reg = (idx_value << 2) + PACKET3_SET_RESOURCE_START;
1181 end_reg = 4 * pkt->count + start_reg - 4;
1182 if ((start_reg < PACKET3_SET_RESOURCE_START) ||
1183 (start_reg >= PACKET3_SET_RESOURCE_END) ||
1184 (end_reg >= PACKET3_SET_RESOURCE_END)) {
1185 DRM_ERROR("bad SET_RESOURCE\n");
1186 return -EINVAL;
1187 }
1188 for (i = 0; i < (pkt->count / 8); i++) {
1189 struct radeon_bo *texture, *mipmap;
1190 u32 size, offset;
1191
1192 switch (G__SQ_CONSTANT_TYPE(radeon_get_ib_value(p, idx+1+(i*8)+7))) {
1193 case SQ_TEX_VTX_VALID_TEXTURE:
1194 /* tex base */
1195 r = evergreen_cs_packet_next_reloc(p, &reloc);
1196 if (r) {
1197 DRM_ERROR("bad SET_RESOURCE (tex)\n");
1198 return -EINVAL;
1199 }
1200 ib[idx+1+(i*8)+2] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
1201 if (reloc->lobj.tiling_flags & RADEON_TILING_MACRO)
1202 ib[idx+1+(i*8)+1] |= TEX_ARRAY_MODE(ARRAY_2D_TILED_THIN1);
1203 else if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO)
1204 ib[idx+1+(i*8)+1] |= TEX_ARRAY_MODE(ARRAY_1D_TILED_THIN1);
1205 texture = reloc->robj;
1206 /* tex mip base */
1207 r = evergreen_cs_packet_next_reloc(p, &reloc);
1208 if (r) {
1209 DRM_ERROR("bad SET_RESOURCE (tex)\n");
1210 return -EINVAL;
1211 }
1212 ib[idx+1+(i*8)+3] += (u32)((reloc->lobj.gpu_offset >> 8) & 0xffffffff);
1213 mipmap = reloc->robj;
1214 r = evergreen_check_texture_resource(p, idx+1+(i*8),
1215 texture, mipmap);
1216 if (r)
1217 return r;
1218 break;
1219 case SQ_TEX_VTX_VALID_BUFFER:
1220 /* vtx base */
1221 r = evergreen_cs_packet_next_reloc(p, &reloc);
1222 if (r) {
1223 DRM_ERROR("bad SET_RESOURCE (vtx)\n");
1224 return -EINVAL;
1225 }
1226 offset = radeon_get_ib_value(p, idx+1+(i*8)+0);
1227 size = radeon_get_ib_value(p, idx+1+(i*8)+1);
1228 if (p->rdev && (size + offset) > radeon_bo_size(reloc->robj)) {
1229 /* force size to size of the buffer */
1230 dev_warn(p->dev, "vbo resource seems too big for the bo\n");
1231 ib[idx+1+(i*8)+1] = radeon_bo_size(reloc->robj);
1232 }
1233 ib[idx+1+(i*8)+0] += (u32)((reloc->lobj.gpu_offset) & 0xffffffff);
1234 ib[idx+1+(i*8)+2] += upper_32_bits(reloc->lobj.gpu_offset) & 0xff;
1235 break;
1236 case SQ_TEX_VTX_INVALID_TEXTURE:
1237 case SQ_TEX_VTX_INVALID_BUFFER:
1238 default:
1239 DRM_ERROR("bad SET_RESOURCE\n");
1240 return -EINVAL;
1241 }
1242 }
1243 break;
1244 case PACKET3_SET_ALU_CONST:
1245 /* XXX fix me ALU const buffers only */
1246 break;
1247 case PACKET3_SET_BOOL_CONST:
1248 start_reg = (idx_value << 2) + PACKET3_SET_BOOL_CONST_START;
1249 end_reg = 4 * pkt->count + start_reg - 4;
1250 if ((start_reg < PACKET3_SET_BOOL_CONST_START) ||
1251 (start_reg >= PACKET3_SET_BOOL_CONST_END) ||
1252 (end_reg >= PACKET3_SET_BOOL_CONST_END)) {
1253 DRM_ERROR("bad SET_BOOL_CONST\n");
1254 return -EINVAL;
1255 }
1256 break;
1257 case PACKET3_SET_LOOP_CONST:
1258 start_reg = (idx_value << 2) + PACKET3_SET_LOOP_CONST_START;
1259 end_reg = 4 * pkt->count + start_reg - 4;
1260 if ((start_reg < PACKET3_SET_LOOP_CONST_START) ||
1261 (start_reg >= PACKET3_SET_LOOP_CONST_END) ||
1262 (end_reg >= PACKET3_SET_LOOP_CONST_END)) {
1263 DRM_ERROR("bad SET_LOOP_CONST\n");
1264 return -EINVAL;
1265 }
1266 break;
1267 case PACKET3_SET_CTL_CONST:
1268 start_reg = (idx_value << 2) + PACKET3_SET_CTL_CONST_START;
1269 end_reg = 4 * pkt->count + start_reg - 4;
1270 if ((start_reg < PACKET3_SET_CTL_CONST_START) ||
1271 (start_reg >= PACKET3_SET_CTL_CONST_END) ||
1272 (end_reg >= PACKET3_SET_CTL_CONST_END)) {
1273 DRM_ERROR("bad SET_CTL_CONST\n");
1274 return -EINVAL;
1275 }
1276 break;
1277 case PACKET3_SET_SAMPLER:
1278 if (pkt->count % 3) {
1279 DRM_ERROR("bad SET_SAMPLER\n");
1280 return -EINVAL;
1281 }
1282 start_reg = (idx_value << 2) + PACKET3_SET_SAMPLER_START;
1283 end_reg = 4 * pkt->count + start_reg - 4;
1284 if ((start_reg < PACKET3_SET_SAMPLER_START) ||
1285 (start_reg >= PACKET3_SET_SAMPLER_END) ||
1286 (end_reg >= PACKET3_SET_SAMPLER_END)) {
1287 DRM_ERROR("bad SET_SAMPLER\n");
1288 return -EINVAL;
1289 }
1290 break;
1291 case PACKET3_NOP:
1292 break;
1293 default:
1294 DRM_ERROR("Packet3 opcode %x not supported\n", pkt->opcode);
1295 return -EINVAL;
1296 }
1297 return 0;
1298}
1299
1300int evergreen_cs_parse(struct radeon_cs_parser *p)
1301{
1302 struct radeon_cs_packet pkt;
1303 struct evergreen_cs_track *track;
1304 int r;
1305
1306 if (p->track == NULL) {
1307 /* initialize tracker, we are in kms */
1308 track = kzalloc(sizeof(*track), GFP_KERNEL);
1309 if (track == NULL)
1310 return -ENOMEM;
1311 evergreen_cs_track_init(track);
1312 track->npipes = p->rdev->config.evergreen.tiling_npipes;
1313 track->nbanks = p->rdev->config.evergreen.tiling_nbanks;
1314 track->group_size = p->rdev->config.evergreen.tiling_group_size;
1315 p->track = track;
1316 }
1317 do {
1318 r = evergreen_cs_packet_parse(p, &pkt, p->idx);
1319 if (r) {
1320 kfree(p->track);
1321 p->track = NULL;
1322 return r;
1323 }
1324 p->idx += pkt.count + 2;
1325 switch (pkt.type) {
1326 case PACKET_TYPE0:
1327 r = evergreen_cs_parse_packet0(p, &pkt);
1328 break;
1329 case PACKET_TYPE2:
1330 break;
1331 case PACKET_TYPE3:
1332 r = evergreen_packet3_check(p, &pkt);
1333 break;
1334 default:
1335 DRM_ERROR("Unknown packet type %d !\n", pkt.type);
1336 kfree(p->track);
1337 p->track = NULL;
1338 return -EINVAL;
1339 }
1340 if (r) {
1341 kfree(p->track);
1342 p->track = NULL;
1343 return r;
1344 }
1345 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw);
1346#if 0
1347 for (r = 0; r < p->ib->length_dw; r++) {
1348 printk(KERN_INFO "%05d 0x%08X\n", r, p->ib->ptr[r]);
1349 mdelay(1);
1350 }
1351#endif
1352 kfree(p->track);
1353 p->track = NULL;
1354 return 0;
1355}
1356
diff --git a/drivers/gpu/drm/radeon/evergreen_reg.h b/drivers/gpu/drm/radeon/evergreen_reg.h
index af86af836f13..e028c1cd9d9b 100644
--- a/drivers/gpu/drm/radeon/evergreen_reg.h
+++ b/drivers/gpu/drm/radeon/evergreen_reg.h
@@ -151,6 +151,9 @@
151#define EVERGREEN_DATA_FORMAT 0x6b00 151#define EVERGREEN_DATA_FORMAT 0x6b00
152# define EVERGREEN_INTERLEAVE_EN (1 << 0) 152# define EVERGREEN_INTERLEAVE_EN (1 << 0)
153#define EVERGREEN_DESKTOP_HEIGHT 0x6b04 153#define EVERGREEN_DESKTOP_HEIGHT 0x6b04
154#define EVERGREEN_VLINE_START_END 0x6b08
155#define EVERGREEN_VLINE_STATUS 0x6bb8
156# define EVERGREEN_VLINE_STAT (1 << 12)
154 157
155#define EVERGREEN_VIEWPORT_START 0x6d70 158#define EVERGREEN_VIEWPORT_START 0x6d70
156#define EVERGREEN_VIEWPORT_SIZE 0x6d74 159#define EVERGREEN_VIEWPORT_SIZE 0x6d74
diff --git a/drivers/gpu/drm/radeon/evergreend.h b/drivers/gpu/drm/radeon/evergreend.h
index 93e9e17ad54a..a1cd621780e2 100644
--- a/drivers/gpu/drm/radeon/evergreend.h
+++ b/drivers/gpu/drm/radeon/evergreend.h
@@ -218,6 +218,8 @@
218#define CLIP_VTX_REORDER_ENA (1 << 0) 218#define CLIP_VTX_REORDER_ENA (1 << 0)
219#define NUM_CLIP_SEQ(x) ((x) << 1) 219#define NUM_CLIP_SEQ(x) ((x) << 1)
220#define PA_SC_AA_CONFIG 0x28C04 220#define PA_SC_AA_CONFIG 0x28C04
221#define MSAA_NUM_SAMPLES_SHIFT 0
222#define MSAA_NUM_SAMPLES_MASK 0x3
221#define PA_SC_CLIPRECT_RULE 0x2820C 223#define PA_SC_CLIPRECT_RULE 0x2820C
222#define PA_SC_EDGERULE 0x28230 224#define PA_SC_EDGERULE 0x28230
223#define PA_SC_FIFO_SIZE 0x8BCC 225#define PA_SC_FIFO_SIZE 0x8BCC
@@ -553,4 +555,469 @@
553# define DC_HPDx_RX_INT_TIMER(x) ((x) << 16) 555# define DC_HPDx_RX_INT_TIMER(x) ((x) << 16)
554# define DC_HPDx_EN (1 << 28) 556# define DC_HPDx_EN (1 << 28)
555 557
558/*
559 * PM4
560 */
561#define PACKET_TYPE0 0
562#define PACKET_TYPE1 1
563#define PACKET_TYPE2 2
564#define PACKET_TYPE3 3
565
566#define CP_PACKET_GET_TYPE(h) (((h) >> 30) & 3)
567#define CP_PACKET_GET_COUNT(h) (((h) >> 16) & 0x3FFF)
568#define CP_PACKET0_GET_REG(h) (((h) & 0xFFFF) << 2)
569#define CP_PACKET3_GET_OPCODE(h) (((h) >> 8) & 0xFF)
570#define PACKET0(reg, n) ((PACKET_TYPE0 << 30) | \
571 (((reg) >> 2) & 0xFFFF) | \
572 ((n) & 0x3FFF) << 16)
573#define CP_PACKET2 0x80000000
574#define PACKET2_PAD_SHIFT 0
575#define PACKET2_PAD_MASK (0x3fffffff << 0)
576
577#define PACKET2(v) (CP_PACKET2 | REG_SET(PACKET2_PAD, (v)))
578
579#define PACKET3(op, n) ((PACKET_TYPE3 << 30) | \
580 (((op) & 0xFF) << 8) | \
581 ((n) & 0x3FFF) << 16)
582
583/* Packet 3 types */
584#define PACKET3_NOP 0x10
585#define PACKET3_SET_BASE 0x11
586#define PACKET3_CLEAR_STATE 0x12
587#define PACKET3_INDIRECT_BUFFER_SIZE 0x13
588#define PACKET3_DISPATCH_DIRECT 0x15
589#define PACKET3_DISPATCH_INDIRECT 0x16
590#define PACKET3_INDIRECT_BUFFER_END 0x17
591#define PACKET3_SET_PREDICATION 0x20
592#define PACKET3_REG_RMW 0x21
593#define PACKET3_COND_EXEC 0x22
594#define PACKET3_PRED_EXEC 0x23
595#define PACKET3_DRAW_INDIRECT 0x24
596#define PACKET3_DRAW_INDEX_INDIRECT 0x25
597#define PACKET3_INDEX_BASE 0x26
598#define PACKET3_DRAW_INDEX_2 0x27
599#define PACKET3_CONTEXT_CONTROL 0x28
600#define PACKET3_DRAW_INDEX_OFFSET 0x29
601#define PACKET3_INDEX_TYPE 0x2A
602#define PACKET3_DRAW_INDEX 0x2B
603#define PACKET3_DRAW_INDEX_AUTO 0x2D
604#define PACKET3_DRAW_INDEX_IMMD 0x2E
605#define PACKET3_NUM_INSTANCES 0x2F
606#define PACKET3_DRAW_INDEX_MULTI_AUTO 0x30
607#define PACKET3_STRMOUT_BUFFER_UPDATE 0x34
608#define PACKET3_DRAW_INDEX_OFFSET_2 0x35
609#define PACKET3_DRAW_INDEX_MULTI_ELEMENT 0x36
610#define PACKET3_MEM_SEMAPHORE 0x39
611#define PACKET3_MPEG_INDEX 0x3A
612#define PACKET3_WAIT_REG_MEM 0x3C
613#define PACKET3_MEM_WRITE 0x3D
614#define PACKET3_INDIRECT_BUFFER 0x32
615#define PACKET3_SURFACE_SYNC 0x43
616# define PACKET3_CB0_DEST_BASE_ENA (1 << 6)
617# define PACKET3_CB1_DEST_BASE_ENA (1 << 7)
618# define PACKET3_CB2_DEST_BASE_ENA (1 << 8)
619# define PACKET3_CB3_DEST_BASE_ENA (1 << 9)
620# define PACKET3_CB4_DEST_BASE_ENA (1 << 10)
621# define PACKET3_CB5_DEST_BASE_ENA (1 << 11)
622# define PACKET3_CB6_DEST_BASE_ENA (1 << 12)
623# define PACKET3_CB7_DEST_BASE_ENA (1 << 13)
624# define PACKET3_DB_DEST_BASE_ENA (1 << 14)
625# define PACKET3_CB8_DEST_BASE_ENA (1 << 15)
626# define PACKET3_CB9_DEST_BASE_ENA (1 << 16)
627# define PACKET3_CB10_DEST_BASE_ENA (1 << 17)
628# define PACKET3_CB11_DEST_BASE_ENA (1 << 17)
629# define PACKET3_FULL_CACHE_ENA (1 << 20)
630# define PACKET3_TC_ACTION_ENA (1 << 23)
631# define PACKET3_VC_ACTION_ENA (1 << 24)
632# define PACKET3_CB_ACTION_ENA (1 << 25)
633# define PACKET3_DB_ACTION_ENA (1 << 26)
634# define PACKET3_SH_ACTION_ENA (1 << 27)
635# define PACKET3_SMX_ACTION_ENA (1 << 28)
636#define PACKET3_ME_INITIALIZE 0x44
637#define PACKET3_ME_INITIALIZE_DEVICE_ID(x) ((x) << 16)
638#define PACKET3_COND_WRITE 0x45
639#define PACKET3_EVENT_WRITE 0x46
640#define PACKET3_EVENT_WRITE_EOP 0x47
641#define PACKET3_EVENT_WRITE_EOS 0x48
642#define PACKET3_PREAMBLE_CNTL 0x4A
643#define PACKET3_RB_OFFSET 0x4B
644#define PACKET3_ALU_PS_CONST_BUFFER_COPY 0x4C
645#define PACKET3_ALU_VS_CONST_BUFFER_COPY 0x4D
646#define PACKET3_ALU_PS_CONST_UPDATE 0x4E
647#define PACKET3_ALU_VS_CONST_UPDATE 0x4F
648#define PACKET3_ONE_REG_WRITE 0x57
649#define PACKET3_SET_CONFIG_REG 0x68
650#define PACKET3_SET_CONFIG_REG_START 0x00008000
651#define PACKET3_SET_CONFIG_REG_END 0x0000ac00
652#define PACKET3_SET_CONTEXT_REG 0x69
653#define PACKET3_SET_CONTEXT_REG_START 0x00028000
654#define PACKET3_SET_CONTEXT_REG_END 0x00029000
655#define PACKET3_SET_ALU_CONST 0x6A
656/* alu const buffers only; no reg file */
657#define PACKET3_SET_BOOL_CONST 0x6B
658#define PACKET3_SET_BOOL_CONST_START 0x0003a500
659#define PACKET3_SET_BOOL_CONST_END 0x0003a518
660#define PACKET3_SET_LOOP_CONST 0x6C
661#define PACKET3_SET_LOOP_CONST_START 0x0003a200
662#define PACKET3_SET_LOOP_CONST_END 0x0003a500
663#define PACKET3_SET_RESOURCE 0x6D
664#define PACKET3_SET_RESOURCE_START 0x00030000
665#define PACKET3_SET_RESOURCE_END 0x00038000
666#define PACKET3_SET_SAMPLER 0x6E
667#define PACKET3_SET_SAMPLER_START 0x0003c000
668#define PACKET3_SET_SAMPLER_END 0x0003c600
669#define PACKET3_SET_CTL_CONST 0x6F
670#define PACKET3_SET_CTL_CONST_START 0x0003cff0
671#define PACKET3_SET_CTL_CONST_END 0x0003ff0c
672#define PACKET3_SET_RESOURCE_OFFSET 0x70
673#define PACKET3_SET_ALU_CONST_VS 0x71
674#define PACKET3_SET_ALU_CONST_DI 0x72
675#define PACKET3_SET_CONTEXT_REG_INDIRECT 0x73
676#define PACKET3_SET_RESOURCE_INDIRECT 0x74
677#define PACKET3_SET_APPEND_CNT 0x75
678
679#define SQ_RESOURCE_CONSTANT_WORD7_0 0x3001c
680#define S__SQ_CONSTANT_TYPE(x) (((x) & 3) << 30)
681#define G__SQ_CONSTANT_TYPE(x) (((x) >> 30) & 3)
682#define SQ_TEX_VTX_INVALID_TEXTURE 0x0
683#define SQ_TEX_VTX_INVALID_BUFFER 0x1
684#define SQ_TEX_VTX_VALID_TEXTURE 0x2
685#define SQ_TEX_VTX_VALID_BUFFER 0x3
686
687#define SQ_CONST_MEM_BASE 0x8df8
688
689#define SQ_ESGS_RING_SIZE 0x8c44
690#define SQ_GSVS_RING_SIZE 0x8c4c
691#define SQ_ESTMP_RING_SIZE 0x8c54
692#define SQ_GSTMP_RING_SIZE 0x8c5c
693#define SQ_VSTMP_RING_SIZE 0x8c64
694#define SQ_PSTMP_RING_SIZE 0x8c6c
695#define SQ_LSTMP_RING_SIZE 0x8e14
696#define SQ_HSTMP_RING_SIZE 0x8e1c
697#define VGT_TF_RING_SIZE 0x8988
698
699#define SQ_ESGS_RING_ITEMSIZE 0x28900
700#define SQ_GSVS_RING_ITEMSIZE 0x28904
701#define SQ_ESTMP_RING_ITEMSIZE 0x28908
702#define SQ_GSTMP_RING_ITEMSIZE 0x2890c
703#define SQ_VSTMP_RING_ITEMSIZE 0x28910
704#define SQ_PSTMP_RING_ITEMSIZE 0x28914
705#define SQ_LSTMP_RING_ITEMSIZE 0x28830
706#define SQ_HSTMP_RING_ITEMSIZE 0x28834
707
708#define SQ_GS_VERT_ITEMSIZE 0x2891c
709#define SQ_GS_VERT_ITEMSIZE_1 0x28920
710#define SQ_GS_VERT_ITEMSIZE_2 0x28924
711#define SQ_GS_VERT_ITEMSIZE_3 0x28928
712#define SQ_GSVS_RING_OFFSET_1 0x2892c
713#define SQ_GSVS_RING_OFFSET_2 0x28930
714#define SQ_GSVS_RING_OFFSET_3 0x28934
715
716#define SQ_ALU_CONST_BUFFER_SIZE_PS_0 0x28140
717#define SQ_ALU_CONST_BUFFER_SIZE_HS_0 0x28f80
718
719#define SQ_ALU_CONST_CACHE_PS_0 0x28940
720#define SQ_ALU_CONST_CACHE_PS_1 0x28944
721#define SQ_ALU_CONST_CACHE_PS_2 0x28948
722#define SQ_ALU_CONST_CACHE_PS_3 0x2894c
723#define SQ_ALU_CONST_CACHE_PS_4 0x28950
724#define SQ_ALU_CONST_CACHE_PS_5 0x28954
725#define SQ_ALU_CONST_CACHE_PS_6 0x28958
726#define SQ_ALU_CONST_CACHE_PS_7 0x2895c
727#define SQ_ALU_CONST_CACHE_PS_8 0x28960
728#define SQ_ALU_CONST_CACHE_PS_9 0x28964
729#define SQ_ALU_CONST_CACHE_PS_10 0x28968
730#define SQ_ALU_CONST_CACHE_PS_11 0x2896c
731#define SQ_ALU_CONST_CACHE_PS_12 0x28970
732#define SQ_ALU_CONST_CACHE_PS_13 0x28974
733#define SQ_ALU_CONST_CACHE_PS_14 0x28978
734#define SQ_ALU_CONST_CACHE_PS_15 0x2897c
735#define SQ_ALU_CONST_CACHE_VS_0 0x28980
736#define SQ_ALU_CONST_CACHE_VS_1 0x28984
737#define SQ_ALU_CONST_CACHE_VS_2 0x28988
738#define SQ_ALU_CONST_CACHE_VS_3 0x2898c
739#define SQ_ALU_CONST_CACHE_VS_4 0x28990
740#define SQ_ALU_CONST_CACHE_VS_5 0x28994
741#define SQ_ALU_CONST_CACHE_VS_6 0x28998
742#define SQ_ALU_CONST_CACHE_VS_7 0x2899c
743#define SQ_ALU_CONST_CACHE_VS_8 0x289a0
744#define SQ_ALU_CONST_CACHE_VS_9 0x289a4
745#define SQ_ALU_CONST_CACHE_VS_10 0x289a8
746#define SQ_ALU_CONST_CACHE_VS_11 0x289ac
747#define SQ_ALU_CONST_CACHE_VS_12 0x289b0
748#define SQ_ALU_CONST_CACHE_VS_13 0x289b4
749#define SQ_ALU_CONST_CACHE_VS_14 0x289b8
750#define SQ_ALU_CONST_CACHE_VS_15 0x289bc
751#define SQ_ALU_CONST_CACHE_GS_0 0x289c0
752#define SQ_ALU_CONST_CACHE_GS_1 0x289c4
753#define SQ_ALU_CONST_CACHE_GS_2 0x289c8
754#define SQ_ALU_CONST_CACHE_GS_3 0x289cc
755#define SQ_ALU_CONST_CACHE_GS_4 0x289d0
756#define SQ_ALU_CONST_CACHE_GS_5 0x289d4
757#define SQ_ALU_CONST_CACHE_GS_6 0x289d8
758#define SQ_ALU_CONST_CACHE_GS_7 0x289dc
759#define SQ_ALU_CONST_CACHE_GS_8 0x289e0
760#define SQ_ALU_CONST_CACHE_GS_9 0x289e4
761#define SQ_ALU_CONST_CACHE_GS_10 0x289e8
762#define SQ_ALU_CONST_CACHE_GS_11 0x289ec
763#define SQ_ALU_CONST_CACHE_GS_12 0x289f0
764#define SQ_ALU_CONST_CACHE_GS_13 0x289f4
765#define SQ_ALU_CONST_CACHE_GS_14 0x289f8
766#define SQ_ALU_CONST_CACHE_GS_15 0x289fc
767#define SQ_ALU_CONST_CACHE_HS_0 0x28f00
768#define SQ_ALU_CONST_CACHE_HS_1 0x28f04
769#define SQ_ALU_CONST_CACHE_HS_2 0x28f08
770#define SQ_ALU_CONST_CACHE_HS_3 0x28f0c
771#define SQ_ALU_CONST_CACHE_HS_4 0x28f10
772#define SQ_ALU_CONST_CACHE_HS_5 0x28f14
773#define SQ_ALU_CONST_CACHE_HS_6 0x28f18
774#define SQ_ALU_CONST_CACHE_HS_7 0x28f1c
775#define SQ_ALU_CONST_CACHE_HS_8 0x28f20
776#define SQ_ALU_CONST_CACHE_HS_9 0x28f24
777#define SQ_ALU_CONST_CACHE_HS_10 0x28f28
778#define SQ_ALU_CONST_CACHE_HS_11 0x28f2c
779#define SQ_ALU_CONST_CACHE_HS_12 0x28f30
780#define SQ_ALU_CONST_CACHE_HS_13 0x28f34
781#define SQ_ALU_CONST_CACHE_HS_14 0x28f38
782#define SQ_ALU_CONST_CACHE_HS_15 0x28f3c
783#define SQ_ALU_CONST_CACHE_LS_0 0x28f40
784#define SQ_ALU_CONST_CACHE_LS_1 0x28f44
785#define SQ_ALU_CONST_CACHE_LS_2 0x28f48
786#define SQ_ALU_CONST_CACHE_LS_3 0x28f4c
787#define SQ_ALU_CONST_CACHE_LS_4 0x28f50
788#define SQ_ALU_CONST_CACHE_LS_5 0x28f54
789#define SQ_ALU_CONST_CACHE_LS_6 0x28f58
790#define SQ_ALU_CONST_CACHE_LS_7 0x28f5c
791#define SQ_ALU_CONST_CACHE_LS_8 0x28f60
792#define SQ_ALU_CONST_CACHE_LS_9 0x28f64
793#define SQ_ALU_CONST_CACHE_LS_10 0x28f68
794#define SQ_ALU_CONST_CACHE_LS_11 0x28f6c
795#define SQ_ALU_CONST_CACHE_LS_12 0x28f70
796#define SQ_ALU_CONST_CACHE_LS_13 0x28f74
797#define SQ_ALU_CONST_CACHE_LS_14 0x28f78
798#define SQ_ALU_CONST_CACHE_LS_15 0x28f7c
799
800#define DB_DEPTH_CONTROL 0x28800
801#define DB_DEPTH_VIEW 0x28008
802#define DB_HTILE_DATA_BASE 0x28014
803#define DB_Z_INFO 0x28040
804# define Z_ARRAY_MODE(x) ((x) << 4)
805#define DB_STENCIL_INFO 0x28044
806#define DB_Z_READ_BASE 0x28048
807#define DB_STENCIL_READ_BASE 0x2804c
808#define DB_Z_WRITE_BASE 0x28050
809#define DB_STENCIL_WRITE_BASE 0x28054
810#define DB_DEPTH_SIZE 0x28058
811
812#define SQ_PGM_START_PS 0x28840
813#define SQ_PGM_START_VS 0x2885c
814#define SQ_PGM_START_GS 0x28874
815#define SQ_PGM_START_ES 0x2888c
816#define SQ_PGM_START_FS 0x288a4
817#define SQ_PGM_START_HS 0x288b8
818#define SQ_PGM_START_LS 0x288d0
819
820#define VGT_STRMOUT_CONFIG 0x28b94
821#define VGT_STRMOUT_BUFFER_CONFIG 0x28b98
822
823#define CB_TARGET_MASK 0x28238
824#define CB_SHADER_MASK 0x2823c
825
826#define GDS_ADDR_BASE 0x28720
827
828#define CB_IMMED0_BASE 0x28b9c
829#define CB_IMMED1_BASE 0x28ba0
830#define CB_IMMED2_BASE 0x28ba4
831#define CB_IMMED3_BASE 0x28ba8
832#define CB_IMMED4_BASE 0x28bac
833#define CB_IMMED5_BASE 0x28bb0
834#define CB_IMMED6_BASE 0x28bb4
835#define CB_IMMED7_BASE 0x28bb8
836#define CB_IMMED8_BASE 0x28bbc
837#define CB_IMMED9_BASE 0x28bc0
838#define CB_IMMED10_BASE 0x28bc4
839#define CB_IMMED11_BASE 0x28bc8
840
841/* all 12 CB blocks have these regs */
842#define CB_COLOR0_BASE 0x28c60
843#define CB_COLOR0_PITCH 0x28c64
844#define CB_COLOR0_SLICE 0x28c68
845#define CB_COLOR0_VIEW 0x28c6c
846#define CB_COLOR0_INFO 0x28c70
847# define CB_ARRAY_MODE(x) ((x) << 8)
848# define ARRAY_LINEAR_GENERAL 0
849# define ARRAY_LINEAR_ALIGNED 1
850# define ARRAY_1D_TILED_THIN1 2
851# define ARRAY_2D_TILED_THIN1 4
852#define CB_COLOR0_ATTRIB 0x28c74
853#define CB_COLOR0_DIM 0x28c78
854/* only CB0-7 blocks have these regs */
855#define CB_COLOR0_CMASK 0x28c7c
856#define CB_COLOR0_CMASK_SLICE 0x28c80
857#define CB_COLOR0_FMASK 0x28c84
858#define CB_COLOR0_FMASK_SLICE 0x28c88
859#define CB_COLOR0_CLEAR_WORD0 0x28c8c
860#define CB_COLOR0_CLEAR_WORD1 0x28c90
861#define CB_COLOR0_CLEAR_WORD2 0x28c94
862#define CB_COLOR0_CLEAR_WORD3 0x28c98
863
864#define CB_COLOR1_BASE 0x28c9c
865#define CB_COLOR2_BASE 0x28cd8
866#define CB_COLOR3_BASE 0x28d14
867#define CB_COLOR4_BASE 0x28d50
868#define CB_COLOR5_BASE 0x28d8c
869#define CB_COLOR6_BASE 0x28dc8
870#define CB_COLOR7_BASE 0x28e04
871#define CB_COLOR8_BASE 0x28e40
872#define CB_COLOR9_BASE 0x28e5c
873#define CB_COLOR10_BASE 0x28e78
874#define CB_COLOR11_BASE 0x28e94
875
876#define CB_COLOR1_PITCH 0x28ca0
877#define CB_COLOR2_PITCH 0x28cdc
878#define CB_COLOR3_PITCH 0x28d18
879#define CB_COLOR4_PITCH 0x28d54
880#define CB_COLOR5_PITCH 0x28d90
881#define CB_COLOR6_PITCH 0x28dcc
882#define CB_COLOR7_PITCH 0x28e08
883#define CB_COLOR8_PITCH 0x28e44
884#define CB_COLOR9_PITCH 0x28e60
885#define CB_COLOR10_PITCH 0x28e7c
886#define CB_COLOR11_PITCH 0x28e98
887
888#define CB_COLOR1_SLICE 0x28ca4
889#define CB_COLOR2_SLICE 0x28ce0
890#define CB_COLOR3_SLICE 0x28d1c
891#define CB_COLOR4_SLICE 0x28d58
892#define CB_COLOR5_SLICE 0x28d94
893#define CB_COLOR6_SLICE 0x28dd0
894#define CB_COLOR7_SLICE 0x28e0c
895#define CB_COLOR8_SLICE 0x28e48
896#define CB_COLOR9_SLICE 0x28e64
897#define CB_COLOR10_SLICE 0x28e80
898#define CB_COLOR11_SLICE 0x28e9c
899
900#define CB_COLOR1_VIEW 0x28ca8
901#define CB_COLOR2_VIEW 0x28ce4
902#define CB_COLOR3_VIEW 0x28d20
903#define CB_COLOR4_VIEW 0x28d5c
904#define CB_COLOR5_VIEW 0x28d98
905#define CB_COLOR6_VIEW 0x28dd4
906#define CB_COLOR7_VIEW 0x28e10
907#define CB_COLOR8_VIEW 0x28e4c
908#define CB_COLOR9_VIEW 0x28e68
909#define CB_COLOR10_VIEW 0x28e84
910#define CB_COLOR11_VIEW 0x28ea0
911
912#define CB_COLOR1_INFO 0x28cac
913#define CB_COLOR2_INFO 0x28ce8
914#define CB_COLOR3_INFO 0x28d24
915#define CB_COLOR4_INFO 0x28d60
916#define CB_COLOR5_INFO 0x28d9c
917#define CB_COLOR6_INFO 0x28dd8
918#define CB_COLOR7_INFO 0x28e14
919#define CB_COLOR8_INFO 0x28e50
920#define CB_COLOR9_INFO 0x28e6c
921#define CB_COLOR10_INFO 0x28e88
922#define CB_COLOR11_INFO 0x28ea4
923
924#define CB_COLOR1_ATTRIB 0x28cb0
925#define CB_COLOR2_ATTRIB 0x28cec
926#define CB_COLOR3_ATTRIB 0x28d28
927#define CB_COLOR4_ATTRIB 0x28d64
928#define CB_COLOR5_ATTRIB 0x28da0
929#define CB_COLOR6_ATTRIB 0x28ddc
930#define CB_COLOR7_ATTRIB 0x28e18
931#define CB_COLOR8_ATTRIB 0x28e54
932#define CB_COLOR9_ATTRIB 0x28e70
933#define CB_COLOR10_ATTRIB 0x28e8c
934#define CB_COLOR11_ATTRIB 0x28ea8
935
936#define CB_COLOR1_DIM 0x28cb4
937#define CB_COLOR2_DIM 0x28cf0
938#define CB_COLOR3_DIM 0x28d2c
939#define CB_COLOR4_DIM 0x28d68
940#define CB_COLOR5_DIM 0x28da4
941#define CB_COLOR6_DIM 0x28de0
942#define CB_COLOR7_DIM 0x28e1c
943#define CB_COLOR8_DIM 0x28e58
944#define CB_COLOR9_DIM 0x28e74
945#define CB_COLOR10_DIM 0x28e90
946#define CB_COLOR11_DIM 0x28eac
947
948#define CB_COLOR1_CMASK 0x28cb8
949#define CB_COLOR2_CMASK 0x28cf4
950#define CB_COLOR3_CMASK 0x28d30
951#define CB_COLOR4_CMASK 0x28d6c
952#define CB_COLOR5_CMASK 0x28da8
953#define CB_COLOR6_CMASK 0x28de4
954#define CB_COLOR7_CMASK 0x28e20
955
956#define CB_COLOR1_CMASK_SLICE 0x28cbc
957#define CB_COLOR2_CMASK_SLICE 0x28cf8
958#define CB_COLOR3_CMASK_SLICE 0x28d34
959#define CB_COLOR4_CMASK_SLICE 0x28d70
960#define CB_COLOR5_CMASK_SLICE 0x28dac
961#define CB_COLOR6_CMASK_SLICE 0x28de8
962#define CB_COLOR7_CMASK_SLICE 0x28e24
963
964#define CB_COLOR1_FMASK 0x28cc0
965#define CB_COLOR2_FMASK 0x28cfc
966#define CB_COLOR3_FMASK 0x28d38
967#define CB_COLOR4_FMASK 0x28d74
968#define CB_COLOR5_FMASK 0x28db0
969#define CB_COLOR6_FMASK 0x28dec
970#define CB_COLOR7_FMASK 0x28e28
971
972#define CB_COLOR1_FMASK_SLICE 0x28cc4
973#define CB_COLOR2_FMASK_SLICE 0x28d00
974#define CB_COLOR3_FMASK_SLICE 0x28d3c
975#define CB_COLOR4_FMASK_SLICE 0x28d78
976#define CB_COLOR5_FMASK_SLICE 0x28db4
977#define CB_COLOR6_FMASK_SLICE 0x28df0
978#define CB_COLOR7_FMASK_SLICE 0x28e2c
979
980#define CB_COLOR1_CLEAR_WORD0 0x28cc8
981#define CB_COLOR2_CLEAR_WORD0 0x28d04
982#define CB_COLOR3_CLEAR_WORD0 0x28d40
983#define CB_COLOR4_CLEAR_WORD0 0x28d7c
984#define CB_COLOR5_CLEAR_WORD0 0x28db8
985#define CB_COLOR6_CLEAR_WORD0 0x28df4
986#define CB_COLOR7_CLEAR_WORD0 0x28e30
987
988#define CB_COLOR1_CLEAR_WORD1 0x28ccc
989#define CB_COLOR2_CLEAR_WORD1 0x28d08
990#define CB_COLOR3_CLEAR_WORD1 0x28d44
991#define CB_COLOR4_CLEAR_WORD1 0x28d80
992#define CB_COLOR5_CLEAR_WORD1 0x28dbc
993#define CB_COLOR6_CLEAR_WORD1 0x28df8
994#define CB_COLOR7_CLEAR_WORD1 0x28e34
995
996#define CB_COLOR1_CLEAR_WORD2 0x28cd0
997#define CB_COLOR2_CLEAR_WORD2 0x28d0c
998#define CB_COLOR3_CLEAR_WORD2 0x28d48
999#define CB_COLOR4_CLEAR_WORD2 0x28d84
1000#define CB_COLOR5_CLEAR_WORD2 0x28dc0
1001#define CB_COLOR6_CLEAR_WORD2 0x28dfc
1002#define CB_COLOR7_CLEAR_WORD2 0x28e38
1003
1004#define CB_COLOR1_CLEAR_WORD3 0x28cd4
1005#define CB_COLOR2_CLEAR_WORD3 0x28d10
1006#define CB_COLOR3_CLEAR_WORD3 0x28d4c
1007#define CB_COLOR4_CLEAR_WORD3 0x28d88
1008#define CB_COLOR5_CLEAR_WORD3 0x28dc4
1009#define CB_COLOR6_CLEAR_WORD3 0x28e00
1010#define CB_COLOR7_CLEAR_WORD3 0x28e3c
1011
1012#define SQ_TEX_RESOURCE_WORD0_0 0x30000
1013#define SQ_TEX_RESOURCE_WORD1_0 0x30004
1014# define TEX_ARRAY_MODE(x) ((x) << 28)
1015#define SQ_TEX_RESOURCE_WORD2_0 0x30008
1016#define SQ_TEX_RESOURCE_WORD3_0 0x3000C
1017#define SQ_TEX_RESOURCE_WORD4_0 0x30010
1018#define SQ_TEX_RESOURCE_WORD5_0 0x30014
1019#define SQ_TEX_RESOURCE_WORD6_0 0x30018
1020#define SQ_TEX_RESOURCE_WORD7_0 0x3001c
1021
1022
556#endif 1023#endif
diff --git a/drivers/gpu/drm/radeon/r100.c b/drivers/gpu/drm/radeon/r100.c
index cc004b05d63e..3970e62eaab8 100644
--- a/drivers/gpu/drm/radeon/r100.c
+++ b/drivers/gpu/drm/radeon/r100.c
@@ -162,6 +162,11 @@ void r100_pm_init_profile(struct radeon_device *rdev)
162 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0; 162 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0;
163 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; 163 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
164 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; 164 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
165 /* mid sh */
166 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0;
167 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 0;
168 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
169 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
165 /* high sh */ 170 /* high sh */
166 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0; 171 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0;
167 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; 172 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
@@ -172,6 +177,11 @@ void r100_pm_init_profile(struct radeon_device *rdev)
172 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; 177 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
173 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; 178 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
174 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; 179 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
180 /* mid mh */
181 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 0;
182 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
183 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
184 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
175 /* high mh */ 185 /* high mh */
176 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0; 186 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0;
177 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; 187 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
@@ -1618,6 +1628,7 @@ static int r100_packet0_check(struct radeon_cs_parser *p,
1618 case RADEON_TXFORMAT_RGB332: 1628 case RADEON_TXFORMAT_RGB332:
1619 case RADEON_TXFORMAT_Y8: 1629 case RADEON_TXFORMAT_Y8:
1620 track->textures[i].cpp = 1; 1630 track->textures[i].cpp = 1;
1631 track->textures[i].compress_format = R100_TRACK_COMP_NONE;
1621 break; 1632 break;
1622 case RADEON_TXFORMAT_AI88: 1633 case RADEON_TXFORMAT_AI88:
1623 case RADEON_TXFORMAT_ARGB1555: 1634 case RADEON_TXFORMAT_ARGB1555:
@@ -1629,12 +1640,14 @@ static int r100_packet0_check(struct radeon_cs_parser *p,
1629 case RADEON_TXFORMAT_LDUDV655: 1640 case RADEON_TXFORMAT_LDUDV655:
1630 case RADEON_TXFORMAT_DUDV88: 1641 case RADEON_TXFORMAT_DUDV88:
1631 track->textures[i].cpp = 2; 1642 track->textures[i].cpp = 2;
1643 track->textures[i].compress_format = R100_TRACK_COMP_NONE;
1632 break; 1644 break;
1633 case RADEON_TXFORMAT_ARGB8888: 1645 case RADEON_TXFORMAT_ARGB8888:
1634 case RADEON_TXFORMAT_RGBA8888: 1646 case RADEON_TXFORMAT_RGBA8888:
1635 case RADEON_TXFORMAT_SHADOW32: 1647 case RADEON_TXFORMAT_SHADOW32:
1636 case RADEON_TXFORMAT_LDUDUV8888: 1648 case RADEON_TXFORMAT_LDUDUV8888:
1637 track->textures[i].cpp = 4; 1649 track->textures[i].cpp = 4;
1650 track->textures[i].compress_format = R100_TRACK_COMP_NONE;
1638 break; 1651 break;
1639 case RADEON_TXFORMAT_DXT1: 1652 case RADEON_TXFORMAT_DXT1:
1640 track->textures[i].cpp = 1; 1653 track->textures[i].cpp = 1;
@@ -2594,12 +2607,6 @@ int r100_set_surface_reg(struct radeon_device *rdev, int reg,
2594 int surf_index = reg * 16; 2607 int surf_index = reg * 16;
2595 int flags = 0; 2608 int flags = 0;
2596 2609
2597 /* r100/r200 divide by 16 */
2598 if (rdev->family < CHIP_R300)
2599 flags = pitch / 16;
2600 else
2601 flags = pitch / 8;
2602
2603 if (rdev->family <= CHIP_RS200) { 2610 if (rdev->family <= CHIP_RS200) {
2604 if ((tiling_flags & (RADEON_TILING_MACRO|RADEON_TILING_MICRO)) 2611 if ((tiling_flags & (RADEON_TILING_MACRO|RADEON_TILING_MICRO))
2605 == (RADEON_TILING_MACRO|RADEON_TILING_MICRO)) 2612 == (RADEON_TILING_MACRO|RADEON_TILING_MICRO))
@@ -2623,6 +2630,20 @@ int r100_set_surface_reg(struct radeon_device *rdev, int reg,
2623 if (tiling_flags & RADEON_TILING_SWAP_32BIT) 2630 if (tiling_flags & RADEON_TILING_SWAP_32BIT)
2624 flags |= RADEON_SURF_AP0_SWP_32BPP | RADEON_SURF_AP1_SWP_32BPP; 2631 flags |= RADEON_SURF_AP0_SWP_32BPP | RADEON_SURF_AP1_SWP_32BPP;
2625 2632
2633 /* when we aren't tiling the pitch seems to needs to be furtherdivided down. - tested on power5 + rn50 server */
2634 if (tiling_flags & (RADEON_TILING_SWAP_16BIT | RADEON_TILING_SWAP_32BIT)) {
2635 if (!(tiling_flags & (RADEON_TILING_MACRO | RADEON_TILING_MICRO)))
2636 if (ASIC_IS_RN50(rdev))
2637 pitch /= 16;
2638 }
2639
2640 /* r100/r200 divide by 16 */
2641 if (rdev->family < CHIP_R300)
2642 flags |= pitch / 16;
2643 else
2644 flags |= pitch / 8;
2645
2646
2626 DRM_DEBUG("writing surface %d %d %x %x\n", reg, flags, offset, offset+obj_size-1); 2647 DRM_DEBUG("writing surface %d %d %x %x\n", reg, flags, offset, offset+obj_size-1);
2627 WREG32(RADEON_SURFACE0_INFO + surf_index, flags); 2648 WREG32(RADEON_SURFACE0_INFO + surf_index, flags);
2628 WREG32(RADEON_SURFACE0_LOWER_BOUND + surf_index, offset); 2649 WREG32(RADEON_SURFACE0_LOWER_BOUND + surf_index, offset);
@@ -3137,33 +3158,6 @@ static inline void r100_cs_track_texture_print(struct r100_cs_track_texture *t)
3137 DRM_ERROR("compress format %d\n", t->compress_format); 3158 DRM_ERROR("compress format %d\n", t->compress_format);
3138} 3159}
3139 3160
3140static int r100_cs_track_cube(struct radeon_device *rdev,
3141 struct r100_cs_track *track, unsigned idx)
3142{
3143 unsigned face, w, h;
3144 struct radeon_bo *cube_robj;
3145 unsigned long size;
3146
3147 for (face = 0; face < 5; face++) {
3148 cube_robj = track->textures[idx].cube_info[face].robj;
3149 w = track->textures[idx].cube_info[face].width;
3150 h = track->textures[idx].cube_info[face].height;
3151
3152 size = w * h;
3153 size *= track->textures[idx].cpp;
3154
3155 size += track->textures[idx].cube_info[face].offset;
3156
3157 if (size > radeon_bo_size(cube_robj)) {
3158 DRM_ERROR("Cube texture offset greater than object size %lu %lu\n",
3159 size, radeon_bo_size(cube_robj));
3160 r100_cs_track_texture_print(&track->textures[idx]);
3161 return -1;
3162 }
3163 }
3164 return 0;
3165}
3166
3167static int r100_track_compress_size(int compress_format, int w, int h) 3161static int r100_track_compress_size(int compress_format, int w, int h)
3168{ 3162{
3169 int block_width, block_height, block_bytes; 3163 int block_width, block_height, block_bytes;
@@ -3194,6 +3188,37 @@ static int r100_track_compress_size(int compress_format, int w, int h)
3194 return sz; 3188 return sz;
3195} 3189}
3196 3190
3191static int r100_cs_track_cube(struct radeon_device *rdev,
3192 struct r100_cs_track *track, unsigned idx)
3193{
3194 unsigned face, w, h;
3195 struct radeon_bo *cube_robj;
3196 unsigned long size;
3197 unsigned compress_format = track->textures[idx].compress_format;
3198
3199 for (face = 0; face < 5; face++) {
3200 cube_robj = track->textures[idx].cube_info[face].robj;
3201 w = track->textures[idx].cube_info[face].width;
3202 h = track->textures[idx].cube_info[face].height;
3203
3204 if (compress_format) {
3205 size = r100_track_compress_size(compress_format, w, h);
3206 } else
3207 size = w * h;
3208 size *= track->textures[idx].cpp;
3209
3210 size += track->textures[idx].cube_info[face].offset;
3211
3212 if (size > radeon_bo_size(cube_robj)) {
3213 DRM_ERROR("Cube texture offset greater than object size %lu %lu\n",
3214 size, radeon_bo_size(cube_robj));
3215 r100_cs_track_texture_print(&track->textures[idx]);
3216 return -1;
3217 }
3218 }
3219 return 0;
3220}
3221
3197static int r100_cs_track_texture_check(struct radeon_device *rdev, 3222static int r100_cs_track_texture_check(struct radeon_device *rdev,
3198 struct r100_cs_track *track) 3223 struct r100_cs_track *track)
3199{ 3224{
diff --git a/drivers/gpu/drm/radeon/r200.c b/drivers/gpu/drm/radeon/r200.c
index 85617c311212..0266d72e0a4c 100644
--- a/drivers/gpu/drm/radeon/r200.c
+++ b/drivers/gpu/drm/radeon/r200.c
@@ -415,6 +415,8 @@ int r200_packet0_check(struct radeon_cs_parser *p,
415 /* 2D, 3D, CUBE */ 415 /* 2D, 3D, CUBE */
416 switch (tmp) { 416 switch (tmp) {
417 case 0: 417 case 0:
418 case 3:
419 case 4:
418 case 5: 420 case 5:
419 case 6: 421 case 6:
420 case 7: 422 case 7:
@@ -450,6 +452,7 @@ int r200_packet0_check(struct radeon_cs_parser *p,
450 case R200_TXFORMAT_RGB332: 452 case R200_TXFORMAT_RGB332:
451 case R200_TXFORMAT_Y8: 453 case R200_TXFORMAT_Y8:
452 track->textures[i].cpp = 1; 454 track->textures[i].cpp = 1;
455 track->textures[i].compress_format = R100_TRACK_COMP_NONE;
453 break; 456 break;
454 case R200_TXFORMAT_AI88: 457 case R200_TXFORMAT_AI88:
455 case R200_TXFORMAT_ARGB1555: 458 case R200_TXFORMAT_ARGB1555:
@@ -461,6 +464,7 @@ int r200_packet0_check(struct radeon_cs_parser *p,
461 case R200_TXFORMAT_DVDU88: 464 case R200_TXFORMAT_DVDU88:
462 case R200_TXFORMAT_AVYU4444: 465 case R200_TXFORMAT_AVYU4444:
463 track->textures[i].cpp = 2; 466 track->textures[i].cpp = 2;
467 track->textures[i].compress_format = R100_TRACK_COMP_NONE;
464 break; 468 break;
465 case R200_TXFORMAT_ARGB8888: 469 case R200_TXFORMAT_ARGB8888:
466 case R200_TXFORMAT_RGBA8888: 470 case R200_TXFORMAT_RGBA8888:
@@ -468,6 +472,7 @@ int r200_packet0_check(struct radeon_cs_parser *p,
468 case R200_TXFORMAT_BGR111110: 472 case R200_TXFORMAT_BGR111110:
469 case R200_TXFORMAT_LDVDU8888: 473 case R200_TXFORMAT_LDVDU8888:
470 track->textures[i].cpp = 4; 474 track->textures[i].cpp = 4;
475 track->textures[i].compress_format = R100_TRACK_COMP_NONE;
471 break; 476 break;
472 case R200_TXFORMAT_DXT1: 477 case R200_TXFORMAT_DXT1:
473 track->textures[i].cpp = 1; 478 track->textures[i].cpp = 1;
diff --git a/drivers/gpu/drm/radeon/r300.c b/drivers/gpu/drm/radeon/r300.c
index b2f9efe2897c..7e81db5eb804 100644
--- a/drivers/gpu/drm/radeon/r300.c
+++ b/drivers/gpu/drm/radeon/r300.c
@@ -881,6 +881,7 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
881 case R300_TX_FORMAT_Y4X4: 881 case R300_TX_FORMAT_Y4X4:
882 case R300_TX_FORMAT_Z3Y3X2: 882 case R300_TX_FORMAT_Z3Y3X2:
883 track->textures[i].cpp = 1; 883 track->textures[i].cpp = 1;
884 track->textures[i].compress_format = R100_TRACK_COMP_NONE;
884 break; 885 break;
885 case R300_TX_FORMAT_X16: 886 case R300_TX_FORMAT_X16:
886 case R300_TX_FORMAT_Y8X8: 887 case R300_TX_FORMAT_Y8X8:
@@ -892,6 +893,7 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
892 case R300_TX_FORMAT_B8G8_B8G8: 893 case R300_TX_FORMAT_B8G8_B8G8:
893 case R300_TX_FORMAT_G8R8_G8B8: 894 case R300_TX_FORMAT_G8R8_G8B8:
894 track->textures[i].cpp = 2; 895 track->textures[i].cpp = 2;
896 track->textures[i].compress_format = R100_TRACK_COMP_NONE;
895 break; 897 break;
896 case R300_TX_FORMAT_Y16X16: 898 case R300_TX_FORMAT_Y16X16:
897 case R300_TX_FORMAT_Z11Y11X10: 899 case R300_TX_FORMAT_Z11Y11X10:
@@ -902,14 +904,17 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
902 case R300_TX_FORMAT_FL_I32: 904 case R300_TX_FORMAT_FL_I32:
903 case 0x1e: 905 case 0x1e:
904 track->textures[i].cpp = 4; 906 track->textures[i].cpp = 4;
907 track->textures[i].compress_format = R100_TRACK_COMP_NONE;
905 break; 908 break;
906 case R300_TX_FORMAT_W16Z16Y16X16: 909 case R300_TX_FORMAT_W16Z16Y16X16:
907 case R300_TX_FORMAT_FL_R16G16B16A16: 910 case R300_TX_FORMAT_FL_R16G16B16A16:
908 case R300_TX_FORMAT_FL_I32A32: 911 case R300_TX_FORMAT_FL_I32A32:
909 track->textures[i].cpp = 8; 912 track->textures[i].cpp = 8;
913 track->textures[i].compress_format = R100_TRACK_COMP_NONE;
910 break; 914 break;
911 case R300_TX_FORMAT_FL_R32G32B32A32: 915 case R300_TX_FORMAT_FL_R32G32B32A32:
912 track->textures[i].cpp = 16; 916 track->textures[i].cpp = 16;
917 track->textures[i].compress_format = R100_TRACK_COMP_NONE;
913 break; 918 break;
914 case R300_TX_FORMAT_DXT1: 919 case R300_TX_FORMAT_DXT1:
915 track->textures[i].cpp = 1; 920 track->textures[i].cpp = 1;
diff --git a/drivers/gpu/drm/radeon/r420.c b/drivers/gpu/drm/radeon/r420.c
index 4415a5ee5871..e6c89142bb4d 100644
--- a/drivers/gpu/drm/radeon/r420.c
+++ b/drivers/gpu/drm/radeon/r420.c
@@ -45,9 +45,14 @@ void r420_pm_init_profile(struct radeon_device *rdev)
45 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; 45 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
46 /* low sh */ 46 /* low sh */
47 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0; 47 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0;
48 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1; 48 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0;
49 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; 49 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
50 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; 50 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
51 /* mid sh */
52 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0;
53 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
54 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
55 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
51 /* high sh */ 56 /* high sh */
52 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0; 57 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0;
53 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; 58 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
@@ -58,6 +63,11 @@ void r420_pm_init_profile(struct radeon_device *rdev)
58 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; 63 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
59 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; 64 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
60 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; 65 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
66 /* mid mh */
67 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 0;
68 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
69 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
70 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
61 /* high mh */ 71 /* high mh */
62 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0; 72 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0;
63 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; 73 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
diff --git a/drivers/gpu/drm/radeon/r600.c b/drivers/gpu/drm/radeon/r600.c
index 44e96a2ae25a..3d6645ce2151 100644
--- a/drivers/gpu/drm/radeon/r600.c
+++ b/drivers/gpu/drm/radeon/r600.c
@@ -130,9 +130,14 @@ void r600_pm_get_dynpm_state(struct radeon_device *rdev)
130 break; 130 break;
131 } 131 }
132 } 132 }
133 } else 133 } else {
134 rdev->pm.requested_power_state_index = 134 if (rdev->pm.current_power_state_index == 0)
135 rdev->pm.current_power_state_index - 1; 135 rdev->pm.requested_power_state_index =
136 rdev->pm.num_power_states - 1;
137 else
138 rdev->pm.requested_power_state_index =
139 rdev->pm.current_power_state_index - 1;
140 }
136 } 141 }
137 rdev->pm.requested_clock_mode_index = 0; 142 rdev->pm.requested_clock_mode_index = 0;
138 /* don't use the power state if crtcs are active and no display flag is set */ 143 /* don't use the power state if crtcs are active and no display flag is set */
@@ -291,6 +296,11 @@ void rs780_pm_init_profile(struct radeon_device *rdev)
291 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0; 296 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0;
292 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; 297 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
293 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; 298 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
299 /* mid sh */
300 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0;
301 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 0;
302 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
303 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
294 /* high sh */ 304 /* high sh */
295 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0; 305 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0;
296 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1; 306 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
@@ -301,6 +311,11 @@ void rs780_pm_init_profile(struct radeon_device *rdev)
301 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0; 311 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
302 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; 312 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
303 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; 313 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
314 /* mid mh */
315 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 0;
316 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
317 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
318 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
304 /* high mh */ 319 /* high mh */
305 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0; 320 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0;
306 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 1; 321 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 1;
@@ -317,6 +332,11 @@ void rs780_pm_init_profile(struct radeon_device *rdev)
317 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1; 332 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
318 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; 333 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
319 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; 334 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
335 /* mid sh */
336 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
337 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
338 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
339 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
320 /* high sh */ 340 /* high sh */
321 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1; 341 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
322 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 2; 342 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 2;
@@ -327,6 +347,11 @@ void rs780_pm_init_profile(struct radeon_device *rdev)
327 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 1; 347 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 1;
328 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; 348 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
329 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; 349 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
350 /* mid mh */
351 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 1;
352 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 1;
353 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
354 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
330 /* high mh */ 355 /* high mh */
331 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 1; 356 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 1;
332 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2; 357 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
@@ -343,6 +368,11 @@ void rs780_pm_init_profile(struct radeon_device *rdev)
343 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 2; 368 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 2;
344 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; 369 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
345 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; 370 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
371 /* mid sh */
372 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 2;
373 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 2;
374 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
375 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
346 /* high sh */ 376 /* high sh */
347 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 2; 377 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 2;
348 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 3; 378 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 3;
@@ -353,6 +383,11 @@ void rs780_pm_init_profile(struct radeon_device *rdev)
353 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0; 383 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
354 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; 384 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
355 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; 385 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
386 /* mid mh */
387 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
388 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
389 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
390 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
356 /* high mh */ 391 /* high mh */
357 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2; 392 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
358 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 3; 393 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 3;
@@ -375,6 +410,11 @@ void r600_pm_init_profile(struct radeon_device *rdev)
375 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; 410 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
376 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; 411 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
377 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; 412 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
413 /* mid sh */
414 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
415 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
416 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
417 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
378 /* high sh */ 418 /* high sh */
379 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; 419 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
380 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; 420 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
@@ -385,6 +425,11 @@ void r600_pm_init_profile(struct radeon_device *rdev)
385 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; 425 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
386 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; 426 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
387 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; 427 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
428 /* mid mh */
429 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
430 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
431 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
432 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
388 /* high mh */ 433 /* high mh */
389 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; 434 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
390 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; 435 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
@@ -401,7 +446,12 @@ void r600_pm_init_profile(struct radeon_device *rdev)
401 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1; 446 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
402 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1; 447 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
403 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; 448 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
404 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 1; 449 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
450 /* mid sh */
451 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
452 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
453 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
454 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
405 /* high sh */ 455 /* high sh */
406 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1; 456 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
407 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1; 457 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
@@ -411,7 +461,12 @@ void r600_pm_init_profile(struct radeon_device *rdev)
411 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2; 461 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
412 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 2; 462 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 2;
413 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; 463 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
414 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 1; 464 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
465 /* low mh */
466 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
467 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 2;
468 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
469 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
415 /* high mh */ 470 /* high mh */
416 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2; 471 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
417 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2; 472 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
@@ -430,14 +485,30 @@ void r600_pm_init_profile(struct radeon_device *rdev)
430 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 485 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx =
431 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0); 486 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
432 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; 487 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
433 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 1; 488 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
434 } else { 489 } else {
435 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 490 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx =
436 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0); 491 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
437 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 492 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx =
438 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0); 493 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
439 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; 494 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
440 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 1; 495 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
496 }
497 /* mid sh */
498 if (rdev->flags & RADEON_IS_MOBILITY) {
499 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx =
500 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
501 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx =
502 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
503 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
504 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
505 } else {
506 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx =
507 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
508 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx =
509 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
510 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
511 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
441 } 512 }
442 /* high sh */ 513 /* high sh */
443 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 514 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx =
@@ -453,14 +524,30 @@ void r600_pm_init_profile(struct radeon_device *rdev)
453 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 524 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx =
454 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 1); 525 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 1);
455 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; 526 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
456 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 2; 527 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
457 } else { 528 } else {
458 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 529 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx =
459 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1); 530 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
460 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 531 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx =
461 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1); 532 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
462 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; 533 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
463 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 1; 534 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
535 }
536 /* mid mh */
537 if (rdev->flags & RADEON_IS_MOBILITY) {
538 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx =
539 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 1);
540 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx =
541 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 1);
542 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
543 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
544 } else {
545 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx =
546 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
547 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx =
548 r600_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
549 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
550 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
464 } 551 }
465 /* high mh */ 552 /* high mh */
466 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 553 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx =
@@ -475,7 +562,18 @@ void r600_pm_init_profile(struct radeon_device *rdev)
475 562
476void r600_pm_misc(struct radeon_device *rdev) 563void r600_pm_misc(struct radeon_device *rdev)
477{ 564{
565 int req_ps_idx = rdev->pm.requested_power_state_index;
566 int req_cm_idx = rdev->pm.requested_clock_mode_index;
567 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
568 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
478 569
570 if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
571 if (voltage->voltage != rdev->pm.current_vddc) {
572 radeon_atom_set_voltage(rdev, voltage->voltage);
573 rdev->pm.current_vddc = voltage->voltage;
574 DRM_DEBUG("Setting: v: %d\n", voltage->voltage);
575 }
576 }
479} 577}
480 578
481bool r600_gui_idle(struct radeon_device *rdev) 579bool r600_gui_idle(struct radeon_device *rdev)
@@ -1004,7 +1102,7 @@ static void r600_mc_program(struct radeon_device *rdev)
1004 WREG32(MC_VM_FB_LOCATION, tmp); 1102 WREG32(MC_VM_FB_LOCATION, tmp);
1005 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); 1103 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1006 WREG32(HDP_NONSURFACE_INFO, (2 << 7)); 1104 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
1007 WREG32(HDP_NONSURFACE_SIZE, rdev->mc.mc_vram_size | 0x3FF); 1105 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
1008 if (rdev->flags & RADEON_IS_AGP) { 1106 if (rdev->flags & RADEON_IS_AGP) {
1009 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 22); 1107 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 22);
1010 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 22); 1108 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 22);
@@ -1126,8 +1224,10 @@ int r600_mc_init(struct radeon_device *rdev)
1126 rdev->mc.visible_vram_size = rdev->mc.aper_size; 1224 rdev->mc.visible_vram_size = rdev->mc.aper_size;
1127 r600_vram_gtt_location(rdev, &rdev->mc); 1225 r600_vram_gtt_location(rdev, &rdev->mc);
1128 1226
1129 if (rdev->flags & RADEON_IS_IGP) 1227 if (rdev->flags & RADEON_IS_IGP) {
1228 rs690_pm_info(rdev);
1130 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev); 1229 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
1230 }
1131 radeon_update_bandwidth_info(rdev); 1231 radeon_update_bandwidth_info(rdev);
1132 return 0; 1232 return 0;
1133} 1233}
diff --git a/drivers/gpu/drm/radeon/radeon.h b/drivers/gpu/drm/radeon/radeon.h
index 669feb689bfc..ab61aaa887bb 100644
--- a/drivers/gpu/drm/radeon/radeon.h
+++ b/drivers/gpu/drm/radeon/radeon.h
@@ -176,6 +176,8 @@ void radeon_pm_suspend(struct radeon_device *rdev);
176void radeon_pm_resume(struct radeon_device *rdev); 176void radeon_pm_resume(struct radeon_device *rdev);
177void radeon_combios_get_power_modes(struct radeon_device *rdev); 177void radeon_combios_get_power_modes(struct radeon_device *rdev);
178void radeon_atombios_get_power_modes(struct radeon_device *rdev); 178void radeon_atombios_get_power_modes(struct radeon_device *rdev);
179void radeon_atom_set_voltage(struct radeon_device *rdev, u16 level);
180void rs690_pm_info(struct radeon_device *rdev);
179 181
180/* 182/*
181 * Fences. 183 * Fences.
@@ -618,7 +620,8 @@ enum radeon_dynpm_state {
618 DYNPM_STATE_DISABLED, 620 DYNPM_STATE_DISABLED,
619 DYNPM_STATE_MINIMUM, 621 DYNPM_STATE_MINIMUM,
620 DYNPM_STATE_PAUSED, 622 DYNPM_STATE_PAUSED,
621 DYNPM_STATE_ACTIVE 623 DYNPM_STATE_ACTIVE,
624 DYNPM_STATE_SUSPENDED,
622}; 625};
623enum radeon_dynpm_action { 626enum radeon_dynpm_action {
624 DYNPM_ACTION_NONE, 627 DYNPM_ACTION_NONE,
@@ -647,15 +650,18 @@ enum radeon_pm_profile_type {
647 PM_PROFILE_DEFAULT, 650 PM_PROFILE_DEFAULT,
648 PM_PROFILE_AUTO, 651 PM_PROFILE_AUTO,
649 PM_PROFILE_LOW, 652 PM_PROFILE_LOW,
653 PM_PROFILE_MID,
650 PM_PROFILE_HIGH, 654 PM_PROFILE_HIGH,
651}; 655};
652 656
653#define PM_PROFILE_DEFAULT_IDX 0 657#define PM_PROFILE_DEFAULT_IDX 0
654#define PM_PROFILE_LOW_SH_IDX 1 658#define PM_PROFILE_LOW_SH_IDX 1
655#define PM_PROFILE_HIGH_SH_IDX 2 659#define PM_PROFILE_MID_SH_IDX 2
656#define PM_PROFILE_LOW_MH_IDX 3 660#define PM_PROFILE_HIGH_SH_IDX 3
657#define PM_PROFILE_HIGH_MH_IDX 4 661#define PM_PROFILE_LOW_MH_IDX 4
658#define PM_PROFILE_MAX 5 662#define PM_PROFILE_MID_MH_IDX 5
663#define PM_PROFILE_HIGH_MH_IDX 6
664#define PM_PROFILE_MAX 7
659 665
660struct radeon_pm_profile { 666struct radeon_pm_profile {
661 int dpms_off_ps_idx; 667 int dpms_off_ps_idx;
@@ -744,6 +750,7 @@ struct radeon_pm {
744 int default_power_state_index; 750 int default_power_state_index;
745 u32 current_sclk; 751 u32 current_sclk;
746 u32 current_mclk; 752 u32 current_mclk;
753 u32 current_vddc;
747 struct radeon_i2c_chan *i2c_bus; 754 struct radeon_i2c_chan *i2c_bus;
748 /* selected pm method */ 755 /* selected pm method */
749 enum radeon_pm_method pm_method; 756 enum radeon_pm_method pm_method;
diff --git a/drivers/gpu/drm/radeon/radeon_asic.c b/drivers/gpu/drm/radeon/radeon_asic.c
index e57df08d4aeb..646f96f97c77 100644
--- a/drivers/gpu/drm/radeon/radeon_asic.c
+++ b/drivers/gpu/drm/radeon/radeon_asic.c
@@ -724,8 +724,8 @@ static struct radeon_asic evergreen_asic = {
724 .irq_set = &evergreen_irq_set, 724 .irq_set = &evergreen_irq_set,
725 .irq_process = &evergreen_irq_process, 725 .irq_process = &evergreen_irq_process,
726 .get_vblank_counter = &evergreen_get_vblank_counter, 726 .get_vblank_counter = &evergreen_get_vblank_counter,
727 .fence_ring_emit = NULL, 727 .fence_ring_emit = &r600_fence_ring_emit,
728 .cs_parse = NULL, 728 .cs_parse = &evergreen_cs_parse,
729 .copy_blit = NULL, 729 .copy_blit = NULL,
730 .copy_dma = NULL, 730 .copy_dma = NULL,
731 .copy = NULL, 731 .copy = NULL,
@@ -780,6 +780,13 @@ int radeon_asic_init(struct radeon_device *rdev)
780 case CHIP_R423: 780 case CHIP_R423:
781 case CHIP_RV410: 781 case CHIP_RV410:
782 rdev->asic = &r420_asic; 782 rdev->asic = &r420_asic;
783 /* handle macs */
784 if (rdev->bios == NULL) {
785 rdev->asic->get_engine_clock = &radeon_legacy_get_engine_clock;
786 rdev->asic->set_engine_clock = &radeon_legacy_set_engine_clock;
787 rdev->asic->get_memory_clock = &radeon_legacy_get_memory_clock;
788 rdev->asic->set_memory_clock = NULL;
789 }
783 break; 790 break;
784 case CHIP_RS400: 791 case CHIP_RS400:
785 case CHIP_RS480: 792 case CHIP_RS480:
diff --git a/drivers/gpu/drm/radeon/radeon_asic.h b/drivers/gpu/drm/radeon/radeon_asic.h
index 5c40a3dfaca2..c0bbaa64157a 100644
--- a/drivers/gpu/drm/radeon/radeon_asic.h
+++ b/drivers/gpu/drm/radeon/radeon_asic.h
@@ -314,6 +314,7 @@ void evergreen_hpd_set_polarity(struct radeon_device *rdev,
314u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc); 314u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc);
315int evergreen_irq_set(struct radeon_device *rdev); 315int evergreen_irq_set(struct radeon_device *rdev);
316int evergreen_irq_process(struct radeon_device *rdev); 316int evergreen_irq_process(struct radeon_device *rdev);
317extern int evergreen_cs_parse(struct radeon_cs_parser *p);
317extern void evergreen_pm_misc(struct radeon_device *rdev); 318extern void evergreen_pm_misc(struct radeon_device *rdev);
318extern void evergreen_pm_prepare(struct radeon_device *rdev); 319extern void evergreen_pm_prepare(struct radeon_device *rdev);
319extern void evergreen_pm_finish(struct radeon_device *rdev); 320extern void evergreen_pm_finish(struct radeon_device *rdev);
diff --git a/drivers/gpu/drm/radeon/radeon_atombios.c b/drivers/gpu/drm/radeon/radeon_atombios.c
index 24ea683f7cf5..99bd8a9c56b3 100644
--- a/drivers/gpu/drm/radeon/radeon_atombios.c
+++ b/drivers/gpu/drm/radeon/radeon_atombios.c
@@ -1538,7 +1538,8 @@ void radeon_atombios_get_power_modes(struct radeon_device *rdev)
1538 rdev->pm.power_state[state_index].pcie_lanes = 1538 rdev->pm.power_state[state_index].pcie_lanes =
1539 power_info->info.asPowerPlayInfo[i].ucNumPciELanes; 1539 power_info->info.asPowerPlayInfo[i].ucNumPciELanes;
1540 misc = le32_to_cpu(power_info->info.asPowerPlayInfo[i].ulMiscInfo); 1540 misc = le32_to_cpu(power_info->info.asPowerPlayInfo[i].ulMiscInfo);
1541 if (misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_SUPPORT) { 1541 if ((misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_SUPPORT) ||
1542 (misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_ACTIVE_HIGH)) {
1542 rdev->pm.power_state[state_index].clock_info[0].voltage.type = 1543 rdev->pm.power_state[state_index].clock_info[0].voltage.type =
1543 VOLTAGE_GPIO; 1544 VOLTAGE_GPIO;
1544 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio = 1545 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio =
@@ -1605,7 +1606,8 @@ void radeon_atombios_get_power_modes(struct radeon_device *rdev)
1605 power_info->info_2.asPowerPlayInfo[i].ucNumPciELanes; 1606 power_info->info_2.asPowerPlayInfo[i].ucNumPciELanes;
1606 misc = le32_to_cpu(power_info->info_2.asPowerPlayInfo[i].ulMiscInfo); 1607 misc = le32_to_cpu(power_info->info_2.asPowerPlayInfo[i].ulMiscInfo);
1607 misc2 = le32_to_cpu(power_info->info_2.asPowerPlayInfo[i].ulMiscInfo2); 1608 misc2 = le32_to_cpu(power_info->info_2.asPowerPlayInfo[i].ulMiscInfo2);
1608 if (misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_SUPPORT) { 1609 if ((misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_SUPPORT) ||
1610 (misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_ACTIVE_HIGH)) {
1609 rdev->pm.power_state[state_index].clock_info[0].voltage.type = 1611 rdev->pm.power_state[state_index].clock_info[0].voltage.type =
1610 VOLTAGE_GPIO; 1612 VOLTAGE_GPIO;
1611 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio = 1613 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio =
@@ -1679,7 +1681,8 @@ void radeon_atombios_get_power_modes(struct radeon_device *rdev)
1679 power_info->info_3.asPowerPlayInfo[i].ucNumPciELanes; 1681 power_info->info_3.asPowerPlayInfo[i].ucNumPciELanes;
1680 misc = le32_to_cpu(power_info->info_3.asPowerPlayInfo[i].ulMiscInfo); 1682 misc = le32_to_cpu(power_info->info_3.asPowerPlayInfo[i].ulMiscInfo);
1681 misc2 = le32_to_cpu(power_info->info_3.asPowerPlayInfo[i].ulMiscInfo2); 1683 misc2 = le32_to_cpu(power_info->info_3.asPowerPlayInfo[i].ulMiscInfo2);
1682 if (misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_SUPPORT) { 1684 if ((misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_SUPPORT) ||
1685 (misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_ACTIVE_HIGH)) {
1683 rdev->pm.power_state[state_index].clock_info[0].voltage.type = 1686 rdev->pm.power_state[state_index].clock_info[0].voltage.type =
1684 VOLTAGE_GPIO; 1687 VOLTAGE_GPIO;
1685 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio = 1688 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio =
@@ -1755,9 +1758,22 @@ void radeon_atombios_get_power_modes(struct radeon_device *rdev)
1755 rdev->pm.power_state[state_index].misc2 = 0; 1758 rdev->pm.power_state[state_index].misc2 = 0;
1756 } 1759 }
1757 } else { 1760 } else {
1761 int fw_index = GetIndexIntoMasterTable(DATA, FirmwareInfo);
1762 uint8_t fw_frev, fw_crev;
1763 uint16_t fw_data_offset, vddc = 0;
1764 union firmware_info *firmware_info;
1765 ATOM_PPLIB_THERMALCONTROLLER *controller = &power_info->info_4.sThermalController;
1766
1767 if (atom_parse_data_header(mode_info->atom_context, fw_index, NULL,
1768 &fw_frev, &fw_crev, &fw_data_offset)) {
1769 firmware_info =
1770 (union firmware_info *)(mode_info->atom_context->bios +
1771 fw_data_offset);
1772 vddc = firmware_info->info_14.usBootUpVDDCVoltage;
1773 }
1774
1758 /* add the i2c bus for thermal/fan chip */ 1775 /* add the i2c bus for thermal/fan chip */
1759 /* no support for internal controller yet */ 1776 /* no support for internal controller yet */
1760 ATOM_PPLIB_THERMALCONTROLLER *controller = &power_info->info_4.sThermalController;
1761 if (controller->ucType > 0) { 1777 if (controller->ucType > 0) {
1762 if ((controller->ucType == ATOM_PP_THERMALCONTROLLER_RV6xx) || 1778 if ((controller->ucType == ATOM_PP_THERMALCONTROLLER_RV6xx) ||
1763 (controller->ucType == ATOM_PP_THERMALCONTROLLER_RV770) || 1779 (controller->ucType == ATOM_PP_THERMALCONTROLLER_RV770) ||
@@ -1817,10 +1833,7 @@ void radeon_atombios_get_power_modes(struct radeon_device *rdev)
1817 /* skip invalid modes */ 1833 /* skip invalid modes */
1818 if (rdev->pm.power_state[state_index].clock_info[mode_index].sclk == 0) 1834 if (rdev->pm.power_state[state_index].clock_info[mode_index].sclk == 0)
1819 continue; 1835 continue;
1820 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.type = 1836 /* voltage works differently on IGPs */
1821 VOLTAGE_SW;
1822 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage =
1823 clock_info->usVDDC;
1824 mode_index++; 1837 mode_index++;
1825 } else if (ASIC_IS_DCE4(rdev)) { 1838 } else if (ASIC_IS_DCE4(rdev)) {
1826 struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO *clock_info = 1839 struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO *clock_info =
@@ -1904,6 +1917,16 @@ void radeon_atombios_get_power_modes(struct radeon_device *rdev)
1904 rdev->pm.default_power_state_index = state_index; 1917 rdev->pm.default_power_state_index = state_index;
1905 rdev->pm.power_state[state_index].default_clock_mode = 1918 rdev->pm.power_state[state_index].default_clock_mode =
1906 &rdev->pm.power_state[state_index].clock_info[mode_index - 1]; 1919 &rdev->pm.power_state[state_index].clock_info[mode_index - 1];
1920 /* patch the table values with the default slck/mclk from firmware info */
1921 for (j = 0; j < mode_index; j++) {
1922 rdev->pm.power_state[state_index].clock_info[j].mclk =
1923 rdev->clock.default_mclk;
1924 rdev->pm.power_state[state_index].clock_info[j].sclk =
1925 rdev->clock.default_sclk;
1926 if (vddc)
1927 rdev->pm.power_state[state_index].clock_info[j].voltage.voltage =
1928 vddc;
1929 }
1907 } 1930 }
1908 state_index++; 1931 state_index++;
1909 } 1932 }
@@ -1943,6 +1966,7 @@ void radeon_atombios_get_power_modes(struct radeon_device *rdev)
1943 1966
1944 rdev->pm.current_power_state_index = rdev->pm.default_power_state_index; 1967 rdev->pm.current_power_state_index = rdev->pm.default_power_state_index;
1945 rdev->pm.current_clock_mode_index = 0; 1968 rdev->pm.current_clock_mode_index = 0;
1969 rdev->pm.current_vddc = rdev->pm.power_state[rdev->pm.default_power_state_index].clock_info[0].voltage.voltage;
1946} 1970}
1947 1971
1948void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable) 1972void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable)
@@ -1998,6 +2022,42 @@ void radeon_atom_set_memory_clock(struct radeon_device *rdev,
1998 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 2022 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
1999} 2023}
2000 2024
2025union set_voltage {
2026 struct _SET_VOLTAGE_PS_ALLOCATION alloc;
2027 struct _SET_VOLTAGE_PARAMETERS v1;
2028 struct _SET_VOLTAGE_PARAMETERS_V2 v2;
2029};
2030
2031void radeon_atom_set_voltage(struct radeon_device *rdev, u16 level)
2032{
2033 union set_voltage args;
2034 int index = GetIndexIntoMasterTable(COMMAND, SetVoltage);
2035 u8 frev, crev, volt_index = level;
2036
2037 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev))
2038 return;
2039
2040 switch (crev) {
2041 case 1:
2042 args.v1.ucVoltageType = SET_VOLTAGE_TYPE_ASIC_VDDC;
2043 args.v1.ucVoltageMode = SET_ASIC_VOLTAGE_MODE_ALL_SOURCE;
2044 args.v1.ucVoltageIndex = volt_index;
2045 break;
2046 case 2:
2047 args.v2.ucVoltageType = SET_VOLTAGE_TYPE_ASIC_VDDC;
2048 args.v2.ucVoltageMode = SET_ASIC_VOLTAGE_MODE_SET_VOLTAGE;
2049 args.v2.usVoltageLevel = cpu_to_le16(level);
2050 break;
2051 default:
2052 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
2053 return;
2054 }
2055
2056 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
2057}
2058
2059
2060
2001void radeon_atom_initialize_bios_scratch_regs(struct drm_device *dev) 2061void radeon_atom_initialize_bios_scratch_regs(struct drm_device *dev)
2002{ 2062{
2003 struct radeon_device *rdev = dev->dev_private; 2063 struct radeon_device *rdev = dev->dev_private;
diff --git a/drivers/gpu/drm/radeon/radeon_bios.c b/drivers/gpu/drm/radeon/radeon_bios.c
index fbba938f8048..2c9213739999 100644
--- a/drivers/gpu/drm/radeon/radeon_bios.c
+++ b/drivers/gpu/drm/radeon/radeon_bios.c
@@ -48,6 +48,10 @@ static bool igp_read_bios_from_vram(struct radeon_device *rdev)
48 resource_size_t vram_base; 48 resource_size_t vram_base;
49 resource_size_t size = 256 * 1024; /* ??? */ 49 resource_size_t size = 256 * 1024; /* ??? */
50 50
51 if (!(rdev->flags & RADEON_IS_IGP))
52 if (!radeon_card_posted(rdev))
53 return false;
54
51 rdev->bios = NULL; 55 rdev->bios = NULL;
52 vram_base = drm_get_resource_start(rdev->ddev, 0); 56 vram_base = drm_get_resource_start(rdev->ddev, 0);
53 bios = ioremap(vram_base, size); 57 bios = ioremap(vram_base, size);
diff --git a/drivers/gpu/drm/radeon/radeon_combios.c b/drivers/gpu/drm/radeon/radeon_combios.c
index 7b5e10d3e9c9..d1c1d8dd93ce 100644
--- a/drivers/gpu/drm/radeon/radeon_combios.c
+++ b/drivers/gpu/drm/radeon/radeon_combios.c
@@ -1411,6 +1411,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1411 rdev->mode_info.connector_table = CT_IMAC_G5_ISIGHT; 1411 rdev->mode_info.connector_table = CT_IMAC_G5_ISIGHT;
1412 } else 1412 } else
1413#endif /* CONFIG_PPC_PMAC */ 1413#endif /* CONFIG_PPC_PMAC */
1414#ifdef CONFIG_PPC64
1415 if (ASIC_IS_RN50(rdev))
1416 rdev->mode_info.connector_table = CT_RN50_POWER;
1417 else
1418#endif
1414 rdev->mode_info.connector_table = CT_GENERIC; 1419 rdev->mode_info.connector_table = CT_GENERIC;
1415 } 1420 }
1416 1421
@@ -1853,6 +1858,33 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
1853 CONNECTOR_OBJECT_ID_SVIDEO, 1858 CONNECTOR_OBJECT_ID_SVIDEO,
1854 &hpd); 1859 &hpd);
1855 break; 1860 break;
1861 case CT_RN50_POWER:
1862 DRM_INFO("Connector Table: %d (rn50-power)\n",
1863 rdev->mode_info.connector_table);
1864 /* VGA - primary dac */
1865 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_VGA_DDC);
1866 hpd.hpd = RADEON_HPD_NONE;
1867 radeon_add_legacy_encoder(dev,
1868 radeon_get_encoder_id(dev,
1869 ATOM_DEVICE_CRT1_SUPPORT,
1870 1),
1871 ATOM_DEVICE_CRT1_SUPPORT);
1872 radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_CRT1_SUPPORT,
1873 DRM_MODE_CONNECTOR_VGA, &ddc_i2c,
1874 CONNECTOR_OBJECT_ID_VGA,
1875 &hpd);
1876 ddc_i2c = combios_setup_i2c_bus(rdev, RADEON_GPIO_CRT2_DDC);
1877 hpd.hpd = RADEON_HPD_NONE;
1878 radeon_add_legacy_encoder(dev,
1879 radeon_get_encoder_id(dev,
1880 ATOM_DEVICE_CRT2_SUPPORT,
1881 2),
1882 ATOM_DEVICE_CRT2_SUPPORT);
1883 radeon_add_legacy_connector(dev, 1, ATOM_DEVICE_CRT2_SUPPORT,
1884 DRM_MODE_CONNECTOR_VGA, &ddc_i2c,
1885 CONNECTOR_OBJECT_ID_VGA,
1886 &hpd);
1887 break;
1856 default: 1888 default:
1857 DRM_INFO("Connector table: %d (invalid)\n", 1889 DRM_INFO("Connector table: %d (invalid)\n",
1858 rdev->mode_info.connector_table); 1890 rdev->mode_info.connector_table);
@@ -1906,15 +1938,6 @@ static bool radeon_apply_legacy_quirks(struct drm_device *dev,
1906 return false; 1938 return false;
1907 } 1939 }
1908 1940
1909 /* Some RV100 cards with 2 VGA ports show up with DVI+VGA */
1910 if (dev->pdev->device == 0x5159 &&
1911 dev->pdev->subsystem_vendor == 0x1002 &&
1912 dev->pdev->subsystem_device == 0x013a) {
1913 if (*legacy_connector == CONNECTOR_DVI_I_LEGACY)
1914 *legacy_connector = CONNECTOR_CRT_LEGACY;
1915
1916 }
1917
1918 /* X300 card with extra non-existent DVI port */ 1941 /* X300 card with extra non-existent DVI port */
1919 if (dev->pdev->device == 0x5B60 && 1942 if (dev->pdev->device == 0x5B60 &&
1920 dev->pdev->subsystem_vendor == 0x17af && 1943 dev->pdev->subsystem_vendor == 0x17af &&
@@ -2026,6 +2049,7 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
2026 combios_setup_i2c_bus(rdev, RADEON_GPIO_CRT2_DDC); 2049 combios_setup_i2c_bus(rdev, RADEON_GPIO_CRT2_DDC);
2027 break; 2050 break;
2028 default: 2051 default:
2052 ddc_i2c.valid = false;
2029 break; 2053 break;
2030 } 2054 }
2031 2055
@@ -2339,6 +2363,7 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
2339 if (RBIOS8(tv_info + 6) == 'T') { 2363 if (RBIOS8(tv_info + 6) == 'T') {
2340 if (radeon_apply_legacy_tv_quirks(dev)) { 2364 if (radeon_apply_legacy_tv_quirks(dev)) {
2341 hpd.hpd = RADEON_HPD_NONE; 2365 hpd.hpd = RADEON_HPD_NONE;
2366 ddc_i2c.valid = false;
2342 radeon_add_legacy_encoder(dev, 2367 radeon_add_legacy_encoder(dev,
2343 radeon_get_encoder_id 2368 radeon_get_encoder_id
2344 (dev, 2369 (dev,
@@ -2454,7 +2479,12 @@ default_mode:
2454 rdev->pm.power_state[state_index].clock_info[0].mclk = rdev->clock.default_mclk; 2479 rdev->pm.power_state[state_index].clock_info[0].mclk = rdev->clock.default_mclk;
2455 rdev->pm.power_state[state_index].clock_info[0].sclk = rdev->clock.default_sclk; 2480 rdev->pm.power_state[state_index].clock_info[0].sclk = rdev->clock.default_sclk;
2456 rdev->pm.power_state[state_index].default_clock_mode = &rdev->pm.power_state[state_index].clock_info[0]; 2481 rdev->pm.power_state[state_index].default_clock_mode = &rdev->pm.power_state[state_index].clock_info[0];
2457 rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_NONE; 2482 if ((state_index > 0) &&
2483 (rdev->pm.power_state[0].clock_info[0].voltage.type == VOLTAGE_GPIO))
2484 rdev->pm.power_state[state_index].clock_info[0].voltage =
2485 rdev->pm.power_state[0].clock_info[0].voltage;
2486 else
2487 rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_NONE;
2458 rdev->pm.power_state[state_index].pcie_lanes = 16; 2488 rdev->pm.power_state[state_index].pcie_lanes = 16;
2459 rdev->pm.power_state[state_index].flags = 0; 2489 rdev->pm.power_state[state_index].flags = 0;
2460 rdev->pm.default_power_state_index = state_index; 2490 rdev->pm.default_power_state_index = state_index;
@@ -3012,6 +3042,14 @@ void radeon_combios_asic_init(struct drm_device *dev)
3012 combios_write_ram_size(dev); 3042 combios_write_ram_size(dev);
3013 } 3043 }
3014 3044
3045 /* quirk for rs4xx HP nx6125 laptop to make it resume
3046 * - it hangs on resume inside the dynclk 1 table.
3047 */
3048 if (rdev->family == CHIP_RS480 &&
3049 rdev->pdev->subsystem_vendor == 0x103c &&
3050 rdev->pdev->subsystem_device == 0x308b)
3051 return;
3052
3015 /* DYN CLK 1 */ 3053 /* DYN CLK 1 */
3016 table = combios_get_table_offset(dev, COMBIOS_DYN_CLK_1_TABLE); 3054 table = combios_get_table_offset(dev, COMBIOS_DYN_CLK_1_TABLE);
3017 if (table) 3055 if (table)
diff --git a/drivers/gpu/drm/radeon/radeon_cursor.c b/drivers/gpu/drm/radeon/radeon_cursor.c
index b7023fff89eb..4eb67c0e0996 100644
--- a/drivers/gpu/drm/radeon/radeon_cursor.c
+++ b/drivers/gpu/drm/radeon/radeon_cursor.c
@@ -194,7 +194,7 @@ unpin:
194fail: 194fail:
195 drm_gem_object_unreference_unlocked(obj); 195 drm_gem_object_unreference_unlocked(obj);
196 196
197 return 0; 197 return ret;
198} 198}
199 199
200int radeon_crtc_cursor_move(struct drm_crtc *crtc, 200int radeon_crtc_cursor_move(struct drm_crtc *crtc,
diff --git a/drivers/gpu/drm/radeon/radeon_device.c b/drivers/gpu/drm/radeon/radeon_device.c
index fdc3fdf78acb..5f317317aba2 100644
--- a/drivers/gpu/drm/radeon/radeon_device.c
+++ b/drivers/gpu/drm/radeon/radeon_device.c
@@ -546,8 +546,10 @@ static void radeon_switcheroo_set_state(struct pci_dev *pdev, enum vga_switchero
546 /* don't suspend or resume card normally */ 546 /* don't suspend or resume card normally */
547 rdev->powered_down = false; 547 rdev->powered_down = false;
548 radeon_resume_kms(dev); 548 radeon_resume_kms(dev);
549 drm_kms_helper_poll_enable(dev);
549 } else { 550 } else {
550 printk(KERN_INFO "radeon: switched off\n"); 551 printk(KERN_INFO "radeon: switched off\n");
552 drm_kms_helper_poll_disable(dev);
551 radeon_suspend_kms(dev, pmm); 553 radeon_suspend_kms(dev, pmm);
552 /* don't suspend or resume card normally */ 554 /* don't suspend or resume card normally */
553 rdev->powered_down = true; 555 rdev->powered_down = true;
@@ -711,6 +713,7 @@ int radeon_suspend_kms(struct drm_device *dev, pm_message_t state)
711{ 713{
712 struct radeon_device *rdev; 714 struct radeon_device *rdev;
713 struct drm_crtc *crtc; 715 struct drm_crtc *crtc;
716 struct drm_connector *connector;
714 int r; 717 int r;
715 718
716 if (dev == NULL || dev->dev_private == NULL) { 719 if (dev == NULL || dev->dev_private == NULL) {
@@ -723,6 +726,12 @@ int radeon_suspend_kms(struct drm_device *dev, pm_message_t state)
723 726
724 if (rdev->powered_down) 727 if (rdev->powered_down)
725 return 0; 728 return 0;
729
730 /* turn off display hw */
731 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
732 drm_helper_connector_dpms(connector, DRM_MODE_DPMS_OFF);
733 }
734
726 /* unpin the front buffers */ 735 /* unpin the front buffers */
727 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 736 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
728 struct radeon_framebuffer *rfb = to_radeon_framebuffer(crtc->fb); 737 struct radeon_framebuffer *rfb = to_radeon_framebuffer(crtc->fb);
@@ -770,6 +779,7 @@ int radeon_suspend_kms(struct drm_device *dev, pm_message_t state)
770 779
771int radeon_resume_kms(struct drm_device *dev) 780int radeon_resume_kms(struct drm_device *dev)
772{ 781{
782 struct drm_connector *connector;
773 struct radeon_device *rdev = dev->dev_private; 783 struct radeon_device *rdev = dev->dev_private;
774 784
775 if (rdev->powered_down) 785 if (rdev->powered_down)
@@ -788,6 +798,12 @@ int radeon_resume_kms(struct drm_device *dev)
788 radeon_resume(rdev); 798 radeon_resume(rdev);
789 radeon_pm_resume(rdev); 799 radeon_pm_resume(rdev);
790 radeon_restore_bios_scratch_regs(rdev); 800 radeon_restore_bios_scratch_regs(rdev);
801
802 /* turn on display hw */
803 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
804 drm_helper_connector_dpms(connector, DRM_MODE_DPMS_ON);
805 }
806
791 radeon_fbdev_set_suspend(rdev, 0); 807 radeon_fbdev_set_suspend(rdev, 0);
792 release_console_sem(); 808 release_console_sem();
793 809
diff --git a/drivers/gpu/drm/radeon/radeon_display.c b/drivers/gpu/drm/radeon/radeon_display.c
index 1006549d1570..8154cdf796e4 100644
--- a/drivers/gpu/drm/radeon/radeon_display.c
+++ b/drivers/gpu/drm/radeon/radeon_display.c
@@ -284,8 +284,7 @@ static const char *connector_names[15] = {
284 "eDP", 284 "eDP",
285}; 285};
286 286
287static const char *hpd_names[7] = { 287static const char *hpd_names[6] = {
288 "NONE",
289 "HPD1", 288 "HPD1",
290 "HPD2", 289 "HPD2",
291 "HPD3", 290 "HPD3",
diff --git a/drivers/gpu/drm/radeon/radeon_drv.c b/drivers/gpu/drm/radeon/radeon_drv.c
index 902d1731a652..e166fe4d7c30 100644
--- a/drivers/gpu/drm/radeon/radeon_drv.c
+++ b/drivers/gpu/drm/radeon/radeon_drv.c
@@ -45,9 +45,10 @@
45 * - 2.2.0 - add r6xx/r7xx const buffer support 45 * - 2.2.0 - add r6xx/r7xx const buffer support
46 * - 2.3.0 - add MSPOS + 3D texture + r500 VAP regs 46 * - 2.3.0 - add MSPOS + 3D texture + r500 VAP regs
47 * - 2.4.0 - add crtc id query 47 * - 2.4.0 - add crtc id query
48 * - 2.5.0 - add get accel 2 to work around ddx breakage for evergreen
48 */ 49 */
49#define KMS_DRIVER_MAJOR 2 50#define KMS_DRIVER_MAJOR 2
50#define KMS_DRIVER_MINOR 4 51#define KMS_DRIVER_MINOR 5
51#define KMS_DRIVER_PATCHLEVEL 0 52#define KMS_DRIVER_PATCHLEVEL 0
52int radeon_driver_load_kms(struct drm_device *dev, unsigned long flags); 53int radeon_driver_load_kms(struct drm_device *dev, unsigned long flags);
53int radeon_driver_unload_kms(struct drm_device *dev); 54int radeon_driver_unload_kms(struct drm_device *dev);
diff --git a/drivers/gpu/drm/radeon/radeon_encoders.c b/drivers/gpu/drm/radeon/radeon_encoders.c
index 1ebb100015b7..e0b30b264c28 100644
--- a/drivers/gpu/drm/radeon/radeon_encoders.c
+++ b/drivers/gpu/drm/radeon/radeon_encoders.c
@@ -1072,6 +1072,8 @@ radeon_atom_encoder_dpms(struct drm_encoder *encoder, int mode)
1072 if (is_dig) { 1072 if (is_dig) {
1073 switch (mode) { 1073 switch (mode) {
1074 case DRM_MODE_DPMS_ON: 1074 case DRM_MODE_DPMS_ON:
1075 if (!ASIC_IS_DCE4(rdev))
1076 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_ENABLE_OUTPUT, 0, 0);
1075 if (atombios_get_encoder_mode(encoder) == ATOM_ENCODER_MODE_DP) { 1077 if (atombios_get_encoder_mode(encoder) == ATOM_ENCODER_MODE_DP) {
1076 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder); 1078 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1077 1079
@@ -1079,8 +1081,6 @@ radeon_atom_encoder_dpms(struct drm_encoder *encoder, int mode)
1079 if (ASIC_IS_DCE4(rdev)) 1081 if (ASIC_IS_DCE4(rdev))
1080 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON); 1082 atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON);
1081 } 1083 }
1082 if (!ASIC_IS_DCE4(rdev))
1083 atombios_dig_transmitter_setup(encoder, ATOM_TRANSMITTER_ACTION_ENABLE_OUTPUT, 0, 0);
1084 break; 1084 break;
1085 case DRM_MODE_DPMS_STANDBY: 1085 case DRM_MODE_DPMS_STANDBY:
1086 case DRM_MODE_DPMS_SUSPEND: 1086 case DRM_MODE_DPMS_SUSPEND:
diff --git a/drivers/gpu/drm/radeon/radeon_fb.c b/drivers/gpu/drm/radeon/radeon_fb.c
index e192acfbf0cd..dc1634bb0c11 100644
--- a/drivers/gpu/drm/radeon/radeon_fb.c
+++ b/drivers/gpu/drm/radeon/radeon_fb.c
@@ -363,6 +363,7 @@ int radeon_fbdev_init(struct radeon_device *rdev)
363{ 363{
364 struct radeon_fbdev *rfbdev; 364 struct radeon_fbdev *rfbdev;
365 int bpp_sel = 32; 365 int bpp_sel = 32;
366 int ret;
366 367
367 /* select 8 bpp console on RN50 or 16MB cards */ 368 /* select 8 bpp console on RN50 or 16MB cards */
368 if (ASIC_IS_RN50(rdev) || rdev->mc.real_vram_size <= (32*1024*1024)) 369 if (ASIC_IS_RN50(rdev) || rdev->mc.real_vram_size <= (32*1024*1024))
@@ -376,9 +377,14 @@ int radeon_fbdev_init(struct radeon_device *rdev)
376 rdev->mode_info.rfbdev = rfbdev; 377 rdev->mode_info.rfbdev = rfbdev;
377 rfbdev->helper.funcs = &radeon_fb_helper_funcs; 378 rfbdev->helper.funcs = &radeon_fb_helper_funcs;
378 379
379 drm_fb_helper_init(rdev->ddev, &rfbdev->helper, 380 ret = drm_fb_helper_init(rdev->ddev, &rfbdev->helper,
380 rdev->num_crtc, 381 rdev->num_crtc,
381 RADEONFB_CONN_LIMIT); 382 RADEONFB_CONN_LIMIT);
383 if (ret) {
384 kfree(rfbdev);
385 return ret;
386 }
387
382 drm_fb_helper_single_add_all_connectors(&rfbdev->helper); 388 drm_fb_helper_single_add_all_connectors(&rfbdev->helper);
383 drm_fb_helper_initial_config(&rfbdev->helper, bpp_sel); 389 drm_fb_helper_initial_config(&rfbdev->helper, bpp_sel);
384 return 0; 390 return 0;
diff --git a/drivers/gpu/drm/radeon/radeon_kms.c b/drivers/gpu/drm/radeon/radeon_kms.c
index 04068352ccd2..6a70c0dc7f92 100644
--- a/drivers/gpu/drm/radeon/radeon_kms.c
+++ b/drivers/gpu/drm/radeon/radeon_kms.c
@@ -118,7 +118,11 @@ int radeon_info_ioctl(struct drm_device *dev, void *data, struct drm_file *filp)
118 value = rdev->num_z_pipes; 118 value = rdev->num_z_pipes;
119 break; 119 break;
120 case RADEON_INFO_ACCEL_WORKING: 120 case RADEON_INFO_ACCEL_WORKING:
121 value = rdev->accel_working; 121 /* xf86-video-ati 6.13.0 relies on this being false for evergreen */
122 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK))
123 value = false;
124 else
125 value = rdev->accel_working;
122 break; 126 break;
123 case RADEON_INFO_CRTC_FROM_ID: 127 case RADEON_INFO_CRTC_FROM_ID:
124 for (i = 0, found = 0; i < rdev->num_crtc; i++) { 128 for (i = 0, found = 0; i < rdev->num_crtc; i++) {
@@ -134,6 +138,9 @@ int radeon_info_ioctl(struct drm_device *dev, void *data, struct drm_file *filp)
134 return -EINVAL; 138 return -EINVAL;
135 } 139 }
136 break; 140 break;
141 case RADEON_INFO_ACCEL_WORKING2:
142 value = rdev->accel_working;
143 break;
137 default: 144 default:
138 DRM_DEBUG("Invalid request %d\n", info->request); 145 DRM_DEBUG("Invalid request %d\n", info->request);
139 return -EINVAL; 146 return -EINVAL;
diff --git a/drivers/gpu/drm/radeon/radeon_legacy_encoders.c b/drivers/gpu/drm/radeon/radeon_legacy_encoders.c
index 5a13b3eeef19..bad77f40a9da 100644
--- a/drivers/gpu/drm/radeon/radeon_legacy_encoders.c
+++ b/drivers/gpu/drm/radeon/radeon_legacy_encoders.c
@@ -928,16 +928,14 @@ static void radeon_legacy_tv_dac_mode_set(struct drm_encoder *encoder,
928 if (ASIC_IS_R300(rdev)) { 928 if (ASIC_IS_R300(rdev)) {
929 gpiopad_a = RREG32(RADEON_GPIOPAD_A) | 1; 929 gpiopad_a = RREG32(RADEON_GPIOPAD_A) | 1;
930 disp_output_cntl = RREG32(RADEON_DISP_OUTPUT_CNTL); 930 disp_output_cntl = RREG32(RADEON_DISP_OUTPUT_CNTL);
931 } 931 } else if (rdev->family != CHIP_R200)
932
933 if (rdev->family == CHIP_R200 || ASIC_IS_R300(rdev))
934 disp_tv_out_cntl = RREG32(RADEON_DISP_TV_OUT_CNTL);
935 else
936 disp_hw_debug = RREG32(RADEON_DISP_HW_DEBUG); 932 disp_hw_debug = RREG32(RADEON_DISP_HW_DEBUG);
937 933 else if (rdev->family == CHIP_R200)
938 if (rdev->family == CHIP_R200)
939 fp2_gen_cntl = RREG32(RADEON_FP2_GEN_CNTL); 934 fp2_gen_cntl = RREG32(RADEON_FP2_GEN_CNTL);
940 935
936 if (rdev->family >= CHIP_R200)
937 disp_tv_out_cntl = RREG32(RADEON_DISP_TV_OUT_CNTL);
938
941 if (is_tv) { 939 if (is_tv) {
942 uint32_t dac_cntl; 940 uint32_t dac_cntl;
943 941
@@ -1002,15 +1000,13 @@ static void radeon_legacy_tv_dac_mode_set(struct drm_encoder *encoder,
1002 if (ASIC_IS_R300(rdev)) { 1000 if (ASIC_IS_R300(rdev)) {
1003 WREG32_P(RADEON_GPIOPAD_A, gpiopad_a, ~1); 1001 WREG32_P(RADEON_GPIOPAD_A, gpiopad_a, ~1);
1004 WREG32(RADEON_DISP_OUTPUT_CNTL, disp_output_cntl); 1002 WREG32(RADEON_DISP_OUTPUT_CNTL, disp_output_cntl);
1005 } 1003 } else if (rdev->family != CHIP_R200)
1004 WREG32(RADEON_DISP_HW_DEBUG, disp_hw_debug);
1005 else if (rdev->family == CHIP_R200)
1006 WREG32(RADEON_FP2_GEN_CNTL, fp2_gen_cntl);
1006 1007
1007 if (rdev->family >= CHIP_R200) 1008 if (rdev->family >= CHIP_R200)
1008 WREG32(RADEON_DISP_TV_OUT_CNTL, disp_tv_out_cntl); 1009 WREG32(RADEON_DISP_TV_OUT_CNTL, disp_tv_out_cntl);
1009 else
1010 WREG32(RADEON_DISP_HW_DEBUG, disp_hw_debug);
1011
1012 if (rdev->family == CHIP_R200)
1013 WREG32(RADEON_FP2_GEN_CNTL, fp2_gen_cntl);
1014 1010
1015 if (is_tv) 1011 if (is_tv)
1016 radeon_legacy_tv_mode_set(encoder, mode, adjusted_mode); 1012 radeon_legacy_tv_mode_set(encoder, mode, adjusted_mode);
@@ -1168,6 +1164,17 @@ static enum drm_connector_status radeon_legacy_tv_dac_detect(struct drm_encoder
1168 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 1164 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1169 struct radeon_encoder_tv_dac *tv_dac = radeon_encoder->enc_priv; 1165 struct radeon_encoder_tv_dac *tv_dac = radeon_encoder->enc_priv;
1170 bool color = true; 1166 bool color = true;
1167 struct drm_crtc *crtc;
1168
1169 /* find out if crtc2 is in use or if this encoder is using it */
1170 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
1171 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1172 if ((radeon_crtc->crtc_id == 1) && crtc->enabled) {
1173 if (encoder->crtc != crtc) {
1174 return connector_status_disconnected;
1175 }
1176 }
1177 }
1171 1178
1172 if (connector->connector_type == DRM_MODE_CONNECTOR_SVIDEO || 1179 if (connector->connector_type == DRM_MODE_CONNECTOR_SVIDEO ||
1173 connector->connector_type == DRM_MODE_CONNECTOR_Composite || 1180 connector->connector_type == DRM_MODE_CONNECTOR_Composite ||
diff --git a/drivers/gpu/drm/radeon/radeon_mode.h b/drivers/gpu/drm/radeon/radeon_mode.h
index 67358baf28b2..95696aa57ac8 100644
--- a/drivers/gpu/drm/radeon/radeon_mode.h
+++ b/drivers/gpu/drm/radeon/radeon_mode.h
@@ -206,6 +206,7 @@ enum radeon_connector_table {
206 CT_MINI_INTERNAL, 206 CT_MINI_INTERNAL,
207 CT_IMAC_G5_ISIGHT, 207 CT_IMAC_G5_ISIGHT,
208 CT_EMAC, 208 CT_EMAC,
209 CT_RN50_POWER,
209}; 210};
210 211
211enum radeon_dvo_chip { 212enum radeon_dvo_chip {
diff --git a/drivers/gpu/drm/radeon/radeon_pm.c b/drivers/gpu/drm/radeon/radeon_pm.c
index a8d162c6f829..115d26b762cc 100644
--- a/drivers/gpu/drm/radeon/radeon_pm.c
+++ b/drivers/gpu/drm/radeon/radeon_pm.c
@@ -33,6 +33,14 @@
33#define RADEON_WAIT_VBLANK_TIMEOUT 200 33#define RADEON_WAIT_VBLANK_TIMEOUT 200
34#define RADEON_WAIT_IDLE_TIMEOUT 200 34#define RADEON_WAIT_IDLE_TIMEOUT 200
35 35
36static const char *radeon_pm_state_type_name[5] = {
37 "Default",
38 "Powersave",
39 "Battery",
40 "Balanced",
41 "Performance",
42};
43
36static void radeon_dynpm_idle_work_handler(struct work_struct *work); 44static void radeon_dynpm_idle_work_handler(struct work_struct *work);
37static int radeon_debugfs_pm_init(struct radeon_device *rdev); 45static int radeon_debugfs_pm_init(struct radeon_device *rdev);
38static bool radeon_pm_in_vbl(struct radeon_device *rdev); 46static bool radeon_pm_in_vbl(struct radeon_device *rdev);
@@ -84,9 +92,9 @@ static void radeon_pm_update_profile(struct radeon_device *rdev)
84 rdev->pm.profile_index = PM_PROFILE_HIGH_SH_IDX; 92 rdev->pm.profile_index = PM_PROFILE_HIGH_SH_IDX;
85 } else { 93 } else {
86 if (rdev->pm.active_crtc_count > 1) 94 if (rdev->pm.active_crtc_count > 1)
87 rdev->pm.profile_index = PM_PROFILE_LOW_MH_IDX; 95 rdev->pm.profile_index = PM_PROFILE_MID_MH_IDX;
88 else 96 else
89 rdev->pm.profile_index = PM_PROFILE_LOW_SH_IDX; 97 rdev->pm.profile_index = PM_PROFILE_MID_SH_IDX;
90 } 98 }
91 break; 99 break;
92 case PM_PROFILE_LOW: 100 case PM_PROFILE_LOW:
@@ -95,6 +103,12 @@ static void radeon_pm_update_profile(struct radeon_device *rdev)
95 else 103 else
96 rdev->pm.profile_index = PM_PROFILE_LOW_SH_IDX; 104 rdev->pm.profile_index = PM_PROFILE_LOW_SH_IDX;
97 break; 105 break;
106 case PM_PROFILE_MID:
107 if (rdev->pm.active_crtc_count > 1)
108 rdev->pm.profile_index = PM_PROFILE_MID_MH_IDX;
109 else
110 rdev->pm.profile_index = PM_PROFILE_MID_SH_IDX;
111 break;
98 case PM_PROFILE_HIGH: 112 case PM_PROFILE_HIGH:
99 if (rdev->pm.active_crtc_count > 1) 113 if (rdev->pm.active_crtc_count > 1)
100 rdev->pm.profile_index = PM_PROFILE_HIGH_MH_IDX; 114 rdev->pm.profile_index = PM_PROFILE_HIGH_MH_IDX;
@@ -127,15 +141,6 @@ static void radeon_unmap_vram_bos(struct radeon_device *rdev)
127 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) 141 if (bo->tbo.mem.mem_type == TTM_PL_VRAM)
128 ttm_bo_unmap_virtual(&bo->tbo); 142 ttm_bo_unmap_virtual(&bo->tbo);
129 } 143 }
130
131 if (rdev->gart.table.vram.robj)
132 ttm_bo_unmap_virtual(&rdev->gart.table.vram.robj->tbo);
133
134 if (rdev->stollen_vga_memory)
135 ttm_bo_unmap_virtual(&rdev->stollen_vga_memory->tbo);
136
137 if (rdev->r600_blit.shader_obj)
138 ttm_bo_unmap_virtual(&rdev->r600_blit.shader_obj->tbo);
139} 144}
140 145
141static void radeon_sync_with_vblank(struct radeon_device *rdev) 146static void radeon_sync_with_vblank(struct radeon_device *rdev)
@@ -151,6 +156,7 @@ static void radeon_sync_with_vblank(struct radeon_device *rdev)
151static void radeon_set_power_state(struct radeon_device *rdev) 156static void radeon_set_power_state(struct radeon_device *rdev)
152{ 157{
153 u32 sclk, mclk; 158 u32 sclk, mclk;
159 bool misc_after = false;
154 160
155 if ((rdev->pm.requested_clock_mode_index == rdev->pm.current_clock_mode_index) && 161 if ((rdev->pm.requested_clock_mode_index == rdev->pm.current_clock_mode_index) &&
156 (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index)) 162 (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index))
@@ -167,55 +173,47 @@ static void radeon_set_power_state(struct radeon_device *rdev)
167 if (mclk > rdev->clock.default_mclk) 173 if (mclk > rdev->clock.default_mclk)
168 mclk = rdev->clock.default_mclk; 174 mclk = rdev->clock.default_mclk;
169 175
170 /* voltage, pcie lanes, etc.*/ 176 /* upvolt before raising clocks, downvolt after lowering clocks */
171 radeon_pm_misc(rdev); 177 if (sclk < rdev->pm.current_sclk)
178 misc_after = true;
172 179
173 if (rdev->pm.pm_method == PM_METHOD_DYNPM) { 180 radeon_sync_with_vblank(rdev);
174 radeon_sync_with_vblank(rdev);
175 181
182 if (rdev->pm.pm_method == PM_METHOD_DYNPM) {
176 if (!radeon_pm_in_vbl(rdev)) 183 if (!radeon_pm_in_vbl(rdev))
177 return; 184 return;
185 }
178 186
179 radeon_pm_prepare(rdev); 187 radeon_pm_prepare(rdev);
180 /* set engine clock */
181 if (sclk != rdev->pm.current_sclk) {
182 radeon_pm_debug_check_in_vbl(rdev, false);
183 radeon_set_engine_clock(rdev, sclk);
184 radeon_pm_debug_check_in_vbl(rdev, true);
185 rdev->pm.current_sclk = sclk;
186 DRM_DEBUG("Setting: e: %d\n", sclk);
187 }
188 188
189 /* set memory clock */ 189 if (!misc_after)
190 if (rdev->asic->set_memory_clock && (mclk != rdev->pm.current_mclk)) { 190 /* voltage, pcie lanes, etc.*/
191 radeon_pm_debug_check_in_vbl(rdev, false); 191 radeon_pm_misc(rdev);
192 radeon_set_memory_clock(rdev, mclk); 192
193 radeon_pm_debug_check_in_vbl(rdev, true); 193 /* set engine clock */
194 rdev->pm.current_mclk = mclk; 194 if (sclk != rdev->pm.current_sclk) {
195 DRM_DEBUG("Setting: m: %d\n", mclk); 195 radeon_pm_debug_check_in_vbl(rdev, false);
196 } 196 radeon_set_engine_clock(rdev, sclk);
197 radeon_pm_finish(rdev); 197 radeon_pm_debug_check_in_vbl(rdev, true);
198 } else { 198 rdev->pm.current_sclk = sclk;
199 /* set engine clock */ 199 DRM_DEBUG("Setting: e: %d\n", sclk);
200 if (sclk != rdev->pm.current_sclk) {
201 radeon_sync_with_vblank(rdev);
202 radeon_pm_prepare(rdev);
203 radeon_set_engine_clock(rdev, sclk);
204 radeon_pm_finish(rdev);
205 rdev->pm.current_sclk = sclk;
206 DRM_DEBUG("Setting: e: %d\n", sclk);
207 }
208 /* set memory clock */
209 if (rdev->asic->set_memory_clock && (mclk != rdev->pm.current_mclk)) {
210 radeon_sync_with_vblank(rdev);
211 radeon_pm_prepare(rdev);
212 radeon_set_memory_clock(rdev, mclk);
213 radeon_pm_finish(rdev);
214 rdev->pm.current_mclk = mclk;
215 DRM_DEBUG("Setting: m: %d\n", mclk);
216 }
217 } 200 }
218 201
202 /* set memory clock */
203 if (rdev->asic->set_memory_clock && (mclk != rdev->pm.current_mclk)) {
204 radeon_pm_debug_check_in_vbl(rdev, false);
205 radeon_set_memory_clock(rdev, mclk);
206 radeon_pm_debug_check_in_vbl(rdev, true);
207 rdev->pm.current_mclk = mclk;
208 DRM_DEBUG("Setting: m: %d\n", mclk);
209 }
210
211 if (misc_after)
212 /* voltage, pcie lanes, etc.*/
213 radeon_pm_misc(rdev);
214
215 radeon_pm_finish(rdev);
216
219 rdev->pm.current_power_state_index = rdev->pm.requested_power_state_index; 217 rdev->pm.current_power_state_index = rdev->pm.requested_power_state_index;
220 rdev->pm.current_clock_mode_index = rdev->pm.requested_clock_mode_index; 218 rdev->pm.current_clock_mode_index = rdev->pm.requested_clock_mode_index;
221 } else 219 } else
@@ -288,6 +286,42 @@ static void radeon_pm_set_clocks(struct radeon_device *rdev)
288 mutex_unlock(&rdev->ddev->struct_mutex); 286 mutex_unlock(&rdev->ddev->struct_mutex);
289} 287}
290 288
289static void radeon_pm_print_states(struct radeon_device *rdev)
290{
291 int i, j;
292 struct radeon_power_state *power_state;
293 struct radeon_pm_clock_info *clock_info;
294
295 DRM_DEBUG("%d Power State(s)\n", rdev->pm.num_power_states);
296 for (i = 0; i < rdev->pm.num_power_states; i++) {
297 power_state = &rdev->pm.power_state[i];
298 DRM_DEBUG("State %d: %s\n", i,
299 radeon_pm_state_type_name[power_state->type]);
300 if (i == rdev->pm.default_power_state_index)
301 DRM_DEBUG("\tDefault");
302 if ((rdev->flags & RADEON_IS_PCIE) && !(rdev->flags & RADEON_IS_IGP))
303 DRM_DEBUG("\t%d PCIE Lanes\n", power_state->pcie_lanes);
304 if (power_state->flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
305 DRM_DEBUG("\tSingle display only\n");
306 DRM_DEBUG("\t%d Clock Mode(s)\n", power_state->num_clock_modes);
307 for (j = 0; j < power_state->num_clock_modes; j++) {
308 clock_info = &(power_state->clock_info[j]);
309 if (rdev->flags & RADEON_IS_IGP)
310 DRM_DEBUG("\t\t%d e: %d%s\n",
311 j,
312 clock_info->sclk * 10,
313 clock_info->flags & RADEON_PM_MODE_NO_DISPLAY ? "\tNo display only" : "");
314 else
315 DRM_DEBUG("\t\t%d e: %d\tm: %d\tv: %d%s\n",
316 j,
317 clock_info->sclk * 10,
318 clock_info->mclk * 10,
319 clock_info->voltage.voltage,
320 clock_info->flags & RADEON_PM_MODE_NO_DISPLAY ? "\tNo display only" : "");
321 }
322 }
323}
324
291static ssize_t radeon_get_pm_profile(struct device *dev, 325static ssize_t radeon_get_pm_profile(struct device *dev,
292 struct device_attribute *attr, 326 struct device_attribute *attr,
293 char *buf) 327 char *buf)
@@ -318,6 +352,8 @@ static ssize_t radeon_set_pm_profile(struct device *dev,
318 rdev->pm.profile = PM_PROFILE_AUTO; 352 rdev->pm.profile = PM_PROFILE_AUTO;
319 else if (strncmp("low", buf, strlen("low")) == 0) 353 else if (strncmp("low", buf, strlen("low")) == 0)
320 rdev->pm.profile = PM_PROFILE_LOW; 354 rdev->pm.profile = PM_PROFILE_LOW;
355 else if (strncmp("mid", buf, strlen("mid")) == 0)
356 rdev->pm.profile = PM_PROFILE_MID;
321 else if (strncmp("high", buf, strlen("high")) == 0) 357 else if (strncmp("high", buf, strlen("high")) == 0)
322 rdev->pm.profile = PM_PROFILE_HIGH; 358 rdev->pm.profile = PM_PROFILE_HIGH;
323 else { 359 else {
@@ -361,13 +397,20 @@ static ssize_t radeon_set_pm_method(struct device *dev,
361 rdev->pm.dynpm_planned_action = DYNPM_ACTION_DEFAULT; 397 rdev->pm.dynpm_planned_action = DYNPM_ACTION_DEFAULT;
362 mutex_unlock(&rdev->pm.mutex); 398 mutex_unlock(&rdev->pm.mutex);
363 } else if (strncmp("profile", buf, strlen("profile")) == 0) { 399 } else if (strncmp("profile", buf, strlen("profile")) == 0) {
400 bool flush_wq = false;
401
364 mutex_lock(&rdev->pm.mutex); 402 mutex_lock(&rdev->pm.mutex);
365 rdev->pm.pm_method = PM_METHOD_PROFILE; 403 if (rdev->pm.pm_method == PM_METHOD_DYNPM) {
404 cancel_delayed_work(&rdev->pm.dynpm_idle_work);
405 flush_wq = true;
406 }
366 /* disable dynpm */ 407 /* disable dynpm */
367 rdev->pm.dynpm_state = DYNPM_STATE_DISABLED; 408 rdev->pm.dynpm_state = DYNPM_STATE_DISABLED;
368 rdev->pm.dynpm_planned_action = DYNPM_ACTION_NONE; 409 rdev->pm.dynpm_planned_action = DYNPM_ACTION_NONE;
369 cancel_delayed_work(&rdev->pm.dynpm_idle_work); 410 rdev->pm.pm_method = PM_METHOD_PROFILE;
370 mutex_unlock(&rdev->pm.mutex); 411 mutex_unlock(&rdev->pm.mutex);
412 if (flush_wq)
413 flush_workqueue(rdev->wq);
371 } else { 414 } else {
372 DRM_ERROR("invalid power method!\n"); 415 DRM_ERROR("invalid power method!\n");
373 goto fail; 416 goto fail;
@@ -382,17 +425,36 @@ static DEVICE_ATTR(power_method, S_IRUGO | S_IWUSR, radeon_get_pm_method, radeon
382 425
383void radeon_pm_suspend(struct radeon_device *rdev) 426void radeon_pm_suspend(struct radeon_device *rdev)
384{ 427{
428 bool flush_wq = false;
429
385 mutex_lock(&rdev->pm.mutex); 430 mutex_lock(&rdev->pm.mutex);
386 cancel_delayed_work(&rdev->pm.dynpm_idle_work); 431 if (rdev->pm.pm_method == PM_METHOD_DYNPM) {
387 rdev->pm.current_power_state_index = -1; 432 cancel_delayed_work(&rdev->pm.dynpm_idle_work);
388 rdev->pm.current_clock_mode_index = -1; 433 if (rdev->pm.dynpm_state == DYNPM_STATE_ACTIVE)
389 rdev->pm.current_sclk = 0; 434 rdev->pm.dynpm_state = DYNPM_STATE_SUSPENDED;
390 rdev->pm.current_mclk = 0; 435 flush_wq = true;
436 }
391 mutex_unlock(&rdev->pm.mutex); 437 mutex_unlock(&rdev->pm.mutex);
438 if (flush_wq)
439 flush_workqueue(rdev->wq);
392} 440}
393 441
394void radeon_pm_resume(struct radeon_device *rdev) 442void radeon_pm_resume(struct radeon_device *rdev)
395{ 443{
444 /* asic init will reset the default power state */
445 mutex_lock(&rdev->pm.mutex);
446 rdev->pm.current_power_state_index = rdev->pm.default_power_state_index;
447 rdev->pm.current_clock_mode_index = 0;
448 rdev->pm.current_sclk = rdev->clock.default_sclk;
449 rdev->pm.current_mclk = rdev->clock.default_mclk;
450 rdev->pm.current_vddc = rdev->pm.power_state[rdev->pm.default_power_state_index].clock_info[0].voltage.voltage;
451 if (rdev->pm.pm_method == PM_METHOD_DYNPM
452 && rdev->pm.dynpm_state == DYNPM_STATE_SUSPENDED) {
453 rdev->pm.dynpm_state = DYNPM_STATE_ACTIVE;
454 queue_delayed_work(rdev->wq, &rdev->pm.dynpm_idle_work,
455 msecs_to_jiffies(RADEON_IDLE_LOOP_MS));
456 }
457 mutex_unlock(&rdev->pm.mutex);
396 radeon_pm_compute_clocks(rdev); 458 radeon_pm_compute_clocks(rdev);
397} 459}
398 460
@@ -401,32 +463,24 @@ int radeon_pm_init(struct radeon_device *rdev)
401 int ret; 463 int ret;
402 /* default to profile method */ 464 /* default to profile method */
403 rdev->pm.pm_method = PM_METHOD_PROFILE; 465 rdev->pm.pm_method = PM_METHOD_PROFILE;
466 rdev->pm.profile = PM_PROFILE_DEFAULT;
404 rdev->pm.dynpm_state = DYNPM_STATE_DISABLED; 467 rdev->pm.dynpm_state = DYNPM_STATE_DISABLED;
405 rdev->pm.dynpm_planned_action = DYNPM_ACTION_NONE; 468 rdev->pm.dynpm_planned_action = DYNPM_ACTION_NONE;
406 rdev->pm.dynpm_can_upclock = true; 469 rdev->pm.dynpm_can_upclock = true;
407 rdev->pm.dynpm_can_downclock = true; 470 rdev->pm.dynpm_can_downclock = true;
408 rdev->pm.current_sclk = 0; 471 rdev->pm.current_sclk = rdev->clock.default_sclk;
409 rdev->pm.current_mclk = 0; 472 rdev->pm.current_mclk = rdev->clock.default_mclk;
410 473
411 if (rdev->bios) { 474 if (rdev->bios) {
412 if (rdev->is_atom_bios) 475 if (rdev->is_atom_bios)
413 radeon_atombios_get_power_modes(rdev); 476 radeon_atombios_get_power_modes(rdev);
414 else 477 else
415 radeon_combios_get_power_modes(rdev); 478 radeon_combios_get_power_modes(rdev);
479 radeon_pm_print_states(rdev);
416 radeon_pm_init_profile(rdev); 480 radeon_pm_init_profile(rdev);
417 rdev->pm.current_power_state_index = -1;
418 rdev->pm.current_clock_mode_index = -1;
419 } 481 }
420 482
421 if (rdev->pm.num_power_states > 1) { 483 if (rdev->pm.num_power_states > 1) {
422 if (rdev->pm.pm_method == PM_METHOD_PROFILE) {
423 mutex_lock(&rdev->pm.mutex);
424 rdev->pm.profile = PM_PROFILE_DEFAULT;
425 radeon_pm_update_profile(rdev);
426 radeon_pm_set_clocks(rdev);
427 mutex_unlock(&rdev->pm.mutex);
428 }
429
430 /* where's the best place to put these? */ 484 /* where's the best place to put these? */
431 ret = device_create_file(rdev->dev, &dev_attr_power_profile); 485 ret = device_create_file(rdev->dev, &dev_attr_power_profile);
432 if (ret) 486 if (ret)
@@ -454,6 +508,8 @@ int radeon_pm_init(struct radeon_device *rdev)
454void radeon_pm_fini(struct radeon_device *rdev) 508void radeon_pm_fini(struct radeon_device *rdev)
455{ 509{
456 if (rdev->pm.num_power_states > 1) { 510 if (rdev->pm.num_power_states > 1) {
511 bool flush_wq = false;
512
457 mutex_lock(&rdev->pm.mutex); 513 mutex_lock(&rdev->pm.mutex);
458 if (rdev->pm.pm_method == PM_METHOD_PROFILE) { 514 if (rdev->pm.pm_method == PM_METHOD_PROFILE) {
459 rdev->pm.profile = PM_PROFILE_DEFAULT; 515 rdev->pm.profile = PM_PROFILE_DEFAULT;
@@ -461,13 +517,16 @@ void radeon_pm_fini(struct radeon_device *rdev)
461 radeon_pm_set_clocks(rdev); 517 radeon_pm_set_clocks(rdev);
462 } else if (rdev->pm.pm_method == PM_METHOD_DYNPM) { 518 } else if (rdev->pm.pm_method == PM_METHOD_DYNPM) {
463 /* cancel work */ 519 /* cancel work */
464 cancel_delayed_work_sync(&rdev->pm.dynpm_idle_work); 520 cancel_delayed_work(&rdev->pm.dynpm_idle_work);
521 flush_wq = true;
465 /* reset default clocks */ 522 /* reset default clocks */
466 rdev->pm.dynpm_state = DYNPM_STATE_DISABLED; 523 rdev->pm.dynpm_state = DYNPM_STATE_DISABLED;
467 rdev->pm.dynpm_planned_action = DYNPM_ACTION_DEFAULT; 524 rdev->pm.dynpm_planned_action = DYNPM_ACTION_DEFAULT;
468 radeon_pm_set_clocks(rdev); 525 radeon_pm_set_clocks(rdev);
469 } 526 }
470 mutex_unlock(&rdev->pm.mutex); 527 mutex_unlock(&rdev->pm.mutex);
528 if (flush_wq)
529 flush_workqueue(rdev->wq);
471 530
472 device_remove_file(rdev->dev, &dev_attr_power_profile); 531 device_remove_file(rdev->dev, &dev_attr_power_profile);
473 device_remove_file(rdev->dev, &dev_attr_power_method); 532 device_remove_file(rdev->dev, &dev_attr_power_method);
@@ -688,12 +747,12 @@ static void radeon_dynpm_idle_work_handler(struct work_struct *work)
688 radeon_pm_get_dynpm_state(rdev); 747 radeon_pm_get_dynpm_state(rdev);
689 radeon_pm_set_clocks(rdev); 748 radeon_pm_set_clocks(rdev);
690 } 749 }
750
751 queue_delayed_work(rdev->wq, &rdev->pm.dynpm_idle_work,
752 msecs_to_jiffies(RADEON_IDLE_LOOP_MS));
691 } 753 }
692 mutex_unlock(&rdev->pm.mutex); 754 mutex_unlock(&rdev->pm.mutex);
693 ttm_bo_unlock_delayed_workqueue(&rdev->mman.bdev, resched); 755 ttm_bo_unlock_delayed_workqueue(&rdev->mman.bdev, resched);
694
695 queue_delayed_work(rdev->wq, &rdev->pm.dynpm_idle_work,
696 msecs_to_jiffies(RADEON_IDLE_LOOP_MS));
697} 756}
698 757
699/* 758/*
@@ -712,6 +771,8 @@ static int radeon_debugfs_pm_info(struct seq_file *m, void *data)
712 seq_printf(m, "default memory clock: %u0 kHz\n", rdev->clock.default_mclk); 771 seq_printf(m, "default memory clock: %u0 kHz\n", rdev->clock.default_mclk);
713 if (rdev->asic->get_memory_clock) 772 if (rdev->asic->get_memory_clock)
714 seq_printf(m, "current memory clock: %u0 kHz\n", radeon_get_memory_clock(rdev)); 773 seq_printf(m, "current memory clock: %u0 kHz\n", radeon_get_memory_clock(rdev));
774 if (rdev->pm.current_vddc)
775 seq_printf(m, "voltage: %u mV\n", rdev->pm.current_vddc);
715 if (rdev->asic->get_pcie_lanes) 776 if (rdev->asic->get_pcie_lanes)
716 seq_printf(m, "PCIE lanes: %d\n", radeon_get_pcie_lanes(rdev)); 777 seq_printf(m, "PCIE lanes: %d\n", radeon_get_pcie_lanes(rdev));
717 778
diff --git a/drivers/gpu/drm/radeon/reg_srcs/evergreen b/drivers/gpu/drm/radeon/reg_srcs/evergreen
new file mode 100644
index 000000000000..f78fd592544d
--- /dev/null
+++ b/drivers/gpu/drm/radeon/reg_srcs/evergreen
@@ -0,0 +1,611 @@
1evergreen 0x9400
20x00008040 WAIT_UNTIL
30x00008044 WAIT_UNTIL_POLL_CNTL
40x00008048 WAIT_UNTIL_POLL_MASK
50x0000804c WAIT_UNTIL_POLL_REFDATA
60x000088B0 VGT_VTX_VECT_EJECT_REG
70x000088C4 VGT_CACHE_INVALIDATION
80x000088D4 VGT_GS_VERTEX_REUSE
90x00008958 VGT_PRIMITIVE_TYPE
100x0000895C VGT_INDEX_TYPE
110x00008970 VGT_NUM_INDICES
120x00008974 VGT_NUM_INSTANCES
130x00008990 VGT_COMPUTE_DIM_X
140x00008994 VGT_COMPUTE_DIM_Y
150x00008998 VGT_COMPUTE_DIM_Z
160x0000899C VGT_COMPUTE_START_X
170x000089A0 VGT_COMPUTE_START_Y
180x000089A4 VGT_COMPUTE_START_Z
190x000089AC VGT_COMPUTE_THREAD_GOURP_SIZE
200x00008A14 PA_CL_ENHANCE
210x00008A60 PA_SC_LINE_STIPPLE_VALUE
220x00008B10 PA_SC_LINE_STIPPLE_STATE
230x00008BF0 PA_SC_ENHANCE
240x00008D8C SQ_DYN_GPR_CNTL_PS_FLUSH_REQ
250x00008C00 SQ_CONFIG
260x00008C04 SQ_GPR_RESOURCE_MGMT_1
270x00008C08 SQ_GPR_RESOURCE_MGMT_2
280x00008C0C SQ_GPR_RESOURCE_MGMT_3
290x00008C10 SQ_GLOBAL_GPR_RESOURCE_MGMT_1
300x00008C14 SQ_GLOBAL_GPR_RESOURCE_MGMT_2
310x00008C18 SQ_THREAD_RESOURCE_MGMT
320x00008C1C SQ_THREAD_RESOURCE_MGMT_2
330x00008C20 SQ_STACK_RESOURCE_MGMT_1
340x00008C24 SQ_STACK_RESOURCE_MGMT_2
350x00008C28 SQ_STACK_RESOURCE_MGMT_3
360x00008DF8 SQ_CONST_MEM_BASE
370x00008E48 SQ_EX_ALLOC_TABLE_SLOTS
380x00009100 SPI_CONFIG_CNTL
390x0000913C SPI_CONFIG_CNTL_1
400x00009700 VC_CNTL
410x00009714 VC_ENHANCE
420x00009830 DB_DEBUG
430x00009834 DB_DEBUG2
440x00009838 DB_DEBUG3
450x0000983C DB_DEBUG4
460x00009854 DB_WATERMARKS
470x0000A400 TD_PS_BORDER_COLOR_INDEX
480x0000A404 TD_PS_BORDER_COLOR_RED
490x0000A408 TD_PS_BORDER_COLOR_GREEN
500x0000A40C TD_PS_BORDER_COLOR_BLUE
510x0000A410 TD_PS_BORDER_COLOR_ALPHA
520x0000A414 TD_VS_BORDER_COLOR_INDEX
530x0000A418 TD_VS_BORDER_COLOR_RED
540x0000A41C TD_VS_BORDER_COLOR_GREEN
550x0000A420 TD_VS_BORDER_COLOR_BLUE
560x0000A424 TD_VS_BORDER_COLOR_ALPHA
570x0000A428 TD_GS_BORDER_COLOR_INDEX
580x0000A42C TD_GS_BORDER_COLOR_RED
590x0000A430 TD_GS_BORDER_COLOR_GREEN
600x0000A434 TD_GS_BORDER_COLOR_BLUE
610x0000A438 TD_GS_BORDER_COLOR_ALPHA
620x0000A43C TD_HS_BORDER_COLOR_INDEX
630x0000A440 TD_HS_BORDER_COLOR_RED
640x0000A444 TD_HS_BORDER_COLOR_GREEN
650x0000A448 TD_HS_BORDER_COLOR_BLUE
660x0000A44C TD_HS_BORDER_COLOR_ALPHA
670x0000A450 TD_LS_BORDER_COLOR_INDEX
680x0000A454 TD_LS_BORDER_COLOR_RED
690x0000A458 TD_LS_BORDER_COLOR_GREEN
700x0000A45C TD_LS_BORDER_COLOR_BLUE
710x0000A460 TD_LS_BORDER_COLOR_ALPHA
720x0000A464 TD_CS_BORDER_COLOR_INDEX
730x0000A468 TD_CS_BORDER_COLOR_RED
740x0000A46C TD_CS_BORDER_COLOR_GREEN
750x0000A470 TD_CS_BORDER_COLOR_BLUE
760x0000A474 TD_CS_BORDER_COLOR_ALPHA
770x00028000 DB_RENDER_CONTROL
780x00028004 DB_COUNT_CONTROL
790x0002800C DB_RENDER_OVERRIDE
800x00028010 DB_RENDER_OVERRIDE2
810x00028028 DB_STENCIL_CLEAR
820x0002802C DB_DEPTH_CLEAR
830x00028030 PA_SC_SCREEN_SCISSOR_TL
840x00028034 PA_SC_SCREEN_SCISSOR_BR
850x0002805C DB_DEPTH_SLICE
860x00028140 SQ_ALU_CONST_BUFFER_SIZE_PS_0
870x00028144 SQ_ALU_CONST_BUFFER_SIZE_PS_1
880x00028148 SQ_ALU_CONST_BUFFER_SIZE_PS_2
890x0002814C SQ_ALU_CONST_BUFFER_SIZE_PS_3
900x00028150 SQ_ALU_CONST_BUFFER_SIZE_PS_4
910x00028154 SQ_ALU_CONST_BUFFER_SIZE_PS_5
920x00028158 SQ_ALU_CONST_BUFFER_SIZE_PS_6
930x0002815C SQ_ALU_CONST_BUFFER_SIZE_PS_7
940x00028160 SQ_ALU_CONST_BUFFER_SIZE_PS_8
950x00028164 SQ_ALU_CONST_BUFFER_SIZE_PS_9
960x00028168 SQ_ALU_CONST_BUFFER_SIZE_PS_10
970x0002816C SQ_ALU_CONST_BUFFER_SIZE_PS_11
980x00028170 SQ_ALU_CONST_BUFFER_SIZE_PS_12
990x00028174 SQ_ALU_CONST_BUFFER_SIZE_PS_13
1000x00028178 SQ_ALU_CONST_BUFFER_SIZE_PS_14
1010x0002817C SQ_ALU_CONST_BUFFER_SIZE_PS_15
1020x00028180 SQ_ALU_CONST_BUFFER_SIZE_VS_0
1030x00028184 SQ_ALU_CONST_BUFFER_SIZE_VS_1
1040x00028188 SQ_ALU_CONST_BUFFER_SIZE_VS_2
1050x0002818C SQ_ALU_CONST_BUFFER_SIZE_VS_3
1060x00028190 SQ_ALU_CONST_BUFFER_SIZE_VS_4
1070x00028194 SQ_ALU_CONST_BUFFER_SIZE_VS_5
1080x00028198 SQ_ALU_CONST_BUFFER_SIZE_VS_6
1090x0002819C SQ_ALU_CONST_BUFFER_SIZE_VS_7
1100x000281A0 SQ_ALU_CONST_BUFFER_SIZE_VS_8
1110x000281A4 SQ_ALU_CONST_BUFFER_SIZE_VS_9
1120x000281A8 SQ_ALU_CONST_BUFFER_SIZE_VS_10
1130x000281AC SQ_ALU_CONST_BUFFER_SIZE_VS_11
1140x000281B0 SQ_ALU_CONST_BUFFER_SIZE_VS_12
1150x000281B4 SQ_ALU_CONST_BUFFER_SIZE_VS_13
1160x000281B8 SQ_ALU_CONST_BUFFER_SIZE_VS_14
1170x000281BC SQ_ALU_CONST_BUFFER_SIZE_VS_15
1180x000281C0 SQ_ALU_CONST_BUFFER_SIZE_GS_0
1190x000281C4 SQ_ALU_CONST_BUFFER_SIZE_GS_1
1200x000281C8 SQ_ALU_CONST_BUFFER_SIZE_GS_2
1210x000281CC SQ_ALU_CONST_BUFFER_SIZE_GS_3
1220x000281D0 SQ_ALU_CONST_BUFFER_SIZE_GS_4
1230x000281D4 SQ_ALU_CONST_BUFFER_SIZE_GS_5
1240x000281D8 SQ_ALU_CONST_BUFFER_SIZE_GS_6
1250x000281DC SQ_ALU_CONST_BUFFER_SIZE_GS_7
1260x000281E0 SQ_ALU_CONST_BUFFER_SIZE_GS_8
1270x000281E4 SQ_ALU_CONST_BUFFER_SIZE_GS_9
1280x000281E8 SQ_ALU_CONST_BUFFER_SIZE_GS_10
1290x000281EC SQ_ALU_CONST_BUFFER_SIZE_GS_11
1300x000281F0 SQ_ALU_CONST_BUFFER_SIZE_GS_12
1310x000281F4 SQ_ALU_CONST_BUFFER_SIZE_GS_13
1320x000281F8 SQ_ALU_CONST_BUFFER_SIZE_GS_14
1330x000281FC SQ_ALU_CONST_BUFFER_SIZE_GS_15
1340x00028200 PA_SC_WINDOW_OFFSET
1350x00028204 PA_SC_WINDOW_SCISSOR_TL
1360x00028208 PA_SC_WINDOW_SCISSOR_BR
1370x0002820C PA_SC_CLIPRECT_RULE
1380x00028210 PA_SC_CLIPRECT_0_TL
1390x00028214 PA_SC_CLIPRECT_0_BR
1400x00028218 PA_SC_CLIPRECT_1_TL
1410x0002821C PA_SC_CLIPRECT_1_BR
1420x00028220 PA_SC_CLIPRECT_2_TL
1430x00028224 PA_SC_CLIPRECT_2_BR
1440x00028228 PA_SC_CLIPRECT_3_TL
1450x0002822C PA_SC_CLIPRECT_3_BR
1460x00028230 PA_SC_EDGERULE
1470x00028234 PA_SU_HARDWARE_SCREEN_OFFSET
1480x00028240 PA_SC_GENERIC_SCISSOR_TL
1490x00028244 PA_SC_GENERIC_SCISSOR_BR
1500x00028250 PA_SC_VPORT_SCISSOR_0_TL
1510x00028254 PA_SC_VPORT_SCISSOR_0_BR
1520x00028258 PA_SC_VPORT_SCISSOR_1_TL
1530x0002825C PA_SC_VPORT_SCISSOR_1_BR
1540x00028260 PA_SC_VPORT_SCISSOR_2_TL
1550x00028264 PA_SC_VPORT_SCISSOR_2_BR
1560x00028268 PA_SC_VPORT_SCISSOR_3_TL
1570x0002826C PA_SC_VPORT_SCISSOR_3_BR
1580x00028270 PA_SC_VPORT_SCISSOR_4_TL
1590x00028274 PA_SC_VPORT_SCISSOR_4_BR
1600x00028278 PA_SC_VPORT_SCISSOR_5_TL
1610x0002827C PA_SC_VPORT_SCISSOR_5_BR
1620x00028280 PA_SC_VPORT_SCISSOR_6_TL
1630x00028284 PA_SC_VPORT_SCISSOR_6_BR
1640x00028288 PA_SC_VPORT_SCISSOR_7_TL
1650x0002828C PA_SC_VPORT_SCISSOR_7_BR
1660x00028290 PA_SC_VPORT_SCISSOR_8_TL
1670x00028294 PA_SC_VPORT_SCISSOR_8_BR
1680x00028298 PA_SC_VPORT_SCISSOR_9_TL
1690x0002829C PA_SC_VPORT_SCISSOR_9_BR
1700x000282A0 PA_SC_VPORT_SCISSOR_10_TL
1710x000282A4 PA_SC_VPORT_SCISSOR_10_BR
1720x000282A8 PA_SC_VPORT_SCISSOR_11_TL
1730x000282AC PA_SC_VPORT_SCISSOR_11_BR
1740x000282B0 PA_SC_VPORT_SCISSOR_12_TL
1750x000282B4 PA_SC_VPORT_SCISSOR_12_BR
1760x000282B8 PA_SC_VPORT_SCISSOR_13_TL
1770x000282BC PA_SC_VPORT_SCISSOR_13_BR
1780x000282C0 PA_SC_VPORT_SCISSOR_14_TL
1790x000282C4 PA_SC_VPORT_SCISSOR_14_BR
1800x000282C8 PA_SC_VPORT_SCISSOR_15_TL
1810x000282CC PA_SC_VPORT_SCISSOR_15_BR
1820x000282D0 PA_SC_VPORT_ZMIN_0
1830x000282D4 PA_SC_VPORT_ZMAX_0
1840x000282D8 PA_SC_VPORT_ZMIN_1
1850x000282DC PA_SC_VPORT_ZMAX_1
1860x000282E0 PA_SC_VPORT_ZMIN_2
1870x000282E4 PA_SC_VPORT_ZMAX_2
1880x000282E8 PA_SC_VPORT_ZMIN_3
1890x000282EC PA_SC_VPORT_ZMAX_3
1900x000282F0 PA_SC_VPORT_ZMIN_4
1910x000282F4 PA_SC_VPORT_ZMAX_4
1920x000282F8 PA_SC_VPORT_ZMIN_5
1930x000282FC PA_SC_VPORT_ZMAX_5
1940x00028300 PA_SC_VPORT_ZMIN_6
1950x00028304 PA_SC_VPORT_ZMAX_6
1960x00028308 PA_SC_VPORT_ZMIN_7
1970x0002830C PA_SC_VPORT_ZMAX_7
1980x00028310 PA_SC_VPORT_ZMIN_8
1990x00028314 PA_SC_VPORT_ZMAX_8
2000x00028318 PA_SC_VPORT_ZMIN_9
2010x0002831C PA_SC_VPORT_ZMAX_9
2020x00028320 PA_SC_VPORT_ZMIN_10
2030x00028324 PA_SC_VPORT_ZMAX_10
2040x00028328 PA_SC_VPORT_ZMIN_11
2050x0002832C PA_SC_VPORT_ZMAX_11
2060x00028330 PA_SC_VPORT_ZMIN_12
2070x00028334 PA_SC_VPORT_ZMAX_12
2080x00028338 PA_SC_VPORT_ZMIN_13
2090x0002833C PA_SC_VPORT_ZMAX_13
2100x00028340 PA_SC_VPORT_ZMIN_14
2110x00028344 PA_SC_VPORT_ZMAX_14
2120x00028348 PA_SC_VPORT_ZMIN_15
2130x0002834C PA_SC_VPORT_ZMAX_15
2140x00028350 SX_MISC
2150x00028380 SQ_VTX_SEMANTIC_0
2160x00028384 SQ_VTX_SEMANTIC_1
2170x00028388 SQ_VTX_SEMANTIC_2
2180x0002838C SQ_VTX_SEMANTIC_3
2190x00028390 SQ_VTX_SEMANTIC_4
2200x00028394 SQ_VTX_SEMANTIC_5
2210x00028398 SQ_VTX_SEMANTIC_6
2220x0002839C SQ_VTX_SEMANTIC_7
2230x000283A0 SQ_VTX_SEMANTIC_8
2240x000283A4 SQ_VTX_SEMANTIC_9
2250x000283A8 SQ_VTX_SEMANTIC_10
2260x000283AC SQ_VTX_SEMANTIC_11
2270x000283B0 SQ_VTX_SEMANTIC_12
2280x000283B4 SQ_VTX_SEMANTIC_13
2290x000283B8 SQ_VTX_SEMANTIC_14
2300x000283BC SQ_VTX_SEMANTIC_15
2310x000283C0 SQ_VTX_SEMANTIC_16
2320x000283C4 SQ_VTX_SEMANTIC_17
2330x000283C8 SQ_VTX_SEMANTIC_18
2340x000283CC SQ_VTX_SEMANTIC_19
2350x000283D0 SQ_VTX_SEMANTIC_20
2360x000283D4 SQ_VTX_SEMANTIC_21
2370x000283D8 SQ_VTX_SEMANTIC_22
2380x000283DC SQ_VTX_SEMANTIC_23
2390x000283E0 SQ_VTX_SEMANTIC_24
2400x000283E4 SQ_VTX_SEMANTIC_25
2410x000283E8 SQ_VTX_SEMANTIC_26
2420x000283EC SQ_VTX_SEMANTIC_27
2430x000283F0 SQ_VTX_SEMANTIC_28
2440x000283F4 SQ_VTX_SEMANTIC_29
2450x000283F8 SQ_VTX_SEMANTIC_30
2460x000283FC SQ_VTX_SEMANTIC_31
2470x00028400 VGT_MAX_VTX_INDX
2480x00028404 VGT_MIN_VTX_INDX
2490x00028408 VGT_INDX_OFFSET
2500x0002840C VGT_MULTI_PRIM_IB_RESET_INDX
2510x00028410 SX_ALPHA_TEST_CONTROL
2520x00028414 CB_BLEND_RED
2530x00028418 CB_BLEND_GREEN
2540x0002841C CB_BLEND_BLUE
2550x00028420 CB_BLEND_ALPHA
2560x00028430 DB_STENCILREFMASK
2570x00028434 DB_STENCILREFMASK_BF
2580x00028438 SX_ALPHA_REF
2590x0002843C PA_CL_VPORT_XSCALE_0
2600x00028440 PA_CL_VPORT_XOFFSET_0
2610x00028444 PA_CL_VPORT_YSCALE_0
2620x00028448 PA_CL_VPORT_YOFFSET_0
2630x0002844C PA_CL_VPORT_ZSCALE_0
2640x00028450 PA_CL_VPORT_ZOFFSET_0
2650x00028454 PA_CL_VPORT_XSCALE_1
2660x00028458 PA_CL_VPORT_XOFFSET_1
2670x0002845C PA_CL_VPORT_YSCALE_1
2680x00028460 PA_CL_VPORT_YOFFSET_1
2690x00028464 PA_CL_VPORT_ZSCALE_1
2700x00028468 PA_CL_VPORT_ZOFFSET_1
2710x0002846C PA_CL_VPORT_XSCALE_2
2720x00028470 PA_CL_VPORT_XOFFSET_2
2730x00028474 PA_CL_VPORT_YSCALE_2
2740x00028478 PA_CL_VPORT_YOFFSET_2
2750x0002847C PA_CL_VPORT_ZSCALE_2
2760x00028480 PA_CL_VPORT_ZOFFSET_2
2770x00028484 PA_CL_VPORT_XSCALE_3
2780x00028488 PA_CL_VPORT_XOFFSET_3
2790x0002848C PA_CL_VPORT_YSCALE_3
2800x00028490 PA_CL_VPORT_YOFFSET_3
2810x00028494 PA_CL_VPORT_ZSCALE_3
2820x00028498 PA_CL_VPORT_ZOFFSET_3
2830x0002849C PA_CL_VPORT_XSCALE_4
2840x000284A0 PA_CL_VPORT_XOFFSET_4
2850x000284A4 PA_CL_VPORT_YSCALE_4
2860x000284A8 PA_CL_VPORT_YOFFSET_4
2870x000284AC PA_CL_VPORT_ZSCALE_4
2880x000284B0 PA_CL_VPORT_ZOFFSET_4
2890x000284B4 PA_CL_VPORT_XSCALE_5
2900x000284B8 PA_CL_VPORT_XOFFSET_5
2910x000284BC PA_CL_VPORT_YSCALE_5
2920x000284C0 PA_CL_VPORT_YOFFSET_5
2930x000284C4 PA_CL_VPORT_ZSCALE_5
2940x000284C8 PA_CL_VPORT_ZOFFSET_5
2950x000284CC PA_CL_VPORT_XSCALE_6
2960x000284D0 PA_CL_VPORT_XOFFSET_6
2970x000284D4 PA_CL_VPORT_YSCALE_6
2980x000284D8 PA_CL_VPORT_YOFFSET_6
2990x000284DC PA_CL_VPORT_ZSCALE_6
3000x000284E0 PA_CL_VPORT_ZOFFSET_6
3010x000284E4 PA_CL_VPORT_XSCALE_7
3020x000284E8 PA_CL_VPORT_XOFFSET_7
3030x000284EC PA_CL_VPORT_YSCALE_7
3040x000284F0 PA_CL_VPORT_YOFFSET_7
3050x000284F4 PA_CL_VPORT_ZSCALE_7
3060x000284F8 PA_CL_VPORT_ZOFFSET_7
3070x000284FC PA_CL_VPORT_XSCALE_8
3080x00028500 PA_CL_VPORT_XOFFSET_8
3090x00028504 PA_CL_VPORT_YSCALE_8
3100x00028508 PA_CL_VPORT_YOFFSET_8
3110x0002850C PA_CL_VPORT_ZSCALE_8
3120x00028510 PA_CL_VPORT_ZOFFSET_8
3130x00028514 PA_CL_VPORT_XSCALE_9
3140x00028518 PA_CL_VPORT_XOFFSET_9
3150x0002851C PA_CL_VPORT_YSCALE_9
3160x00028520 PA_CL_VPORT_YOFFSET_9
3170x00028524 PA_CL_VPORT_ZSCALE_9
3180x00028528 PA_CL_VPORT_ZOFFSET_9
3190x0002852C PA_CL_VPORT_XSCALE_10
3200x00028530 PA_CL_VPORT_XOFFSET_10
3210x00028534 PA_CL_VPORT_YSCALE_10
3220x00028538 PA_CL_VPORT_YOFFSET_10
3230x0002853C PA_CL_VPORT_ZSCALE_10
3240x00028540 PA_CL_VPORT_ZOFFSET_10
3250x00028544 PA_CL_VPORT_XSCALE_11
3260x00028548 PA_CL_VPORT_XOFFSET_11
3270x0002854C PA_CL_VPORT_YSCALE_11
3280x00028550 PA_CL_VPORT_YOFFSET_11
3290x00028554 PA_CL_VPORT_ZSCALE_11
3300x00028558 PA_CL_VPORT_ZOFFSET_11
3310x0002855C PA_CL_VPORT_XSCALE_12
3320x00028560 PA_CL_VPORT_XOFFSET_12
3330x00028564 PA_CL_VPORT_YSCALE_12
3340x00028568 PA_CL_VPORT_YOFFSET_12
3350x0002856C PA_CL_VPORT_ZSCALE_12
3360x00028570 PA_CL_VPORT_ZOFFSET_12
3370x00028574 PA_CL_VPORT_XSCALE_13
3380x00028578 PA_CL_VPORT_XOFFSET_13
3390x0002857C PA_CL_VPORT_YSCALE_13
3400x00028580 PA_CL_VPORT_YOFFSET_13
3410x00028584 PA_CL_VPORT_ZSCALE_13
3420x00028588 PA_CL_VPORT_ZOFFSET_13
3430x0002858C PA_CL_VPORT_XSCALE_14
3440x00028590 PA_CL_VPORT_XOFFSET_14
3450x00028594 PA_CL_VPORT_YSCALE_14
3460x00028598 PA_CL_VPORT_YOFFSET_14
3470x0002859C PA_CL_VPORT_ZSCALE_14
3480x000285A0 PA_CL_VPORT_ZOFFSET_14
3490x000285A4 PA_CL_VPORT_XSCALE_15
3500x000285A8 PA_CL_VPORT_XOFFSET_15
3510x000285AC PA_CL_VPORT_YSCALE_15
3520x000285B0 PA_CL_VPORT_YOFFSET_15
3530x000285B4 PA_CL_VPORT_ZSCALE_15
3540x000285B8 PA_CL_VPORT_ZOFFSET_15
3550x000285BC PA_CL_UCP_0_X
3560x000285C0 PA_CL_UCP_0_Y
3570x000285C4 PA_CL_UCP_0_Z
3580x000285C8 PA_CL_UCP_0_W
3590x000285CC PA_CL_UCP_1_X
3600x000285D0 PA_CL_UCP_1_Y
3610x000285D4 PA_CL_UCP_1_Z
3620x000285D8 PA_CL_UCP_1_W
3630x000285DC PA_CL_UCP_2_X
3640x000285E0 PA_CL_UCP_2_Y
3650x000285E4 PA_CL_UCP_2_Z
3660x000285E8 PA_CL_UCP_2_W
3670x000285EC PA_CL_UCP_3_X
3680x000285F0 PA_CL_UCP_3_Y
3690x000285F4 PA_CL_UCP_3_Z
3700x000285F8 PA_CL_UCP_3_W
3710x000285FC PA_CL_UCP_4_X
3720x00028600 PA_CL_UCP_4_Y
3730x00028604 PA_CL_UCP_4_Z
3740x00028608 PA_CL_UCP_4_W
3750x0002860C PA_CL_UCP_5_X
3760x00028610 PA_CL_UCP_5_Y
3770x00028614 PA_CL_UCP_5_Z
3780x00028618 PA_CL_UCP_5_W
3790x0002861C SPI_VS_OUT_ID_0
3800x00028620 SPI_VS_OUT_ID_1
3810x00028624 SPI_VS_OUT_ID_2
3820x00028628 SPI_VS_OUT_ID_3
3830x0002862C SPI_VS_OUT_ID_4
3840x00028630 SPI_VS_OUT_ID_5
3850x00028634 SPI_VS_OUT_ID_6
3860x00028638 SPI_VS_OUT_ID_7
3870x0002863C SPI_VS_OUT_ID_8
3880x00028640 SPI_VS_OUT_ID_9
3890x00028644 SPI_PS_INPUT_CNTL_0
3900x00028648 SPI_PS_INPUT_CNTL_1
3910x0002864C SPI_PS_INPUT_CNTL_2
3920x00028650 SPI_PS_INPUT_CNTL_3
3930x00028654 SPI_PS_INPUT_CNTL_4
3940x00028658 SPI_PS_INPUT_CNTL_5
3950x0002865C SPI_PS_INPUT_CNTL_6
3960x00028660 SPI_PS_INPUT_CNTL_7
3970x00028664 SPI_PS_INPUT_CNTL_8
3980x00028668 SPI_PS_INPUT_CNTL_9
3990x0002866C SPI_PS_INPUT_CNTL_10
4000x00028670 SPI_PS_INPUT_CNTL_11
4010x00028674 SPI_PS_INPUT_CNTL_12
4020x00028678 SPI_PS_INPUT_CNTL_13
4030x0002867C SPI_PS_INPUT_CNTL_14
4040x00028680 SPI_PS_INPUT_CNTL_15
4050x00028684 SPI_PS_INPUT_CNTL_16
4060x00028688 SPI_PS_INPUT_CNTL_17
4070x0002868C SPI_PS_INPUT_CNTL_18
4080x00028690 SPI_PS_INPUT_CNTL_19
4090x00028694 SPI_PS_INPUT_CNTL_20
4100x00028698 SPI_PS_INPUT_CNTL_21
4110x0002869C SPI_PS_INPUT_CNTL_22
4120x000286A0 SPI_PS_INPUT_CNTL_23
4130x000286A4 SPI_PS_INPUT_CNTL_24
4140x000286A8 SPI_PS_INPUT_CNTL_25
4150x000286AC SPI_PS_INPUT_CNTL_26
4160x000286B0 SPI_PS_INPUT_CNTL_27
4170x000286B4 SPI_PS_INPUT_CNTL_28
4180x000286B8 SPI_PS_INPUT_CNTL_29
4190x000286BC SPI_PS_INPUT_CNTL_30
4200x000286C0 SPI_PS_INPUT_CNTL_31
4210x000286C4 SPI_VS_OUT_CONFIG
4220x000286C8 SPI_THREAD_GROUPING
4230x000286CC SPI_PS_IN_CONTROL_0
4240x000286D0 SPI_PS_IN_CONTROL_1
4250x000286D4 SPI_INTERP_CONTROL_0
4260x000286D8 SPI_INPUT_Z
4270x000286DC SPI_FOG_CNTL
4280x000286E0 SPI_BARYC_CNTL
4290x000286E4 SPI_PS_IN_CONTROL_2
4300x000286E8 SPI_COMPUTE_INPUT_CNTL
4310x000286EC SPI_COMPUTE_NUM_THREAD_X
4320x000286F0 SPI_COMPUTE_NUM_THREAD_Y
4330x000286F4 SPI_COMPUTE_NUM_THREAD_Z
4340x000286F8 GDS_ADDR_SIZE
4350x00028780 CB_BLEND0_CONTROL
4360x00028784 CB_BLEND1_CONTROL
4370x00028788 CB_BLEND2_CONTROL
4380x0002878C CB_BLEND3_CONTROL
4390x00028790 CB_BLEND4_CONTROL
4400x00028794 CB_BLEND5_CONTROL
4410x00028798 CB_BLEND6_CONTROL
4420x0002879C CB_BLEND7_CONTROL
4430x000287CC CS_COPY_STATE
4440x000287D0 GFX_COPY_STATE
4450x000287D4 PA_CL_POINT_X_RAD
4460x000287D8 PA_CL_POINT_Y_RAD
4470x000287DC PA_CL_POINT_SIZE
4480x000287E0 PA_CL_POINT_CULL_RAD
4490x00028808 CB_COLOR_CONTROL
4500x0002880C DB_SHADER_CONTROL
4510x00028810 PA_CL_CLIP_CNTL
4520x00028814 PA_SU_SC_MODE_CNTL
4530x00028818 PA_CL_VTE_CNTL
4540x0002881C PA_CL_VS_OUT_CNTL
4550x00028820 PA_CL_NANINF_CNTL
4560x00028824 PA_SU_LINE_STIPPLE_CNTL
4570x00028828 PA_SU_LINE_STIPPLE_SCALE
4580x0002882C PA_SU_PRIM_FILTER_CNTL
4590x00028838 SQ_DYN_GPR_RESOURCE_LIMIT_1
4600x00028844 SQ_PGM_RESOURCES_PS
4610x00028848 SQ_PGM_RESOURCES_2_PS
4620x0002884C SQ_PGM_EXPORTS_PS
4630x00028860 SQ_PGM_RESOURCES_VS
4640x00028864 SQ_PGM_RESOURCES_2_VS
4650x00028878 SQ_PGM_RESOURCES_GS
4660x0002887C SQ_PGM_RESOURCES_2_GS
4670x00028890 SQ_PGM_RESOURCES_ES
4680x00028894 SQ_PGM_RESOURCES_2_ES
4690x000288A8 SQ_PGM_RESOURCES_FS
4700x000288BC SQ_PGM_RESOURCES_HS
4710x000288C0 SQ_PGM_RESOURCES_2_HS
4720x000288D4 SQ_PGM_RESOURCES_LS
4730x000288D8 SQ_PGM_RESOURCES_2_LS
4740x000288E8 SQ_LDS_ALLOC
4750x000288EC SQ_LDS_ALLOC_PS
4760x000288F0 SQ_VTX_SEMANTIC_CLEAR
4770x00028A00 PA_SU_POINT_SIZE
4780x00028A04 PA_SU_POINT_MINMAX
4790x00028A08 PA_SU_LINE_CNTL
4800x00028A0C PA_SC_LINE_STIPPLE
4810x00028A10 VGT_OUTPUT_PATH_CNTL
4820x00028A14 VGT_HOS_CNTL
4830x00028A18 VGT_HOS_MAX_TESS_LEVEL
4840x00028A1C VGT_HOS_MIN_TESS_LEVEL
4850x00028A20 VGT_HOS_REUSE_DEPTH
4860x00028A24 VGT_GROUP_PRIM_TYPE
4870x00028A28 VGT_GROUP_FIRST_DECR
4880x00028A2C VGT_GROUP_DECR
4890x00028A30 VGT_GROUP_VECT_0_CNTL
4900x00028A34 VGT_GROUP_VECT_1_CNTL
4910x00028A38 VGT_GROUP_VECT_0_FMT_CNTL
4920x00028A3C VGT_GROUP_VECT_1_FMT_CNTL
4930x00028A40 VGT_GS_MODE
4940x00028A48 PA_SC_MODE_CNTL_0
4950x00028A4C PA_SC_MODE_CNTL_1
4960x00028A50 VGT_ENHANCE
4970x00028A54 VGT_GS_PER_ES
4980x00028A58 VGT_ES_PER_GS
4990x00028A5C VGT_GS_PER_VS
5000x00028A6C VGT_GS_OUT_PRIM_TYPE
5010x00028A84 VGT_PRIMITIVEID_EN
5020x00028A94 VGT_MULTI_PRIM_IB_RESET_EN
5030x00028AA0 VGT_INSTANCE_STEP_RATE_0
5040x00028AA4 VGT_INSTANCE_STEP_RATE_1
5050x00028AB4 VGT_REUSE_OFF
5060x00028AB8 VGT_VTX_CNT_EN
5070x00028ABC DB_HTILE_SURFACE
5080x00028AC0 DB_SRESULTS_COMPARE_STATE0
5090x00028AC4 DB_SRESULTS_COMPARE_STATE1
5100x00028AC8 DB_PRELOAD_CONTROL
5110x00028B38 VGT_GS_MAX_VERT_OUT
5120x00028B54 VGT_SHADER_STAGES_EN
5130x00028B58 VGT_LS_HS_CONFIG
5140x00028B5C VGT_LS_SIZE
5150x00028B60 VGT_HS_SIZE
5160x00028B64 VGT_LS_HS_ALLOC
5170x00028B68 VGT_HS_PATCH_CONST
5180x00028B6C VGT_TF_PARAM
5190x00028B70 DB_ALPHA_TO_MASK
5200x00028B74 VGT_DISPATCH_INITIATOR
5210x00028B78 PA_SU_POLY_OFFSET_DB_FMT_CNTL
5220x00028B7C PA_SU_POLY_OFFSET_CLAMP
5230x00028B80 PA_SU_POLY_OFFSET_FRONT_SCALE
5240x00028B84 PA_SU_POLY_OFFSET_FRONT_OFFSET
5250x00028B88 PA_SU_POLY_OFFSET_BACK_SCALE
5260x00028B8C PA_SU_POLY_OFFSET_BACK_OFFSET
5270x00028B74 VGT_GS_INSTANCE_CNT
5280x00028C00 PA_SC_LINE_CNTL
5290x00028C08 PA_SU_VTX_CNTL
5300x00028C0C PA_CL_GB_VERT_CLIP_ADJ
5310x00028C10 PA_CL_GB_VERT_DISC_ADJ
5320x00028C14 PA_CL_GB_HORZ_CLIP_ADJ
5330x00028C18 PA_CL_GB_HORZ_DISC_ADJ
5340x00028C1C PA_SC_AA_SAMPLE_LOCS_0
5350x00028C20 PA_SC_AA_SAMPLE_LOCS_1
5360x00028C24 PA_SC_AA_SAMPLE_LOCS_2
5370x00028C28 PA_SC_AA_SAMPLE_LOCS_3
5380x00028C2C PA_SC_AA_SAMPLE_LOCS_4
5390x00028C30 PA_SC_AA_SAMPLE_LOCS_5
5400x00028C34 PA_SC_AA_SAMPLE_LOCS_6
5410x00028C38 PA_SC_AA_SAMPLE_LOCS_7
5420x00028C3C PA_SC_AA_MASK
5430x00028C8C CB_COLOR0_CLEAR_WORD0
5440x00028C90 CB_COLOR0_CLEAR_WORD1
5450x00028C94 CB_COLOR0_CLEAR_WORD2
5460x00028C98 CB_COLOR0_CLEAR_WORD3
5470x00028CC8 CB_COLOR1_CLEAR_WORD0
5480x00028CCC CB_COLOR1_CLEAR_WORD1
5490x00028CD0 CB_COLOR1_CLEAR_WORD2
5500x00028CD4 CB_COLOR1_CLEAR_WORD3
5510x00028D04 CB_COLOR2_CLEAR_WORD0
5520x00028D08 CB_COLOR2_CLEAR_WORD1
5530x00028D0C CB_COLOR2_CLEAR_WORD2
5540x00028D10 CB_COLOR2_CLEAR_WORD3
5550x00028D40 CB_COLOR3_CLEAR_WORD0
5560x00028D44 CB_COLOR3_CLEAR_WORD1
5570x00028D48 CB_COLOR3_CLEAR_WORD2
5580x00028D4C CB_COLOR3_CLEAR_WORD3
5590x00028D7C CB_COLOR4_CLEAR_WORD0
5600x00028D80 CB_COLOR4_CLEAR_WORD1
5610x00028D84 CB_COLOR4_CLEAR_WORD2
5620x00028D88 CB_COLOR4_CLEAR_WORD3
5630x00028DB8 CB_COLOR5_CLEAR_WORD0
5640x00028DBC CB_COLOR5_CLEAR_WORD1
5650x00028DC0 CB_COLOR5_CLEAR_WORD2
5660x00028DC4 CB_COLOR5_CLEAR_WORD3
5670x00028DF4 CB_COLOR6_CLEAR_WORD0
5680x00028DF8 CB_COLOR6_CLEAR_WORD1
5690x00028DFC CB_COLOR6_CLEAR_WORD2
5700x00028E00 CB_COLOR6_CLEAR_WORD3
5710x00028E30 CB_COLOR7_CLEAR_WORD0
5720x00028E34 CB_COLOR7_CLEAR_WORD1
5730x00028E38 CB_COLOR7_CLEAR_WORD2
5740x00028E3C CB_COLOR7_CLEAR_WORD3
5750x00028F80 SQ_ALU_CONST_BUFFER_SIZE_HS_0
5760x00028F84 SQ_ALU_CONST_BUFFER_SIZE_HS_1
5770x00028F88 SQ_ALU_CONST_BUFFER_SIZE_HS_2
5780x00028F8C SQ_ALU_CONST_BUFFER_SIZE_HS_3
5790x00028F90 SQ_ALU_CONST_BUFFER_SIZE_HS_4
5800x00028F94 SQ_ALU_CONST_BUFFER_SIZE_HS_5
5810x00028F98 SQ_ALU_CONST_BUFFER_SIZE_HS_6
5820x00028F9C SQ_ALU_CONST_BUFFER_SIZE_HS_7
5830x00028FA0 SQ_ALU_CONST_BUFFER_SIZE_HS_8
5840x00028FA4 SQ_ALU_CONST_BUFFER_SIZE_HS_9
5850x00028FA8 SQ_ALU_CONST_BUFFER_SIZE_HS_10
5860x00028FAC SQ_ALU_CONST_BUFFER_SIZE_HS_11
5870x00028FB0 SQ_ALU_CONST_BUFFER_SIZE_HS_12
5880x00028FB4 SQ_ALU_CONST_BUFFER_SIZE_HS_13
5890x00028FB8 SQ_ALU_CONST_BUFFER_SIZE_HS_14
5900x00028FBC SQ_ALU_CONST_BUFFER_SIZE_HS_15
5910x00028FC0 SQ_ALU_CONST_BUFFER_SIZE_LS_0
5920x00028FC4 SQ_ALU_CONST_BUFFER_SIZE_LS_1
5930x00028FC8 SQ_ALU_CONST_BUFFER_SIZE_LS_2
5940x00028FCC SQ_ALU_CONST_BUFFER_SIZE_LS_3
5950x00028FD0 SQ_ALU_CONST_BUFFER_SIZE_LS_4
5960x00028FD4 SQ_ALU_CONST_BUFFER_SIZE_LS_5
5970x00028FD8 SQ_ALU_CONST_BUFFER_SIZE_LS_6
5980x00028FDC SQ_ALU_CONST_BUFFER_SIZE_LS_7
5990x00028FE0 SQ_ALU_CONST_BUFFER_SIZE_LS_8
6000x00028FE4 SQ_ALU_CONST_BUFFER_SIZE_LS_9
6010x00028FE8 SQ_ALU_CONST_BUFFER_SIZE_LS_10
6020x00028FEC SQ_ALU_CONST_BUFFER_SIZE_LS_11
6030x00028FF0 SQ_ALU_CONST_BUFFER_SIZE_LS_12
6040x00028FF4 SQ_ALU_CONST_BUFFER_SIZE_LS_13
6050x00028FF8 SQ_ALU_CONST_BUFFER_SIZE_LS_14
6060x00028FFC SQ_ALU_CONST_BUFFER_SIZE_LS_15
6070x0003CFF0 SQ_VTX_BASE_VTX_LOC
6080x0003CFF4 SQ_VTX_START_INST_LOC
6090x0003FF00 SQ_TEX_SAMPLER_CLEAR
6100x0003FF04 SQ_TEX_RESOURCE_CLEAR
6110x0003FF08 SQ_LOOP_BOOL_CLEAR
diff --git a/drivers/gpu/drm/radeon/rs600.c b/drivers/gpu/drm/radeon/rs600.c
index 79887cac5b54..7bb4c3e52f3b 100644
--- a/drivers/gpu/drm/radeon/rs600.c
+++ b/drivers/gpu/drm/radeon/rs600.c
@@ -74,7 +74,8 @@ void rs600_pm_misc(struct radeon_device *rdev)
74 if (voltage->delay) 74 if (voltage->delay)
75 udelay(voltage->delay); 75 udelay(voltage->delay);
76 } 76 }
77 } 77 } else if (voltage->type == VOLTAGE_VDDC)
78 radeon_atom_set_voltage(rdev, voltage->vddc_id);
78 79
79 dyn_pwrmgt_sclk_length = RREG32_PLL(DYN_PWRMGT_SCLK_LENGTH); 80 dyn_pwrmgt_sclk_length = RREG32_PLL(DYN_PWRMGT_SCLK_LENGTH);
80 dyn_pwrmgt_sclk_length &= ~REDUCED_POWER_SCLK_HILEN(0xf); 81 dyn_pwrmgt_sclk_length &= ~REDUCED_POWER_SCLK_HILEN(0xf);
diff --git a/drivers/gpu/drm/radeon/rs690.c b/drivers/gpu/drm/radeon/rs690.c
index bcc33195ebc2..f4f0a61bcdce 100644
--- a/drivers/gpu/drm/radeon/rs690.c
+++ b/drivers/gpu/drm/radeon/rs690.c
@@ -79,7 +79,13 @@ void rs690_pm_info(struct radeon_device *rdev)
79 tmp.full = dfixed_const(100); 79 tmp.full = dfixed_const(100);
80 rdev->pm.igp_sideport_mclk.full = dfixed_const(info->info.ulBootUpMemoryClock); 80 rdev->pm.igp_sideport_mclk.full = dfixed_const(info->info.ulBootUpMemoryClock);
81 rdev->pm.igp_sideport_mclk.full = dfixed_div(rdev->pm.igp_sideport_mclk, tmp); 81 rdev->pm.igp_sideport_mclk.full = dfixed_div(rdev->pm.igp_sideport_mclk, tmp);
82 rdev->pm.igp_system_mclk.full = dfixed_const(le16_to_cpu(info->info.usK8MemoryClock)); 82 if (info->info.usK8MemoryClock)
83 rdev->pm.igp_system_mclk.full = dfixed_const(le16_to_cpu(info->info.usK8MemoryClock));
84 else if (rdev->clock.default_mclk) {
85 rdev->pm.igp_system_mclk.full = dfixed_const(rdev->clock.default_mclk);
86 rdev->pm.igp_system_mclk.full = dfixed_div(rdev->pm.igp_system_mclk, tmp);
87 } else
88 rdev->pm.igp_system_mclk.full = dfixed_const(400);
83 rdev->pm.igp_ht_link_clk.full = dfixed_const(le16_to_cpu(info->info.usFSBClock)); 89 rdev->pm.igp_ht_link_clk.full = dfixed_const(le16_to_cpu(info->info.usFSBClock));
84 rdev->pm.igp_ht_link_width.full = dfixed_const(info->info.ucHTLinkWidth); 90 rdev->pm.igp_ht_link_width.full = dfixed_const(info->info.ucHTLinkWidth);
85 break; 91 break;
@@ -87,34 +93,31 @@ void rs690_pm_info(struct radeon_device *rdev)
87 tmp.full = dfixed_const(100); 93 tmp.full = dfixed_const(100);
88 rdev->pm.igp_sideport_mclk.full = dfixed_const(info->info_v2.ulBootUpSidePortClock); 94 rdev->pm.igp_sideport_mclk.full = dfixed_const(info->info_v2.ulBootUpSidePortClock);
89 rdev->pm.igp_sideport_mclk.full = dfixed_div(rdev->pm.igp_sideport_mclk, tmp); 95 rdev->pm.igp_sideport_mclk.full = dfixed_div(rdev->pm.igp_sideport_mclk, tmp);
90 rdev->pm.igp_system_mclk.full = dfixed_const(info->info_v2.ulBootUpUMAClock); 96 if (info->info_v2.ulBootUpUMAClock)
97 rdev->pm.igp_system_mclk.full = dfixed_const(info->info_v2.ulBootUpUMAClock);
98 else if (rdev->clock.default_mclk)
99 rdev->pm.igp_system_mclk.full = dfixed_const(rdev->clock.default_mclk);
100 else
101 rdev->pm.igp_system_mclk.full = dfixed_const(66700);
91 rdev->pm.igp_system_mclk.full = dfixed_div(rdev->pm.igp_system_mclk, tmp); 102 rdev->pm.igp_system_mclk.full = dfixed_div(rdev->pm.igp_system_mclk, tmp);
92 rdev->pm.igp_ht_link_clk.full = dfixed_const(info->info_v2.ulHTLinkFreq); 103 rdev->pm.igp_ht_link_clk.full = dfixed_const(info->info_v2.ulHTLinkFreq);
93 rdev->pm.igp_ht_link_clk.full = dfixed_div(rdev->pm.igp_ht_link_clk, tmp); 104 rdev->pm.igp_ht_link_clk.full = dfixed_div(rdev->pm.igp_ht_link_clk, tmp);
94 rdev->pm.igp_ht_link_width.full = dfixed_const(le16_to_cpu(info->info_v2.usMinHTLinkWidth)); 105 rdev->pm.igp_ht_link_width.full = dfixed_const(le16_to_cpu(info->info_v2.usMinHTLinkWidth));
95 break; 106 break;
96 default: 107 default:
97 tmp.full = dfixed_const(100);
98 /* We assume the slower possible clock ie worst case */ 108 /* We assume the slower possible clock ie worst case */
99 /* DDR 333Mhz */ 109 rdev->pm.igp_sideport_mclk.full = dfixed_const(200);
100 rdev->pm.igp_sideport_mclk.full = dfixed_const(333); 110 rdev->pm.igp_system_mclk.full = dfixed_const(200);
101 /* FIXME: system clock ? */ 111 rdev->pm.igp_ht_link_clk.full = dfixed_const(1000);
102 rdev->pm.igp_system_mclk.full = dfixed_const(100);
103 rdev->pm.igp_system_mclk.full = dfixed_div(rdev->pm.igp_system_mclk, tmp);
104 rdev->pm.igp_ht_link_clk.full = dfixed_const(200);
105 rdev->pm.igp_ht_link_width.full = dfixed_const(8); 112 rdev->pm.igp_ht_link_width.full = dfixed_const(8);
106 DRM_ERROR("No integrated system info for your GPU, using safe default\n"); 113 DRM_ERROR("No integrated system info for your GPU, using safe default\n");
107 break; 114 break;
108 } 115 }
109 } else { 116 } else {
110 tmp.full = dfixed_const(100);
111 /* We assume the slower possible clock ie worst case */ 117 /* We assume the slower possible clock ie worst case */
112 /* DDR 333Mhz */ 118 rdev->pm.igp_sideport_mclk.full = dfixed_const(200);
113 rdev->pm.igp_sideport_mclk.full = dfixed_const(333); 119 rdev->pm.igp_system_mclk.full = dfixed_const(200);
114 /* FIXME: system clock ? */ 120 rdev->pm.igp_ht_link_clk.full = dfixed_const(1000);
115 rdev->pm.igp_system_mclk.full = dfixed_const(100);
116 rdev->pm.igp_system_mclk.full = dfixed_div(rdev->pm.igp_system_mclk, tmp);
117 rdev->pm.igp_ht_link_clk.full = dfixed_const(200);
118 rdev->pm.igp_ht_link_width.full = dfixed_const(8); 121 rdev->pm.igp_ht_link_width.full = dfixed_const(8);
119 DRM_ERROR("No integrated system info for your GPU, using safe default\n"); 122 DRM_ERROR("No integrated system info for your GPU, using safe default\n");
120 } 123 }
@@ -228,10 +231,6 @@ void rs690_crtc_bandwidth_compute(struct radeon_device *rdev,
228 fixed20_12 a, b, c; 231 fixed20_12 a, b, c;
229 fixed20_12 pclk, request_fifo_depth, tolerable_latency, estimated_width; 232 fixed20_12 pclk, request_fifo_depth, tolerable_latency, estimated_width;
230 fixed20_12 consumption_time, line_time, chunk_time, read_delay_latency; 233 fixed20_12 consumption_time, line_time, chunk_time, read_delay_latency;
231 /* FIXME: detect IGP with sideport memory, i don't think there is any
232 * such product available
233 */
234 bool sideport = false;
235 234
236 if (!crtc->base.enabled) { 235 if (!crtc->base.enabled) {
237 /* FIXME: wouldn't it better to set priority mark to maximum */ 236 /* FIXME: wouldn't it better to set priority mark to maximum */
@@ -300,7 +299,7 @@ void rs690_crtc_bandwidth_compute(struct radeon_device *rdev,
300 299
301 /* Maximun bandwidth is the minimun bandwidth of all component */ 300 /* Maximun bandwidth is the minimun bandwidth of all component */
302 rdev->pm.max_bandwidth = rdev->pm.core_bandwidth; 301 rdev->pm.max_bandwidth = rdev->pm.core_bandwidth;
303 if (sideport) { 302 if (rdev->mc.igp_sideport_enabled) {
304 if (rdev->pm.max_bandwidth.full > rdev->pm.sideport_bandwidth.full && 303 if (rdev->pm.max_bandwidth.full > rdev->pm.sideport_bandwidth.full &&
305 rdev->pm.sideport_bandwidth.full) 304 rdev->pm.sideport_bandwidth.full)
306 rdev->pm.max_bandwidth = rdev->pm.sideport_bandwidth; 305 rdev->pm.max_bandwidth = rdev->pm.sideport_bandwidth;
diff --git a/drivers/gpu/drm/radeon/rv770.c b/drivers/gpu/drm/radeon/rv770.c
index 253f24aec031..b7fd82064922 100644
--- a/drivers/gpu/drm/radeon/rv770.c
+++ b/drivers/gpu/drm/radeon/rv770.c
@@ -44,7 +44,18 @@ void rv770_fini(struct radeon_device *rdev);
44 44
45void rv770_pm_misc(struct radeon_device *rdev) 45void rv770_pm_misc(struct radeon_device *rdev)
46{ 46{
47 47 int req_ps_idx = rdev->pm.requested_power_state_index;
48 int req_cm_idx = rdev->pm.requested_clock_mode_index;
49 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
50 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
51
52 if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
53 if (voltage->voltage != rdev->pm.current_vddc) {
54 radeon_atom_set_voltage(rdev, voltage->voltage);
55 rdev->pm.current_vddc = voltage->voltage;
56 DRM_DEBUG("Setting: v: %d\n", voltage->voltage);
57 }
58 }
48} 59}
49 60
50/* 61/*
@@ -213,7 +224,7 @@ static void rv770_mc_program(struct radeon_device *rdev)
213 WREG32(MC_VM_FB_LOCATION, tmp); 224 WREG32(MC_VM_FB_LOCATION, tmp);
214 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); 225 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
215 WREG32(HDP_NONSURFACE_INFO, (2 << 7)); 226 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
216 WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF); 227 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
217 if (rdev->flags & RADEON_IS_AGP) { 228 if (rdev->flags & RADEON_IS_AGP) {
218 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16); 229 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
219 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16); 230 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);