aboutsummaryrefslogtreecommitdiffstats
path: root/drivers
diff options
context:
space:
mode:
authorLucas Stach <l.stach@pengutronix.de>2018-01-22 06:38:52 -0500
committerLucas Stach <l.stach@pengutronix.de>2018-03-09 06:23:48 -0500
commitf8433f9e1066363582e49eda0d6ddeaa08a842ca (patch)
treec404bec499d72e102127e83d871bd5fa86db52ec /drivers
parent1137bef67a11d2e5a4f44ce87b6e4dd1fb7a0163 (diff)
drm/etnaviv: add PTA handling to MMUv2
The Page Table Array is a new first level structure above the MTLB availabale on GPUs with the security feature. Use the PTa to set up the MMU when the security related states are handled by the kernel driver. Signed-off-by: Lucas Stach <l.stach@pengutronix.de>
Diffstat (limited to 'drivers')
-rw-r--r--drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c76
1 files changed, 75 insertions, 1 deletions
diff --git a/drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c b/drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
index 6e7c89247dc6..9752dbd5d28b 100644
--- a/drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
+++ b/drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
@@ -40,6 +40,9 @@
40 40
41struct etnaviv_iommuv2_domain { 41struct etnaviv_iommuv2_domain {
42 struct etnaviv_iommu_domain base; 42 struct etnaviv_iommu_domain base;
43 /* P(age) T(able) A(rray) */
44 u64 *pta_cpu;
45 dma_addr_t pta_dma;
43 /* M(aster) TLB aka first level pagetable */ 46 /* M(aster) TLB aka first level pagetable */
44 u32 *mtlb_cpu; 47 u32 *mtlb_cpu;
45 dma_addr_t mtlb_dma; 48 dma_addr_t mtlb_dma;
@@ -114,6 +117,15 @@ static int etnaviv_iommuv2_init(struct etnaviv_iommuv2_domain *etnaviv_domain)
114 for (i = 0; i < SZ_4K / 4; i++) 117 for (i = 0; i < SZ_4K / 4; i++)
115 *p++ = 0xdead55aa; 118 *p++ = 0xdead55aa;
116 119
120 etnaviv_domain->pta_cpu = dma_alloc_coherent(etnaviv_domain->base.dev,
121 SZ_4K,
122 &etnaviv_domain->pta_dma,
123 GFP_KERNEL);
124 if (!etnaviv_domain->pta_cpu) {
125 ret = -ENOMEM;
126 goto fail_mem;
127 }
128
117 etnaviv_domain->mtlb_cpu = dma_alloc_coherent(etnaviv_domain->base.dev, 129 etnaviv_domain->mtlb_cpu = dma_alloc_coherent(etnaviv_domain->base.dev,
118 SZ_4K, 130 SZ_4K,
119 &etnaviv_domain->mtlb_dma, 131 &etnaviv_domain->mtlb_dma,
@@ -150,6 +162,11 @@ fail_mem:
150 etnaviv_domain->base.bad_page_cpu, 162 etnaviv_domain->base.bad_page_cpu,
151 etnaviv_domain->base.bad_page_dma); 163 etnaviv_domain->base.bad_page_dma);
152 164
165 if (etnaviv_domain->pta_cpu)
166 dma_free_coherent(etnaviv_domain->base.dev, SZ_4K,
167 etnaviv_domain->pta_cpu,
168 etnaviv_domain->pta_dma);
169
153 if (etnaviv_domain->mtlb_cpu) 170 if (etnaviv_domain->mtlb_cpu)
154 dma_free_coherent(etnaviv_domain->base.dev, SZ_4K, 171 dma_free_coherent(etnaviv_domain->base.dev, SZ_4K,
155 etnaviv_domain->mtlb_cpu, 172 etnaviv_domain->mtlb_cpu,
@@ -176,6 +193,10 @@ static void etnaviv_iommuv2_domain_free(struct etnaviv_iommu_domain *domain)
176 etnaviv_domain->base.bad_page_dma); 193 etnaviv_domain->base.bad_page_dma);
177 194
178 dma_free_coherent(etnaviv_domain->base.dev, SZ_4K, 195 dma_free_coherent(etnaviv_domain->base.dev, SZ_4K,
196 etnaviv_domain->pta_cpu,
197 etnaviv_domain->pta_dma);
198
199 dma_free_coherent(etnaviv_domain->base.dev, SZ_4K,
179 etnaviv_domain->mtlb_cpu, 200 etnaviv_domain->mtlb_cpu,
180 etnaviv_domain->mtlb_dma); 201 etnaviv_domain->mtlb_dma);
181 202
@@ -216,7 +237,7 @@ static void etnaviv_iommuv2_dump(struct etnaviv_iommu_domain *domain, void *buf)
216 memcpy(buf, etnaviv_domain->stlb_cpu[i], SZ_4K); 237 memcpy(buf, etnaviv_domain->stlb_cpu[i], SZ_4K);
217} 238}
218 239
219void etnaviv_iommuv2_restore(struct etnaviv_gpu *gpu) 240static void etnaviv_iommuv2_restore_nonsec(struct etnaviv_gpu *gpu)
220{ 241{
221 struct etnaviv_iommuv2_domain *etnaviv_domain = 242 struct etnaviv_iommuv2_domain *etnaviv_domain =
222 to_etnaviv_domain(gpu->mmu->domain); 243 to_etnaviv_domain(gpu->mmu->domain);
@@ -236,6 +257,59 @@ void etnaviv_iommuv2_restore(struct etnaviv_gpu *gpu)
236 gpu_write(gpu, VIVS_MMUv2_CONTROL, VIVS_MMUv2_CONTROL_ENABLE); 257 gpu_write(gpu, VIVS_MMUv2_CONTROL, VIVS_MMUv2_CONTROL_ENABLE);
237} 258}
238 259
260static void etnaviv_iommuv2_restore_sec(struct etnaviv_gpu *gpu)
261{
262 struct etnaviv_iommuv2_domain *etnaviv_domain =
263 to_etnaviv_domain(gpu->mmu->domain);
264 u16 prefetch;
265
266 /* If the MMU is already enabled the state is still there. */
267 if (gpu_read(gpu, VIVS_MMUv2_SEC_CONTROL) & VIVS_MMUv2_SEC_CONTROL_ENABLE)
268 return;
269
270 gpu_write(gpu, VIVS_MMUv2_PTA_ADDRESS_LOW,
271 lower_32_bits(etnaviv_domain->pta_dma));
272 gpu_write(gpu, VIVS_MMUv2_PTA_ADDRESS_HIGH,
273 upper_32_bits(etnaviv_domain->pta_dma));
274 gpu_write(gpu, VIVS_MMUv2_PTA_CONTROL, VIVS_MMUv2_PTA_CONTROL_ENABLE);
275
276 gpu_write(gpu, VIVS_MMUv2_NONSEC_SAFE_ADDR_LOW,
277 lower_32_bits(etnaviv_domain->base.bad_page_dma));
278 gpu_write(gpu, VIVS_MMUv2_SEC_SAFE_ADDR_LOW,
279 lower_32_bits(etnaviv_domain->base.bad_page_dma));
280 gpu_write(gpu, VIVS_MMUv2_SAFE_ADDRESS_CONFIG,
281 VIVS_MMUv2_SAFE_ADDRESS_CONFIG_NON_SEC_SAFE_ADDR_HIGH(
282 upper_32_bits(etnaviv_domain->base.bad_page_dma)) |
283 VIVS_MMUv2_SAFE_ADDRESS_CONFIG_SEC_SAFE_ADDR_HIGH(
284 upper_32_bits(etnaviv_domain->base.bad_page_dma)));
285
286 etnaviv_domain->pta_cpu[0] = etnaviv_domain->mtlb_dma |
287 VIVS_MMUv2_CONFIGURATION_MODE_MODE4_K;
288
289 /* trigger a PTA load through the FE */
290 prefetch = etnaviv_buffer_config_pta(gpu);
291 etnaviv_gpu_start_fe(gpu, (u32)etnaviv_cmdbuf_get_pa(&gpu->buffer),
292 prefetch);
293 etnaviv_gpu_wait_idle(gpu, 100);
294
295 gpu_write(gpu, VIVS_MMUv2_SEC_CONTROL, VIVS_MMUv2_SEC_CONTROL_ENABLE);
296}
297
298void etnaviv_iommuv2_restore(struct etnaviv_gpu *gpu)
299{
300 switch (gpu->sec_mode) {
301 case ETNA_SEC_NONE:
302 etnaviv_iommuv2_restore_nonsec(gpu);
303 break;
304 case ETNA_SEC_KERNEL:
305 etnaviv_iommuv2_restore_sec(gpu);
306 break;
307 default:
308 WARN(1, "unhandled GPU security mode\n");
309 break;
310 }
311}
312
239static const struct etnaviv_iommu_domain_ops etnaviv_iommuv2_ops = { 313static const struct etnaviv_iommu_domain_ops etnaviv_iommuv2_ops = {
240 .free = etnaviv_iommuv2_domain_free, 314 .free = etnaviv_iommuv2_domain_free,
241 .map = etnaviv_iommuv2_map, 315 .map = etnaviv_iommuv2_map,