aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma/dmatest.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/dma/dmatest.c')
-rw-r--r--drivers/dma/dmatest.c22
1 files changed, 17 insertions, 5 deletions
diff --git a/drivers/dma/dmatest.c b/drivers/dma/dmatest.c
index 64b048d7fba7..a2c8904b63ea 100644
--- a/drivers/dma/dmatest.c
+++ b/drivers/dma/dmatest.c
@@ -242,6 +242,13 @@ static inline void unmap_dst(struct device *dev, dma_addr_t *addr, size_t len,
242 dma_unmap_single(dev, addr[count], len, DMA_BIDIRECTIONAL); 242 dma_unmap_single(dev, addr[count], len, DMA_BIDIRECTIONAL);
243} 243}
244 244
245static unsigned int min_odd(unsigned int x, unsigned int y)
246{
247 unsigned int val = min(x, y);
248
249 return val % 2 ? val : val - 1;
250}
251
245/* 252/*
246 * This function repeatedly tests DMA transfers of various lengths and 253 * This function repeatedly tests DMA transfers of various lengths and
247 * offsets for a given operation type until it is told to exit by 254 * offsets for a given operation type until it is told to exit by
@@ -262,6 +269,7 @@ static int dmatest_func(void *data)
262 struct dmatest_thread *thread = data; 269 struct dmatest_thread *thread = data;
263 struct dmatest_done done = { .wait = &done_wait }; 270 struct dmatest_done done = { .wait = &done_wait };
264 struct dma_chan *chan; 271 struct dma_chan *chan;
272 struct dma_device *dev;
265 const char *thread_name; 273 const char *thread_name;
266 unsigned int src_off, dst_off, len; 274 unsigned int src_off, dst_off, len;
267 unsigned int error_count; 275 unsigned int error_count;
@@ -283,13 +291,16 @@ static int dmatest_func(void *data)
283 291
284 smp_rmb(); 292 smp_rmb();
285 chan = thread->chan; 293 chan = thread->chan;
294 dev = chan->device;
286 if (thread->type == DMA_MEMCPY) 295 if (thread->type == DMA_MEMCPY)
287 src_cnt = dst_cnt = 1; 296 src_cnt = dst_cnt = 1;
288 else if (thread->type == DMA_XOR) { 297 else if (thread->type == DMA_XOR) {
289 src_cnt = xor_sources | 1; /* force odd to ensure dst = src */ 298 /* force odd to ensure dst = src */
299 src_cnt = min_odd(xor_sources | 1, dev->max_xor);
290 dst_cnt = 1; 300 dst_cnt = 1;
291 } else if (thread->type == DMA_PQ) { 301 } else if (thread->type == DMA_PQ) {
292 src_cnt = pq_sources | 1; /* force odd to ensure dst = src */ 302 /* force odd to ensure dst = src */
303 src_cnt = min_odd(pq_sources | 1, dma_maxpq(dev, 0));
293 dst_cnt = 2; 304 dst_cnt = 2;
294 for (i = 0; i < src_cnt; i++) 305 for (i = 0; i < src_cnt; i++)
295 pq_coefs[i] = 1; 306 pq_coefs[i] = 1;
@@ -327,7 +338,6 @@ static int dmatest_func(void *data)
327 338
328 while (!kthread_should_stop() 339 while (!kthread_should_stop()
329 && !(iterations && total_tests >= iterations)) { 340 && !(iterations && total_tests >= iterations)) {
330 struct dma_device *dev = chan->device;
331 struct dma_async_tx_descriptor *tx = NULL; 341 struct dma_async_tx_descriptor *tx = NULL;
332 dma_addr_t dma_srcs[src_cnt]; 342 dma_addr_t dma_srcs[src_cnt];
333 dma_addr_t dma_dsts[dst_cnt]; 343 dma_addr_t dma_dsts[dst_cnt];
@@ -526,7 +536,9 @@ err_srcs:
526 thread_name, total_tests, failed_tests, ret); 536 thread_name, total_tests, failed_tests, ret);
527 537
528 /* terminate all transfers on specified channels */ 538 /* terminate all transfers on specified channels */
529 chan->device->device_control(chan, DMA_TERMINATE_ALL, 0); 539 if (ret)
540 dmaengine_terminate_all(chan);
541
530 if (iterations > 0) 542 if (iterations > 0)
531 while (!kthread_should_stop()) { 543 while (!kthread_should_stop()) {
532 DECLARE_WAIT_QUEUE_HEAD_ONSTACK(wait_dmatest_exit); 544 DECLARE_WAIT_QUEUE_HEAD_ONSTACK(wait_dmatest_exit);
@@ -551,7 +563,7 @@ static void dmatest_cleanup_channel(struct dmatest_chan *dtc)
551 } 563 }
552 564
553 /* terminate all transfers on specified channels */ 565 /* terminate all transfers on specified channels */
554 dtc->chan->device->device_control(dtc->chan, DMA_TERMINATE_ALL, 0); 566 dmaengine_terminate_all(dtc->chan);
555 567
556 kfree(dtc); 568 kfree(dtc);
557} 569}