diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2009-04-03 15:13:45 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2009-04-03 15:13:45 -0400 |
commit | 133e2a3164771454aa326859c2b293687189b553 (patch) | |
tree | 4e21f63be087738d7ffe7526d41e15140fc63ff0 /drivers/dma/dmatest.c | |
parent | 20bec8ab1458c24bed0d5492ee15d87807fc415a (diff) | |
parent | 8c6db1bbf80123839ec87bdd6cb364aea384623d (diff) |
Merge branch 'next' of git://git.kernel.org/pub/scm/linux/kernel/git/djbw/async_tx
* 'next' of git://git.kernel.org/pub/scm/linux/kernel/git/djbw/async_tx:
dma: Add SoF and EoF debugging to ipu_idmac.c, minor cleanup
dw_dmac: add cyclic API to DW DMA driver
dmaengine: Add privatecnt to revert DMA_PRIVATE property
dmatest: add dma interrupts and callbacks
dmatest: add xor test
dmaengine: allow dma support for async_tx to be toggled
async_tx: provide __async_inline for HAS_DMA=n archs
dmaengine: kill some unused headers
dmaengine: initialize tx_list in dma_async_tx_descriptor_init
dma: i.MX31 IPU DMA robustness improvements
dma: improve section assignment in i.MX31 IPU DMA driver
dma: ipu_idmac driver cosmetic clean-up
dmaengine: fail device registration if channel registration fails
Diffstat (limited to 'drivers/dma/dmatest.c')
-rw-r--r-- | drivers/dma/dmatest.c | 307 |
1 files changed, 214 insertions, 93 deletions
diff --git a/drivers/dma/dmatest.c b/drivers/dma/dmatest.c index e190d8b30700..a27c0fb1bc11 100644 --- a/drivers/dma/dmatest.c +++ b/drivers/dma/dmatest.c | |||
@@ -38,6 +38,11 @@ module_param(max_channels, uint, S_IRUGO); | |||
38 | MODULE_PARM_DESC(max_channels, | 38 | MODULE_PARM_DESC(max_channels, |
39 | "Maximum number of channels to use (default: all)"); | 39 | "Maximum number of channels to use (default: all)"); |
40 | 40 | ||
41 | static unsigned int xor_sources = 3; | ||
42 | module_param(xor_sources, uint, S_IRUGO); | ||
43 | MODULE_PARM_DESC(xor_sources, | ||
44 | "Number of xor source buffers (default: 3)"); | ||
45 | |||
41 | /* | 46 | /* |
42 | * Initialization patterns. All bytes in the source buffer has bit 7 | 47 | * Initialization patterns. All bytes in the source buffer has bit 7 |
43 | * set, all bytes in the destination buffer has bit 7 cleared. | 48 | * set, all bytes in the destination buffer has bit 7 cleared. |
@@ -59,8 +64,9 @@ struct dmatest_thread { | |||
59 | struct list_head node; | 64 | struct list_head node; |
60 | struct task_struct *task; | 65 | struct task_struct *task; |
61 | struct dma_chan *chan; | 66 | struct dma_chan *chan; |
62 | u8 *srcbuf; | 67 | u8 **srcs; |
63 | u8 *dstbuf; | 68 | u8 **dsts; |
69 | enum dma_transaction_type type; | ||
64 | }; | 70 | }; |
65 | 71 | ||
66 | struct dmatest_chan { | 72 | struct dmatest_chan { |
@@ -98,30 +104,37 @@ static unsigned long dmatest_random(void) | |||
98 | return buf; | 104 | return buf; |
99 | } | 105 | } |
100 | 106 | ||
101 | static void dmatest_init_srcbuf(u8 *buf, unsigned int start, unsigned int len) | 107 | static void dmatest_init_srcs(u8 **bufs, unsigned int start, unsigned int len) |
102 | { | 108 | { |
103 | unsigned int i; | 109 | unsigned int i; |
104 | 110 | u8 *buf; | |
105 | for (i = 0; i < start; i++) | 111 | |
106 | buf[i] = PATTERN_SRC | (~i & PATTERN_COUNT_MASK); | 112 | for (; (buf = *bufs); bufs++) { |
107 | for ( ; i < start + len; i++) | 113 | for (i = 0; i < start; i++) |
108 | buf[i] = PATTERN_SRC | PATTERN_COPY | 114 | buf[i] = PATTERN_SRC | (~i & PATTERN_COUNT_MASK); |
109 | | (~i & PATTERN_COUNT_MASK);; | 115 | for ( ; i < start + len; i++) |
110 | for ( ; i < test_buf_size; i++) | 116 | buf[i] = PATTERN_SRC | PATTERN_COPY |
111 | buf[i] = PATTERN_SRC | (~i & PATTERN_COUNT_MASK); | 117 | | (~i & PATTERN_COUNT_MASK);; |
118 | for ( ; i < test_buf_size; i++) | ||
119 | buf[i] = PATTERN_SRC | (~i & PATTERN_COUNT_MASK); | ||
120 | buf++; | ||
121 | } | ||
112 | } | 122 | } |
113 | 123 | ||
114 | static void dmatest_init_dstbuf(u8 *buf, unsigned int start, unsigned int len) | 124 | static void dmatest_init_dsts(u8 **bufs, unsigned int start, unsigned int len) |
115 | { | 125 | { |
116 | unsigned int i; | 126 | unsigned int i; |
117 | 127 | u8 *buf; | |
118 | for (i = 0; i < start; i++) | 128 | |
119 | buf[i] = PATTERN_DST | (~i & PATTERN_COUNT_MASK); | 129 | for (; (buf = *bufs); bufs++) { |
120 | for ( ; i < start + len; i++) | 130 | for (i = 0; i < start; i++) |
121 | buf[i] = PATTERN_DST | PATTERN_OVERWRITE | 131 | buf[i] = PATTERN_DST | (~i & PATTERN_COUNT_MASK); |
122 | | (~i & PATTERN_COUNT_MASK); | 132 | for ( ; i < start + len; i++) |
123 | for ( ; i < test_buf_size; i++) | 133 | buf[i] = PATTERN_DST | PATTERN_OVERWRITE |
124 | buf[i] = PATTERN_DST | (~i & PATTERN_COUNT_MASK); | 134 | | (~i & PATTERN_COUNT_MASK); |
135 | for ( ; i < test_buf_size; i++) | ||
136 | buf[i] = PATTERN_DST | (~i & PATTERN_COUNT_MASK); | ||
137 | } | ||
125 | } | 138 | } |
126 | 139 | ||
127 | static void dmatest_mismatch(u8 actual, u8 pattern, unsigned int index, | 140 | static void dmatest_mismatch(u8 actual, u8 pattern, unsigned int index, |
@@ -150,23 +163,30 @@ static void dmatest_mismatch(u8 actual, u8 pattern, unsigned int index, | |||
150 | thread_name, index, expected, actual); | 163 | thread_name, index, expected, actual); |
151 | } | 164 | } |
152 | 165 | ||
153 | static unsigned int dmatest_verify(u8 *buf, unsigned int start, | 166 | static unsigned int dmatest_verify(u8 **bufs, unsigned int start, |
154 | unsigned int end, unsigned int counter, u8 pattern, | 167 | unsigned int end, unsigned int counter, u8 pattern, |
155 | bool is_srcbuf) | 168 | bool is_srcbuf) |
156 | { | 169 | { |
157 | unsigned int i; | 170 | unsigned int i; |
158 | unsigned int error_count = 0; | 171 | unsigned int error_count = 0; |
159 | u8 actual; | 172 | u8 actual; |
160 | 173 | u8 expected; | |
161 | for (i = start; i < end; i++) { | 174 | u8 *buf; |
162 | actual = buf[i]; | 175 | unsigned int counter_orig = counter; |
163 | if (actual != (pattern | (~counter & PATTERN_COUNT_MASK))) { | 176 | |
164 | if (error_count < 32) | 177 | for (; (buf = *bufs); bufs++) { |
165 | dmatest_mismatch(actual, pattern, i, counter, | 178 | counter = counter_orig; |
166 | is_srcbuf); | 179 | for (i = start; i < end; i++) { |
167 | error_count++; | 180 | actual = buf[i]; |
181 | expected = pattern | (~counter & PATTERN_COUNT_MASK); | ||
182 | if (actual != expected) { | ||
183 | if (error_count < 32) | ||
184 | dmatest_mismatch(actual, pattern, i, | ||
185 | counter, is_srcbuf); | ||
186 | error_count++; | ||
187 | } | ||
188 | counter++; | ||
168 | } | 189 | } |
169 | counter++; | ||
170 | } | 190 | } |
171 | 191 | ||
172 | if (error_count > 32) | 192 | if (error_count > 32) |
@@ -176,12 +196,17 @@ static unsigned int dmatest_verify(u8 *buf, unsigned int start, | |||
176 | return error_count; | 196 | return error_count; |
177 | } | 197 | } |
178 | 198 | ||
199 | static void dmatest_callback(void *completion) | ||
200 | { | ||
201 | complete(completion); | ||
202 | } | ||
203 | |||
179 | /* | 204 | /* |
180 | * This function repeatedly tests DMA transfers of various lengths and | 205 | * This function repeatedly tests DMA transfers of various lengths and |
181 | * offsets until it is told to exit by kthread_stop(). There may be | 206 | * offsets for a given operation type until it is told to exit by |
182 | * multiple threads running this function in parallel for a single | 207 | * kthread_stop(). There may be multiple threads running this function |
183 | * channel, and there may be multiple channels being tested in | 208 | * in parallel for a single channel, and there may be multiple channels |
184 | * parallel. | 209 | * being tested in parallel. |
185 | * | 210 | * |
186 | * Before each test, the source and destination buffer is initialized | 211 | * Before each test, the source and destination buffer is initialized |
187 | * with a known pattern. This pattern is different depending on | 212 | * with a known pattern. This pattern is different depending on |
@@ -201,25 +226,57 @@ static int dmatest_func(void *data) | |||
201 | unsigned int total_tests = 0; | 226 | unsigned int total_tests = 0; |
202 | dma_cookie_t cookie; | 227 | dma_cookie_t cookie; |
203 | enum dma_status status; | 228 | enum dma_status status; |
229 | enum dma_ctrl_flags flags; | ||
204 | int ret; | 230 | int ret; |
231 | int src_cnt; | ||
232 | int dst_cnt; | ||
233 | int i; | ||
205 | 234 | ||
206 | thread_name = current->comm; | 235 | thread_name = current->comm; |
207 | 236 | ||
208 | ret = -ENOMEM; | 237 | ret = -ENOMEM; |
209 | thread->srcbuf = kmalloc(test_buf_size, GFP_KERNEL); | ||
210 | if (!thread->srcbuf) | ||
211 | goto err_srcbuf; | ||
212 | thread->dstbuf = kmalloc(test_buf_size, GFP_KERNEL); | ||
213 | if (!thread->dstbuf) | ||
214 | goto err_dstbuf; | ||
215 | 238 | ||
216 | smp_rmb(); | 239 | smp_rmb(); |
217 | chan = thread->chan; | 240 | chan = thread->chan; |
241 | if (thread->type == DMA_MEMCPY) | ||
242 | src_cnt = dst_cnt = 1; | ||
243 | else if (thread->type == DMA_XOR) { | ||
244 | src_cnt = xor_sources | 1; /* force odd to ensure dst = src */ | ||
245 | dst_cnt = 1; | ||
246 | } else | ||
247 | goto err_srcs; | ||
248 | |||
249 | thread->srcs = kcalloc(src_cnt+1, sizeof(u8 *), GFP_KERNEL); | ||
250 | if (!thread->srcs) | ||
251 | goto err_srcs; | ||
252 | for (i = 0; i < src_cnt; i++) { | ||
253 | thread->srcs[i] = kmalloc(test_buf_size, GFP_KERNEL); | ||
254 | if (!thread->srcs[i]) | ||
255 | goto err_srcbuf; | ||
256 | } | ||
257 | thread->srcs[i] = NULL; | ||
258 | |||
259 | thread->dsts = kcalloc(dst_cnt+1, sizeof(u8 *), GFP_KERNEL); | ||
260 | if (!thread->dsts) | ||
261 | goto err_dsts; | ||
262 | for (i = 0; i < dst_cnt; i++) { | ||
263 | thread->dsts[i] = kmalloc(test_buf_size, GFP_KERNEL); | ||
264 | if (!thread->dsts[i]) | ||
265 | goto err_dstbuf; | ||
266 | } | ||
267 | thread->dsts[i] = NULL; | ||
268 | |||
269 | set_user_nice(current, 10); | ||
270 | |||
271 | flags = DMA_CTRL_ACK | DMA_COMPL_SKIP_DEST_UNMAP | DMA_PREP_INTERRUPT; | ||
218 | 272 | ||
219 | while (!kthread_should_stop()) { | 273 | while (!kthread_should_stop()) { |
220 | struct dma_device *dev = chan->device; | 274 | struct dma_device *dev = chan->device; |
221 | struct dma_async_tx_descriptor *tx; | 275 | struct dma_async_tx_descriptor *tx = NULL; |
222 | dma_addr_t dma_src, dma_dest; | 276 | dma_addr_t dma_srcs[src_cnt]; |
277 | dma_addr_t dma_dsts[dst_cnt]; | ||
278 | struct completion cmp; | ||
279 | unsigned long tmo = msecs_to_jiffies(3000); | ||
223 | 280 | ||
224 | total_tests++; | 281 | total_tests++; |
225 | 282 | ||
@@ -227,22 +284,41 @@ static int dmatest_func(void *data) | |||
227 | src_off = dmatest_random() % (test_buf_size - len + 1); | 284 | src_off = dmatest_random() % (test_buf_size - len + 1); |
228 | dst_off = dmatest_random() % (test_buf_size - len + 1); | 285 | dst_off = dmatest_random() % (test_buf_size - len + 1); |
229 | 286 | ||
230 | dmatest_init_srcbuf(thread->srcbuf, src_off, len); | 287 | dmatest_init_srcs(thread->srcs, src_off, len); |
231 | dmatest_init_dstbuf(thread->dstbuf, dst_off, len); | 288 | dmatest_init_dsts(thread->dsts, dst_off, len); |
232 | 289 | ||
233 | dma_src = dma_map_single(dev->dev, thread->srcbuf + src_off, | 290 | for (i = 0; i < src_cnt; i++) { |
234 | len, DMA_TO_DEVICE); | 291 | u8 *buf = thread->srcs[i] + src_off; |
292 | |||
293 | dma_srcs[i] = dma_map_single(dev->dev, buf, len, | ||
294 | DMA_TO_DEVICE); | ||
295 | } | ||
235 | /* map with DMA_BIDIRECTIONAL to force writeback/invalidate */ | 296 | /* map with DMA_BIDIRECTIONAL to force writeback/invalidate */ |
236 | dma_dest = dma_map_single(dev->dev, thread->dstbuf, | 297 | for (i = 0; i < dst_cnt; i++) { |
237 | test_buf_size, DMA_BIDIRECTIONAL); | 298 | dma_dsts[i] = dma_map_single(dev->dev, thread->dsts[i], |
299 | test_buf_size, | ||
300 | DMA_BIDIRECTIONAL); | ||
301 | } | ||
302 | |||
303 | if (thread->type == DMA_MEMCPY) | ||
304 | tx = dev->device_prep_dma_memcpy(chan, | ||
305 | dma_dsts[0] + dst_off, | ||
306 | dma_srcs[0], len, | ||
307 | flags); | ||
308 | else if (thread->type == DMA_XOR) | ||
309 | tx = dev->device_prep_dma_xor(chan, | ||
310 | dma_dsts[0] + dst_off, | ||
311 | dma_srcs, xor_sources, | ||
312 | len, flags); | ||
238 | 313 | ||
239 | tx = dev->device_prep_dma_memcpy(chan, dma_dest + dst_off, | ||
240 | dma_src, len, | ||
241 | DMA_CTRL_ACK | DMA_COMPL_SKIP_DEST_UNMAP); | ||
242 | if (!tx) { | 314 | if (!tx) { |
243 | dma_unmap_single(dev->dev, dma_src, len, DMA_TO_DEVICE); | 315 | for (i = 0; i < src_cnt; i++) |
244 | dma_unmap_single(dev->dev, dma_dest, | 316 | dma_unmap_single(dev->dev, dma_srcs[i], len, |
245 | test_buf_size, DMA_BIDIRECTIONAL); | 317 | DMA_TO_DEVICE); |
318 | for (i = 0; i < dst_cnt; i++) | ||
319 | dma_unmap_single(dev->dev, dma_dsts[i], | ||
320 | test_buf_size, | ||
321 | DMA_BIDIRECTIONAL); | ||
246 | pr_warning("%s: #%u: prep error with src_off=0x%x " | 322 | pr_warning("%s: #%u: prep error with src_off=0x%x " |
247 | "dst_off=0x%x len=0x%x\n", | 323 | "dst_off=0x%x len=0x%x\n", |
248 | thread_name, total_tests - 1, | 324 | thread_name, total_tests - 1, |
@@ -251,7 +327,10 @@ static int dmatest_func(void *data) | |||
251 | failed_tests++; | 327 | failed_tests++; |
252 | continue; | 328 | continue; |
253 | } | 329 | } |
254 | tx->callback = NULL; | 330 | |
331 | init_completion(&cmp); | ||
332 | tx->callback = dmatest_callback; | ||
333 | tx->callback_param = &cmp; | ||
255 | cookie = tx->tx_submit(tx); | 334 | cookie = tx->tx_submit(tx); |
256 | 335 | ||
257 | if (dma_submit_error(cookie)) { | 336 | if (dma_submit_error(cookie)) { |
@@ -263,44 +342,50 @@ static int dmatest_func(void *data) | |||
263 | failed_tests++; | 342 | failed_tests++; |
264 | continue; | 343 | continue; |
265 | } | 344 | } |
266 | dma_async_memcpy_issue_pending(chan); | 345 | dma_async_issue_pending(chan); |
267 | 346 | ||
268 | do { | 347 | tmo = wait_for_completion_timeout(&cmp, tmo); |
269 | msleep(1); | 348 | status = dma_async_is_tx_complete(chan, cookie, NULL, NULL); |
270 | status = dma_async_memcpy_complete( | ||
271 | chan, cookie, NULL, NULL); | ||
272 | } while (status == DMA_IN_PROGRESS); | ||
273 | 349 | ||
274 | if (status == DMA_ERROR) { | 350 | if (tmo == 0) { |
275 | pr_warning("%s: #%u: error during copy\n", | 351 | pr_warning("%s: #%u: test timed out\n", |
276 | thread_name, total_tests - 1); | 352 | thread_name, total_tests - 1); |
353 | failed_tests++; | ||
354 | continue; | ||
355 | } else if (status != DMA_SUCCESS) { | ||
356 | pr_warning("%s: #%u: got completion callback," | ||
357 | " but status is \'%s\'\n", | ||
358 | thread_name, total_tests - 1, | ||
359 | status == DMA_ERROR ? "error" : "in progress"); | ||
277 | failed_tests++; | 360 | failed_tests++; |
278 | continue; | 361 | continue; |
279 | } | 362 | } |
363 | |||
280 | /* Unmap by myself (see DMA_COMPL_SKIP_DEST_UNMAP above) */ | 364 | /* Unmap by myself (see DMA_COMPL_SKIP_DEST_UNMAP above) */ |
281 | dma_unmap_single(dev->dev, dma_dest, | 365 | for (i = 0; i < dst_cnt; i++) |
282 | test_buf_size, DMA_BIDIRECTIONAL); | 366 | dma_unmap_single(dev->dev, dma_dsts[i], test_buf_size, |
367 | DMA_BIDIRECTIONAL); | ||
283 | 368 | ||
284 | error_count = 0; | 369 | error_count = 0; |
285 | 370 | ||
286 | pr_debug("%s: verifying source buffer...\n", thread_name); | 371 | pr_debug("%s: verifying source buffer...\n", thread_name); |
287 | error_count += dmatest_verify(thread->srcbuf, 0, src_off, | 372 | error_count += dmatest_verify(thread->srcs, 0, src_off, |
288 | 0, PATTERN_SRC, true); | 373 | 0, PATTERN_SRC, true); |
289 | error_count += dmatest_verify(thread->srcbuf, src_off, | 374 | error_count += dmatest_verify(thread->srcs, src_off, |
290 | src_off + len, src_off, | 375 | src_off + len, src_off, |
291 | PATTERN_SRC | PATTERN_COPY, true); | 376 | PATTERN_SRC | PATTERN_COPY, true); |
292 | error_count += dmatest_verify(thread->srcbuf, src_off + len, | 377 | error_count += dmatest_verify(thread->srcs, src_off + len, |
293 | test_buf_size, src_off + len, | 378 | test_buf_size, src_off + len, |
294 | PATTERN_SRC, true); | 379 | PATTERN_SRC, true); |
295 | 380 | ||
296 | pr_debug("%s: verifying dest buffer...\n", | 381 | pr_debug("%s: verifying dest buffer...\n", |
297 | thread->task->comm); | 382 | thread->task->comm); |
298 | error_count += dmatest_verify(thread->dstbuf, 0, dst_off, | 383 | error_count += dmatest_verify(thread->dsts, 0, dst_off, |
299 | 0, PATTERN_DST, false); | 384 | 0, PATTERN_DST, false); |
300 | error_count += dmatest_verify(thread->dstbuf, dst_off, | 385 | error_count += dmatest_verify(thread->dsts, dst_off, |
301 | dst_off + len, src_off, | 386 | dst_off + len, src_off, |
302 | PATTERN_SRC | PATTERN_COPY, false); | 387 | PATTERN_SRC | PATTERN_COPY, false); |
303 | error_count += dmatest_verify(thread->dstbuf, dst_off + len, | 388 | error_count += dmatest_verify(thread->dsts, dst_off + len, |
304 | test_buf_size, dst_off + len, | 389 | test_buf_size, dst_off + len, |
305 | PATTERN_DST, false); | 390 | PATTERN_DST, false); |
306 | 391 | ||
@@ -319,10 +404,16 @@ static int dmatest_func(void *data) | |||
319 | } | 404 | } |
320 | 405 | ||
321 | ret = 0; | 406 | ret = 0; |
322 | kfree(thread->dstbuf); | 407 | for (i = 0; thread->dsts[i]; i++) |
408 | kfree(thread->dsts[i]); | ||
323 | err_dstbuf: | 409 | err_dstbuf: |
324 | kfree(thread->srcbuf); | 410 | kfree(thread->dsts); |
411 | err_dsts: | ||
412 | for (i = 0; thread->srcs[i]; i++) | ||
413 | kfree(thread->srcs[i]); | ||
325 | err_srcbuf: | 414 | err_srcbuf: |
415 | kfree(thread->srcs); | ||
416 | err_srcs: | ||
326 | pr_notice("%s: terminating after %u tests, %u failures (status %d)\n", | 417 | pr_notice("%s: terminating after %u tests, %u failures (status %d)\n", |
327 | thread_name, total_tests, failed_tests, ret); | 418 | thread_name, total_tests, failed_tests, ret); |
328 | return ret; | 419 | return ret; |
@@ -344,35 +435,36 @@ static void dmatest_cleanup_channel(struct dmatest_chan *dtc) | |||
344 | kfree(dtc); | 435 | kfree(dtc); |
345 | } | 436 | } |
346 | 437 | ||
347 | static int dmatest_add_channel(struct dma_chan *chan) | 438 | static int dmatest_add_threads(struct dmatest_chan *dtc, enum dma_transaction_type type) |
348 | { | 439 | { |
349 | struct dmatest_chan *dtc; | 440 | struct dmatest_thread *thread; |
350 | struct dmatest_thread *thread; | 441 | struct dma_chan *chan = dtc->chan; |
351 | unsigned int i; | 442 | char *op; |
352 | 443 | unsigned int i; | |
353 | dtc = kmalloc(sizeof(struct dmatest_chan), GFP_KERNEL); | ||
354 | if (!dtc) { | ||
355 | pr_warning("dmatest: No memory for %s\n", dma_chan_name(chan)); | ||
356 | return -ENOMEM; | ||
357 | } | ||
358 | 444 | ||
359 | dtc->chan = chan; | 445 | if (type == DMA_MEMCPY) |
360 | INIT_LIST_HEAD(&dtc->threads); | 446 | op = "copy"; |
447 | else if (type == DMA_XOR) | ||
448 | op = "xor"; | ||
449 | else | ||
450 | return -EINVAL; | ||
361 | 451 | ||
362 | for (i = 0; i < threads_per_chan; i++) { | 452 | for (i = 0; i < threads_per_chan; i++) { |
363 | thread = kzalloc(sizeof(struct dmatest_thread), GFP_KERNEL); | 453 | thread = kzalloc(sizeof(struct dmatest_thread), GFP_KERNEL); |
364 | if (!thread) { | 454 | if (!thread) { |
365 | pr_warning("dmatest: No memory for %s-test%u\n", | 455 | pr_warning("dmatest: No memory for %s-%s%u\n", |
366 | dma_chan_name(chan), i); | 456 | dma_chan_name(chan), op, i); |
457 | |||
367 | break; | 458 | break; |
368 | } | 459 | } |
369 | thread->chan = dtc->chan; | 460 | thread->chan = dtc->chan; |
461 | thread->type = type; | ||
370 | smp_wmb(); | 462 | smp_wmb(); |
371 | thread->task = kthread_run(dmatest_func, thread, "%s-test%u", | 463 | thread->task = kthread_run(dmatest_func, thread, "%s-%s%u", |
372 | dma_chan_name(chan), i); | 464 | dma_chan_name(chan), op, i); |
373 | if (IS_ERR(thread->task)) { | 465 | if (IS_ERR(thread->task)) { |
374 | pr_warning("dmatest: Failed to run thread %s-test%u\n", | 466 | pr_warning("dmatest: Failed to run thread %s-%s%u\n", |
375 | dma_chan_name(chan), i); | 467 | dma_chan_name(chan), op, i); |
376 | kfree(thread); | 468 | kfree(thread); |
377 | break; | 469 | break; |
378 | } | 470 | } |
@@ -382,7 +474,36 @@ static int dmatest_add_channel(struct dma_chan *chan) | |||
382 | list_add_tail(&thread->node, &dtc->threads); | 474 | list_add_tail(&thread->node, &dtc->threads); |
383 | } | 475 | } |
384 | 476 | ||
385 | pr_info("dmatest: Started %u threads using %s\n", i, dma_chan_name(chan)); | 477 | return i; |
478 | } | ||
479 | |||
480 | static int dmatest_add_channel(struct dma_chan *chan) | ||
481 | { | ||
482 | struct dmatest_chan *dtc; | ||
483 | struct dma_device *dma_dev = chan->device; | ||
484 | unsigned int thread_count = 0; | ||
485 | unsigned int cnt; | ||
486 | |||
487 | dtc = kmalloc(sizeof(struct dmatest_chan), GFP_KERNEL); | ||
488 | if (!dtc) { | ||
489 | pr_warning("dmatest: No memory for %s\n", dma_chan_name(chan)); | ||
490 | return -ENOMEM; | ||
491 | } | ||
492 | |||
493 | dtc->chan = chan; | ||
494 | INIT_LIST_HEAD(&dtc->threads); | ||
495 | |||
496 | if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) { | ||
497 | cnt = dmatest_add_threads(dtc, DMA_MEMCPY); | ||
498 | thread_count += cnt > 0 ?: 0; | ||
499 | } | ||
500 | if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { | ||
501 | cnt = dmatest_add_threads(dtc, DMA_XOR); | ||
502 | thread_count += cnt > 0 ?: 0; | ||
503 | } | ||
504 | |||
505 | pr_info("dmatest: Started %u threads using %s\n", | ||
506 | thread_count, dma_chan_name(chan)); | ||
386 | 507 | ||
387 | list_add_tail(&dtc->node, &dmatest_channels); | 508 | list_add_tail(&dtc->node, &dmatest_channels); |
388 | nr_channels++; | 509 | nr_channels++; |