aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/async_tx
diff options
context:
space:
mode:
Diffstat (limited to 'crypto/async_tx')
-rw-r--r--crypto/async_tx/async_memcpy.c3
-rw-r--r--crypto/async_tx/async_memset.c3
-rw-r--r--crypto/async_tx/async_xor.c10
3 files changed, 10 insertions, 6 deletions
diff --git a/crypto/async_tx/async_memcpy.c b/crypto/async_tx/async_memcpy.c
index faca0bc52068..25dcf33bbc2d 100644
--- a/crypto/async_tx/async_memcpy.c
+++ b/crypto/async_tx/async_memcpy.c
@@ -52,6 +52,7 @@ async_memcpy(struct page *dest, struct page *src, unsigned int dest_offset,
52 52
53 if (device) { 53 if (device) {
54 dma_addr_t dma_dest, dma_src; 54 dma_addr_t dma_dest, dma_src;
55 unsigned long dma_prep_flags = cb_fn ? DMA_PREP_INTERRUPT : 0;
55 56
56 dma_dest = dma_map_page(device->dev, dest, dest_offset, len, 57 dma_dest = dma_map_page(device->dev, dest, dest_offset, len,
57 DMA_FROM_DEVICE); 58 DMA_FROM_DEVICE);
@@ -60,7 +61,7 @@ async_memcpy(struct page *dest, struct page *src, unsigned int dest_offset,
60 DMA_TO_DEVICE); 61 DMA_TO_DEVICE);
61 62
62 tx = device->device_prep_dma_memcpy(chan, dma_dest, dma_src, 63 tx = device->device_prep_dma_memcpy(chan, dma_dest, dma_src,
63 len, cb_fn != NULL); 64 len, dma_prep_flags);
64 } 65 }
65 66
66 if (tx) { 67 if (tx) {
diff --git a/crypto/async_tx/async_memset.c b/crypto/async_tx/async_memset.c
index 0c94851cfd37..8e98ab0cd37c 100644
--- a/crypto/async_tx/async_memset.c
+++ b/crypto/async_tx/async_memset.c
@@ -52,12 +52,13 @@ async_memset(struct page *dest, int val, unsigned int offset,
52 52
53 if (device) { 53 if (device) {
54 dma_addr_t dma_dest; 54 dma_addr_t dma_dest;
55 unsigned long dma_prep_flags = cb_fn ? DMA_PREP_INTERRUPT : 0;
55 56
56 dma_dest = dma_map_page(device->dev, dest, offset, len, 57 dma_dest = dma_map_page(device->dev, dest, offset, len,
57 DMA_FROM_DEVICE); 58 DMA_FROM_DEVICE);
58 59
59 tx = device->device_prep_dma_memset(chan, dma_dest, val, len, 60 tx = device->device_prep_dma_memset(chan, dma_dest, val, len,
60 cb_fn != NULL); 61 dma_prep_flags);
61 } 62 }
62 63
63 if (tx) { 64 if (tx) {
diff --git a/crypto/async_tx/async_xor.c b/crypto/async_tx/async_xor.c
index 12cba1a4205b..68d2fe4465d8 100644
--- a/crypto/async_tx/async_xor.c
+++ b/crypto/async_tx/async_xor.c
@@ -45,6 +45,7 @@ do_async_xor(struct dma_device *device,
45 dma_addr_t *dma_src = (dma_addr_t *) src_list; 45 dma_addr_t *dma_src = (dma_addr_t *) src_list;
46 struct dma_async_tx_descriptor *tx; 46 struct dma_async_tx_descriptor *tx;
47 int i; 47 int i;
48 unsigned long dma_prep_flags = cb_fn ? DMA_PREP_INTERRUPT : 0;
48 49
49 pr_debug("%s: len: %zu\n", __FUNCTION__, len); 50 pr_debug("%s: len: %zu\n", __FUNCTION__, len);
50 51
@@ -60,7 +61,7 @@ do_async_xor(struct dma_device *device,
60 * in case they can not provide a descriptor 61 * in case they can not provide a descriptor
61 */ 62 */
62 tx = device->device_prep_dma_xor(chan, dma_dest, dma_src, src_cnt, len, 63 tx = device->device_prep_dma_xor(chan, dma_dest, dma_src, src_cnt, len,
63 cb_fn != NULL); 64 dma_prep_flags);
64 if (!tx) { 65 if (!tx) {
65 if (depend_tx) 66 if (depend_tx)
66 dma_wait_for_async_tx(depend_tx); 67 dma_wait_for_async_tx(depend_tx);
@@ -68,7 +69,7 @@ do_async_xor(struct dma_device *device,
68 while (!tx) 69 while (!tx)
69 tx = device->device_prep_dma_xor(chan, dma_dest, 70 tx = device->device_prep_dma_xor(chan, dma_dest,
70 dma_src, src_cnt, len, 71 dma_src, src_cnt, len,
71 cb_fn != NULL); 72 dma_prep_flags);
72 } 73 }
73 74
74 async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param); 75 async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param);
@@ -268,6 +269,7 @@ async_xor_zero_sum(struct page *dest, struct page **src_list,
268 269
269 if (device) { 270 if (device) {
270 dma_addr_t *dma_src = (dma_addr_t *) src_list; 271 dma_addr_t *dma_src = (dma_addr_t *) src_list;
272 unsigned long dma_prep_flags = cb_fn ? DMA_PREP_INTERRUPT : 0;
271 int i; 273 int i;
272 274
273 pr_debug("%s: (async) len: %zu\n", __FUNCTION__, len); 275 pr_debug("%s: (async) len: %zu\n", __FUNCTION__, len);
@@ -278,7 +280,7 @@ async_xor_zero_sum(struct page *dest, struct page **src_list,
278 280
279 tx = device->device_prep_dma_zero_sum(chan, dma_src, src_cnt, 281 tx = device->device_prep_dma_zero_sum(chan, dma_src, src_cnt,
280 len, result, 282 len, result,
281 cb_fn != NULL); 283 dma_prep_flags);
282 if (!tx) { 284 if (!tx) {
283 if (depend_tx) 285 if (depend_tx)
284 dma_wait_for_async_tx(depend_tx); 286 dma_wait_for_async_tx(depend_tx);
@@ -286,7 +288,7 @@ async_xor_zero_sum(struct page *dest, struct page **src_list,
286 while (!tx) 288 while (!tx)
287 tx = device->device_prep_dma_zero_sum(chan, 289 tx = device->device_prep_dma_zero_sum(chan,
288 dma_src, src_cnt, len, result, 290 dma_src, src_cnt, len, result,
289 cb_fn != NULL); 291 dma_prep_flags);
290 } 292 }
291 293
292 async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param); 294 async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param);