diff options
author | Patrick McHardy <kaber@trash.net> | 2008-05-07 10:37:29 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2008-07-10 08:35:12 -0400 |
commit | 692af5da779e018fc6a3b480b67adb33e3c6e1f0 (patch) | |
tree | a34beb5f12814c803d2c186e2210ac6c0f8dc1cd /drivers | |
parent | 85e7e60b856141cc9831e11cdfc8e9265886abac (diff) |
[HIFN]: Have HW invalidate src and dest descriptors after processing
The descriptors need to be invalidated after processing for ring
cleanup to work properly and to avoid using an old destination
descriptor when the src and cmd descriptors are already set up
and the dst descriptor isn't.
Signed-off-by: Patrick McHardy <kaber@trash.net>
Acked-by: Evgeniy Polyakov <johnpol@2ka.mipt.ru>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers')
-rw-r--r-- | drivers/crypto/hifn_795x.c | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/drivers/crypto/hifn_795x.c b/drivers/crypto/hifn_795x.c index c9fe18d5348e..459d283b94c5 100644 --- a/drivers/crypto/hifn_795x.c +++ b/drivers/crypto/hifn_795x.c | |||
@@ -1297,7 +1297,7 @@ static int hifn_setup_src_desc(struct hifn_device *dev, struct page *page, | |||
1297 | 1297 | ||
1298 | dma->srcr[idx].p = __cpu_to_le32(addr); | 1298 | dma->srcr[idx].p = __cpu_to_le32(addr); |
1299 | dma->srcr[idx].l = __cpu_to_le32(size | HIFN_D_VALID | | 1299 | dma->srcr[idx].l = __cpu_to_le32(size | HIFN_D_VALID | |
1300 | HIFN_D_MASKDONEIRQ | HIFN_D_NOINVALID | HIFN_D_LAST); | 1300 | HIFN_D_MASKDONEIRQ | HIFN_D_LAST); |
1301 | 1301 | ||
1302 | if (++idx == HIFN_D_SRC_RSIZE) { | 1302 | if (++idx == HIFN_D_SRC_RSIZE) { |
1303 | dma->srcr[idx].l = __cpu_to_le32(HIFN_D_VALID | | 1303 | dma->srcr[idx].l = __cpu_to_le32(HIFN_D_VALID | |
@@ -1325,7 +1325,7 @@ static void hifn_setup_res_desc(struct hifn_device *dev) | |||
1325 | HIFN_D_VALID | HIFN_D_LAST); | 1325 | HIFN_D_VALID | HIFN_D_LAST); |
1326 | /* | 1326 | /* |
1327 | * dma->resr[dma->resi].l = __cpu_to_le32(HIFN_MAX_RESULT | HIFN_D_VALID | | 1327 | * dma->resr[dma->resi].l = __cpu_to_le32(HIFN_MAX_RESULT | HIFN_D_VALID | |
1328 | * HIFN_D_LAST | HIFN_D_NOINVALID); | 1328 | * HIFN_D_LAST); |
1329 | */ | 1329 | */ |
1330 | 1330 | ||
1331 | if (++dma->resi == HIFN_D_RES_RSIZE) { | 1331 | if (++dma->resi == HIFN_D_RES_RSIZE) { |
@@ -1354,12 +1354,12 @@ static void hifn_setup_dst_desc(struct hifn_device *dev, struct page *page, | |||
1354 | idx = dma->dsti; | 1354 | idx = dma->dsti; |
1355 | dma->dstr[idx].p = __cpu_to_le32(addr); | 1355 | dma->dstr[idx].p = __cpu_to_le32(addr); |
1356 | dma->dstr[idx].l = __cpu_to_le32(size | HIFN_D_VALID | | 1356 | dma->dstr[idx].l = __cpu_to_le32(size | HIFN_D_VALID | |
1357 | HIFN_D_MASKDONEIRQ | HIFN_D_NOINVALID | HIFN_D_LAST); | 1357 | HIFN_D_MASKDONEIRQ | HIFN_D_LAST); |
1358 | 1358 | ||
1359 | if (++idx == HIFN_D_DST_RSIZE) { | 1359 | if (++idx == HIFN_D_DST_RSIZE) { |
1360 | dma->dstr[idx].l = __cpu_to_le32(HIFN_D_VALID | | 1360 | dma->dstr[idx].l = __cpu_to_le32(HIFN_D_VALID | |
1361 | HIFN_D_JUMP | HIFN_D_MASKDONEIRQ | | 1361 | HIFN_D_JUMP | HIFN_D_MASKDONEIRQ | |
1362 | HIFN_D_LAST | HIFN_D_NOINVALID); | 1362 | HIFN_D_LAST); |
1363 | idx = 0; | 1363 | idx = 0; |
1364 | } | 1364 | } |
1365 | dma->dsti = idx; | 1365 | dma->dsti = idx; |