diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2006-08-12 07:56:17 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2006-09-20 21:41:52 -0400 |
commit | 5c64097aa0f6dc4f27718ef47ca9a12538d62860 (patch) | |
tree | d8c0cd3358464f589c9f2778b7be348f73db6950 /crypto/cipher.c | |
parent | f28776a369b12f9a03a822a8e1090ed670a41f4f (diff) |
[CRYPTO] scatterwalk: Prepare for block ciphers
This patch prepares the scatterwalk code for use by the new block cipher
type.
Firstly it halves the size of scatter_walk on 32-bit platforms. This
is important as we allocate at least two of these objects on the stack
for each block cipher operation.
It also exports the symbols since the block cipher code can be built as
a module.
Finally there is a hack in scatterwalk_unmap that relies on progress
being made. Unfortunately, for hardware crypto we can't guarantee
progress to be made since the hardware can fail.
So this also gets rid of the hack by not advancing the address returned
by scatterwalk_map.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/cipher.c')
-rw-r--r-- | crypto/cipher.c | 27 |
1 files changed, 13 insertions, 14 deletions
diff --git a/crypto/cipher.c b/crypto/cipher.c index d8ca0ec8d0be..326461780673 100644 --- a/crypto/cipher.c +++ b/crypto/cipher.c | |||
@@ -45,15 +45,10 @@ static unsigned int crypt_slow(const struct cipher_desc *desc, | |||
45 | u8 buffer[bsize * 2 + alignmask]; | 45 | u8 buffer[bsize * 2 + alignmask]; |
46 | u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | 46 | u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); |
47 | u8 *dst = src + bsize; | 47 | u8 *dst = src + bsize; |
48 | unsigned int n; | ||
49 | |||
50 | n = scatterwalk_copychunks(src, in, bsize, 0); | ||
51 | scatterwalk_advance(in, n); | ||
52 | 48 | ||
49 | scatterwalk_copychunks(src, in, bsize, 0); | ||
53 | desc->prfn(desc, dst, src, bsize); | 50 | desc->prfn(desc, dst, src, bsize); |
54 | 51 | scatterwalk_copychunks(dst, out, bsize, 1); | |
55 | n = scatterwalk_copychunks(dst, out, bsize, 1); | ||
56 | scatterwalk_advance(out, n); | ||
57 | 52 | ||
58 | return bsize; | 53 | return bsize; |
59 | } | 54 | } |
@@ -64,12 +59,16 @@ static inline unsigned int crypt_fast(const struct cipher_desc *desc, | |||
64 | unsigned int nbytes, u8 *tmp) | 59 | unsigned int nbytes, u8 *tmp) |
65 | { | 60 | { |
66 | u8 *src, *dst; | 61 | u8 *src, *dst; |
62 | u8 *real_src, *real_dst; | ||
63 | |||
64 | real_src = scatterwalk_map(in, 0); | ||
65 | real_dst = scatterwalk_map(out, 1); | ||
67 | 66 | ||
68 | src = in->data; | 67 | src = real_src; |
69 | dst = scatterwalk_samebuf(in, out) ? src : out->data; | 68 | dst = scatterwalk_samebuf(in, out) ? src : real_dst; |
70 | 69 | ||
71 | if (tmp) { | 70 | if (tmp) { |
72 | memcpy(tmp, in->data, nbytes); | 71 | memcpy(tmp, src, nbytes); |
73 | src = tmp; | 72 | src = tmp; |
74 | dst = tmp; | 73 | dst = tmp; |
75 | } | 74 | } |
@@ -77,7 +76,10 @@ static inline unsigned int crypt_fast(const struct cipher_desc *desc, | |||
77 | nbytes = desc->prfn(desc, dst, src, nbytes); | 76 | nbytes = desc->prfn(desc, dst, src, nbytes); |
78 | 77 | ||
79 | if (tmp) | 78 | if (tmp) |
80 | memcpy(out->data, tmp, nbytes); | 79 | memcpy(real_dst, tmp, nbytes); |
80 | |||
81 | scatterwalk_unmap(real_src, 0); | ||
82 | scatterwalk_unmap(real_dst, 1); | ||
81 | 83 | ||
82 | scatterwalk_advance(in, nbytes); | 84 | scatterwalk_advance(in, nbytes); |
83 | scatterwalk_advance(out, nbytes); | 85 | scatterwalk_advance(out, nbytes); |
@@ -126,9 +128,6 @@ static int crypt(const struct cipher_desc *desc, | |||
126 | tmp = (u8 *)buffer; | 128 | tmp = (u8 *)buffer; |
127 | } | 129 | } |
128 | 130 | ||
129 | scatterwalk_map(&walk_in, 0); | ||
130 | scatterwalk_map(&walk_out, 1); | ||
131 | |||
132 | n = scatterwalk_clamp(&walk_in, n); | 131 | n = scatterwalk_clamp(&walk_in, n); |
133 | n = scatterwalk_clamp(&walk_out, n); | 132 | n = scatterwalk_clamp(&walk_out, n); |
134 | 133 | ||