diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2007-11-20 07:32:56 -0500 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2008-01-10 16:16:20 -0500 |
commit | 3f8214ea335e422702340d7e835921e78367f99d (patch) | |
tree | ef8b7ef5c7e2d079e05e2a048db22eb183498235 /crypto | |
parent | d0b9007a27206fe944d9db72e13dab157b8e118c (diff) |
[CRYPTO] ctr: Use crypto_inc and crypto_xor
This patch replaces the custom inc/xor in CTR with the generic functions.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/ctr.c | 71 |
1 files changed, 16 insertions, 55 deletions
diff --git a/crypto/ctr.c b/crypto/ctr.c index b974a9f9b879..b816e959fa55 100644 --- a/crypto/ctr.c +++ b/crypto/ctr.c | |||
@@ -31,51 +31,6 @@ struct crypto_ctr_ctx { | |||
31 | u8 *nonce; | 31 | u8 *nonce; |
32 | }; | 32 | }; |
33 | 33 | ||
34 | static inline void __ctr_inc_byte(u8 *a, unsigned int size) | ||
35 | { | ||
36 | u8 *b = (a + size); | ||
37 | u8 c; | ||
38 | |||
39 | for (; size; size--) { | ||
40 | c = *--b + 1; | ||
41 | *b = c; | ||
42 | if (c) | ||
43 | break; | ||
44 | } | ||
45 | } | ||
46 | |||
47 | static void ctr_inc_quad(u8 *a, unsigned int size) | ||
48 | { | ||
49 | __be32 *b = (__be32 *)(a + size); | ||
50 | u32 c; | ||
51 | |||
52 | for (; size >= 4; size -=4) { | ||
53 | c = be32_to_cpu(*--b) + 1; | ||
54 | *b = cpu_to_be32(c); | ||
55 | if (c) | ||
56 | return; | ||
57 | } | ||
58 | |||
59 | __ctr_inc_byte(a, size); | ||
60 | } | ||
61 | |||
62 | static void xor_byte(u8 *a, const u8 *b, unsigned int bs) | ||
63 | { | ||
64 | for (; bs; bs--) | ||
65 | *a++ ^= *b++; | ||
66 | } | ||
67 | |||
68 | static void xor_quad(u8 *dst, const u8 *src, unsigned int bs) | ||
69 | { | ||
70 | u32 *a = (u32 *)dst; | ||
71 | u32 *b = (u32 *)src; | ||
72 | |||
73 | for (; bs >= 4; bs -= 4) | ||
74 | *a++ ^= *b++; | ||
75 | |||
76 | xor_byte((u8 *)a, (u8 *)b, bs); | ||
77 | } | ||
78 | |||
79 | static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key, | 34 | static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key, |
80 | unsigned int keylen) | 35 | unsigned int keylen) |
81 | { | 36 | { |
@@ -111,7 +66,8 @@ static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk, | |||
111 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = | 66 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = |
112 | crypto_cipher_alg(tfm)->cia_encrypt; | 67 | crypto_cipher_alg(tfm)->cia_encrypt; |
113 | unsigned int bsize = crypto_cipher_blocksize(tfm); | 68 | unsigned int bsize = crypto_cipher_blocksize(tfm); |
114 | unsigned long alignmask = crypto_cipher_alignmask(tfm); | 69 | unsigned long alignmask = crypto_cipher_alignmask(tfm) | |
70 | (__alignof__(u32) - 1); | ||
115 | u8 ks[bsize + alignmask]; | 71 | u8 ks[bsize + alignmask]; |
116 | u8 *keystream = (u8 *)ALIGN((unsigned long)ks, alignmask + 1); | 72 | u8 *keystream = (u8 *)ALIGN((unsigned long)ks, alignmask + 1); |
117 | u8 *src = walk->src.virt.addr; | 73 | u8 *src = walk->src.virt.addr; |
@@ -121,13 +77,13 @@ static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk, | |||
121 | do { | 77 | do { |
122 | /* create keystream */ | 78 | /* create keystream */ |
123 | fn(crypto_cipher_tfm(tfm), keystream, ctrblk); | 79 | fn(crypto_cipher_tfm(tfm), keystream, ctrblk); |
124 | xor_quad(keystream, src, min(nbytes, bsize)); | 80 | crypto_xor(keystream, src, min(nbytes, bsize)); |
125 | 81 | ||
126 | /* copy result into dst */ | 82 | /* copy result into dst */ |
127 | memcpy(dst, keystream, min(nbytes, bsize)); | 83 | memcpy(dst, keystream, min(nbytes, bsize)); |
128 | 84 | ||
129 | /* increment counter in counterblock */ | 85 | /* increment counter in counterblock */ |
130 | ctr_inc_quad(ctrblk + (bsize - countersize), countersize); | 86 | crypto_inc(ctrblk + bsize - countersize, countersize); |
131 | 87 | ||
132 | if (nbytes < bsize) | 88 | if (nbytes < bsize) |
133 | break; | 89 | break; |
@@ -148,7 +104,8 @@ static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk, | |||
148 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = | 104 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = |
149 | crypto_cipher_alg(tfm)->cia_encrypt; | 105 | crypto_cipher_alg(tfm)->cia_encrypt; |
150 | unsigned int bsize = crypto_cipher_blocksize(tfm); | 106 | unsigned int bsize = crypto_cipher_blocksize(tfm); |
151 | unsigned long alignmask = crypto_cipher_alignmask(tfm); | 107 | unsigned long alignmask = crypto_cipher_alignmask(tfm) | |
108 | (__alignof__(u32) - 1); | ||
152 | unsigned int nbytes = walk->nbytes; | 109 | unsigned int nbytes = walk->nbytes; |
153 | u8 *src = walk->src.virt.addr; | 110 | u8 *src = walk->src.virt.addr; |
154 | u8 ks[bsize + alignmask]; | 111 | u8 ks[bsize + alignmask]; |
@@ -157,10 +114,10 @@ static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk, | |||
157 | do { | 114 | do { |
158 | /* create keystream */ | 115 | /* create keystream */ |
159 | fn(crypto_cipher_tfm(tfm), keystream, ctrblk); | 116 | fn(crypto_cipher_tfm(tfm), keystream, ctrblk); |
160 | xor_quad(src, keystream, min(nbytes, bsize)); | 117 | crypto_xor(src, keystream, min(nbytes, bsize)); |
161 | 118 | ||
162 | /* increment counter in counterblock */ | 119 | /* increment counter in counterblock */ |
163 | ctr_inc_quad(ctrblk + (bsize - countersize), countersize); | 120 | crypto_inc(ctrblk + bsize - countersize, countersize); |
164 | 121 | ||
165 | if (nbytes < bsize) | 122 | if (nbytes < bsize) |
166 | break; | 123 | break; |
@@ -184,7 +141,8 @@ static int crypto_ctr_crypt(struct blkcipher_desc *desc, | |||
184 | unsigned int bsize = crypto_cipher_blocksize(child); | 141 | unsigned int bsize = crypto_cipher_blocksize(child); |
185 | struct ctr_instance_ctx *ictx = | 142 | struct ctr_instance_ctx *ictx = |
186 | crypto_instance_ctx(crypto_tfm_alg_instance(&tfm->base)); | 143 | crypto_instance_ctx(crypto_tfm_alg_instance(&tfm->base)); |
187 | unsigned long alignmask = crypto_cipher_alignmask(child); | 144 | unsigned long alignmask = crypto_cipher_alignmask(child) | |
145 | (__alignof__(u32) - 1); | ||
188 | u8 cblk[bsize + alignmask]; | 146 | u8 cblk[bsize + alignmask]; |
189 | u8 *counterblk = (u8 *)ALIGN((unsigned long)cblk, alignmask + 1); | 147 | u8 *counterblk = (u8 *)ALIGN((unsigned long)cblk, alignmask + 1); |
190 | int err; | 148 | int err; |
@@ -198,8 +156,7 @@ static int crypto_ctr_crypt(struct blkcipher_desc *desc, | |||
198 | memcpy(counterblk + ictx->noncesize, walk.iv, ictx->ivsize); | 156 | memcpy(counterblk + ictx->noncesize, walk.iv, ictx->ivsize); |
199 | 157 | ||
200 | /* initialize counter portion of counter block */ | 158 | /* initialize counter portion of counter block */ |
201 | ctr_inc_quad(counterblk + (bsize - ictx->countersize), | 159 | crypto_inc(counterblk + bsize - ictx->countersize, ictx->countersize); |
202 | ictx->countersize); | ||
203 | 160 | ||
204 | while (walk.nbytes) { | 161 | while (walk.nbytes) { |
205 | if (walk.src.virt.addr == walk.dst.virt.addr) | 162 | if (walk.src.virt.addr == walk.dst.virt.addr) |
@@ -284,6 +241,10 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb) | |||
284 | (countersize > alg->cra_blocksize) || (countersize < 4)) | 241 | (countersize > alg->cra_blocksize) || (countersize < 4)) |
285 | goto out_put_alg; | 242 | goto out_put_alg; |
286 | 243 | ||
244 | /* If this is false we'd fail the alignment of crypto_inc. */ | ||
245 | if ((alg->cra_blocksize - countersize) % 4) | ||
246 | goto out_put_alg; | ||
247 | |||
287 | inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); | 248 | inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); |
288 | err = -ENOMEM; | 249 | err = -ENOMEM; |
289 | if (!inst) | 250 | if (!inst) |
@@ -316,7 +277,7 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb) | |||
316 | inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; | 277 | inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; |
317 | inst->alg.cra_priority = alg->cra_priority; | 278 | inst->alg.cra_priority = alg->cra_priority; |
318 | inst->alg.cra_blocksize = 1; | 279 | inst->alg.cra_blocksize = 1; |
319 | inst->alg.cra_alignmask = 3; | 280 | inst->alg.cra_alignmask = __alignof__(u32) - 1; |
320 | inst->alg.cra_type = &crypto_blkcipher_type; | 281 | inst->alg.cra_type = &crypto_blkcipher_type; |
321 | 282 | ||
322 | inst->alg.cra_blkcipher.ivsize = ivsize; | 283 | inst->alg.cra_blkcipher.ivsize = ivsize; |