diff options
-rw-r--r-- | crypto/ctr.c | 334 | ||||
-rw-r--r-- | crypto/gcm.c | 7 | ||||
-rw-r--r-- | crypto/tcrypt.c | 8 | ||||
-rw-r--r-- | include/crypto/ctr.h | 20 |
4 files changed, 233 insertions, 136 deletions
diff --git a/crypto/ctr.c b/crypto/ctr.c index 57da7d0affcb..1052b318aed2 100644 --- a/crypto/ctr.c +++ b/crypto/ctr.c | |||
@@ -11,6 +11,7 @@ | |||
11 | */ | 11 | */ |
12 | 12 | ||
13 | #include <crypto/algapi.h> | 13 | #include <crypto/algapi.h> |
14 | #include <crypto/ctr.h> | ||
14 | #include <linux/err.h> | 15 | #include <linux/err.h> |
15 | #include <linux/init.h> | 16 | #include <linux/init.h> |
16 | #include <linux/kernel.h> | 17 | #include <linux/kernel.h> |
@@ -19,16 +20,13 @@ | |||
19 | #include <linux/scatterlist.h> | 20 | #include <linux/scatterlist.h> |
20 | #include <linux/slab.h> | 21 | #include <linux/slab.h> |
21 | 22 | ||
22 | struct ctr_instance_ctx { | ||
23 | struct crypto_spawn alg; | ||
24 | unsigned int noncesize; | ||
25 | unsigned int ivsize; | ||
26 | unsigned int countersize; | ||
27 | }; | ||
28 | |||
29 | struct crypto_ctr_ctx { | 23 | struct crypto_ctr_ctx { |
30 | struct crypto_cipher *child; | 24 | struct crypto_cipher *child; |
31 | u8 *nonce; | 25 | }; |
26 | |||
27 | struct crypto_rfc3686_ctx { | ||
28 | struct crypto_blkcipher *child; | ||
29 | u8 nonce[CTR_RFC3686_NONCE_SIZE]; | ||
32 | }; | 30 | }; |
33 | 31 | ||
34 | static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key, | 32 | static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key, |
@@ -36,18 +34,7 @@ static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key, | |||
36 | { | 34 | { |
37 | struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent); | 35 | struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent); |
38 | struct crypto_cipher *child = ctx->child; | 36 | struct crypto_cipher *child = ctx->child; |
39 | struct ctr_instance_ctx *ictx = | 37 | int err; |
40 | crypto_instance_ctx(crypto_tfm_alg_instance(parent)); | ||
41 | unsigned int noncelen = ictx->noncesize; | ||
42 | int err = 0; | ||
43 | |||
44 | /* the nonce is stored in bytes at end of key */ | ||
45 | if (keylen < noncelen) | ||
46 | return -EINVAL; | ||
47 | |||
48 | memcpy(ctx->nonce, key + (keylen - noncelen), noncelen); | ||
49 | |||
50 | keylen -= noncelen; | ||
51 | 38 | ||
52 | crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); | 39 | crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); |
53 | crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & | 40 | crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & |
@@ -60,11 +47,13 @@ static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key, | |||
60 | } | 47 | } |
61 | 48 | ||
62 | static void crypto_ctr_crypt_final(struct blkcipher_walk *walk, | 49 | static void crypto_ctr_crypt_final(struct blkcipher_walk *walk, |
63 | struct crypto_cipher *tfm, u8 *ctrblk, | 50 | struct crypto_cipher *tfm) |
64 | unsigned int countersize) | ||
65 | { | 51 | { |
66 | unsigned int bsize = crypto_cipher_blocksize(tfm); | 52 | unsigned int bsize = crypto_cipher_blocksize(tfm); |
67 | u8 *keystream = ctrblk + bsize; | 53 | unsigned long alignmask = crypto_cipher_alignmask(tfm); |
54 | u8 *ctrblk = walk->iv; | ||
55 | u8 tmp[bsize + alignmask]; | ||
56 | u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); | ||
68 | u8 *src = walk->src.virt.addr; | 57 | u8 *src = walk->src.virt.addr; |
69 | u8 *dst = walk->dst.virt.addr; | 58 | u8 *dst = walk->dst.virt.addr; |
70 | unsigned int nbytes = walk->nbytes; | 59 | unsigned int nbytes = walk->nbytes; |
@@ -72,15 +61,17 @@ static void crypto_ctr_crypt_final(struct blkcipher_walk *walk, | |||
72 | crypto_cipher_encrypt_one(tfm, keystream, ctrblk); | 61 | crypto_cipher_encrypt_one(tfm, keystream, ctrblk); |
73 | crypto_xor(keystream, src, nbytes); | 62 | crypto_xor(keystream, src, nbytes); |
74 | memcpy(dst, keystream, nbytes); | 63 | memcpy(dst, keystream, nbytes); |
64 | |||
65 | crypto_inc(ctrblk, bsize); | ||
75 | } | 66 | } |
76 | 67 | ||
77 | static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk, | 68 | static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk, |
78 | struct crypto_cipher *tfm, u8 *ctrblk, | 69 | struct crypto_cipher *tfm) |
79 | unsigned int countersize) | ||
80 | { | 70 | { |
81 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = | 71 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = |
82 | crypto_cipher_alg(tfm)->cia_encrypt; | 72 | crypto_cipher_alg(tfm)->cia_encrypt; |
83 | unsigned int bsize = crypto_cipher_blocksize(tfm); | 73 | unsigned int bsize = crypto_cipher_blocksize(tfm); |
74 | u8 *ctrblk = walk->iv; | ||
84 | u8 *src = walk->src.virt.addr; | 75 | u8 *src = walk->src.virt.addr; |
85 | u8 *dst = walk->dst.virt.addr; | 76 | u8 *dst = walk->dst.virt.addr; |
86 | unsigned int nbytes = walk->nbytes; | 77 | unsigned int nbytes = walk->nbytes; |
@@ -91,7 +82,7 @@ static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk, | |||
91 | crypto_xor(dst, src, bsize); | 82 | crypto_xor(dst, src, bsize); |
92 | 83 | ||
93 | /* increment counter in counterblock */ | 84 | /* increment counter in counterblock */ |
94 | crypto_inc(ctrblk + bsize - countersize, countersize); | 85 | crypto_inc(ctrblk, bsize); |
95 | 86 | ||
96 | src += bsize; | 87 | src += bsize; |
97 | dst += bsize; | 88 | dst += bsize; |
@@ -101,15 +92,17 @@ static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk, | |||
101 | } | 92 | } |
102 | 93 | ||
103 | static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk, | 94 | static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk, |
104 | struct crypto_cipher *tfm, u8 *ctrblk, | 95 | struct crypto_cipher *tfm) |
105 | unsigned int countersize) | ||
106 | { | 96 | { |
107 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = | 97 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = |
108 | crypto_cipher_alg(tfm)->cia_encrypt; | 98 | crypto_cipher_alg(tfm)->cia_encrypt; |
109 | unsigned int bsize = crypto_cipher_blocksize(tfm); | 99 | unsigned int bsize = crypto_cipher_blocksize(tfm); |
100 | unsigned long alignmask = crypto_cipher_alignmask(tfm); | ||
110 | unsigned int nbytes = walk->nbytes; | 101 | unsigned int nbytes = walk->nbytes; |
102 | u8 *ctrblk = walk->iv; | ||
111 | u8 *src = walk->src.virt.addr; | 103 | u8 *src = walk->src.virt.addr; |
112 | u8 *keystream = ctrblk + bsize; | 104 | u8 tmp[bsize + alignmask]; |
105 | u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); | ||
113 | 106 | ||
114 | do { | 107 | do { |
115 | /* create keystream */ | 108 | /* create keystream */ |
@@ -117,7 +110,7 @@ static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk, | |||
117 | crypto_xor(src, keystream, bsize); | 110 | crypto_xor(src, keystream, bsize); |
118 | 111 | ||
119 | /* increment counter in counterblock */ | 112 | /* increment counter in counterblock */ |
120 | crypto_inc(ctrblk + bsize - countersize, countersize); | 113 | crypto_inc(ctrblk, bsize); |
121 | 114 | ||
122 | src += bsize; | 115 | src += bsize; |
123 | } while ((nbytes -= bsize) >= bsize); | 116 | } while ((nbytes -= bsize) >= bsize); |
@@ -134,41 +127,22 @@ static int crypto_ctr_crypt(struct blkcipher_desc *desc, | |||
134 | struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm); | 127 | struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm); |
135 | struct crypto_cipher *child = ctx->child; | 128 | struct crypto_cipher *child = ctx->child; |
136 | unsigned int bsize = crypto_cipher_blocksize(child); | 129 | unsigned int bsize = crypto_cipher_blocksize(child); |
137 | struct ctr_instance_ctx *ictx = | ||
138 | crypto_instance_ctx(crypto_tfm_alg_instance(&tfm->base)); | ||
139 | unsigned long alignmask = crypto_cipher_alignmask(child) | | ||
140 | (__alignof__(u32) - 1); | ||
141 | u8 cblk[bsize * 2 + alignmask]; | ||
142 | u8 *counterblk = (u8 *)ALIGN((unsigned long)cblk, alignmask + 1); | ||
143 | int err; | 130 | int err; |
144 | 131 | ||
145 | blkcipher_walk_init(&walk, dst, src, nbytes); | 132 | blkcipher_walk_init(&walk, dst, src, nbytes); |
146 | err = blkcipher_walk_virt_block(desc, &walk, bsize); | 133 | err = blkcipher_walk_virt_block(desc, &walk, bsize); |
147 | 134 | ||
148 | /* set up counter block */ | ||
149 | memset(counterblk, 0 , bsize); | ||
150 | memcpy(counterblk, ctx->nonce, ictx->noncesize); | ||
151 | memcpy(counterblk + ictx->noncesize, walk.iv, ictx->ivsize); | ||
152 | |||
153 | /* initialize counter portion of counter block */ | ||
154 | crypto_inc(counterblk + bsize - ictx->countersize, ictx->countersize); | ||
155 | |||
156 | while (walk.nbytes >= bsize) { | 135 | while (walk.nbytes >= bsize) { |
157 | if (walk.src.virt.addr == walk.dst.virt.addr) | 136 | if (walk.src.virt.addr == walk.dst.virt.addr) |
158 | nbytes = crypto_ctr_crypt_inplace(&walk, child, | 137 | nbytes = crypto_ctr_crypt_inplace(&walk, child); |
159 | counterblk, | ||
160 | ictx->countersize); | ||
161 | else | 138 | else |
162 | nbytes = crypto_ctr_crypt_segment(&walk, child, | 139 | nbytes = crypto_ctr_crypt_segment(&walk, child); |
163 | counterblk, | ||
164 | ictx->countersize); | ||
165 | 140 | ||
166 | err = blkcipher_walk_done(desc, &walk, nbytes); | 141 | err = blkcipher_walk_done(desc, &walk, nbytes); |
167 | } | 142 | } |
168 | 143 | ||
169 | if (walk.nbytes) { | 144 | if (walk.nbytes) { |
170 | crypto_ctr_crypt_final(&walk, child, counterblk, | 145 | crypto_ctr_crypt_final(&walk, child); |
171 | ictx->countersize); | ||
172 | err = blkcipher_walk_done(desc, &walk, 0); | 146 | err = blkcipher_walk_done(desc, &walk, 0); |
173 | } | 147 | } |
174 | 148 | ||
@@ -178,15 +152,11 @@ static int crypto_ctr_crypt(struct blkcipher_desc *desc, | |||
178 | static int crypto_ctr_init_tfm(struct crypto_tfm *tfm) | 152 | static int crypto_ctr_init_tfm(struct crypto_tfm *tfm) |
179 | { | 153 | { |
180 | struct crypto_instance *inst = (void *)tfm->__crt_alg; | 154 | struct crypto_instance *inst = (void *)tfm->__crt_alg; |
181 | struct ctr_instance_ctx *ictx = crypto_instance_ctx(inst); | 155 | struct crypto_spawn *spawn = crypto_instance_ctx(inst); |
182 | struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm); | 156 | struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm); |
183 | struct crypto_cipher *cipher; | 157 | struct crypto_cipher *cipher; |
184 | 158 | ||
185 | ctx->nonce = kzalloc(ictx->noncesize, GFP_KERNEL); | 159 | cipher = crypto_spawn_cipher(spawn); |
186 | if (!ctx->nonce) | ||
187 | return -ENOMEM; | ||
188 | |||
189 | cipher = crypto_spawn_cipher(&ictx->alg); | ||
190 | if (IS_ERR(cipher)) | 160 | if (IS_ERR(cipher)) |
191 | return PTR_ERR(cipher); | 161 | return PTR_ERR(cipher); |
192 | 162 | ||
@@ -199,7 +169,6 @@ static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm) | |||
199 | { | 169 | { |
200 | struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm); | 170 | struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm); |
201 | 171 | ||
202 | kfree(ctx->nonce); | ||
203 | crypto_free_cipher(ctx->child); | 172 | crypto_free_cipher(ctx->child); |
204 | } | 173 | } |
205 | 174 | ||
@@ -207,10 +176,6 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb) | |||
207 | { | 176 | { |
208 | struct crypto_instance *inst; | 177 | struct crypto_instance *inst; |
209 | struct crypto_alg *alg; | 178 | struct crypto_alg *alg; |
210 | struct ctr_instance_ctx *ictx; | ||
211 | unsigned int noncesize; | ||
212 | unsigned int ivsize; | ||
213 | unsigned int countersize; | ||
214 | int err; | 179 | int err; |
215 | 180 | ||
216 | err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); | 181 | err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); |
@@ -222,71 +187,28 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb) | |||
222 | if (IS_ERR(alg)) | 187 | if (IS_ERR(alg)) |
223 | return ERR_PTR(PTR_ERR(alg)); | 188 | return ERR_PTR(PTR_ERR(alg)); |
224 | 189 | ||
225 | err = crypto_attr_u32(tb[2], &noncesize); | 190 | /* Block size must be >= 4 bytes. */ |
226 | if (err) | ||
227 | goto out_put_alg; | ||
228 | |||
229 | err = crypto_attr_u32(tb[3], &ivsize); | ||
230 | if (err) | ||
231 | goto out_put_alg; | ||
232 | |||
233 | err = crypto_attr_u32(tb[4], &countersize); | ||
234 | if (err) | ||
235 | goto out_put_alg; | ||
236 | |||
237 | /* verify size of nonce + iv + counter | ||
238 | * counter must be >= 4 bytes. | ||
239 | */ | ||
240 | err = -EINVAL; | 191 | err = -EINVAL; |
241 | if (((noncesize + ivsize + countersize) < alg->cra_blocksize) || | 192 | if (alg->cra_blocksize < 4) |
242 | ((noncesize + ivsize) > alg->cra_blocksize) || | ||
243 | (countersize > alg->cra_blocksize) || (countersize < 4)) | ||
244 | goto out_put_alg; | 193 | goto out_put_alg; |
245 | 194 | ||
246 | /* If this is false we'd fail the alignment of crypto_inc. */ | 195 | /* If this is false we'd fail the alignment of crypto_inc. */ |
247 | if ((alg->cra_blocksize - countersize) % 4) | 196 | if (alg->cra_blocksize % 4) |
248 | goto out_put_alg; | 197 | goto out_put_alg; |
249 | 198 | ||
250 | inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); | 199 | inst = crypto_alloc_instance("ctr", alg); |
251 | err = -ENOMEM; | 200 | if (IS_ERR(inst)) |
252 | if (!inst) | 201 | goto out; |
253 | goto out_put_alg; | ||
254 | |||
255 | err = -ENAMETOOLONG; | ||
256 | if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, | ||
257 | "ctr(%s,%u,%u,%u)", alg->cra_name, noncesize, | ||
258 | ivsize, countersize) >= CRYPTO_MAX_ALG_NAME) { | ||
259 | goto err_free_inst; | ||
260 | } | ||
261 | |||
262 | if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, | ||
263 | "ctr(%s,%u,%u,%u)", alg->cra_driver_name, noncesize, | ||
264 | ivsize, countersize) >= CRYPTO_MAX_ALG_NAME) { | ||
265 | goto err_free_inst; | ||
266 | } | ||
267 | |||
268 | ictx = crypto_instance_ctx(inst); | ||
269 | ictx->noncesize = noncesize; | ||
270 | ictx->ivsize = ivsize; | ||
271 | ictx->countersize = countersize; | ||
272 | |||
273 | err = crypto_init_spawn(&ictx->alg, alg, inst, | ||
274 | CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); | ||
275 | if (err) | ||
276 | goto err_free_inst; | ||
277 | 202 | ||
278 | err = 0; | ||
279 | inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; | 203 | inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; |
280 | inst->alg.cra_priority = alg->cra_priority; | 204 | inst->alg.cra_priority = alg->cra_priority; |
281 | inst->alg.cra_blocksize = 1; | 205 | inst->alg.cra_blocksize = 1; |
282 | inst->alg.cra_alignmask = alg->cra_alignmask | (__alignof__(u32) - 1); | 206 | inst->alg.cra_alignmask = alg->cra_alignmask | (__alignof__(u32) - 1); |
283 | inst->alg.cra_type = &crypto_blkcipher_type; | 207 | inst->alg.cra_type = &crypto_blkcipher_type; |
284 | 208 | ||
285 | inst->alg.cra_blkcipher.ivsize = ivsize; | 209 | inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; |
286 | inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize | 210 | inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize; |
287 | + noncesize; | 211 | inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize; |
288 | inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize | ||
289 | + noncesize; | ||
290 | 212 | ||
291 | inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx); | 213 | inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx); |
292 | 214 | ||
@@ -297,24 +219,18 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb) | |||
297 | inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt; | 219 | inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt; |
298 | inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt; | 220 | inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt; |
299 | 221 | ||
300 | err_free_inst: | 222 | out: |
301 | if (err) | ||
302 | kfree(inst); | ||
303 | |||
304 | out_put_alg: | ||
305 | crypto_mod_put(alg); | 223 | crypto_mod_put(alg); |
306 | |||
307 | if (err) | ||
308 | inst = ERR_PTR(err); | ||
309 | |||
310 | return inst; | 224 | return inst; |
225 | |||
226 | out_put_alg: | ||
227 | inst = ERR_PTR(err); | ||
228 | goto out; | ||
311 | } | 229 | } |
312 | 230 | ||
313 | static void crypto_ctr_free(struct crypto_instance *inst) | 231 | static void crypto_ctr_free(struct crypto_instance *inst) |
314 | { | 232 | { |
315 | struct ctr_instance_ctx *ictx = crypto_instance_ctx(inst); | 233 | crypto_drop_spawn(crypto_instance_ctx(inst)); |
316 | |||
317 | crypto_drop_spawn(&ictx->alg); | ||
318 | kfree(inst); | 234 | kfree(inst); |
319 | } | 235 | } |
320 | 236 | ||
@@ -325,13 +241,174 @@ static struct crypto_template crypto_ctr_tmpl = { | |||
325 | .module = THIS_MODULE, | 241 | .module = THIS_MODULE, |
326 | }; | 242 | }; |
327 | 243 | ||
244 | static int crypto_rfc3686_setkey(struct crypto_tfm *parent, const u8 *key, | ||
245 | unsigned int keylen) | ||
246 | { | ||
247 | struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(parent); | ||
248 | struct crypto_blkcipher *child = ctx->child; | ||
249 | int err; | ||
250 | |||
251 | /* the nonce is stored in bytes at end of key */ | ||
252 | if (keylen < CTR_RFC3686_NONCE_SIZE) | ||
253 | return -EINVAL; | ||
254 | |||
255 | memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE), | ||
256 | CTR_RFC3686_NONCE_SIZE); | ||
257 | |||
258 | keylen -= CTR_RFC3686_NONCE_SIZE; | ||
259 | |||
260 | crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); | ||
261 | crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) & | ||
262 | CRYPTO_TFM_REQ_MASK); | ||
263 | err = crypto_blkcipher_setkey(child, key, keylen); | ||
264 | crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) & | ||
265 | CRYPTO_TFM_RES_MASK); | ||
266 | |||
267 | return err; | ||
268 | } | ||
269 | |||
270 | static int crypto_rfc3686_crypt(struct blkcipher_desc *desc, | ||
271 | struct scatterlist *dst, | ||
272 | struct scatterlist *src, unsigned int nbytes) | ||
273 | { | ||
274 | struct crypto_blkcipher *tfm = desc->tfm; | ||
275 | struct crypto_rfc3686_ctx *ctx = crypto_blkcipher_ctx(tfm); | ||
276 | struct crypto_blkcipher *child = ctx->child; | ||
277 | unsigned long alignmask = crypto_blkcipher_alignmask(tfm); | ||
278 | u8 ivblk[CTR_RFC3686_BLOCK_SIZE + alignmask]; | ||
279 | u8 *iv = PTR_ALIGN(ivblk + 0, alignmask + 1); | ||
280 | u8 *info = desc->info; | ||
281 | int err; | ||
282 | |||
283 | /* set up counter block */ | ||
284 | memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE); | ||
285 | memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE); | ||
286 | |||
287 | /* initialize counter portion of counter block */ | ||
288 | *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) = | ||
289 | cpu_to_be32(1); | ||
290 | |||
291 | desc->tfm = child; | ||
292 | desc->info = iv; | ||
293 | err = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); | ||
294 | desc->tfm = tfm; | ||
295 | desc->info = info; | ||
296 | |||
297 | return err; | ||
298 | } | ||
299 | |||
300 | static int crypto_rfc3686_init_tfm(struct crypto_tfm *tfm) | ||
301 | { | ||
302 | struct crypto_instance *inst = (void *)tfm->__crt_alg; | ||
303 | struct crypto_spawn *spawn = crypto_instance_ctx(inst); | ||
304 | struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm); | ||
305 | struct crypto_blkcipher *cipher; | ||
306 | |||
307 | cipher = crypto_spawn_blkcipher(spawn); | ||
308 | if (IS_ERR(cipher)) | ||
309 | return PTR_ERR(cipher); | ||
310 | |||
311 | ctx->child = cipher; | ||
312 | |||
313 | return 0; | ||
314 | } | ||
315 | |||
316 | static void crypto_rfc3686_exit_tfm(struct crypto_tfm *tfm) | ||
317 | { | ||
318 | struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm); | ||
319 | |||
320 | crypto_free_blkcipher(ctx->child); | ||
321 | } | ||
322 | |||
323 | static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb) | ||
324 | { | ||
325 | struct crypto_instance *inst; | ||
326 | struct crypto_alg *alg; | ||
327 | int err; | ||
328 | |||
329 | err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); | ||
330 | if (err) | ||
331 | return ERR_PTR(err); | ||
332 | |||
333 | alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER, | ||
334 | CRYPTO_ALG_TYPE_MASK); | ||
335 | err = PTR_ERR(alg); | ||
336 | if (IS_ERR(alg)) | ||
337 | return ERR_PTR(err); | ||
338 | |||
339 | /* We only support 16-byte blocks. */ | ||
340 | err = -EINVAL; | ||
341 | if (alg->cra_blkcipher.ivsize != CTR_RFC3686_BLOCK_SIZE) | ||
342 | goto out_put_alg; | ||
343 | |||
344 | /* Not a stream cipher? */ | ||
345 | if (alg->cra_blocksize != 1) | ||
346 | goto out_put_alg; | ||
347 | |||
348 | inst = crypto_alloc_instance("rfc3686", alg); | ||
349 | if (IS_ERR(inst)) | ||
350 | goto out; | ||
351 | |||
352 | inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; | ||
353 | inst->alg.cra_priority = alg->cra_priority; | ||
354 | inst->alg.cra_blocksize = 1; | ||
355 | inst->alg.cra_alignmask = alg->cra_alignmask; | ||
356 | inst->alg.cra_type = &crypto_blkcipher_type; | ||
357 | |||
358 | inst->alg.cra_blkcipher.ivsize = CTR_RFC3686_IV_SIZE; | ||
359 | inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize | ||
360 | + CTR_RFC3686_NONCE_SIZE; | ||
361 | inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize | ||
362 | + CTR_RFC3686_NONCE_SIZE; | ||
363 | |||
364 | inst->alg.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx); | ||
365 | |||
366 | inst->alg.cra_init = crypto_rfc3686_init_tfm; | ||
367 | inst->alg.cra_exit = crypto_rfc3686_exit_tfm; | ||
368 | |||
369 | inst->alg.cra_blkcipher.setkey = crypto_rfc3686_setkey; | ||
370 | inst->alg.cra_blkcipher.encrypt = crypto_rfc3686_crypt; | ||
371 | inst->alg.cra_blkcipher.decrypt = crypto_rfc3686_crypt; | ||
372 | |||
373 | out: | ||
374 | crypto_mod_put(alg); | ||
375 | return inst; | ||
376 | |||
377 | out_put_alg: | ||
378 | inst = ERR_PTR(err); | ||
379 | goto out; | ||
380 | } | ||
381 | |||
382 | static struct crypto_template crypto_rfc3686_tmpl = { | ||
383 | .name = "rfc3686", | ||
384 | .alloc = crypto_rfc3686_alloc, | ||
385 | .free = crypto_ctr_free, | ||
386 | .module = THIS_MODULE, | ||
387 | }; | ||
388 | |||
328 | static int __init crypto_ctr_module_init(void) | 389 | static int __init crypto_ctr_module_init(void) |
329 | { | 390 | { |
330 | return crypto_register_template(&crypto_ctr_tmpl); | 391 | int err; |
392 | |||
393 | err = crypto_register_template(&crypto_ctr_tmpl); | ||
394 | if (err) | ||
395 | goto out; | ||
396 | |||
397 | err = crypto_register_template(&crypto_rfc3686_tmpl); | ||
398 | if (err) | ||
399 | goto out_drop_ctr; | ||
400 | |||
401 | out: | ||
402 | return err; | ||
403 | |||
404 | out_drop_ctr: | ||
405 | crypto_unregister_template(&crypto_ctr_tmpl); | ||
406 | goto out; | ||
331 | } | 407 | } |
332 | 408 | ||
333 | static void __exit crypto_ctr_module_exit(void) | 409 | static void __exit crypto_ctr_module_exit(void) |
334 | { | 410 | { |
411 | crypto_unregister_template(&crypto_rfc3686_tmpl); | ||
335 | crypto_unregister_template(&crypto_ctr_tmpl); | 412 | crypto_unregister_template(&crypto_ctr_tmpl); |
336 | } | 413 | } |
337 | 414 | ||
@@ -340,3 +417,4 @@ module_exit(crypto_ctr_module_exit); | |||
340 | 417 | ||
341 | MODULE_LICENSE("GPL"); | 418 | MODULE_LICENSE("GPL"); |
342 | MODULE_DESCRIPTION("CTR Counter block mode"); | 419 | MODULE_DESCRIPTION("CTR Counter block mode"); |
420 | MODULE_ALIAS("rfc3686"); | ||
diff --git a/crypto/gcm.c b/crypto/gcm.c index 08183171913c..c54d478948a0 100644 --- a/crypto/gcm.c +++ b/crypto/gcm.c | |||
@@ -160,7 +160,7 @@ static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx, | |||
160 | 160 | ||
161 | static inline void crypto_gcm_set_counter(u8 *counterblock, u32 value) | 161 | static inline void crypto_gcm_set_counter(u8 *counterblock, u32 value) |
162 | { | 162 | { |
163 | *((u32 *)&counterblock[12]) = cpu_to_be32(value); | 163 | *((u32 *)&counterblock[12]) = cpu_to_be32(value + 1); |
164 | } | 164 | } |
165 | 165 | ||
166 | static int crypto_gcm_encrypt_counter(struct crypto_aead *aead, u8 *block, | 166 | static int crypto_gcm_encrypt_counter(struct crypto_aead *aead, u8 *block, |
@@ -400,9 +400,8 @@ static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb) | |||
400 | return inst; | 400 | return inst; |
401 | 401 | ||
402 | inst = ERR_PTR(ENAMETOOLONG); | 402 | inst = ERR_PTR(ENAMETOOLONG); |
403 | if (snprintf( | 403 | if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", |
404 | ctr_name, CRYPTO_MAX_ALG_NAME, | 404 | cipher->cra_name) >= CRYPTO_MAX_ALG_NAME) |
405 | "ctr(%s,0,16,4)", cipher->cra_name) >= CRYPTO_MAX_ALG_NAME) | ||
406 | return inst; | 405 | return inst; |
407 | 406 | ||
408 | ctr = crypto_alg_mod_lookup(ctr_name, CRYPTO_ALG_TYPE_BLKCIPHER, | 407 | ctr = crypto_alg_mod_lookup(ctr_name, CRYPTO_ALG_TYPE_BLKCIPHER, |
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 0cfb8ebb22ba..1142b4998c84 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c | |||
@@ -1193,9 +1193,9 @@ static void do_test(void) | |||
1193 | AES_XTS_ENC_TEST_VECTORS); | 1193 | AES_XTS_ENC_TEST_VECTORS); |
1194 | test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template, | 1194 | test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template, |
1195 | AES_XTS_DEC_TEST_VECTORS); | 1195 | AES_XTS_DEC_TEST_VECTORS); |
1196 | test_cipher("ctr(aes,4,8,4)", ENCRYPT, aes_ctr_enc_tv_template, | 1196 | test_cipher("rfc3686(ctr(aes))", ENCRYPT, aes_ctr_enc_tv_template, |
1197 | AES_CTR_ENC_TEST_VECTORS); | 1197 | AES_CTR_ENC_TEST_VECTORS); |
1198 | test_cipher("ctr(aes,4,8,4)", DECRYPT, aes_ctr_dec_tv_template, | 1198 | test_cipher("rfc3686(ctr(aes))", DECRYPT, aes_ctr_dec_tv_template, |
1199 | AES_CTR_DEC_TEST_VECTORS); | 1199 | AES_CTR_DEC_TEST_VECTORS); |
1200 | test_aead("gcm(aes)", ENCRYPT, aes_gcm_enc_tv_template, | 1200 | test_aead("gcm(aes)", ENCRYPT, aes_gcm_enc_tv_template, |
1201 | AES_GCM_ENC_TEST_VECTORS); | 1201 | AES_GCM_ENC_TEST_VECTORS); |
@@ -1394,9 +1394,9 @@ static void do_test(void) | |||
1394 | AES_XTS_ENC_TEST_VECTORS); | 1394 | AES_XTS_ENC_TEST_VECTORS); |
1395 | test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template, | 1395 | test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template, |
1396 | AES_XTS_DEC_TEST_VECTORS); | 1396 | AES_XTS_DEC_TEST_VECTORS); |
1397 | test_cipher("ctr(aes,4,8,4)", ENCRYPT, aes_ctr_enc_tv_template, | 1397 | test_cipher("rfc3686(ctr(aes))", ENCRYPT, aes_ctr_enc_tv_template, |
1398 | AES_CTR_ENC_TEST_VECTORS); | 1398 | AES_CTR_ENC_TEST_VECTORS); |
1399 | test_cipher("ctr(aes,4,8,4)", DECRYPT, aes_ctr_dec_tv_template, | 1399 | test_cipher("rfc3686(ctr(aes))", DECRYPT, aes_ctr_dec_tv_template, |
1400 | AES_CTR_DEC_TEST_VECTORS); | 1400 | AES_CTR_DEC_TEST_VECTORS); |
1401 | break; | 1401 | break; |
1402 | 1402 | ||
diff --git a/include/crypto/ctr.h b/include/crypto/ctr.h new file mode 100644 index 000000000000..4180fc080e3b --- /dev/null +++ b/include/crypto/ctr.h | |||
@@ -0,0 +1,20 @@ | |||
1 | /* | ||
2 | * CTR: Counter mode | ||
3 | * | ||
4 | * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> | ||
5 | * | ||
6 | * This program is free software; you can redistribute it and/or modify it | ||
7 | * under the terms of the GNU General Public License as published by the Free | ||
8 | * Software Foundation; either version 2 of the License, or (at your option) | ||
9 | * any later version. | ||
10 | * | ||
11 | */ | ||
12 | |||
13 | #ifndef _CRYPTO_CTR_H | ||
14 | #define _CRYPTO_CTR_H | ||
15 | |||
16 | #define CTR_RFC3686_NONCE_SIZE 4 | ||
17 | #define CTR_RFC3686_IV_SIZE 8 | ||
18 | #define CTR_RFC3686_BLOCK_SIZE 16 | ||
19 | |||
20 | #endif /* _CRYPTO_CTR_H */ | ||