aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/seqiv.c
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2007-12-11 23:27:26 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2008-01-10 16:16:52 -0500
commit14df4d80433b8413f901e80880c39e8759b8418f (patch)
tree9d7e899bf2b3a9d71cd95eef9bcd40b284b6e265 /crypto/seqiv.c
parent5b6d2d7fdf806f2b5a9352416f9e670911fc4748 (diff)
[CRYPTO] seqiv: Add AEAD support
This patch adds support for using seqiv with AEAD algorithms. This is useful for those AEAD algorithms that performs authentication before encryption because the IV generated by the underlying encryption algorithm won't be available for authentication. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/seqiv.c')
-rw-r--r--crypto/seqiv.c191
1 files changed, 175 insertions, 16 deletions
diff --git a/crypto/seqiv.c b/crypto/seqiv.c
index 9c2d80d77a0a..b903aab31577 100644
--- a/crypto/seqiv.c
+++ b/crypto/seqiv.c
@@ -13,6 +13,7 @@
13 * 13 *
14 */ 14 */
15 15
16#include <crypto/internal/aead.h>
16#include <crypto/internal/skcipher.h> 17#include <crypto/internal/skcipher.h>
17#include <linux/err.h> 18#include <linux/err.h>
18#include <linux/init.h> 19#include <linux/init.h>
@@ -53,6 +54,46 @@ static void seqiv_complete(struct crypto_async_request *base, int err)
53 skcipher_givcrypt_complete(req, err); 54 skcipher_givcrypt_complete(req, err);
54} 55}
55 56
57static void seqiv_aead_complete2(struct aead_givcrypt_request *req, int err)
58{
59 struct aead_request *subreq = aead_givcrypt_reqctx(req);
60 struct crypto_aead *geniv;
61
62 if (err == -EINPROGRESS)
63 return;
64
65 if (err)
66 goto out;
67
68 geniv = aead_givcrypt_reqtfm(req);
69 memcpy(req->areq.iv, subreq->iv, crypto_aead_ivsize(geniv));
70
71out:
72 kfree(subreq->iv);
73}
74
75static void seqiv_aead_complete(struct crypto_async_request *base, int err)
76{
77 struct aead_givcrypt_request *req = base->data;
78
79 seqiv_aead_complete2(req, err);
80 aead_givcrypt_complete(req, err);
81}
82
83static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq,
84 unsigned int ivsize)
85{
86 unsigned int len = ivsize;
87
88 if (ivsize > sizeof(u64)) {
89 memset(info, 0, ivsize - sizeof(u64));
90 len = sizeof(u64);
91 }
92 seq = cpu_to_be64(seq);
93 memcpy(info + ivsize - len, &seq, len);
94 crypto_xor(info, ctx->salt, ivsize);
95}
96
56static int seqiv_givencrypt(struct skcipher_givcrypt_request *req) 97static int seqiv_givencrypt(struct skcipher_givcrypt_request *req)
57{ 98{
58 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); 99 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
@@ -61,9 +102,7 @@ static int seqiv_givencrypt(struct skcipher_givcrypt_request *req)
61 crypto_completion_t complete; 102 crypto_completion_t complete;
62 void *data; 103 void *data;
63 u8 *info; 104 u8 *info;
64 __be64 seq;
65 unsigned int ivsize; 105 unsigned int ivsize;
66 unsigned int len;
67 int err; 106 int err;
68 107
69 ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv)); 108 ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
@@ -91,15 +130,7 @@ static int seqiv_givencrypt(struct skcipher_givcrypt_request *req)
91 ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst, 130 ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst,
92 req->creq.nbytes, info); 131 req->creq.nbytes, info);
93 132
94 len = ivsize; 133 seqiv_geniv(ctx, info, req->seq, ivsize);
95 if (ivsize > sizeof(u64)) {
96 memset(info, 0, ivsize - sizeof(u64));
97 len = sizeof(u64);
98 }
99 seq = cpu_to_be64(req->seq);
100 memcpy(info + ivsize - len, &seq, len);
101 crypto_xor(info, ctx->salt, ivsize);
102
103 memcpy(req->giv, info, ivsize); 134 memcpy(req->giv, info, ivsize);
104 135
105 err = crypto_ablkcipher_encrypt(subreq); 136 err = crypto_ablkcipher_encrypt(subreq);
@@ -108,6 +139,52 @@ static int seqiv_givencrypt(struct skcipher_givcrypt_request *req)
108 return err; 139 return err;
109} 140}
110 141
142static int seqiv_aead_givencrypt(struct aead_givcrypt_request *req)
143{
144 struct crypto_aead *geniv = aead_givcrypt_reqtfm(req);
145 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
146 struct aead_request *areq = &req->areq;
147 struct aead_request *subreq = aead_givcrypt_reqctx(req);
148 crypto_completion_t complete;
149 void *data;
150 u8 *info;
151 unsigned int ivsize;
152 int err;
153
154 aead_request_set_tfm(subreq, aead_geniv_base(geniv));
155
156 complete = areq->base.complete;
157 data = areq->base.data;
158 info = areq->iv;
159
160 ivsize = crypto_aead_ivsize(geniv);
161
162 if (unlikely(!IS_ALIGNED((unsigned long)info,
163 crypto_aead_alignmask(geniv) + 1))) {
164 info = kmalloc(ivsize, areq->base.flags &
165 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
166 GFP_ATOMIC);
167 if (!info)
168 return -ENOMEM;
169
170 complete = seqiv_aead_complete;
171 data = req;
172 }
173
174 aead_request_set_callback(subreq, areq->base.flags, complete, data);
175 aead_request_set_crypt(subreq, areq->src, areq->dst, areq->cryptlen,
176 info);
177 aead_request_set_assoc(subreq, areq->assoc, areq->assoclen);
178
179 seqiv_geniv(ctx, info, req->seq, ivsize);
180 memcpy(req->giv, info, ivsize);
181
182 err = crypto_aead_encrypt(subreq);
183 if (unlikely(info != areq->iv))
184 seqiv_aead_complete2(req, err);
185 return err;
186}
187
111static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req) 188static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req)
112{ 189{
113 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); 190 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
@@ -126,6 +203,24 @@ unlock:
126 return seqiv_givencrypt(req); 203 return seqiv_givencrypt(req);
127} 204}
128 205
206static int seqiv_aead_givencrypt_first(struct aead_givcrypt_request *req)
207{
208 struct crypto_aead *geniv = aead_givcrypt_reqtfm(req);
209 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
210
211 spin_lock_bh(&ctx->lock);
212 if (crypto_aead_crt(geniv)->givencrypt != seqiv_aead_givencrypt_first)
213 goto unlock;
214
215 crypto_aead_crt(geniv)->givencrypt = seqiv_aead_givencrypt;
216 get_random_bytes(ctx->salt, crypto_aead_ivsize(geniv));
217
218unlock:
219 spin_unlock_bh(&ctx->lock);
220
221 return seqiv_aead_givencrypt(req);
222}
223
129static int seqiv_init(struct crypto_tfm *tfm) 224static int seqiv_init(struct crypto_tfm *tfm)
130{ 225{
131 struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm); 226 struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm);
@@ -138,13 +233,26 @@ static int seqiv_init(struct crypto_tfm *tfm)
138 return skcipher_geniv_init(tfm); 233 return skcipher_geniv_init(tfm);
139} 234}
140 235
236static int seqiv_aead_init(struct crypto_tfm *tfm)
237{
238 struct crypto_aead *geniv = __crypto_aead_cast(tfm);
239 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
240
241 spin_lock_init(&ctx->lock);
242
243 tfm->crt_aead.reqsize = sizeof(struct aead_request);
244
245 return aead_geniv_init(tfm);
246}
247
141static struct crypto_template seqiv_tmpl; 248static struct crypto_template seqiv_tmpl;
142 249
143static struct crypto_instance *seqiv_alloc(struct rtattr **tb) 250static struct crypto_instance *seqiv_ablkcipher_alloc(struct rtattr **tb)
144{ 251{
145 struct crypto_instance *inst; 252 struct crypto_instance *inst;
146 253
147 inst = skcipher_geniv_alloc(&seqiv_tmpl, tb, 0, 0); 254 inst = skcipher_geniv_alloc(&seqiv_tmpl, tb, 0, 0);
255
148 if (IS_ERR(inst)) 256 if (IS_ERR(inst))
149 goto out; 257 goto out;
150 258
@@ -153,19 +261,70 @@ static struct crypto_instance *seqiv_alloc(struct rtattr **tb)
153 inst->alg.cra_init = seqiv_init; 261 inst->alg.cra_init = seqiv_init;
154 inst->alg.cra_exit = skcipher_geniv_exit; 262 inst->alg.cra_exit = skcipher_geniv_exit;
155 263
156 inst->alg.cra_alignmask |= __alignof__(u32) - 1;
157
158 inst->alg.cra_ctxsize = sizeof(struct seqiv_ctx);
159 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize; 264 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
160 265
161out: 266out:
162 return inst; 267 return inst;
163} 268}
164 269
270static struct crypto_instance *seqiv_aead_alloc(struct rtattr **tb)
271{
272 struct crypto_instance *inst;
273
274 inst = aead_geniv_alloc(&seqiv_tmpl, tb, 0, 0);
275
276 if (IS_ERR(inst))
277 goto out;
278
279 inst->alg.cra_aead.givencrypt = seqiv_aead_givencrypt_first;
280
281 inst->alg.cra_init = seqiv_aead_init;
282 inst->alg.cra_exit = aead_geniv_exit;
283
284 inst->alg.cra_ctxsize = inst->alg.cra_aead.ivsize;
285
286out:
287 return inst;
288}
289
290static struct crypto_instance *seqiv_alloc(struct rtattr **tb)
291{
292 struct crypto_attr_type *algt;
293 struct crypto_instance *inst;
294 int err;
295
296 algt = crypto_get_attr_type(tb);
297 err = PTR_ERR(algt);
298 if (IS_ERR(algt))
299 return ERR_PTR(err);
300
301 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
302 inst = seqiv_ablkcipher_alloc(tb);
303 else
304 inst = seqiv_aead_alloc(tb);
305
306 if (IS_ERR(inst))
307 goto out;
308
309 inst->alg.cra_alignmask |= __alignof__(u32) - 1;
310 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
311
312out:
313 return inst;
314}
315
316static void seqiv_free(struct crypto_instance *inst)
317{
318 if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
319 skcipher_geniv_free(inst);
320 else
321 aead_geniv_free(inst);
322}
323
165static struct crypto_template seqiv_tmpl = { 324static struct crypto_template seqiv_tmpl = {
166 .name = "seqiv", 325 .name = "seqiv",
167 .alloc = seqiv_alloc, 326 .alloc = seqiv_alloc,
168 .free = skcipher_geniv_free, 327 .free = seqiv_free,
169 .module = THIS_MODULE, 328 .module = THIS_MODULE,
170}; 329};
171 330