aboutsummaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2007-12-17 08:42:08 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2008-01-10 16:16:42 -0500
commit84c911523020a2e39b307a2da26ee1886b7214fe (patch)
tree49ff26b7da1bce6672c8bc2b0810115df9f8d8c7 /crypto
parent5311f248b7764ba8b59e6d477355f766e5609686 (diff)
[CRYPTO] gcm: Add support for async ciphers
This patch adds the necessary changes for GCM to be used with async ciphers. This would allow it to be used with hardware devices that support CTR. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/gcm.c190
1 files changed, 112 insertions, 78 deletions
diff --git a/crypto/gcm.c b/crypto/gcm.c
index c54d478948a0..f6bee6f209a6 100644
--- a/crypto/gcm.c
+++ b/crypto/gcm.c
@@ -11,6 +11,7 @@
11#include <crypto/algapi.h> 11#include <crypto/algapi.h>
12#include <crypto/gf128mul.h> 12#include <crypto/gf128mul.h>
13#include <crypto/scatterwalk.h> 13#include <crypto/scatterwalk.h>
14#include <linux/completion.h>
14#include <linux/err.h> 15#include <linux/err.h>
15#include <linux/init.h> 16#include <linux/init.h>
16#include <linux/kernel.h> 17#include <linux/kernel.h>
@@ -38,11 +39,17 @@ struct crypto_gcm_ghash_ctx {
38struct crypto_gcm_req_priv_ctx { 39struct crypto_gcm_req_priv_ctx {
39 u8 auth_tag[16]; 40 u8 auth_tag[16];
40 u8 iauth_tag[16]; 41 u8 iauth_tag[16];
41 u8 counter[16]; 42 struct scatterlist src[2];
43 struct scatterlist dst[2];
42 struct crypto_gcm_ghash_ctx ghash; 44 struct crypto_gcm_ghash_ctx ghash;
43 struct ablkcipher_request abreq; 45 struct ablkcipher_request abreq;
44}; 46};
45 47
48struct crypto_gcm_setkey_result {
49 int err;
50 struct completion completion;
51};
52
46static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx( 53static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
47 struct aead_request *req) 54 struct aead_request *req)
48{ 55{
@@ -158,33 +165,15 @@ static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
158 crypto_xor(dst, buf, 16); 165 crypto_xor(dst, buf, 16);
159} 166}
160 167
161static inline void crypto_gcm_set_counter(u8 *counterblock, u32 value) 168static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
162{ 169{
163 *((u32 *)&counterblock[12]) = cpu_to_be32(value + 1); 170 struct crypto_gcm_setkey_result *result = req->data;
164}
165 171
166static int crypto_gcm_encrypt_counter(struct crypto_aead *aead, u8 *block, 172 if (err == -EINPROGRESS)
167 u32 value, const u8 *iv) 173 return;
168{ 174
169 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); 175 result->err = err;
170 struct crypto_ablkcipher *ctr = ctx->ctr; 176 complete(&result->completion);
171 struct ablkcipher_request req;
172 struct scatterlist sg;
173 u8 counterblock[16];
174
175 if (iv == NULL)
176 memset(counterblock, 0, 12);
177 else
178 memcpy(counterblock, iv, 12);
179
180 crypto_gcm_set_counter(counterblock, value);
181
182 sg_init_one(&sg, block, 16);
183 ablkcipher_request_set_tfm(&req, ctr);
184 ablkcipher_request_set_crypt(&req, &sg, &sg, 16, counterblock);
185 ablkcipher_request_set_callback(&req, 0, NULL, NULL);
186 memset(block, 0, 16);
187 return crypto_ablkcipher_encrypt(&req);
188} 177}
189 178
190static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key, 179static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
@@ -192,10 +181,16 @@ static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
192{ 181{
193 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); 182 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
194 struct crypto_ablkcipher *ctr = ctx->ctr; 183 struct crypto_ablkcipher *ctr = ctx->ctr;
195 int alignmask = crypto_ablkcipher_alignmask(ctr); 184 struct {
196 u8 alignbuf[16+alignmask]; 185 be128 hash;
197 u8 *hash = (u8 *)ALIGN((unsigned long)alignbuf, alignmask+1); 186 u8 iv[8];
198 int err = 0; 187
188 struct crypto_gcm_setkey_result result;
189
190 struct scatterlist sg[1];
191 struct ablkcipher_request req;
192 } *data;
193 int err;
199 194
200 crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK); 195 crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
201 crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) & 196 crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
@@ -203,62 +198,86 @@ static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
203 198
204 err = crypto_ablkcipher_setkey(ctr, key, keylen); 199 err = crypto_ablkcipher_setkey(ctr, key, keylen);
205 if (err) 200 if (err)
206 goto out; 201 return err;
207 202
208 crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) & 203 crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
209 CRYPTO_TFM_RES_MASK); 204 CRYPTO_TFM_RES_MASK);
210 205
211 err = crypto_gcm_encrypt_counter(aead, hash, -1, NULL); 206 data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr),
207 GFP_KERNEL);
208 if (!data)
209 return -ENOMEM;
210
211 init_completion(&data->result.completion);
212 sg_init_one(data->sg, &data->hash, sizeof(data->hash));
213 ablkcipher_request_set_tfm(&data->req, ctr);
214 ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
215 CRYPTO_TFM_REQ_MAY_BACKLOG,
216 crypto_gcm_setkey_done,
217 &data->result);
218 ablkcipher_request_set_crypt(&data->req, data->sg, data->sg,
219 sizeof(data->hash), data->iv);
220
221 err = crypto_ablkcipher_encrypt(&data->req);
222 if (err == -EINPROGRESS || err == -EBUSY) {
223 err = wait_for_completion_interruptible(
224 &data->result.completion);
225 if (!err)
226 err = data->result.err;
227 }
228
212 if (err) 229 if (err)
213 goto out; 230 goto out;
214 231
215 if (ctx->gf128 != NULL) 232 if (ctx->gf128 != NULL)
216 gf128mul_free_4k(ctx->gf128); 233 gf128mul_free_4k(ctx->gf128);
217 234
218 ctx->gf128 = gf128mul_init_4k_lle((be128 *)hash); 235 ctx->gf128 = gf128mul_init_4k_lle(&data->hash);
219 236
220 if (ctx->gf128 == NULL) 237 if (ctx->gf128 == NULL)
221 err = -ENOMEM; 238 err = -ENOMEM;
222 239
223 out: 240out:
241 kfree(data);
224 return err; 242 return err;
225} 243}
226 244
227static int crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req, 245static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
228 struct aead_request *req, 246 struct aead_request *req,
229 unsigned int cryptlen, 247 unsigned int cryptlen)
230 void (*done)(struct crypto_async_request *,
231 int))
232{ 248{
233 struct crypto_aead *aead = crypto_aead_reqtfm(req); 249 struct crypto_aead *aead = crypto_aead_reqtfm(req);
234 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); 250 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
235 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 251 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
236 u32 flags = req->base.tfm->crt_flags; 252 u32 flags = req->base.tfm->crt_flags;
237 u8 *auth_tag = pctx->auth_tag;
238 u8 *counter = pctx->counter;
239 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; 253 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
240 int err = 0; 254 struct scatterlist *dst;
255 __be32 counter = cpu_to_be32(1);
256
257 memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
258 memcpy(req->iv + 12, &counter, 4);
259
260 sg_init_table(pctx->src, 2);
261 sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
262 scatterwalk_sg_chain(pctx->src, 2, req->src);
263
264 dst = pctx->src;
265 if (req->src != req->dst) {
266 sg_init_table(pctx->dst, 2);
267 sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
268 scatterwalk_sg_chain(pctx->dst, 2, req->dst);
269 dst = pctx->dst;
270 }
241 271
242 ablkcipher_request_set_tfm(ablk_req, ctx->ctr); 272 ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
243 ablkcipher_request_set_callback(ablk_req, aead_request_flags(req), 273 ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
244 done, req); 274 cryptlen + sizeof(pctx->auth_tag),
245 ablkcipher_request_set_crypt(ablk_req, req->src, req->dst, 275 req->iv);
246 cryptlen, counter);
247
248 err = crypto_gcm_encrypt_counter(aead, auth_tag, 0, req->iv);
249 if (err)
250 goto out;
251
252 memcpy(counter, req->iv, 12);
253 crypto_gcm_set_counter(counter, 1);
254 276
255 crypto_gcm_ghash_init(ghash, flags, ctx->gf128); 277 crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
256 278
257 crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen); 279 crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
258 crypto_gcm_ghash_flush(ghash); 280 crypto_gcm_ghash_flush(ghash);
259
260 out:
261 return err;
262} 281}
263 282
264static int crypto_gcm_hash(struct aead_request *req) 283static int crypto_gcm_hash(struct aead_request *req)
@@ -291,25 +310,44 @@ static int crypto_gcm_encrypt(struct aead_request *req)
291{ 310{
292 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 311 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
293 struct ablkcipher_request *abreq = &pctx->abreq; 312 struct ablkcipher_request *abreq = &pctx->abreq;
294 int err = 0; 313 int err;
314
315 crypto_gcm_init_crypt(abreq, req, req->cryptlen);
316 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
317 crypto_gcm_encrypt_done, req);
295 318
296 err = crypto_gcm_init_crypt(abreq, req, req->cryptlen, 319 err = crypto_ablkcipher_encrypt(abreq);
297 crypto_gcm_encrypt_done);
298 if (err) 320 if (err)
299 return err; 321 return err;
300 322
301 if (req->cryptlen) {
302 err = crypto_ablkcipher_encrypt(abreq);
303 if (err)
304 return err;
305 }
306
307 return crypto_gcm_hash(req); 323 return crypto_gcm_hash(req);
308} 324}
309 325
326static int crypto_gcm_verify(struct aead_request *req)
327{
328 struct crypto_aead *aead = crypto_aead_reqtfm(req);
329 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
330 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
331 u8 *auth_tag = pctx->auth_tag;
332 u8 *iauth_tag = pctx->iauth_tag;
333 unsigned int authsize = crypto_aead_authsize(aead);
334 unsigned int cryptlen = req->cryptlen - authsize;
335
336 crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
337
338 authsize = crypto_aead_authsize(aead);
339 scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
340 return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
341}
342
310static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err) 343static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
311{ 344{
312 aead_request_complete(areq->data, err); 345 struct aead_request *req = areq->data;
346
347 if (!err)
348 err = crypto_gcm_verify(req);
349
350 aead_request_complete(req, err);
313} 351}
314 352
315static int crypto_gcm_decrypt(struct aead_request *req) 353static int crypto_gcm_decrypt(struct aead_request *req)
@@ -317,8 +355,6 @@ static int crypto_gcm_decrypt(struct aead_request *req)
317 struct crypto_aead *aead = crypto_aead_reqtfm(req); 355 struct crypto_aead *aead = crypto_aead_reqtfm(req);
318 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 356 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
319 struct ablkcipher_request *abreq = &pctx->abreq; 357 struct ablkcipher_request *abreq = &pctx->abreq;
320 u8 *auth_tag = pctx->auth_tag;
321 u8 *iauth_tag = pctx->iauth_tag;
322 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; 358 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
323 unsigned int cryptlen = req->cryptlen; 359 unsigned int cryptlen = req->cryptlen;
324 unsigned int authsize = crypto_aead_authsize(aead); 360 unsigned int authsize = crypto_aead_authsize(aead);
@@ -328,19 +364,17 @@ static int crypto_gcm_decrypt(struct aead_request *req)
328 return -EINVAL; 364 return -EINVAL;
329 cryptlen -= authsize; 365 cryptlen -= authsize;
330 366
331 err = crypto_gcm_init_crypt(abreq, req, cryptlen, 367 crypto_gcm_init_crypt(abreq, req, cryptlen);
332 crypto_gcm_decrypt_done); 368 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
333 if (err) 369 crypto_gcm_decrypt_done, req);
334 return err;
335 370
336 crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen); 371 crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
337 crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
338 372
339 scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0); 373 err = crypto_ablkcipher_decrypt(abreq);
340 if (memcmp(iauth_tag, auth_tag, authsize)) 374 if (err)
341 return -EBADMSG; 375 return err;
342 376
343 return crypto_ablkcipher_decrypt(abreq); 377 return crypto_gcm_verify(req);
344} 378}
345 379
346static int crypto_gcm_init_tfm(struct crypto_tfm *tfm) 380static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
@@ -436,7 +470,7 @@ static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
436 inst->alg.cra_blocksize = 16; 470 inst->alg.cra_blocksize = 16;
437 inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1); 471 inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
438 inst->alg.cra_type = &crypto_aead_type; 472 inst->alg.cra_type = &crypto_aead_type;
439 inst->alg.cra_aead.ivsize = 12; 473 inst->alg.cra_aead.ivsize = 16;
440 inst->alg.cra_aead.maxauthsize = 16; 474 inst->alg.cra_aead.maxauthsize = 16;
441 inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx); 475 inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
442 inst->alg.cra_init = crypto_gcm_init_tfm; 476 inst->alg.cra_init = crypto_gcm_init_tfm;