diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2007-12-04 03:17:50 -0500 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2008-01-10 16:16:31 -0500 |
commit | 6160b289929c0b622e64aa36106d8e6e53fcd826 (patch) | |
tree | d8968b0e4aea9d9ef80459d344cd9f4a3e6221b2 /crypto/gcm.c | |
parent | 8df213d9b520a4b58b7a8f7f2200324d4e40363d (diff) |
[CRYPTO] gcm: Fix ICV handling
The crypto_aead convention for ICVs is to include it directly in the
output. If we decided to change this in future then we would make
the ICV (if the algorithm has an explicit one) available in the
request itself.
For now no algorithm needs this so this patch changes gcm to conform
to this convention. It also adjusts the tcrypt aead tests to take
this into account.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/gcm.c')
-rw-r--r-- | crypto/gcm.c | 68 |
1 files changed, 40 insertions, 28 deletions
diff --git a/crypto/gcm.c b/crypto/gcm.c index 5681c7957b88..ed8a6261b346 100644 --- a/crypto/gcm.c +++ b/crypto/gcm.c | |||
@@ -36,6 +36,7 @@ struct crypto_gcm_ghash_ctx { | |||
36 | 36 | ||
37 | struct crypto_gcm_req_priv_ctx { | 37 | struct crypto_gcm_req_priv_ctx { |
38 | u8 auth_tag[16]; | 38 | u8 auth_tag[16]; |
39 | u8 iauth_tag[16]; | ||
39 | u8 counter[16]; | 40 | u8 counter[16]; |
40 | struct crypto_gcm_ghash_ctx ghash; | 41 | struct crypto_gcm_ghash_ctx ghash; |
41 | }; | 42 | }; |
@@ -89,6 +90,9 @@ static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx, | |||
89 | u8 *src; | 90 | u8 *src; |
90 | int n; | 91 | int n; |
91 | 92 | ||
93 | if (!len) | ||
94 | return; | ||
95 | |||
92 | scatterwalk_start(&walk, sg); | 96 | scatterwalk_start(&walk, sg); |
93 | 97 | ||
94 | while (len) { | 98 | while (len) { |
@@ -211,9 +215,10 @@ static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key, | |||
211 | } | 215 | } |
212 | 216 | ||
213 | static int crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req, | 217 | static int crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req, |
214 | struct aead_request *req, | 218 | struct aead_request *req, |
215 | void (*done)(struct crypto_async_request *, | 219 | unsigned int cryptlen, |
216 | int)) | 220 | void (*done)(struct crypto_async_request *, |
221 | int)) | ||
217 | { | 222 | { |
218 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | 223 | struct crypto_aead *aead = crypto_aead_reqtfm(req); |
219 | struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); | 224 | struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); |
@@ -228,7 +233,7 @@ static int crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req, | |||
228 | ablkcipher_request_set_callback(ablk_req, aead_request_flags(req), | 233 | ablkcipher_request_set_callback(ablk_req, aead_request_flags(req), |
229 | done, req); | 234 | done, req); |
230 | ablkcipher_request_set_crypt(ablk_req, req->src, req->dst, | 235 | ablkcipher_request_set_crypt(ablk_req, req->src, req->dst, |
231 | req->cryptlen, counter); | 236 | cryptlen, counter); |
232 | 237 | ||
233 | err = crypto_gcm_encrypt_counter(aead, auth_tag, 0, req->iv); | 238 | err = crypto_gcm_encrypt_counter(aead, auth_tag, 0, req->iv); |
234 | if (err) | 239 | if (err) |
@@ -239,18 +244,16 @@ static int crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req, | |||
239 | 244 | ||
240 | crypto_gcm_ghash_init(ghash, flags, ctx->gf128); | 245 | crypto_gcm_ghash_init(ghash, flags, ctx->gf128); |
241 | 246 | ||
242 | if (req->assoclen) { | 247 | crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen); |
243 | crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen); | 248 | crypto_gcm_ghash_flush(ghash); |
244 | crypto_gcm_ghash_flush(ghash); | ||
245 | } | ||
246 | 249 | ||
247 | out: | 250 | out: |
248 | return err; | 251 | return err; |
249 | } | 252 | } |
250 | 253 | ||
251 | static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err) | 254 | static int crypto_gcm_hash(struct aead_request *req) |
252 | { | 255 | { |
253 | struct aead_request *req = areq->data; | 256 | struct crypto_aead *aead = crypto_aead_reqtfm(req); |
254 | struct crypto_gcm_req_priv_ctx *pctx = aead_request_ctx(req); | 257 | struct crypto_gcm_req_priv_ctx *pctx = aead_request_ctx(req); |
255 | u8 *auth_tag = pctx->auth_tag; | 258 | u8 *auth_tag = pctx->auth_tag; |
256 | struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; | 259 | struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; |
@@ -259,18 +262,28 @@ static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err) | |||
259 | crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen, | 262 | crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen, |
260 | auth_tag); | 263 | auth_tag); |
261 | 264 | ||
265 | scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen, | ||
266 | crypto_aead_authsize(aead), 1); | ||
267 | return 0; | ||
268 | } | ||
269 | |||
270 | static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err) | ||
271 | { | ||
272 | struct aead_request *req = areq->data; | ||
273 | |||
274 | if (!err) | ||
275 | err = crypto_gcm_hash(req); | ||
276 | |||
262 | aead_request_complete(req, err); | 277 | aead_request_complete(req, err); |
263 | } | 278 | } |
264 | 279 | ||
265 | static int crypto_gcm_encrypt(struct aead_request *req) | 280 | static int crypto_gcm_encrypt(struct aead_request *req) |
266 | { | 281 | { |
267 | struct ablkcipher_request abreq; | 282 | struct ablkcipher_request abreq; |
268 | struct crypto_gcm_req_priv_ctx *pctx = aead_request_ctx(req); | ||
269 | u8 *auth_tag = pctx->auth_tag; | ||
270 | struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; | ||
271 | int err = 0; | 283 | int err = 0; |
272 | 284 | ||
273 | err = crypto_gcm_init_crypt(&abreq, req, crypto_gcm_encrypt_done); | 285 | err = crypto_gcm_init_crypt(&abreq, req, req->cryptlen, |
286 | crypto_gcm_encrypt_done); | ||
274 | if (err) | 287 | if (err) |
275 | return err; | 288 | return err; |
276 | 289 | ||
@@ -278,14 +291,9 @@ static int crypto_gcm_encrypt(struct aead_request *req) | |||
278 | err = crypto_ablkcipher_encrypt(&abreq); | 291 | err = crypto_ablkcipher_encrypt(&abreq); |
279 | if (err) | 292 | if (err) |
280 | return err; | 293 | return err; |
281 | |||
282 | crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen); | ||
283 | } | 294 | } |
284 | 295 | ||
285 | crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen, | 296 | return crypto_gcm_hash(req); |
286 | auth_tag); | ||
287 | |||
288 | return err; | ||
289 | } | 297 | } |
290 | 298 | ||
291 | static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err) | 299 | static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err) |
@@ -296,25 +304,29 @@ static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err) | |||
296 | static int crypto_gcm_decrypt(struct aead_request *req) | 304 | static int crypto_gcm_decrypt(struct aead_request *req) |
297 | { | 305 | { |
298 | struct ablkcipher_request abreq; | 306 | struct ablkcipher_request abreq; |
307 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | ||
299 | struct crypto_gcm_req_priv_ctx *pctx = aead_request_ctx(req); | 308 | struct crypto_gcm_req_priv_ctx *pctx = aead_request_ctx(req); |
300 | u8 *auth_tag = pctx->auth_tag; | 309 | u8 *auth_tag = pctx->auth_tag; |
310 | u8 *iauth_tag = pctx->iauth_tag; | ||
301 | struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; | 311 | struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; |
302 | u8 tag[16]; | 312 | unsigned int cryptlen = req->cryptlen; |
313 | unsigned int authsize = crypto_aead_authsize(aead); | ||
303 | int err; | 314 | int err; |
304 | 315 | ||
305 | if (!req->cryptlen) | 316 | if (cryptlen < authsize) |
306 | return -EINVAL; | 317 | return -EINVAL; |
318 | cryptlen -= authsize; | ||
307 | 319 | ||
308 | memcpy(tag, auth_tag, 16); | 320 | err = crypto_gcm_init_crypt(&abreq, req, cryptlen, |
309 | err = crypto_gcm_init_crypt(&abreq, req, crypto_gcm_decrypt_done); | 321 | crypto_gcm_decrypt_done); |
310 | if (err) | 322 | if (err) |
311 | return err; | 323 | return err; |
312 | 324 | ||
313 | crypto_gcm_ghash_update_sg(ghash, req->src, req->cryptlen); | 325 | crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen); |
314 | crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen, | 326 | crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag); |
315 | auth_tag); | ||
316 | 327 | ||
317 | if (memcmp(tag, auth_tag, 16)) | 328 | scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0); |
329 | if (memcmp(iauth_tag, auth_tag, authsize)) | ||
318 | return -EINVAL; | 330 | return -EINVAL; |
319 | 331 | ||
320 | return crypto_ablkcipher_decrypt(&abreq); | 332 | return crypto_ablkcipher_decrypt(&abreq); |