aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJoy Latten <latten@austin.ibm.com>2007-11-07 09:59:47 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2008-01-10 16:16:08 -0500
commit41fdab3dd385dde36caae60ed2df82aecb7a32f0 (patch)
treeaed4c4e7630cebc8b66d33fa6e26ec20f564bbd8
parentd3e7480572bf882dee5baa2891bccbfa3db0b1a1 (diff)
[CRYPTO] ctr: Add countersize
This patch adds countersize to CTR mode. The template is now ctr(algo,noncesize,ivsize,countersize). For example, ctr(aes,4,8,4) indicates the counterblock will be composed of a salt/nonce that is 4 bytes, an iv that is 8 bytes and the counter is 4 bytes. When noncesize + ivsize < blocksize, CTR initializes the last block - ivsize - noncesize portion of the block to zero. Otherwise the counter block is composed of the IV (and nonce if necessary). If noncesize + ivsize == blocksize, then this indicates that user is passing in entire counterblock. Thus countersize indicates the amount of bytes in counterblock to use as the counter for incrementing. CTR will increment counter portion by 1, and begin encryption with that value. Note that CTR assumes the counter portion of the block that will be incremented is stored in big endian. Signed-off-by: Joy Latten <latten@austin.ibm.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--crypto/ctr.c32
-rw-r--r--crypto/tcrypt.c8
2 files changed, 25 insertions, 15 deletions
diff --git a/crypto/ctr.c b/crypto/ctr.c
index 810d5ec2d5d2..b974a9f9b879 100644
--- a/crypto/ctr.c
+++ b/crypto/ctr.c
@@ -23,6 +23,7 @@ struct ctr_instance_ctx {
23 struct crypto_spawn alg; 23 struct crypto_spawn alg;
24 unsigned int noncesize; 24 unsigned int noncesize;
25 unsigned int ivsize; 25 unsigned int ivsize;
26 unsigned int countersize;
26}; 27};
27 28
28struct crypto_ctr_ctx { 29struct crypto_ctr_ctx {
@@ -186,7 +187,6 @@ static int crypto_ctr_crypt(struct blkcipher_desc *desc,
186 unsigned long alignmask = crypto_cipher_alignmask(child); 187 unsigned long alignmask = crypto_cipher_alignmask(child);
187 u8 cblk[bsize + alignmask]; 188 u8 cblk[bsize + alignmask];
188 u8 *counterblk = (u8 *)ALIGN((unsigned long)cblk, alignmask + 1); 189 u8 *counterblk = (u8 *)ALIGN((unsigned long)cblk, alignmask + 1);
189 unsigned int countersize;
190 int err; 190 int err;
191 191
192 blkcipher_walk_init(&walk, dst, src, nbytes); 192 blkcipher_walk_init(&walk, dst, src, nbytes);
@@ -198,18 +198,18 @@ static int crypto_ctr_crypt(struct blkcipher_desc *desc,
198 memcpy(counterblk + ictx->noncesize, walk.iv, ictx->ivsize); 198 memcpy(counterblk + ictx->noncesize, walk.iv, ictx->ivsize);
199 199
200 /* initialize counter portion of counter block */ 200 /* initialize counter portion of counter block */
201 countersize = bsize - ictx->noncesize - ictx->ivsize; 201 ctr_inc_quad(counterblk + (bsize - ictx->countersize),
202 ctr_inc_quad(counterblk + (bsize - countersize), countersize); 202 ictx->countersize);
203 203
204 while (walk.nbytes) { 204 while (walk.nbytes) {
205 if (walk.src.virt.addr == walk.dst.virt.addr) 205 if (walk.src.virt.addr == walk.dst.virt.addr)
206 nbytes = crypto_ctr_crypt_inplace(&walk, child, 206 nbytes = crypto_ctr_crypt_inplace(&walk, child,
207 counterblk, 207 counterblk,
208 countersize); 208 ictx->countersize);
209 else 209 else
210 nbytes = crypto_ctr_crypt_segment(&walk, child, 210 nbytes = crypto_ctr_crypt_segment(&walk, child,
211 counterblk, 211 counterblk,
212 countersize); 212 ictx->countersize);
213 213
214 err = blkcipher_walk_done(desc, &walk, nbytes); 214 err = blkcipher_walk_done(desc, &walk, nbytes);
215 } 215 }
@@ -251,6 +251,7 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
251 struct ctr_instance_ctx *ictx; 251 struct ctr_instance_ctx *ictx;
252 unsigned int noncesize; 252 unsigned int noncesize;
253 unsigned int ivsize; 253 unsigned int ivsize;
254 unsigned int countersize;
254 int err; 255 int err;
255 256
256 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); 257 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
@@ -270,9 +271,17 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
270 if (err) 271 if (err)
271 goto out_put_alg; 272 goto out_put_alg;
272 273
273 /* verify size of nonce + iv + counter */ 274 err = crypto_attr_u32(tb[4], &countersize);
275 if (err)
276 goto out_put_alg;
277
278 /* verify size of nonce + iv + counter
279 * counter must be >= 4 bytes.
280 */
274 err = -EINVAL; 281 err = -EINVAL;
275 if ((noncesize + ivsize) >= alg->cra_blocksize) 282 if (((noncesize + ivsize + countersize) < alg->cra_blocksize) ||
283 ((noncesize + ivsize) > alg->cra_blocksize) ||
284 (countersize > alg->cra_blocksize) || (countersize < 4))
276 goto out_put_alg; 285 goto out_put_alg;
277 286
278 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); 287 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
@@ -282,20 +291,21 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
282 291
283 err = -ENAMETOOLONG; 292 err = -ENAMETOOLONG;
284 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, 293 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
285 "ctr(%s,%u,%u)", alg->cra_name, noncesize, 294 "ctr(%s,%u,%u,%u)", alg->cra_name, noncesize,
286 ivsize) >= CRYPTO_MAX_ALG_NAME) { 295 ivsize, countersize) >= CRYPTO_MAX_ALG_NAME) {
287 goto err_free_inst; 296 goto err_free_inst;
288 } 297 }
289 298
290 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, 299 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
291 "ctr(%s,%u,%u)", alg->cra_driver_name, noncesize, 300 "ctr(%s,%u,%u,%u)", alg->cra_driver_name, noncesize,
292 ivsize) >= CRYPTO_MAX_ALG_NAME) { 301 ivsize, countersize) >= CRYPTO_MAX_ALG_NAME) {
293 goto err_free_inst; 302 goto err_free_inst;
294 } 303 }
295 304
296 ictx = crypto_instance_ctx(inst); 305 ictx = crypto_instance_ctx(inst);
297 ictx->noncesize = noncesize; 306 ictx->noncesize = noncesize;
298 ictx->ivsize = ivsize; 307 ictx->ivsize = ivsize;
308 ictx->countersize = countersize;
299 309
300 err = crypto_init_spawn(&ictx->alg, alg, inst, 310 err = crypto_init_spawn(&ictx->alg, alg, inst,
301 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); 311 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c
index 640cbcad32a1..aa84bc4f2313 100644
--- a/crypto/tcrypt.c
+++ b/crypto/tcrypt.c
@@ -969,9 +969,9 @@ static void do_test(void)
969 AES_XTS_ENC_TEST_VECTORS); 969 AES_XTS_ENC_TEST_VECTORS);
970 test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template, 970 test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
971 AES_XTS_DEC_TEST_VECTORS); 971 AES_XTS_DEC_TEST_VECTORS);
972 test_cipher("ctr(aes,4,8)", ENCRYPT, aes_ctr_enc_tv_template, 972 test_cipher("ctr(aes,4,8,4)", ENCRYPT, aes_ctr_enc_tv_template,
973 AES_CTR_ENC_TEST_VECTORS); 973 AES_CTR_ENC_TEST_VECTORS);
974 test_cipher("ctr(aes,4,8)", DECRYPT, aes_ctr_dec_tv_template, 974 test_cipher("ctr(aes,4,8,4)", DECRYPT, aes_ctr_dec_tv_template,
975 AES_CTR_DEC_TEST_VECTORS); 975 AES_CTR_DEC_TEST_VECTORS);
976 976
977 //CAST5 977 //CAST5
@@ -1160,9 +1160,9 @@ static void do_test(void)
1160 AES_XTS_ENC_TEST_VECTORS); 1160 AES_XTS_ENC_TEST_VECTORS);
1161 test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template, 1161 test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
1162 AES_XTS_DEC_TEST_VECTORS); 1162 AES_XTS_DEC_TEST_VECTORS);
1163 test_cipher("ctr(aes,4,8)", ENCRYPT, aes_ctr_enc_tv_template, 1163 test_cipher("ctr(aes,4,8,4)", ENCRYPT, aes_ctr_enc_tv_template,
1164 AES_CTR_ENC_TEST_VECTORS); 1164 AES_CTR_ENC_TEST_VECTORS);
1165 test_cipher("ctr(aes,4,8)", DECRYPT, aes_ctr_dec_tv_template, 1165 test_cipher("ctr(aes,4,8,4)", DECRYPT, aes_ctr_dec_tv_template,
1166 AES_CTR_DEC_TEST_VECTORS); 1166 AES_CTR_DEC_TEST_VECTORS);
1167 break; 1167 break;
1168 1168