summaryrefslogtreecommitdiffstats
path: root/crypto/ctr.c
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2019-01-03 23:16:17 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2019-01-11 01:16:57 -0500
commit11f14630c4b379279ae3b063ba474d3290914333 (patch)
treedb4a0b5a7038e3d0f4085661f58d602bd72e78f2 /crypto/ctr.c
parent03b8302ddaad4cf59c0f4f1d60d6a9b4baa3b136 (diff)
crypto: ctr - convert to skcipher API
Convert the CTR template from the deprecated "blkcipher" API to the "skcipher" API, taking advantage of skcipher_alloc_instance_simple() to simplify it considerably. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/ctr.c')
-rw-r--r--crypto/ctr.c160
1 files changed, 41 insertions, 119 deletions
diff --git a/crypto/ctr.c b/crypto/ctr.c
index 30f3946efc6d..4c743a96faa4 100644
--- a/crypto/ctr.c
+++ b/crypto/ctr.c
@@ -17,14 +17,8 @@
17#include <linux/init.h> 17#include <linux/init.h>
18#include <linux/kernel.h> 18#include <linux/kernel.h>
19#include <linux/module.h> 19#include <linux/module.h>
20#include <linux/random.h>
21#include <linux/scatterlist.h>
22#include <linux/slab.h> 20#include <linux/slab.h>
23 21
24struct crypto_ctr_ctx {
25 struct crypto_cipher *child;
26};
27
28struct crypto_rfc3686_ctx { 22struct crypto_rfc3686_ctx {
29 struct crypto_skcipher *child; 23 struct crypto_skcipher *child;
30 u8 nonce[CTR_RFC3686_NONCE_SIZE]; 24 u8 nonce[CTR_RFC3686_NONCE_SIZE];
@@ -35,24 +29,7 @@ struct crypto_rfc3686_req_ctx {
35 struct skcipher_request subreq CRYPTO_MINALIGN_ATTR; 29 struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
36}; 30};
37 31
38static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key, 32static void crypto_ctr_crypt_final(struct skcipher_walk *walk,
39 unsigned int keylen)
40{
41 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent);
42 struct crypto_cipher *child = ctx->child;
43 int err;
44
45 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
46 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
47 CRYPTO_TFM_REQ_MASK);
48 err = crypto_cipher_setkey(child, key, keylen);
49 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
50 CRYPTO_TFM_RES_MASK);
51
52 return err;
53}
54
55static void crypto_ctr_crypt_final(struct blkcipher_walk *walk,
56 struct crypto_cipher *tfm) 33 struct crypto_cipher *tfm)
57{ 34{
58 unsigned int bsize = crypto_cipher_blocksize(tfm); 35 unsigned int bsize = crypto_cipher_blocksize(tfm);
@@ -70,7 +47,7 @@ static void crypto_ctr_crypt_final(struct blkcipher_walk *walk,
70 crypto_inc(ctrblk, bsize); 47 crypto_inc(ctrblk, bsize);
71} 48}
72 49
73static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk, 50static int crypto_ctr_crypt_segment(struct skcipher_walk *walk,
74 struct crypto_cipher *tfm) 51 struct crypto_cipher *tfm)
75{ 52{
76 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 53 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
@@ -96,7 +73,7 @@ static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk,
96 return nbytes; 73 return nbytes;
97} 74}
98 75
99static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk, 76static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk,
100 struct crypto_cipher *tfm) 77 struct crypto_cipher *tfm)
101{ 78{
102 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 79 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
@@ -123,135 +100,80 @@ static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk,
123 return nbytes; 100 return nbytes;
124} 101}
125 102
126static int crypto_ctr_crypt(struct blkcipher_desc *desc, 103static int crypto_ctr_crypt(struct skcipher_request *req)
127 struct scatterlist *dst, struct scatterlist *src,
128 unsigned int nbytes)
129{ 104{
130 struct blkcipher_walk walk; 105 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
131 struct crypto_blkcipher *tfm = desc->tfm; 106 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
132 struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm); 107 const unsigned int bsize = crypto_cipher_blocksize(cipher);
133 struct crypto_cipher *child = ctx->child; 108 struct skcipher_walk walk;
134 unsigned int bsize = crypto_cipher_blocksize(child); 109 unsigned int nbytes;
135 int err; 110 int err;
136 111
137 blkcipher_walk_init(&walk, dst, src, nbytes); 112 err = skcipher_walk_virt(&walk, req, false);
138 err = blkcipher_walk_virt_block(desc, &walk, bsize);
139 113
140 while (walk.nbytes >= bsize) { 114 while (walk.nbytes >= bsize) {
141 if (walk.src.virt.addr == walk.dst.virt.addr) 115 if (walk.src.virt.addr == walk.dst.virt.addr)
142 nbytes = crypto_ctr_crypt_inplace(&walk, child); 116 nbytes = crypto_ctr_crypt_inplace(&walk, cipher);
143 else 117 else
144 nbytes = crypto_ctr_crypt_segment(&walk, child); 118 nbytes = crypto_ctr_crypt_segment(&walk, cipher);
145 119
146 err = blkcipher_walk_done(desc, &walk, nbytes); 120 err = skcipher_walk_done(&walk, nbytes);
147 } 121 }
148 122
149 if (walk.nbytes) { 123 if (walk.nbytes) {
150 crypto_ctr_crypt_final(&walk, child); 124 crypto_ctr_crypt_final(&walk, cipher);
151 err = blkcipher_walk_done(desc, &walk, 0); 125 err = skcipher_walk_done(&walk, 0);
152 } 126 }
153 127
154 return err; 128 return err;
155} 129}
156 130
157static int crypto_ctr_init_tfm(struct crypto_tfm *tfm) 131static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb)
158{ 132{
159 struct crypto_instance *inst = (void *)tfm->__crt_alg; 133 struct skcipher_instance *inst;
160 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
161 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
162 struct crypto_cipher *cipher;
163
164 cipher = crypto_spawn_cipher(spawn);
165 if (IS_ERR(cipher))
166 return PTR_ERR(cipher);
167
168 ctx->child = cipher;
169
170 return 0;
171}
172
173static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
174{
175 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
176
177 crypto_free_cipher(ctx->child);
178}
179
180static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
181{
182 struct crypto_instance *inst;
183 struct crypto_attr_type *algt;
184 struct crypto_alg *alg; 134 struct crypto_alg *alg;
185 u32 mask;
186 int err; 135 int err;
187 136
188 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); 137 inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
189 if (err) 138 if (IS_ERR(inst))
190 return ERR_PTR(err); 139 return PTR_ERR(inst);
191
192 algt = crypto_get_attr_type(tb);
193 if (IS_ERR(algt))
194 return ERR_CAST(algt);
195
196 mask = CRYPTO_ALG_TYPE_MASK |
197 crypto_requires_off(algt->type, algt->mask,
198 CRYPTO_ALG_NEED_FALLBACK);
199
200 alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER, mask);
201 if (IS_ERR(alg))
202 return ERR_CAST(alg);
203 140
204 /* Block size must be >= 4 bytes. */ 141 /* Block size must be >= 4 bytes. */
205 err = -EINVAL; 142 err = -EINVAL;
206 if (alg->cra_blocksize < 4) 143 if (alg->cra_blocksize < 4)
207 goto out_put_alg; 144 goto out_free_inst;
208 145
209 /* If this is false we'd fail the alignment of crypto_inc. */ 146 /* If this is false we'd fail the alignment of crypto_inc. */
210 if (alg->cra_blocksize % 4) 147 if (alg->cra_blocksize % 4)
211 goto out_put_alg; 148 goto out_free_inst;
212
213 inst = crypto_alloc_instance("ctr", alg);
214 if (IS_ERR(inst))
215 goto out;
216
217 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
218 inst->alg.cra_priority = alg->cra_priority;
219 inst->alg.cra_blocksize = 1;
220 inst->alg.cra_alignmask = alg->cra_alignmask;
221 inst->alg.cra_type = &crypto_blkcipher_type;
222 149
223 inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; 150 /* CTR mode is a stream cipher. */
224 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize; 151 inst->alg.base.cra_blocksize = 1;
225 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
226
227 inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx);
228 152
229 inst->alg.cra_init = crypto_ctr_init_tfm; 153 /*
230 inst->alg.cra_exit = crypto_ctr_exit_tfm; 154 * To simplify the implementation, configure the skcipher walk to only
155 * give a partial block at the very end, never earlier.
156 */
157 inst->alg.chunksize = alg->cra_blocksize;
231 158
232 inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey; 159 inst->alg.encrypt = crypto_ctr_crypt;
233 inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt; 160 inst->alg.decrypt = crypto_ctr_crypt;
234 inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
235 161
236out: 162 err = skcipher_register_instance(tmpl, inst);
237 crypto_mod_put(alg); 163 if (err)
238 return inst; 164 goto out_free_inst;
165 goto out_put_alg;
239 166
167out_free_inst:
168 inst->free(inst);
240out_put_alg: 169out_put_alg:
241 inst = ERR_PTR(err); 170 crypto_mod_put(alg);
242 goto out; 171 return err;
243}
244
245static void crypto_ctr_free(struct crypto_instance *inst)
246{
247 crypto_drop_spawn(crypto_instance_ctx(inst));
248 kfree(inst);
249} 172}
250 173
251static struct crypto_template crypto_ctr_tmpl = { 174static struct crypto_template crypto_ctr_tmpl = {
252 .name = "ctr", 175 .name = "ctr",
253 .alloc = crypto_ctr_alloc, 176 .create = crypto_ctr_create,
254 .free = crypto_ctr_free,
255 .module = THIS_MODULE, 177 .module = THIS_MODULE,
256}; 178};
257 179
@@ -480,6 +402,6 @@ module_init(crypto_ctr_module_init);
480module_exit(crypto_ctr_module_exit); 402module_exit(crypto_ctr_module_exit);
481 403
482MODULE_LICENSE("GPL"); 404MODULE_LICENSE("GPL");
483MODULE_DESCRIPTION("CTR Counter block mode"); 405MODULE_DESCRIPTION("CTR block cipher mode of operation");
484MODULE_ALIAS_CRYPTO("rfc3686"); 406MODULE_ALIAS_CRYPTO("rfc3686");
485MODULE_ALIAS_CRYPTO("ctr"); 407MODULE_ALIAS_CRYPTO("ctr");