diff options
-rw-r--r-- | crypto/chainiv.c | 208 | ||||
-rw-r--r-- | include/crypto/internal/skcipher.h | 13 |
2 files changed, 213 insertions, 8 deletions
diff --git a/crypto/chainiv.c b/crypto/chainiv.c index 38868d160b47..d17fa0454dc3 100644 --- a/crypto/chainiv.c +++ b/crypto/chainiv.c | |||
@@ -16,16 +16,34 @@ | |||
16 | #include <crypto/internal/skcipher.h> | 16 | #include <crypto/internal/skcipher.h> |
17 | #include <linux/err.h> | 17 | #include <linux/err.h> |
18 | #include <linux/init.h> | 18 | #include <linux/init.h> |
19 | #include <linux/kernel.h> | ||
19 | #include <linux/module.h> | 20 | #include <linux/module.h> |
20 | #include <linux/random.h> | 21 | #include <linux/random.h> |
21 | #include <linux/spinlock.h> | 22 | #include <linux/spinlock.h> |
22 | #include <linux/string.h> | 23 | #include <linux/string.h> |
24 | #include <linux/workqueue.h> | ||
25 | |||
26 | enum { | ||
27 | CHAINIV_STATE_INUSE = 0, | ||
28 | }; | ||
23 | 29 | ||
24 | struct chainiv_ctx { | 30 | struct chainiv_ctx { |
25 | spinlock_t lock; | 31 | spinlock_t lock; |
26 | char iv[]; | 32 | char iv[]; |
27 | }; | 33 | }; |
28 | 34 | ||
35 | struct async_chainiv_ctx { | ||
36 | unsigned long state; | ||
37 | |||
38 | spinlock_t lock; | ||
39 | int err; | ||
40 | |||
41 | struct crypto_queue queue; | ||
42 | struct work_struct postponed; | ||
43 | |||
44 | char iv[]; | ||
45 | }; | ||
46 | |||
29 | static int chainiv_givencrypt(struct skcipher_givcrypt_request *req) | 47 | static int chainiv_givencrypt(struct skcipher_givcrypt_request *req) |
30 | { | 48 | { |
31 | struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); | 49 | struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); |
@@ -80,26 +98,187 @@ unlock: | |||
80 | return chainiv_givencrypt(req); | 98 | return chainiv_givencrypt(req); |
81 | } | 99 | } |
82 | 100 | ||
101 | static int chainiv_init_common(struct crypto_tfm *tfm) | ||
102 | { | ||
103 | tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request); | ||
104 | |||
105 | return skcipher_geniv_init(tfm); | ||
106 | } | ||
107 | |||
83 | static int chainiv_init(struct crypto_tfm *tfm) | 108 | static int chainiv_init(struct crypto_tfm *tfm) |
84 | { | 109 | { |
85 | struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm); | 110 | struct chainiv_ctx *ctx = crypto_tfm_ctx(tfm); |
86 | struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); | ||
87 | 111 | ||
88 | spin_lock_init(&ctx->lock); | 112 | spin_lock_init(&ctx->lock); |
89 | 113 | ||
90 | tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request); | 114 | return chainiv_init_common(tfm); |
115 | } | ||
91 | 116 | ||
92 | return skcipher_geniv_init(tfm); | 117 | static int async_chainiv_schedule_work(struct async_chainiv_ctx *ctx) |
118 | { | ||
119 | int queued; | ||
120 | |||
121 | if (!ctx->queue.qlen) { | ||
122 | smp_mb__before_clear_bit(); | ||
123 | clear_bit(CHAINIV_STATE_INUSE, &ctx->state); | ||
124 | |||
125 | if (!ctx->queue.qlen || | ||
126 | test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state)) | ||
127 | goto out; | ||
128 | } | ||
129 | |||
130 | queued = schedule_work(&ctx->postponed); | ||
131 | BUG_ON(!queued); | ||
132 | |||
133 | out: | ||
134 | return ctx->err; | ||
135 | } | ||
136 | |||
137 | static int async_chainiv_postpone_request(struct skcipher_givcrypt_request *req) | ||
138 | { | ||
139 | struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); | ||
140 | struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); | ||
141 | int err; | ||
142 | |||
143 | spin_lock_bh(&ctx->lock); | ||
144 | err = skcipher_enqueue_givcrypt(&ctx->queue, req); | ||
145 | spin_unlock_bh(&ctx->lock); | ||
146 | |||
147 | if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state)) | ||
148 | return err; | ||
149 | |||
150 | ctx->err = err; | ||
151 | return async_chainiv_schedule_work(ctx); | ||
152 | } | ||
153 | |||
154 | static int async_chainiv_givencrypt_tail(struct skcipher_givcrypt_request *req) | ||
155 | { | ||
156 | struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); | ||
157 | struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); | ||
158 | struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); | ||
159 | unsigned int ivsize = crypto_ablkcipher_ivsize(geniv); | ||
160 | |||
161 | memcpy(req->giv, ctx->iv, ivsize); | ||
162 | memcpy(subreq->info, ctx->iv, ivsize); | ||
163 | |||
164 | ctx->err = crypto_ablkcipher_encrypt(subreq); | ||
165 | if (ctx->err) | ||
166 | goto out; | ||
167 | |||
168 | memcpy(ctx->iv, subreq->info, ivsize); | ||
169 | |||
170 | out: | ||
171 | return async_chainiv_schedule_work(ctx); | ||
172 | } | ||
173 | |||
174 | static int async_chainiv_givencrypt(struct skcipher_givcrypt_request *req) | ||
175 | { | ||
176 | struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); | ||
177 | struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); | ||
178 | struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); | ||
179 | |||
180 | ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv)); | ||
181 | ablkcipher_request_set_callback(subreq, req->creq.base.flags, | ||
182 | req->creq.base.complete, | ||
183 | req->creq.base.data); | ||
184 | ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst, | ||
185 | req->creq.nbytes, req->creq.info); | ||
186 | |||
187 | if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state)) | ||
188 | goto postpone; | ||
189 | |||
190 | if (ctx->queue.qlen) { | ||
191 | clear_bit(CHAINIV_STATE_INUSE, &ctx->state); | ||
192 | goto postpone; | ||
193 | } | ||
194 | |||
195 | return async_chainiv_givencrypt_tail(req); | ||
196 | |||
197 | postpone: | ||
198 | return async_chainiv_postpone_request(req); | ||
199 | } | ||
200 | |||
201 | static int async_chainiv_givencrypt_first(struct skcipher_givcrypt_request *req) | ||
202 | { | ||
203 | struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); | ||
204 | struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); | ||
205 | |||
206 | if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state)) | ||
207 | goto out; | ||
208 | |||
209 | if (crypto_ablkcipher_crt(geniv)->givencrypt != | ||
210 | async_chainiv_givencrypt_first) | ||
211 | goto unlock; | ||
212 | |||
213 | crypto_ablkcipher_crt(geniv)->givencrypt = async_chainiv_givencrypt; | ||
214 | get_random_bytes(ctx->iv, crypto_ablkcipher_ivsize(geniv)); | ||
215 | |||
216 | unlock: | ||
217 | clear_bit(CHAINIV_STATE_INUSE, &ctx->state); | ||
218 | |||
219 | out: | ||
220 | return async_chainiv_givencrypt(req); | ||
221 | } | ||
222 | |||
223 | static void async_chainiv_do_postponed(struct work_struct *work) | ||
224 | { | ||
225 | struct async_chainiv_ctx *ctx = container_of(work, | ||
226 | struct async_chainiv_ctx, | ||
227 | postponed); | ||
228 | struct skcipher_givcrypt_request *req; | ||
229 | struct ablkcipher_request *subreq; | ||
230 | |||
231 | /* Only handle one request at a time to avoid hogging keventd. */ | ||
232 | spin_lock_bh(&ctx->lock); | ||
233 | req = skcipher_dequeue_givcrypt(&ctx->queue); | ||
234 | spin_unlock_bh(&ctx->lock); | ||
235 | |||
236 | if (!req) { | ||
237 | async_chainiv_schedule_work(ctx); | ||
238 | return; | ||
239 | } | ||
240 | |||
241 | subreq = skcipher_givcrypt_reqctx(req); | ||
242 | subreq->base.flags |= CRYPTO_TFM_REQ_MAY_SLEEP; | ||
243 | |||
244 | async_chainiv_givencrypt_tail(req); | ||
245 | } | ||
246 | |||
247 | static int async_chainiv_init(struct crypto_tfm *tfm) | ||
248 | { | ||
249 | struct async_chainiv_ctx *ctx = crypto_tfm_ctx(tfm); | ||
250 | |||
251 | spin_lock_init(&ctx->lock); | ||
252 | |||
253 | crypto_init_queue(&ctx->queue, 100); | ||
254 | INIT_WORK(&ctx->postponed, async_chainiv_do_postponed); | ||
255 | |||
256 | return chainiv_init_common(tfm); | ||
257 | } | ||
258 | |||
259 | static void async_chainiv_exit(struct crypto_tfm *tfm) | ||
260 | { | ||
261 | struct async_chainiv_ctx *ctx = crypto_tfm_ctx(tfm); | ||
262 | |||
263 | BUG_ON(test_bit(CHAINIV_STATE_INUSE, &ctx->state) || ctx->queue.qlen); | ||
264 | |||
265 | skcipher_geniv_exit(tfm); | ||
93 | } | 266 | } |
94 | 267 | ||
95 | static struct crypto_template chainiv_tmpl; | 268 | static struct crypto_template chainiv_tmpl; |
96 | 269 | ||
97 | static struct crypto_instance *chainiv_alloc(struct rtattr **tb) | 270 | static struct crypto_instance *chainiv_alloc(struct rtattr **tb) |
98 | { | 271 | { |
272 | struct crypto_attr_type *algt; | ||
99 | struct crypto_instance *inst; | 273 | struct crypto_instance *inst; |
274 | int err; | ||
100 | 275 | ||
101 | inst = skcipher_geniv_alloc(&chainiv_tmpl, tb, 0, | 276 | algt = crypto_get_attr_type(tb); |
102 | CRYPTO_ALG_ASYNC); | 277 | err = PTR_ERR(algt); |
278 | if (IS_ERR(algt)) | ||
279 | return ERR_PTR(err); | ||
280 | |||
281 | inst = skcipher_geniv_alloc(&chainiv_tmpl, tb, 0, 0); | ||
103 | if (IS_ERR(inst)) | 282 | if (IS_ERR(inst)) |
104 | goto out; | 283 | goto out; |
105 | 284 | ||
@@ -108,8 +287,21 @@ static struct crypto_instance *chainiv_alloc(struct rtattr **tb) | |||
108 | inst->alg.cra_init = chainiv_init; | 287 | inst->alg.cra_init = chainiv_init; |
109 | inst->alg.cra_exit = skcipher_geniv_exit; | 288 | inst->alg.cra_exit = skcipher_geniv_exit; |
110 | 289 | ||
111 | inst->alg.cra_ctxsize = sizeof(struct chainiv_ctx) + | 290 | inst->alg.cra_ctxsize = sizeof(struct chainiv_ctx); |
112 | inst->alg.cra_ablkcipher.ivsize; | 291 | |
292 | if (!crypto_requires_sync(algt->type, algt->mask)) { | ||
293 | inst->alg.cra_flags |= CRYPTO_ALG_ASYNC; | ||
294 | |||
295 | inst->alg.cra_ablkcipher.givencrypt = | ||
296 | async_chainiv_givencrypt_first; | ||
297 | |||
298 | inst->alg.cra_init = async_chainiv_init; | ||
299 | inst->alg.cra_exit = async_chainiv_exit; | ||
300 | |||
301 | inst->alg.cra_ctxsize = sizeof(struct async_chainiv_ctx); | ||
302 | } | ||
303 | |||
304 | inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize; | ||
113 | 305 | ||
114 | out: | 306 | out: |
115 | return inst; | 307 | return inst; |
diff --git a/include/crypto/internal/skcipher.h b/include/crypto/internal/skcipher.h index 0053f34764ff..2ba42cd7d6aa 100644 --- a/include/crypto/internal/skcipher.h +++ b/include/crypto/internal/skcipher.h | |||
@@ -70,6 +70,19 @@ static inline struct crypto_ablkcipher *skcipher_geniv_cipher( | |||
70 | return crypto_ablkcipher_crt(geniv)->base; | 70 | return crypto_ablkcipher_crt(geniv)->base; |
71 | } | 71 | } |
72 | 72 | ||
73 | static inline int skcipher_enqueue_givcrypt( | ||
74 | struct crypto_queue *queue, struct skcipher_givcrypt_request *request) | ||
75 | { | ||
76 | return ablkcipher_enqueue_request(queue, &request->creq); | ||
77 | } | ||
78 | |||
79 | static inline struct skcipher_givcrypt_request *skcipher_dequeue_givcrypt( | ||
80 | struct crypto_queue *queue) | ||
81 | { | ||
82 | return container_of(ablkcipher_dequeue_request(queue), | ||
83 | struct skcipher_givcrypt_request, creq); | ||
84 | } | ||
85 | |||
73 | static inline void *skcipher_givcrypt_reqctx( | 86 | static inline void *skcipher_givcrypt_reqctx( |
74 | struct skcipher_givcrypt_request *req) | 87 | struct skcipher_givcrypt_request *req) |
75 | { | 88 | { |