aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2016-07-12 01:17:31 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2016-07-18 05:35:36 -0400
commit4e6c3df4d729f85997cbf276bfa8ffd8579b8e77 (patch)
tree3293b9ab0a019308724c8b44430cadc0a3b4488d
parenteb9bc8e7afaa9f062105dad55ec1c0663d961bb3 (diff)
crypto: skcipher - Add low-level skcipher interface
This patch allows skcipher algorithms and instances to be created and registered with the crypto API. They are accessible through the top-level skcipher interface, along with ablkcipher/blkcipher algorithms and instances. This patch also introduces a new parameter called chunk size which is meant for ciphers such as CTR and CTS which ostensibly can handle arbitrary lengths, but still behave like block ciphers in that you can only process a partial block at the very end. For these ciphers the block size will continue to be set to 1 as it is now while the chunk size will be set to the underlying block size. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--crypto/skcipher.c196
-rw-r--r--include/crypto/internal/skcipher.h87
-rw-r--r--include/crypto/skcipher.h130
-rw-r--r--include/linux/crypto.h1
4 files changed, 407 insertions, 7 deletions
diff --git a/crypto/skcipher.c b/crypto/skcipher.c
index 69230e9d4ac9..d248008e7f7b 100644
--- a/crypto/skcipher.c
+++ b/crypto/skcipher.c
@@ -16,7 +16,11 @@
16 16
17#include <crypto/internal/skcipher.h> 17#include <crypto/internal/skcipher.h>
18#include <linux/bug.h> 18#include <linux/bug.h>
19#include <linux/cryptouser.h>
19#include <linux/module.h> 20#include <linux/module.h>
21#include <linux/rtnetlink.h>
22#include <linux/seq_file.h>
23#include <net/netlink.h>
20 24
21#include "internal.h" 25#include "internal.h"
22 26
@@ -25,10 +29,11 @@ static unsigned int crypto_skcipher_extsize(struct crypto_alg *alg)
25 if (alg->cra_type == &crypto_blkcipher_type) 29 if (alg->cra_type == &crypto_blkcipher_type)
26 return sizeof(struct crypto_blkcipher *); 30 return sizeof(struct crypto_blkcipher *);
27 31
28 BUG_ON(alg->cra_type != &crypto_ablkcipher_type && 32 if (alg->cra_type == &crypto_ablkcipher_type ||
29 alg->cra_type != &crypto_givcipher_type); 33 alg->cra_type == &crypto_givcipher_type)
34 return sizeof(struct crypto_ablkcipher *);
30 35
31 return sizeof(struct crypto_ablkcipher *); 36 return crypto_alg_extsize(alg);
32} 37}
33 38
34static int skcipher_setkey_blkcipher(struct crypto_skcipher *tfm, 39static int skcipher_setkey_blkcipher(struct crypto_skcipher *tfm,
@@ -216,26 +221,118 @@ static int crypto_init_skcipher_ops_ablkcipher(struct crypto_tfm *tfm)
216 return 0; 221 return 0;
217} 222}
218 223
224static void crypto_skcipher_exit_tfm(struct crypto_tfm *tfm)
225{
226 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
227 struct skcipher_alg *alg = crypto_skcipher_alg(skcipher);
228
229 alg->exit(skcipher);
230}
231
219static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm) 232static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm)
220{ 233{
234 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
235 struct skcipher_alg *alg = crypto_skcipher_alg(skcipher);
236
221 if (tfm->__crt_alg->cra_type == &crypto_blkcipher_type) 237 if (tfm->__crt_alg->cra_type == &crypto_blkcipher_type)
222 return crypto_init_skcipher_ops_blkcipher(tfm); 238 return crypto_init_skcipher_ops_blkcipher(tfm);
223 239
224 BUG_ON(tfm->__crt_alg->cra_type != &crypto_ablkcipher_type && 240 if (tfm->__crt_alg->cra_type == &crypto_ablkcipher_type ||
225 tfm->__crt_alg->cra_type != &crypto_givcipher_type); 241 tfm->__crt_alg->cra_type == &crypto_givcipher_type)
242 return crypto_init_skcipher_ops_ablkcipher(tfm);
243
244 skcipher->setkey = alg->setkey;
245 skcipher->encrypt = alg->encrypt;
246 skcipher->decrypt = alg->decrypt;
247 skcipher->ivsize = alg->ivsize;
248 skcipher->keysize = alg->max_keysize;
249
250 if (alg->exit)
251 skcipher->base.exit = crypto_skcipher_exit_tfm;
226 252
227 return crypto_init_skcipher_ops_ablkcipher(tfm); 253 if (alg->init)
254 return alg->init(skcipher);
255
256 return 0;
257}
258
259static void crypto_skcipher_free_instance(struct crypto_instance *inst)
260{
261 struct skcipher_instance *skcipher =
262 container_of(inst, struct skcipher_instance, s.base);
263
264 skcipher->free(skcipher);
265}
266
267static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg)
268 __attribute__ ((unused));
269static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg)
270{
271 struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg,
272 base);
273
274 seq_printf(m, "type : skcipher\n");
275 seq_printf(m, "async : %s\n",
276 alg->cra_flags & CRYPTO_ALG_ASYNC ? "yes" : "no");
277 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
278 seq_printf(m, "min keysize : %u\n", skcipher->min_keysize);
279 seq_printf(m, "max keysize : %u\n", skcipher->max_keysize);
280 seq_printf(m, "ivsize : %u\n", skcipher->ivsize);
281 seq_printf(m, "chunksize : %u\n", skcipher->chunksize);
228} 282}
229 283
284#ifdef CONFIG_NET
285static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
286{
287 struct crypto_report_blkcipher rblkcipher;
288 struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg,
289 base);
290
291 strncpy(rblkcipher.type, "skcipher", sizeof(rblkcipher.type));
292 strncpy(rblkcipher.geniv, "<none>", sizeof(rblkcipher.geniv));
293
294 rblkcipher.blocksize = alg->cra_blocksize;
295 rblkcipher.min_keysize = skcipher->min_keysize;
296 rblkcipher.max_keysize = skcipher->max_keysize;
297 rblkcipher.ivsize = skcipher->ivsize;
298
299 if (nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
300 sizeof(struct crypto_report_blkcipher), &rblkcipher))
301 goto nla_put_failure;
302 return 0;
303
304nla_put_failure:
305 return -EMSGSIZE;
306}
307#else
308static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
309{
310 return -ENOSYS;
311}
312#endif
313
230static const struct crypto_type crypto_skcipher_type2 = { 314static const struct crypto_type crypto_skcipher_type2 = {
231 .extsize = crypto_skcipher_extsize, 315 .extsize = crypto_skcipher_extsize,
232 .init_tfm = crypto_skcipher_init_tfm, 316 .init_tfm = crypto_skcipher_init_tfm,
317 .free = crypto_skcipher_free_instance,
318#ifdef CONFIG_PROC_FS
319 .show = crypto_skcipher_show,
320#endif
321 .report = crypto_skcipher_report,
233 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 322 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
234 .maskset = CRYPTO_ALG_TYPE_BLKCIPHER_MASK, 323 .maskset = CRYPTO_ALG_TYPE_BLKCIPHER_MASK,
235 .type = CRYPTO_ALG_TYPE_BLKCIPHER, 324 .type = CRYPTO_ALG_TYPE_SKCIPHER,
236 .tfmsize = offsetof(struct crypto_skcipher, base), 325 .tfmsize = offsetof(struct crypto_skcipher, base),
237}; 326};
238 327
328int crypto_grab_skcipher2(struct crypto_skcipher_spawn *spawn,
329 const char *name, u32 type, u32 mask)
330{
331 spawn->base.frontend = &crypto_skcipher_type2;
332 return crypto_grab_spawn(&spawn->base, name, type, mask);
333}
334EXPORT_SYMBOL_GPL(crypto_grab_skcipher2);
335
239struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name, 336struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name,
240 u32 type, u32 mask) 337 u32 type, u32 mask)
241{ 338{
@@ -243,5 +340,90 @@ struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name,
243} 340}
244EXPORT_SYMBOL_GPL(crypto_alloc_skcipher); 341EXPORT_SYMBOL_GPL(crypto_alloc_skcipher);
245 342
343int crypto_has_skcipher2(const char *alg_name, u32 type, u32 mask)
344{
345 return crypto_type_has_alg(alg_name, &crypto_skcipher_type2,
346 type, mask);
347}
348EXPORT_SYMBOL_GPL(crypto_has_skcipher2);
349
350static int skcipher_prepare_alg(struct skcipher_alg *alg)
351{
352 struct crypto_alg *base = &alg->base;
353
354 if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8)
355 return -EINVAL;
356
357 if (!alg->chunksize)
358 alg->chunksize = base->cra_blocksize;
359
360 base->cra_type = &crypto_skcipher_type2;
361 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
362 base->cra_flags |= CRYPTO_ALG_TYPE_SKCIPHER;
363
364 return 0;
365}
366
367int crypto_register_skcipher(struct skcipher_alg *alg)
368{
369 struct crypto_alg *base = &alg->base;
370 int err;
371
372 err = skcipher_prepare_alg(alg);
373 if (err)
374 return err;
375
376 return crypto_register_alg(base);
377}
378EXPORT_SYMBOL_GPL(crypto_register_skcipher);
379
380void crypto_unregister_skcipher(struct skcipher_alg *alg)
381{
382 crypto_unregister_alg(&alg->base);
383}
384EXPORT_SYMBOL_GPL(crypto_unregister_skcipher);
385
386int crypto_register_skciphers(struct skcipher_alg *algs, int count)
387{
388 int i, ret;
389
390 for (i = 0; i < count; i++) {
391 ret = crypto_register_skcipher(&algs[i]);
392 if (ret)
393 goto err;
394 }
395
396 return 0;
397
398err:
399 for (--i; i >= 0; --i)
400 crypto_unregister_skcipher(&algs[i]);
401
402 return ret;
403}
404EXPORT_SYMBOL_GPL(crypto_register_skciphers);
405
406void crypto_unregister_skciphers(struct skcipher_alg *algs, int count)
407{
408 int i;
409
410 for (i = count - 1; i >= 0; --i)
411 crypto_unregister_skcipher(&algs[i]);
412}
413EXPORT_SYMBOL_GPL(crypto_unregister_skciphers);
414
415int skcipher_register_instance(struct crypto_template *tmpl,
416 struct skcipher_instance *inst)
417{
418 int err;
419
420 err = skcipher_prepare_alg(&inst->alg);
421 if (err)
422 return err;
423
424 return crypto_register_instance(tmpl, skcipher_crypto_instance(inst));
425}
426EXPORT_SYMBOL_GPL(skcipher_register_instance);
427
246MODULE_LICENSE("GPL"); 428MODULE_LICENSE("GPL");
247MODULE_DESCRIPTION("Symmetric key cipher type"); 429MODULE_DESCRIPTION("Symmetric key cipher type");
diff --git a/include/crypto/internal/skcipher.h b/include/crypto/internal/skcipher.h
index 2cf7a61ece59..ce6619c339fe 100644
--- a/include/crypto/internal/skcipher.h
+++ b/include/crypto/internal/skcipher.h
@@ -19,12 +19,46 @@
19 19
20struct rtattr; 20struct rtattr;
21 21
22struct skcipher_instance {
23 void (*free)(struct skcipher_instance *inst);
24 union {
25 struct {
26 char head[offsetof(struct skcipher_alg, base)];
27 struct crypto_instance base;
28 } s;
29 struct skcipher_alg alg;
30 };
31};
32
22struct crypto_skcipher_spawn { 33struct crypto_skcipher_spawn {
23 struct crypto_spawn base; 34 struct crypto_spawn base;
24}; 35};
25 36
26extern const struct crypto_type crypto_givcipher_type; 37extern const struct crypto_type crypto_givcipher_type;
27 38
39static inline struct crypto_instance *skcipher_crypto_instance(
40 struct skcipher_instance *inst)
41{
42 return &inst->s.base;
43}
44
45static inline struct skcipher_instance *skcipher_alg_instance(
46 struct crypto_skcipher *skcipher)
47{
48 return container_of(crypto_skcipher_alg(skcipher),
49 struct skcipher_instance, alg);
50}
51
52static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
53{
54 return crypto_instance_ctx(skcipher_crypto_instance(inst));
55}
56
57static inline void skcipher_request_complete(struct skcipher_request *req, int err)
58{
59 req->base.complete(&req->base, err);
60}
61
28static inline void crypto_set_skcipher_spawn( 62static inline void crypto_set_skcipher_spawn(
29 struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst) 63 struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst)
30{ 64{
@@ -33,6 +67,8 @@ static inline void crypto_set_skcipher_spawn(
33 67
34int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name, 68int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
35 u32 type, u32 mask); 69 u32 type, u32 mask);
70int crypto_grab_skcipher2(struct crypto_skcipher_spawn *spawn,
71 const char *name, u32 type, u32 mask);
36 72
37struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type, u32 mask); 73struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type, u32 mask);
38 74
@@ -47,6 +83,12 @@ static inline struct crypto_alg *crypto_skcipher_spawn_alg(
47 return spawn->base.alg; 83 return spawn->base.alg;
48} 84}
49 85
86static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
87 struct crypto_skcipher_spawn *spawn)
88{
89 return container_of(spawn->base.alg, struct skcipher_alg, base);
90}
91
50static inline struct crypto_ablkcipher *crypto_spawn_skcipher( 92static inline struct crypto_ablkcipher *crypto_spawn_skcipher(
51 struct crypto_skcipher_spawn *spawn) 93 struct crypto_skcipher_spawn *spawn)
52{ 94{
@@ -55,6 +97,25 @@ static inline struct crypto_ablkcipher *crypto_spawn_skcipher(
55 crypto_skcipher_mask(0))); 97 crypto_skcipher_mask(0)));
56} 98}
57 99
100static inline struct crypto_skcipher *crypto_spawn_skcipher2(
101 struct crypto_skcipher_spawn *spawn)
102{
103 return crypto_spawn_tfm2(&spawn->base);
104}
105
106static inline void crypto_skcipher_set_reqsize(
107 struct crypto_skcipher *skcipher, unsigned int reqsize)
108{
109 skcipher->reqsize = reqsize;
110}
111
112int crypto_register_skcipher(struct skcipher_alg *alg);
113void crypto_unregister_skcipher(struct skcipher_alg *alg);
114int crypto_register_skciphers(struct skcipher_alg *algs, int count);
115void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
116int skcipher_register_instance(struct crypto_template *tmpl,
117 struct skcipher_instance *inst);
118
58int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req); 119int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req);
59int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req); 120int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req);
60const char *crypto_default_geniv(const struct crypto_alg *alg); 121const char *crypto_default_geniv(const struct crypto_alg *alg);
@@ -122,5 +183,31 @@ static inline u32 skcipher_request_flags(struct skcipher_request *req)
122 return req->base.flags; 183 return req->base.flags;
123} 184}
124 185
186static inline unsigned int crypto_skcipher_alg_min_keysize(
187 struct skcipher_alg *alg)
188{
189 if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
190 CRYPTO_ALG_TYPE_BLKCIPHER)
191 return alg->base.cra_blkcipher.min_keysize;
192
193 if (alg->base.cra_ablkcipher.encrypt)
194 return alg->base.cra_ablkcipher.min_keysize;
195
196 return alg->min_keysize;
197}
198
199static inline unsigned int crypto_skcipher_alg_max_keysize(
200 struct skcipher_alg *alg)
201{
202 if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
203 CRYPTO_ALG_TYPE_BLKCIPHER)
204 return alg->base.cra_blkcipher.max_keysize;
205
206 if (alg->base.cra_ablkcipher.encrypt)
207 return alg->base.cra_ablkcipher.max_keysize;
208
209 return alg->max_keysize;
210}
211
125#endif /* _CRYPTO_INTERNAL_SKCIPHER_H */ 212#endif /* _CRYPTO_INTERNAL_SKCIPHER_H */
126 213
diff --git a/include/crypto/skcipher.h b/include/crypto/skcipher.h
index 0f987f50bb52..a381f57ea695 100644
--- a/include/crypto/skcipher.h
+++ b/include/crypto/skcipher.h
@@ -65,6 +65,75 @@ struct crypto_skcipher {
65 struct crypto_tfm base; 65 struct crypto_tfm base;
66}; 66};
67 67
68/**
69 * struct skcipher_alg - symmetric key cipher definition
70 * @min_keysize: Minimum key size supported by the transformation. This is the
71 * smallest key length supported by this transformation algorithm.
72 * This must be set to one of the pre-defined values as this is
73 * not hardware specific. Possible values for this field can be
74 * found via git grep "_MIN_KEY_SIZE" include/crypto/
75 * @max_keysize: Maximum key size supported by the transformation. This is the
76 * largest key length supported by this transformation algorithm.
77 * This must be set to one of the pre-defined values as this is
78 * not hardware specific. Possible values for this field can be
79 * found via git grep "_MAX_KEY_SIZE" include/crypto/
80 * @setkey: Set key for the transformation. This function is used to either
81 * program a supplied key into the hardware or store the key in the
82 * transformation context for programming it later. Note that this
83 * function does modify the transformation context. This function can
84 * be called multiple times during the existence of the transformation
85 * object, so one must make sure the key is properly reprogrammed into
86 * the hardware. This function is also responsible for checking the key
87 * length for validity. In case a software fallback was put in place in
88 * the @cra_init call, this function might need to use the fallback if
89 * the algorithm doesn't support all of the key sizes.
90 * @encrypt: Encrypt a scatterlist of blocks. This function is used to encrypt
91 * the supplied scatterlist containing the blocks of data. The crypto
92 * API consumer is responsible for aligning the entries of the
93 * scatterlist properly and making sure the chunks are correctly
94 * sized. In case a software fallback was put in place in the
95 * @cra_init call, this function might need to use the fallback if
96 * the algorithm doesn't support all of the key sizes. In case the
97 * key was stored in transformation context, the key might need to be
98 * re-programmed into the hardware in this function. This function
99 * shall not modify the transformation context, as this function may
100 * be called in parallel with the same transformation object.
101 * @decrypt: Decrypt a single block. This is a reverse counterpart to @encrypt
102 * and the conditions are exactly the same.
103 * @init: Initialize the cryptographic transformation object. This function
104 * is used to initialize the cryptographic transformation object.
105 * This function is called only once at the instantiation time, right
106 * after the transformation context was allocated. In case the
107 * cryptographic hardware has some special requirements which need to
108 * be handled by software, this function shall check for the precise
109 * requirement of the transformation and put any software fallbacks
110 * in place.
111 * @exit: Deinitialize the cryptographic transformation object. This is a
112 * counterpart to @init, used to remove various changes set in
113 * @init.
114 * @ivsize: IV size applicable for transformation. The consumer must provide an
115 * IV of exactly that size to perform the encrypt or decrypt operation.
116 * @chunksize: Equal to the block size except for stream ciphers such as
117 * CTR where it is set to the underlying block size.
118 *
119 * All fields except @ivsize are mandatory and must be filled.
120 */
121struct skcipher_alg {
122 int (*setkey)(struct crypto_skcipher *tfm, const u8 *key,
123 unsigned int keylen);
124 int (*encrypt)(struct skcipher_request *req);
125 int (*decrypt)(struct skcipher_request *req);
126 int (*init)(struct crypto_skcipher *tfm);
127 void (*exit)(struct crypto_skcipher *tfm);
128
129 unsigned int min_keysize;
130 unsigned int max_keysize;
131 unsigned int ivsize;
132 unsigned int chunksize;
133
134 struct crypto_alg base;
135};
136
68#define SKCIPHER_REQUEST_ON_STACK(name, tfm) \ 137#define SKCIPHER_REQUEST_ON_STACK(name, tfm) \
69 char __##name##_desc[sizeof(struct skcipher_request) + \ 138 char __##name##_desc[sizeof(struct skcipher_request) + \
70 crypto_skcipher_reqsize(tfm)] CRYPTO_MINALIGN_ATTR; \ 139 crypto_skcipher_reqsize(tfm)] CRYPTO_MINALIGN_ATTR; \
@@ -231,12 +300,43 @@ static inline int crypto_has_skcipher(const char *alg_name, u32 type,
231 crypto_skcipher_mask(mask)); 300 crypto_skcipher_mask(mask));
232} 301}
233 302
303/**
304 * crypto_has_skcipher2() - Search for the availability of an skcipher.
305 * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
306 * skcipher
307 * @type: specifies the type of the skcipher
308 * @mask: specifies the mask for the skcipher
309 *
310 * Return: true when the skcipher is known to the kernel crypto API; false
311 * otherwise
312 */
313int crypto_has_skcipher2(const char *alg_name, u32 type, u32 mask);
314
234static inline const char *crypto_skcipher_driver_name( 315static inline const char *crypto_skcipher_driver_name(
235 struct crypto_skcipher *tfm) 316 struct crypto_skcipher *tfm)
236{ 317{
237 return crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)); 318 return crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
238} 319}
239 320
321static inline struct skcipher_alg *crypto_skcipher_alg(
322 struct crypto_skcipher *tfm)
323{
324 return container_of(crypto_skcipher_tfm(tfm)->__crt_alg,
325 struct skcipher_alg, base);
326}
327
328static inline unsigned int crypto_skcipher_alg_ivsize(struct skcipher_alg *alg)
329{
330 if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
331 CRYPTO_ALG_TYPE_BLKCIPHER)
332 return alg->base.cra_blkcipher.ivsize;
333
334 if (alg->base.cra_ablkcipher.encrypt)
335 return alg->base.cra_ablkcipher.ivsize;
336
337 return alg->ivsize;
338}
339
240/** 340/**
241 * crypto_skcipher_ivsize() - obtain IV size 341 * crypto_skcipher_ivsize() - obtain IV size
242 * @tfm: cipher handle 342 * @tfm: cipher handle
@@ -251,6 +351,36 @@ static inline unsigned int crypto_skcipher_ivsize(struct crypto_skcipher *tfm)
251 return tfm->ivsize; 351 return tfm->ivsize;
252} 352}
253 353
354static inline unsigned int crypto_skcipher_alg_chunksize(
355 struct skcipher_alg *alg)
356{
357 if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
358 CRYPTO_ALG_TYPE_BLKCIPHER)
359 return alg->base.cra_blocksize;
360
361 if (alg->base.cra_ablkcipher.encrypt)
362 return alg->base.cra_blocksize;
363
364 return alg->chunksize;
365}
366
367/**
368 * crypto_skcipher_chunksize() - obtain chunk size
369 * @tfm: cipher handle
370 *
371 * The block size is set to one for ciphers such as CTR. However,
372 * you still need to provide incremental updates in multiples of
373 * the underlying block size as the IV does not have sub-block
374 * granularity. This is known in this API as the chunk size.
375 *
376 * Return: chunk size in bytes
377 */
378static inline unsigned int crypto_skcipher_chunksize(
379 struct crypto_skcipher *tfm)
380{
381 return crypto_skcipher_alg_chunksize(crypto_skcipher_alg(tfm));
382}
383
254/** 384/**
255 * crypto_skcipher_blocksize() - obtain block size of cipher 385 * crypto_skcipher_blocksize() - obtain block size of cipher
256 * @tfm: cipher handle 386 * @tfm: cipher handle
diff --git a/include/linux/crypto.h b/include/linux/crypto.h
index 992cfc2e5df1..37a652d1639d 100644
--- a/include/linux/crypto.h
+++ b/include/linux/crypto.h
@@ -47,6 +47,7 @@
47#define CRYPTO_ALG_TYPE_AEAD 0x00000003 47#define CRYPTO_ALG_TYPE_AEAD 0x00000003
48#define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004 48#define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004
49#define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005 49#define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005
50#define CRYPTO_ALG_TYPE_SKCIPHER 0x00000005
50#define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006 51#define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006
51#define CRYPTO_ALG_TYPE_KPP 0x00000008 52#define CRYPTO_ALG_TYPE_KPP 0x00000008
52#define CRYPTO_ALG_TYPE_RNG 0x0000000c 53#define CRYPTO_ALG_TYPE_RNG 0x0000000c