diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2006-08-21 10:07:53 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2006-09-20 21:41:52 -0400 |
commit | 5cde0af2a9825dd1edaca233bd9590566579ef21 (patch) | |
tree | e396297e3a2436d4a6ac77de63f95f2328c7a0fe /include | |
parent | 5c64097aa0f6dc4f27718ef47ca9a12538d62860 (diff) |
[CRYPTO] cipher: Added block cipher type
This patch adds the new type of block ciphers. Unlike current cipher
algorithms which operate on a single block at a time, block ciphers
operate on an arbitrarily long linear area of data. As it is block-based,
it will skip any data remaining at the end which cannot form a block.
The block cipher has one major difference when compared to the existing
block cipher implementation. The sg walking is now performed by the
algorithm rather than the cipher mid-layer. This is needed for drivers
that directly support sg lists. It also improves performance for all
algorithms as it reduces the total number of indirect calls by one.
In future the existing cipher algorithm will be converted to only have
a single-block interface. This will be done after all existing users
have switched over to the new block cipher type.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'include')
-rw-r--r-- | include/crypto/algapi.h | 65 | ||||
-rw-r--r-- | include/linux/crypto.h | 179 |
2 files changed, 244 insertions, 0 deletions
diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index f21ae672e8a8..f3946baf0c07 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h | |||
@@ -55,6 +55,34 @@ struct scatter_walk { | |||
55 | unsigned int offset; | 55 | unsigned int offset; |
56 | }; | 56 | }; |
57 | 57 | ||
58 | struct blkcipher_walk { | ||
59 | union { | ||
60 | struct { | ||
61 | struct page *page; | ||
62 | unsigned long offset; | ||
63 | } phys; | ||
64 | |||
65 | struct { | ||
66 | u8 *page; | ||
67 | u8 *addr; | ||
68 | } virt; | ||
69 | } src, dst; | ||
70 | |||
71 | struct scatter_walk in; | ||
72 | unsigned int nbytes; | ||
73 | |||
74 | struct scatter_walk out; | ||
75 | unsigned int total; | ||
76 | |||
77 | void *page; | ||
78 | u8 *buffer; | ||
79 | u8 *iv; | ||
80 | |||
81 | int flags; | ||
82 | }; | ||
83 | |||
84 | extern const struct crypto_type crypto_blkcipher_type; | ||
85 | |||
58 | int crypto_register_template(struct crypto_template *tmpl); | 86 | int crypto_register_template(struct crypto_template *tmpl); |
59 | void crypto_unregister_template(struct crypto_template *tmpl); | 87 | void crypto_unregister_template(struct crypto_template *tmpl); |
60 | struct crypto_template *crypto_lookup_template(const char *name); | 88 | struct crypto_template *crypto_lookup_template(const char *name); |
@@ -69,15 +97,52 @@ struct crypto_alg *crypto_get_attr_alg(void *param, unsigned int len, | |||
69 | struct crypto_instance *crypto_alloc_instance(const char *name, | 97 | struct crypto_instance *crypto_alloc_instance(const char *name, |
70 | struct crypto_alg *alg); | 98 | struct crypto_alg *alg); |
71 | 99 | ||
100 | int blkcipher_walk_done(struct blkcipher_desc *desc, | ||
101 | struct blkcipher_walk *walk, int err); | ||
102 | int blkcipher_walk_virt(struct blkcipher_desc *desc, | ||
103 | struct blkcipher_walk *walk); | ||
104 | int blkcipher_walk_phys(struct blkcipher_desc *desc, | ||
105 | struct blkcipher_walk *walk); | ||
106 | |||
107 | static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm) | ||
108 | { | ||
109 | unsigned long addr = (unsigned long)crypto_tfm_ctx(tfm); | ||
110 | unsigned long align = crypto_tfm_alg_alignmask(tfm); | ||
111 | |||
112 | if (align <= crypto_tfm_ctx_alignment()) | ||
113 | align = 1; | ||
114 | return (void *)ALIGN(addr, align); | ||
115 | } | ||
116 | |||
72 | static inline void *crypto_instance_ctx(struct crypto_instance *inst) | 117 | static inline void *crypto_instance_ctx(struct crypto_instance *inst) |
73 | { | 118 | { |
74 | return inst->__ctx; | 119 | return inst->__ctx; |
75 | } | 120 | } |
76 | 121 | ||
122 | static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm) | ||
123 | { | ||
124 | return crypto_tfm_ctx(&tfm->base); | ||
125 | } | ||
126 | |||
127 | static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm) | ||
128 | { | ||
129 | return crypto_tfm_ctx_aligned(&tfm->base); | ||
130 | } | ||
131 | |||
77 | static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm) | 132 | static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm) |
78 | { | 133 | { |
79 | return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher; | 134 | return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher; |
80 | } | 135 | } |
81 | 136 | ||
137 | static inline void blkcipher_walk_init(struct blkcipher_walk *walk, | ||
138 | struct scatterlist *dst, | ||
139 | struct scatterlist *src, | ||
140 | unsigned int nbytes) | ||
141 | { | ||
142 | walk->in.sg = src; | ||
143 | walk->out.sg = dst; | ||
144 | walk->total = nbytes; | ||
145 | } | ||
146 | |||
82 | #endif /* _CRYPTO_ALGAPI_H */ | 147 | #endif /* _CRYPTO_ALGAPI_H */ |
83 | 148 | ||
diff --git a/include/linux/crypto.h b/include/linux/crypto.h index fdecee83878c..5a5466d518e8 100644 --- a/include/linux/crypto.h +++ b/include/linux/crypto.h | |||
@@ -32,6 +32,7 @@ | |||
32 | #define CRYPTO_ALG_TYPE_MASK 0x0000000f | 32 | #define CRYPTO_ALG_TYPE_MASK 0x0000000f |
33 | #define CRYPTO_ALG_TYPE_CIPHER 0x00000001 | 33 | #define CRYPTO_ALG_TYPE_CIPHER 0x00000001 |
34 | #define CRYPTO_ALG_TYPE_DIGEST 0x00000002 | 34 | #define CRYPTO_ALG_TYPE_DIGEST 0x00000002 |
35 | #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000003 | ||
35 | #define CRYPTO_ALG_TYPE_COMPRESS 0x00000004 | 36 | #define CRYPTO_ALG_TYPE_COMPRESS 0x00000004 |
36 | 37 | ||
37 | #define CRYPTO_ALG_LARVAL 0x00000010 | 38 | #define CRYPTO_ALG_LARVAL 0x00000010 |
@@ -89,9 +90,16 @@ | |||
89 | #endif | 90 | #endif |
90 | 91 | ||
91 | struct scatterlist; | 92 | struct scatterlist; |
93 | struct crypto_blkcipher; | ||
92 | struct crypto_tfm; | 94 | struct crypto_tfm; |
93 | struct crypto_type; | 95 | struct crypto_type; |
94 | 96 | ||
97 | struct blkcipher_desc { | ||
98 | struct crypto_blkcipher *tfm; | ||
99 | void *info; | ||
100 | u32 flags; | ||
101 | }; | ||
102 | |||
95 | struct cipher_desc { | 103 | struct cipher_desc { |
96 | struct crypto_tfm *tfm; | 104 | struct crypto_tfm *tfm; |
97 | void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); | 105 | void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); |
@@ -104,6 +112,21 @@ struct cipher_desc { | |||
104 | * Algorithms: modular crypto algorithm implementations, managed | 112 | * Algorithms: modular crypto algorithm implementations, managed |
105 | * via crypto_register_alg() and crypto_unregister_alg(). | 113 | * via crypto_register_alg() and crypto_unregister_alg(). |
106 | */ | 114 | */ |
115 | struct blkcipher_alg { | ||
116 | int (*setkey)(struct crypto_tfm *tfm, const u8 *key, | ||
117 | unsigned int keylen); | ||
118 | int (*encrypt)(struct blkcipher_desc *desc, | ||
119 | struct scatterlist *dst, struct scatterlist *src, | ||
120 | unsigned int nbytes); | ||
121 | int (*decrypt)(struct blkcipher_desc *desc, | ||
122 | struct scatterlist *dst, struct scatterlist *src, | ||
123 | unsigned int nbytes); | ||
124 | |||
125 | unsigned int min_keysize; | ||
126 | unsigned int max_keysize; | ||
127 | unsigned int ivsize; | ||
128 | }; | ||
129 | |||
107 | struct cipher_alg { | 130 | struct cipher_alg { |
108 | unsigned int cia_min_keysize; | 131 | unsigned int cia_min_keysize; |
109 | unsigned int cia_max_keysize; | 132 | unsigned int cia_max_keysize; |
@@ -143,6 +166,7 @@ struct compress_alg { | |||
143 | unsigned int slen, u8 *dst, unsigned int *dlen); | 166 | unsigned int slen, u8 *dst, unsigned int *dlen); |
144 | }; | 167 | }; |
145 | 168 | ||
169 | #define cra_blkcipher cra_u.blkcipher | ||
146 | #define cra_cipher cra_u.cipher | 170 | #define cra_cipher cra_u.cipher |
147 | #define cra_digest cra_u.digest | 171 | #define cra_digest cra_u.digest |
148 | #define cra_compress cra_u.compress | 172 | #define cra_compress cra_u.compress |
@@ -165,6 +189,7 @@ struct crypto_alg { | |||
165 | const struct crypto_type *cra_type; | 189 | const struct crypto_type *cra_type; |
166 | 190 | ||
167 | union { | 191 | union { |
192 | struct blkcipher_alg blkcipher; | ||
168 | struct cipher_alg cipher; | 193 | struct cipher_alg cipher; |
169 | struct digest_alg digest; | 194 | struct digest_alg digest; |
170 | struct compress_alg compress; | 195 | struct compress_alg compress; |
@@ -201,6 +226,16 @@ static inline int crypto_alg_available(const char *name, u32 flags) | |||
201 | * crypto_free_*(), as well as the various helpers below. | 226 | * crypto_free_*(), as well as the various helpers below. |
202 | */ | 227 | */ |
203 | 228 | ||
229 | struct blkcipher_tfm { | ||
230 | void *iv; | ||
231 | int (*setkey)(struct crypto_tfm *tfm, const u8 *key, | ||
232 | unsigned int keylen); | ||
233 | int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
234 | struct scatterlist *src, unsigned int nbytes); | ||
235 | int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
236 | struct scatterlist *src, unsigned int nbytes); | ||
237 | }; | ||
238 | |||
204 | struct cipher_tfm { | 239 | struct cipher_tfm { |
205 | void *cit_iv; | 240 | void *cit_iv; |
206 | unsigned int cit_ivsize; | 241 | unsigned int cit_ivsize; |
@@ -251,6 +286,7 @@ struct compress_tfm { | |||
251 | u8 *dst, unsigned int *dlen); | 286 | u8 *dst, unsigned int *dlen); |
252 | }; | 287 | }; |
253 | 288 | ||
289 | #define crt_blkcipher crt_u.blkcipher | ||
254 | #define crt_cipher crt_u.cipher | 290 | #define crt_cipher crt_u.cipher |
255 | #define crt_digest crt_u.digest | 291 | #define crt_digest crt_u.digest |
256 | #define crt_compress crt_u.compress | 292 | #define crt_compress crt_u.compress |
@@ -260,6 +296,7 @@ struct crypto_tfm { | |||
260 | u32 crt_flags; | 296 | u32 crt_flags; |
261 | 297 | ||
262 | union { | 298 | union { |
299 | struct blkcipher_tfm blkcipher; | ||
263 | struct cipher_tfm cipher; | 300 | struct cipher_tfm cipher; |
264 | struct digest_tfm digest; | 301 | struct digest_tfm digest; |
265 | struct compress_tfm compress; | 302 | struct compress_tfm compress; |
@@ -272,6 +309,10 @@ struct crypto_tfm { | |||
272 | 309 | ||
273 | #define crypto_cipher crypto_tfm | 310 | #define crypto_cipher crypto_tfm |
274 | 311 | ||
312 | struct crypto_blkcipher { | ||
313 | struct crypto_tfm base; | ||
314 | }; | ||
315 | |||
275 | enum { | 316 | enum { |
276 | CRYPTOA_UNSPEC, | 317 | CRYPTOA_UNSPEC, |
277 | CRYPTOA_ALG, | 318 | CRYPTOA_ALG, |
@@ -380,6 +421,144 @@ static inline unsigned int crypto_tfm_ctx_alignment(void) | |||
380 | /* | 421 | /* |
381 | * API wrappers. | 422 | * API wrappers. |
382 | */ | 423 | */ |
424 | static inline struct crypto_blkcipher *__crypto_blkcipher_cast( | ||
425 | struct crypto_tfm *tfm) | ||
426 | { | ||
427 | return (struct crypto_blkcipher *)tfm; | ||
428 | } | ||
429 | |||
430 | static inline struct crypto_blkcipher *crypto_blkcipher_cast( | ||
431 | struct crypto_tfm *tfm) | ||
432 | { | ||
433 | BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER); | ||
434 | return __crypto_blkcipher_cast(tfm); | ||
435 | } | ||
436 | |||
437 | static inline struct crypto_blkcipher *crypto_alloc_blkcipher( | ||
438 | const char *alg_name, u32 type, u32 mask) | ||
439 | { | ||
440 | type &= ~CRYPTO_ALG_TYPE_MASK; | ||
441 | type |= CRYPTO_ALG_TYPE_BLKCIPHER; | ||
442 | mask |= CRYPTO_ALG_TYPE_MASK; | ||
443 | |||
444 | return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask)); | ||
445 | } | ||
446 | |||
447 | static inline struct crypto_tfm *crypto_blkcipher_tfm( | ||
448 | struct crypto_blkcipher *tfm) | ||
449 | { | ||
450 | return &tfm->base; | ||
451 | } | ||
452 | |||
453 | static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm) | ||
454 | { | ||
455 | crypto_free_tfm(crypto_blkcipher_tfm(tfm)); | ||
456 | } | ||
457 | |||
458 | static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm) | ||
459 | { | ||
460 | return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm)); | ||
461 | } | ||
462 | |||
463 | static inline struct blkcipher_tfm *crypto_blkcipher_crt( | ||
464 | struct crypto_blkcipher *tfm) | ||
465 | { | ||
466 | return &crypto_blkcipher_tfm(tfm)->crt_blkcipher; | ||
467 | } | ||
468 | |||
469 | static inline struct blkcipher_alg *crypto_blkcipher_alg( | ||
470 | struct crypto_blkcipher *tfm) | ||
471 | { | ||
472 | return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher; | ||
473 | } | ||
474 | |||
475 | static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm) | ||
476 | { | ||
477 | return crypto_blkcipher_alg(tfm)->ivsize; | ||
478 | } | ||
479 | |||
480 | static inline unsigned int crypto_blkcipher_blocksize( | ||
481 | struct crypto_blkcipher *tfm) | ||
482 | { | ||
483 | return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm)); | ||
484 | } | ||
485 | |||
486 | static inline unsigned int crypto_blkcipher_alignmask( | ||
487 | struct crypto_blkcipher *tfm) | ||
488 | { | ||
489 | return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm)); | ||
490 | } | ||
491 | |||
492 | static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm) | ||
493 | { | ||
494 | return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm)); | ||
495 | } | ||
496 | |||
497 | static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm, | ||
498 | u32 flags) | ||
499 | { | ||
500 | crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags); | ||
501 | } | ||
502 | |||
503 | static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm, | ||
504 | u32 flags) | ||
505 | { | ||
506 | crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags); | ||
507 | } | ||
508 | |||
509 | static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm, | ||
510 | const u8 *key, unsigned int keylen) | ||
511 | { | ||
512 | return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm), | ||
513 | key, keylen); | ||
514 | } | ||
515 | |||
516 | static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc, | ||
517 | struct scatterlist *dst, | ||
518 | struct scatterlist *src, | ||
519 | unsigned int nbytes) | ||
520 | { | ||
521 | desc->info = crypto_blkcipher_crt(desc->tfm)->iv; | ||
522 | return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); | ||
523 | } | ||
524 | |||
525 | static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc, | ||
526 | struct scatterlist *dst, | ||
527 | struct scatterlist *src, | ||
528 | unsigned int nbytes) | ||
529 | { | ||
530 | return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); | ||
531 | } | ||
532 | |||
533 | static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc, | ||
534 | struct scatterlist *dst, | ||
535 | struct scatterlist *src, | ||
536 | unsigned int nbytes) | ||
537 | { | ||
538 | desc->info = crypto_blkcipher_crt(desc->tfm)->iv; | ||
539 | return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); | ||
540 | } | ||
541 | |||
542 | static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc, | ||
543 | struct scatterlist *dst, | ||
544 | struct scatterlist *src, | ||
545 | unsigned int nbytes) | ||
546 | { | ||
547 | return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); | ||
548 | } | ||
549 | |||
550 | static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm, | ||
551 | const u8 *src, unsigned int len) | ||
552 | { | ||
553 | memcpy(crypto_blkcipher_crt(tfm)->iv, src, len); | ||
554 | } | ||
555 | |||
556 | static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm, | ||
557 | u8 *dst, unsigned int len) | ||
558 | { | ||
559 | memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len); | ||
560 | } | ||
561 | |||
383 | static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm) | 562 | static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm) |
384 | { | 563 | { |
385 | return (struct crypto_cipher *)tfm; | 564 | return (struct crypto_cipher *)tfm; |