aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
Diffstat (limited to 'arch')
-rw-r--r--arch/i386/crypto/Makefile3
-rw-r--r--arch/i386/crypto/aes.c3
-rw-r--r--arch/i386/crypto/twofish-i586-asm.S335
-rw-r--r--arch/i386/crypto/twofish.c97
-rw-r--r--arch/s390/crypto/aes_s390.c285
-rw-r--r--arch/s390/crypto/crypt_s390.h3
-rw-r--r--arch/s390/crypto/des_s390.c559
-rw-r--r--arch/s390/crypto/sha1_s390.c2
-rw-r--r--arch/s390/crypto/sha256_s390.c2
-rw-r--r--arch/x86_64/crypto/Makefile3
-rw-r--r--arch/x86_64/crypto/aes.c5
-rw-r--r--arch/x86_64/crypto/twofish-x86_64-asm.S324
-rw-r--r--arch/x86_64/crypto/twofish.c97
13 files changed, 1429 insertions, 289 deletions
diff --git a/arch/i386/crypto/Makefile b/arch/i386/crypto/Makefile
index 103c353d0a63..3fd19af18e34 100644
--- a/arch/i386/crypto/Makefile
+++ b/arch/i386/crypto/Makefile
@@ -5,5 +5,8 @@
5# 5#
6 6
7obj-$(CONFIG_CRYPTO_AES_586) += aes-i586.o 7obj-$(CONFIG_CRYPTO_AES_586) += aes-i586.o
8obj-$(CONFIG_CRYPTO_TWOFISH_586) += twofish-i586.o
8 9
9aes-i586-y := aes-i586-asm.o aes.o 10aes-i586-y := aes-i586-asm.o aes.o
11twofish-i586-y := twofish-i586-asm.o twofish.o
12
diff --git a/arch/i386/crypto/aes.c b/arch/i386/crypto/aes.c
index d3806daa3de3..49aad9397f10 100644
--- a/arch/i386/crypto/aes.c
+++ b/arch/i386/crypto/aes.c
@@ -379,12 +379,13 @@ static void gen_tabs(void)
379} 379}
380 380
381static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 381static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
382 unsigned int key_len, u32 *flags) 382 unsigned int key_len)
383{ 383{
384 int i; 384 int i;
385 u32 ss[8]; 385 u32 ss[8];
386 struct aes_ctx *ctx = crypto_tfm_ctx(tfm); 386 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
387 const __le32 *key = (const __le32 *)in_key; 387 const __le32 *key = (const __le32 *)in_key;
388 u32 *flags = &tfm->crt_flags;
388 389
389 /* encryption schedule */ 390 /* encryption schedule */
390 391
diff --git a/arch/i386/crypto/twofish-i586-asm.S b/arch/i386/crypto/twofish-i586-asm.S
new file mode 100644
index 000000000000..39b98ed2c1b9
--- /dev/null
+++ b/arch/i386/crypto/twofish-i586-asm.S
@@ -0,0 +1,335 @@
1/***************************************************************************
2* Copyright (C) 2006 by Joachim Fritschi, <jfritschi@freenet.de> *
3* *
4* This program is free software; you can redistribute it and/or modify *
5* it under the terms of the GNU General Public License as published by *
6* the Free Software Foundation; either version 2 of the License, or *
7* (at your option) any later version. *
8* *
9* This program is distributed in the hope that it will be useful, *
10* but WITHOUT ANY WARRANTY; without even the implied warranty of *
11* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
12* GNU General Public License for more details. *
13* *
14* You should have received a copy of the GNU General Public License *
15* along with this program; if not, write to the *
16* Free Software Foundation, Inc., *
17* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
18***************************************************************************/
19
20.file "twofish-i586-asm.S"
21.text
22
23#include <asm/asm-offsets.h>
24
25/* return adress at 0 */
26
27#define in_blk 12 /* input byte array address parameter*/
28#define out_blk 8 /* output byte array address parameter*/
29#define tfm 4 /* Twofish context structure */
30
31#define a_offset 0
32#define b_offset 4
33#define c_offset 8
34#define d_offset 12
35
36/* Structure of the crypto context struct*/
37
38#define s0 0 /* S0 Array 256 Words each */
39#define s1 1024 /* S1 Array */
40#define s2 2048 /* S2 Array */
41#define s3 3072 /* S3 Array */
42#define w 4096 /* 8 whitening keys (word) */
43#define k 4128 /* key 1-32 ( word ) */
44
45/* define a few register aliases to allow macro substitution */
46
47#define R0D %eax
48#define R0B %al
49#define R0H %ah
50
51#define R1D %ebx
52#define R1B %bl
53#define R1H %bh
54
55#define R2D %ecx
56#define R2B %cl
57#define R2H %ch
58
59#define R3D %edx
60#define R3B %dl
61#define R3H %dh
62
63
64/* performs input whitening */
65#define input_whitening(src,context,offset)\
66 xor w+offset(context), src;
67
68/* performs input whitening */
69#define output_whitening(src,context,offset)\
70 xor w+16+offset(context), src;
71
72/*
73 * a input register containing a (rotated 16)
74 * b input register containing b
75 * c input register containing c
76 * d input register containing d (already rol $1)
77 * operations on a and b are interleaved to increase performance
78 */
79#define encrypt_round(a,b,c,d,round)\
80 push d ## D;\
81 movzx b ## B, %edi;\
82 mov s1(%ebp,%edi,4),d ## D;\
83 movzx a ## B, %edi;\
84 mov s2(%ebp,%edi,4),%esi;\
85 movzx b ## H, %edi;\
86 ror $16, b ## D;\
87 xor s2(%ebp,%edi,4),d ## D;\
88 movzx a ## H, %edi;\
89 ror $16, a ## D;\
90 xor s3(%ebp,%edi,4),%esi;\
91 movzx b ## B, %edi;\
92 xor s3(%ebp,%edi,4),d ## D;\
93 movzx a ## B, %edi;\
94 xor (%ebp,%edi,4), %esi;\
95 movzx b ## H, %edi;\
96 ror $15, b ## D;\
97 xor (%ebp,%edi,4), d ## D;\
98 movzx a ## H, %edi;\
99 xor s1(%ebp,%edi,4),%esi;\
100 pop %edi;\
101 add d ## D, %esi;\
102 add %esi, d ## D;\
103 add k+round(%ebp), %esi;\
104 xor %esi, c ## D;\
105 rol $15, c ## D;\
106 add k+4+round(%ebp),d ## D;\
107 xor %edi, d ## D;
108
109/*
110 * a input register containing a (rotated 16)
111 * b input register containing b
112 * c input register containing c
113 * d input register containing d (already rol $1)
114 * operations on a and b are interleaved to increase performance
115 * last round has different rotations for the output preparation
116 */
117#define encrypt_last_round(a,b,c,d,round)\
118 push d ## D;\
119 movzx b ## B, %edi;\
120 mov s1(%ebp,%edi,4),d ## D;\
121 movzx a ## B, %edi;\
122 mov s2(%ebp,%edi,4),%esi;\
123 movzx b ## H, %edi;\
124 ror $16, b ## D;\
125 xor s2(%ebp,%edi,4),d ## D;\
126 movzx a ## H, %edi;\
127 ror $16, a ## D;\
128 xor s3(%ebp,%edi,4),%esi;\
129 movzx b ## B, %edi;\
130 xor s3(%ebp,%edi,4),d ## D;\
131 movzx a ## B, %edi;\
132 xor (%ebp,%edi,4), %esi;\
133 movzx b ## H, %edi;\
134 ror $16, b ## D;\
135 xor (%ebp,%edi,4), d ## D;\
136 movzx a ## H, %edi;\
137 xor s1(%ebp,%edi,4),%esi;\
138 pop %edi;\
139 add d ## D, %esi;\
140 add %esi, d ## D;\
141 add k+round(%ebp), %esi;\
142 xor %esi, c ## D;\
143 ror $1, c ## D;\
144 add k+4+round(%ebp),d ## D;\
145 xor %edi, d ## D;
146
147/*
148 * a input register containing a
149 * b input register containing b (rotated 16)
150 * c input register containing c
151 * d input register containing d (already rol $1)
152 * operations on a and b are interleaved to increase performance
153 */
154#define decrypt_round(a,b,c,d,round)\
155 push c ## D;\
156 movzx a ## B, %edi;\
157 mov (%ebp,%edi,4), c ## D;\
158 movzx b ## B, %edi;\
159 mov s3(%ebp,%edi,4),%esi;\
160 movzx a ## H, %edi;\
161 ror $16, a ## D;\
162 xor s1(%ebp,%edi,4),c ## D;\
163 movzx b ## H, %edi;\
164 ror $16, b ## D;\
165 xor (%ebp,%edi,4), %esi;\
166 movzx a ## B, %edi;\
167 xor s2(%ebp,%edi,4),c ## D;\
168 movzx b ## B, %edi;\
169 xor s1(%ebp,%edi,4),%esi;\
170 movzx a ## H, %edi;\
171 ror $15, a ## D;\
172 xor s3(%ebp,%edi,4),c ## D;\
173 movzx b ## H, %edi;\
174 xor s2(%ebp,%edi,4),%esi;\
175 pop %edi;\
176 add %esi, c ## D;\
177 add c ## D, %esi;\
178 add k+round(%ebp), c ## D;\
179 xor %edi, c ## D;\
180 add k+4+round(%ebp),%esi;\
181 xor %esi, d ## D;\
182 rol $15, d ## D;
183
184/*
185 * a input register containing a
186 * b input register containing b (rotated 16)
187 * c input register containing c
188 * d input register containing d (already rol $1)
189 * operations on a and b are interleaved to increase performance
190 * last round has different rotations for the output preparation
191 */
192#define decrypt_last_round(a,b,c,d,round)\
193 push c ## D;\
194 movzx a ## B, %edi;\
195 mov (%ebp,%edi,4), c ## D;\
196 movzx b ## B, %edi;\
197 mov s3(%ebp,%edi,4),%esi;\
198 movzx a ## H, %edi;\
199 ror $16, a ## D;\
200 xor s1(%ebp,%edi,4),c ## D;\
201 movzx b ## H, %edi;\
202 ror $16, b ## D;\
203 xor (%ebp,%edi,4), %esi;\
204 movzx a ## B, %edi;\
205 xor s2(%ebp,%edi,4),c ## D;\
206 movzx b ## B, %edi;\
207 xor s1(%ebp,%edi,4),%esi;\
208 movzx a ## H, %edi;\
209 ror $16, a ## D;\
210 xor s3(%ebp,%edi,4),c ## D;\
211 movzx b ## H, %edi;\
212 xor s2(%ebp,%edi,4),%esi;\
213 pop %edi;\
214 add %esi, c ## D;\
215 add c ## D, %esi;\
216 add k+round(%ebp), c ## D;\
217 xor %edi, c ## D;\
218 add k+4+round(%ebp),%esi;\
219 xor %esi, d ## D;\
220 ror $1, d ## D;
221
222.align 4
223.global twofish_enc_blk
224.global twofish_dec_blk
225
226twofish_enc_blk:
227 push %ebp /* save registers according to calling convention*/
228 push %ebx
229 push %esi
230 push %edi
231
232 mov tfm + 16(%esp), %ebp /* abuse the base pointer: set new base bointer to the crypto tfm */
233 add $crypto_tfm_ctx_offset, %ebp /* ctx adress */
234 mov in_blk+16(%esp),%edi /* input adress in edi */
235
236 mov (%edi), %eax
237 mov b_offset(%edi), %ebx
238 mov c_offset(%edi), %ecx
239 mov d_offset(%edi), %edx
240 input_whitening(%eax,%ebp,a_offset)
241 ror $16, %eax
242 input_whitening(%ebx,%ebp,b_offset)
243 input_whitening(%ecx,%ebp,c_offset)
244 input_whitening(%edx,%ebp,d_offset)
245 rol $1, %edx
246
247 encrypt_round(R0,R1,R2,R3,0);
248 encrypt_round(R2,R3,R0,R1,8);
249 encrypt_round(R0,R1,R2,R3,2*8);
250 encrypt_round(R2,R3,R0,R1,3*8);
251 encrypt_round(R0,R1,R2,R3,4*8);
252 encrypt_round(R2,R3,R0,R1,5*8);
253 encrypt_round(R0,R1,R2,R3,6*8);
254 encrypt_round(R2,R3,R0,R1,7*8);
255 encrypt_round(R0,R1,R2,R3,8*8);
256 encrypt_round(R2,R3,R0,R1,9*8);
257 encrypt_round(R0,R1,R2,R3,10*8);
258 encrypt_round(R2,R3,R0,R1,11*8);
259 encrypt_round(R0,R1,R2,R3,12*8);
260 encrypt_round(R2,R3,R0,R1,13*8);
261 encrypt_round(R0,R1,R2,R3,14*8);
262 encrypt_last_round(R2,R3,R0,R1,15*8);
263
264 output_whitening(%eax,%ebp,c_offset)
265 output_whitening(%ebx,%ebp,d_offset)
266 output_whitening(%ecx,%ebp,a_offset)
267 output_whitening(%edx,%ebp,b_offset)
268 mov out_blk+16(%esp),%edi;
269 mov %eax, c_offset(%edi)
270 mov %ebx, d_offset(%edi)
271 mov %ecx, (%edi)
272 mov %edx, b_offset(%edi)
273
274 pop %edi
275 pop %esi
276 pop %ebx
277 pop %ebp
278 mov $1, %eax
279 ret
280
281twofish_dec_blk:
282 push %ebp /* save registers according to calling convention*/
283 push %ebx
284 push %esi
285 push %edi
286
287
288 mov tfm + 16(%esp), %ebp /* abuse the base pointer: set new base bointer to the crypto tfm */
289 add $crypto_tfm_ctx_offset, %ebp /* ctx adress */
290 mov in_blk+16(%esp),%edi /* input adress in edi */
291
292 mov (%edi), %eax
293 mov b_offset(%edi), %ebx
294 mov c_offset(%edi), %ecx
295 mov d_offset(%edi), %edx
296 output_whitening(%eax,%ebp,a_offset)
297 output_whitening(%ebx,%ebp,b_offset)
298 ror $16, %ebx
299 output_whitening(%ecx,%ebp,c_offset)
300 output_whitening(%edx,%ebp,d_offset)
301 rol $1, %ecx
302
303 decrypt_round(R0,R1,R2,R3,15*8);
304 decrypt_round(R2,R3,R0,R1,14*8);
305 decrypt_round(R0,R1,R2,R3,13*8);
306 decrypt_round(R2,R3,R0,R1,12*8);
307 decrypt_round(R0,R1,R2,R3,11*8);
308 decrypt_round(R2,R3,R0,R1,10*8);
309 decrypt_round(R0,R1,R2,R3,9*8);
310 decrypt_round(R2,R3,R0,R1,8*8);
311 decrypt_round(R0,R1,R2,R3,7*8);
312 decrypt_round(R2,R3,R0,R1,6*8);
313 decrypt_round(R0,R1,R2,R3,5*8);
314 decrypt_round(R2,R3,R0,R1,4*8);
315 decrypt_round(R0,R1,R2,R3,3*8);
316 decrypt_round(R2,R3,R0,R1,2*8);
317 decrypt_round(R0,R1,R2,R3,1*8);
318 decrypt_last_round(R2,R3,R0,R1,0);
319
320 input_whitening(%eax,%ebp,c_offset)
321 input_whitening(%ebx,%ebp,d_offset)
322 input_whitening(%ecx,%ebp,a_offset)
323 input_whitening(%edx,%ebp,b_offset)
324 mov out_blk+16(%esp),%edi;
325 mov %eax, c_offset(%edi)
326 mov %ebx, d_offset(%edi)
327 mov %ecx, (%edi)
328 mov %edx, b_offset(%edi)
329
330 pop %edi
331 pop %esi
332 pop %ebx
333 pop %ebp
334 mov $1, %eax
335 ret
diff --git a/arch/i386/crypto/twofish.c b/arch/i386/crypto/twofish.c
new file mode 100644
index 000000000000..e3004dfe9c7a
--- /dev/null
+++ b/arch/i386/crypto/twofish.c
@@ -0,0 +1,97 @@
1/*
2 * Glue Code for optimized 586 assembler version of TWOFISH
3 *
4 * Originally Twofish for GPG
5 * By Matthew Skala <mskala@ansuz.sooke.bc.ca>, July 26, 1998
6 * 256-bit key length added March 20, 1999
7 * Some modifications to reduce the text size by Werner Koch, April, 1998
8 * Ported to the kerneli patch by Marc Mutz <Marc@Mutz.com>
9 * Ported to CryptoAPI by Colin Slater <hoho@tacomeat.net>
10 *
11 * The original author has disclaimed all copyright interest in this
12 * code and thus put it in the public domain. The subsequent authors
13 * have put this under the GNU General Public License.
14 *
15 * This program is free software; you can redistribute it and/or modify
16 * it under the terms of the GNU General Public License as published by
17 * the Free Software Foundation; either version 2 of the License, or
18 * (at your option) any later version.
19 *
20 * This program is distributed in the hope that it will be useful,
21 * but WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23 * GNU General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, write to the Free Software
27 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
28 * USA
29 *
30 * This code is a "clean room" implementation, written from the paper
31 * _Twofish: A 128-Bit Block Cipher_ by Bruce Schneier, John Kelsey,
32 * Doug Whiting, David Wagner, Chris Hall, and Niels Ferguson, available
33 * through http://www.counterpane.com/twofish.html
34 *
35 * For background information on multiplication in finite fields, used for
36 * the matrix operations in the key schedule, see the book _Contemporary
37 * Abstract Algebra_ by Joseph A. Gallian, especially chapter 22 in the
38 * Third Edition.
39 */
40
41#include <crypto/twofish.h>
42#include <linux/crypto.h>
43#include <linux/init.h>
44#include <linux/module.h>
45#include <linux/types.h>
46
47
48asmlinkage void twofish_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
49asmlinkage void twofish_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
50
51static void twofish_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
52{
53 twofish_enc_blk(tfm, dst, src);
54}
55
56static void twofish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
57{
58 twofish_dec_blk(tfm, dst, src);
59}
60
61static struct crypto_alg alg = {
62 .cra_name = "twofish",
63 .cra_driver_name = "twofish-i586",
64 .cra_priority = 200,
65 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
66 .cra_blocksize = TF_BLOCK_SIZE,
67 .cra_ctxsize = sizeof(struct twofish_ctx),
68 .cra_alignmask = 3,
69 .cra_module = THIS_MODULE,
70 .cra_list = LIST_HEAD_INIT(alg.cra_list),
71 .cra_u = {
72 .cipher = {
73 .cia_min_keysize = TF_MIN_KEY_SIZE,
74 .cia_max_keysize = TF_MAX_KEY_SIZE,
75 .cia_setkey = twofish_setkey,
76 .cia_encrypt = twofish_encrypt,
77 .cia_decrypt = twofish_decrypt
78 }
79 }
80};
81
82static int __init init(void)
83{
84 return crypto_register_alg(&alg);
85}
86
87static void __exit fini(void)
88{
89 crypto_unregister_alg(&alg);
90}
91
92module_init(init);
93module_exit(fini);
94
95MODULE_LICENSE("GPL");
96MODULE_DESCRIPTION ("Twofish Cipher Algorithm, i586 asm optimized");
97MODULE_ALIAS("twofish");
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c
index 5713c7e5bd16..15c9eec02928 100644
--- a/arch/s390/crypto/aes_s390.c
+++ b/arch/s390/crypto/aes_s390.c
@@ -16,9 +16,9 @@
16 * 16 *
17 */ 17 */
18 18
19#include <crypto/algapi.h>
19#include <linux/module.h> 20#include <linux/module.h>
20#include <linux/init.h> 21#include <linux/init.h>
21#include <linux/crypto.h>
22#include "crypt_s390.h" 22#include "crypt_s390.h"
23 23
24#define AES_MIN_KEY_SIZE 16 24#define AES_MIN_KEY_SIZE 16
@@ -34,13 +34,16 @@ int has_aes_256 = 0;
34struct s390_aes_ctx { 34struct s390_aes_ctx {
35 u8 iv[AES_BLOCK_SIZE]; 35 u8 iv[AES_BLOCK_SIZE];
36 u8 key[AES_MAX_KEY_SIZE]; 36 u8 key[AES_MAX_KEY_SIZE];
37 long enc;
38 long dec;
37 int key_len; 39 int key_len;
38}; 40};
39 41
40static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 42static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
41 unsigned int key_len, u32 *flags) 43 unsigned int key_len)
42{ 44{
43 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 45 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
46 u32 *flags = &tfm->crt_flags;
44 47
45 switch (key_len) { 48 switch (key_len) {
46 case 16: 49 case 16:
@@ -110,133 +113,206 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
110 } 113 }
111} 114}
112 115
113static unsigned int aes_encrypt_ecb(const struct cipher_desc *desc, u8 *out,
114 const u8 *in, unsigned int nbytes)
115{
116 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
117 int ret;
118 116
119 /* only use complete blocks */ 117static struct crypto_alg aes_alg = {
120 nbytes &= ~(AES_BLOCK_SIZE - 1); 118 .cra_name = "aes",
119 .cra_driver_name = "aes-s390",
120 .cra_priority = CRYPT_S390_PRIORITY,
121 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
122 .cra_blocksize = AES_BLOCK_SIZE,
123 .cra_ctxsize = sizeof(struct s390_aes_ctx),
124 .cra_module = THIS_MODULE,
125 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
126 .cra_u = {
127 .cipher = {
128 .cia_min_keysize = AES_MIN_KEY_SIZE,
129 .cia_max_keysize = AES_MAX_KEY_SIZE,
130 .cia_setkey = aes_set_key,
131 .cia_encrypt = aes_encrypt,
132 .cia_decrypt = aes_decrypt,
133 }
134 }
135};
136
137static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
138 unsigned int key_len)
139{
140 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
121 141
122 switch (sctx->key_len) { 142 switch (key_len) {
123 case 16: 143 case 16:
124 ret = crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, nbytes); 144 sctx->enc = KM_AES_128_ENCRYPT;
125 BUG_ON((ret < 0) || (ret != nbytes)); 145 sctx->dec = KM_AES_128_DECRYPT;
126 break; 146 break;
127 case 24: 147 case 24:
128 ret = crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in, nbytes); 148 sctx->enc = KM_AES_192_ENCRYPT;
129 BUG_ON((ret < 0) || (ret != nbytes)); 149 sctx->dec = KM_AES_192_DECRYPT;
130 break; 150 break;
131 case 32: 151 case 32:
132 ret = crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in, nbytes); 152 sctx->enc = KM_AES_256_ENCRYPT;
133 BUG_ON((ret < 0) || (ret != nbytes)); 153 sctx->dec = KM_AES_256_DECRYPT;
134 break; 154 break;
135 } 155 }
136 return nbytes; 156
157 return aes_set_key(tfm, in_key, key_len);
137} 158}
138 159
139static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out, 160static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
140 const u8 *in, unsigned int nbytes) 161 struct blkcipher_walk *walk)
141{ 162{
142 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm); 163 int ret = blkcipher_walk_virt(desc, walk);
143 int ret; 164 unsigned int nbytes;
144 165
145 /* only use complete blocks */ 166 while ((nbytes = walk->nbytes)) {
146 nbytes &= ~(AES_BLOCK_SIZE - 1); 167 /* only use complete blocks */
168 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
169 u8 *out = walk->dst.virt.addr;
170 u8 *in = walk->src.virt.addr;
147 171
148 switch (sctx->key_len) { 172 ret = crypt_s390_km(func, param, out, in, n);
149 case 16: 173 BUG_ON((ret < 0) || (ret != n));
150 ret = crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, nbytes); 174
151 BUG_ON((ret < 0) || (ret != nbytes)); 175 nbytes &= AES_BLOCK_SIZE - 1;
152 break; 176 ret = blkcipher_walk_done(desc, walk, nbytes);
153 case 24:
154 ret = crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in, nbytes);
155 BUG_ON((ret < 0) || (ret != nbytes));
156 break;
157 case 32:
158 ret = crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in, nbytes);
159 BUG_ON((ret < 0) || (ret != nbytes));
160 break;
161 } 177 }
162 return nbytes; 178
179 return ret;
163} 180}
164 181
165static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out, 182static int ecb_aes_encrypt(struct blkcipher_desc *desc,
166 const u8 *in, unsigned int nbytes) 183 struct scatterlist *dst, struct scatterlist *src,
184 unsigned int nbytes)
167{ 185{
168 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm); 186 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
169 int ret; 187 struct blkcipher_walk walk;
170 188
171 /* only use complete blocks */ 189 blkcipher_walk_init(&walk, dst, src, nbytes);
172 nbytes &= ~(AES_BLOCK_SIZE - 1); 190 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
191}
173 192
174 memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE); 193static int ecb_aes_decrypt(struct blkcipher_desc *desc,
175 switch (sctx->key_len) { 194 struct scatterlist *dst, struct scatterlist *src,
195 unsigned int nbytes)
196{
197 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
198 struct blkcipher_walk walk;
199
200 blkcipher_walk_init(&walk, dst, src, nbytes);
201 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
202}
203
204static struct crypto_alg ecb_aes_alg = {
205 .cra_name = "ecb(aes)",
206 .cra_driver_name = "ecb-aes-s390",
207 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
208 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
209 .cra_blocksize = AES_BLOCK_SIZE,
210 .cra_ctxsize = sizeof(struct s390_aes_ctx),
211 .cra_type = &crypto_blkcipher_type,
212 .cra_module = THIS_MODULE,
213 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
214 .cra_u = {
215 .blkcipher = {
216 .min_keysize = AES_MIN_KEY_SIZE,
217 .max_keysize = AES_MAX_KEY_SIZE,
218 .setkey = ecb_aes_set_key,
219 .encrypt = ecb_aes_encrypt,
220 .decrypt = ecb_aes_decrypt,
221 }
222 }
223};
224
225static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
226 unsigned int key_len)
227{
228 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
229
230 switch (key_len) {
176 case 16: 231 case 16:
177 ret = crypt_s390_kmc(KMC_AES_128_ENCRYPT, &sctx->iv, out, in, nbytes); 232 sctx->enc = KMC_AES_128_ENCRYPT;
178 BUG_ON((ret < 0) || (ret != nbytes)); 233 sctx->dec = KMC_AES_128_DECRYPT;
179 break; 234 break;
180 case 24: 235 case 24:
181 ret = crypt_s390_kmc(KMC_AES_192_ENCRYPT, &sctx->iv, out, in, nbytes); 236 sctx->enc = KMC_AES_192_ENCRYPT;
182 BUG_ON((ret < 0) || (ret != nbytes)); 237 sctx->dec = KMC_AES_192_DECRYPT;
183 break; 238 break;
184 case 32: 239 case 32:
185 ret = crypt_s390_kmc(KMC_AES_256_ENCRYPT, &sctx->iv, out, in, nbytes); 240 sctx->enc = KMC_AES_256_ENCRYPT;
186 BUG_ON((ret < 0) || (ret != nbytes)); 241 sctx->dec = KMC_AES_256_DECRYPT;
187 break; 242 break;
188 } 243 }
189 memcpy(desc->info, &sctx->iv, AES_BLOCK_SIZE);
190 244
191 return nbytes; 245 return aes_set_key(tfm, in_key, key_len);
192} 246}
193 247
194static unsigned int aes_decrypt_cbc(const struct cipher_desc *desc, u8 *out, 248static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
195 const u8 *in, unsigned int nbytes) 249 struct blkcipher_walk *walk)
196{ 250{
197 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm); 251 int ret = blkcipher_walk_virt(desc, walk);
198 int ret; 252 unsigned int nbytes = walk->nbytes;
199 253
200 /* only use complete blocks */ 254 if (!nbytes)
201 nbytes &= ~(AES_BLOCK_SIZE - 1); 255 goto out;
202 256
203 memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE); 257 memcpy(param, walk->iv, AES_BLOCK_SIZE);
204 switch (sctx->key_len) { 258 do {
205 case 16: 259 /* only use complete blocks */
206 ret = crypt_s390_kmc(KMC_AES_128_DECRYPT, &sctx->iv, out, in, nbytes); 260 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
207 BUG_ON((ret < 0) || (ret != nbytes)); 261 u8 *out = walk->dst.virt.addr;
208 break; 262 u8 *in = walk->src.virt.addr;
209 case 24: 263
210 ret = crypt_s390_kmc(KMC_AES_192_DECRYPT, &sctx->iv, out, in, nbytes); 264 ret = crypt_s390_kmc(func, param, out, in, n);
211 BUG_ON((ret < 0) || (ret != nbytes)); 265 BUG_ON((ret < 0) || (ret != n));
212 break; 266
213 case 32: 267 nbytes &= AES_BLOCK_SIZE - 1;
214 ret = crypt_s390_kmc(KMC_AES_256_DECRYPT, &sctx->iv, out, in, nbytes); 268 ret = blkcipher_walk_done(desc, walk, nbytes);
215 BUG_ON((ret < 0) || (ret != nbytes)); 269 } while ((nbytes = walk->nbytes));
216 break; 270 memcpy(walk->iv, param, AES_BLOCK_SIZE);
217 } 271
218 return nbytes; 272out:
273 return ret;
219} 274}
220 275
276static int cbc_aes_encrypt(struct blkcipher_desc *desc,
277 struct scatterlist *dst, struct scatterlist *src,
278 unsigned int nbytes)
279{
280 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
281 struct blkcipher_walk walk;
221 282
222static struct crypto_alg aes_alg = { 283 blkcipher_walk_init(&walk, dst, src, nbytes);
223 .cra_name = "aes", 284 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
224 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 285}
286
287static int cbc_aes_decrypt(struct blkcipher_desc *desc,
288 struct scatterlist *dst, struct scatterlist *src,
289 unsigned int nbytes)
290{
291 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
292 struct blkcipher_walk walk;
293
294 blkcipher_walk_init(&walk, dst, src, nbytes);
295 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
296}
297
298static struct crypto_alg cbc_aes_alg = {
299 .cra_name = "cbc(aes)",
300 .cra_driver_name = "cbc-aes-s390",
301 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
302 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
225 .cra_blocksize = AES_BLOCK_SIZE, 303 .cra_blocksize = AES_BLOCK_SIZE,
226 .cra_ctxsize = sizeof(struct s390_aes_ctx), 304 .cra_ctxsize = sizeof(struct s390_aes_ctx),
305 .cra_type = &crypto_blkcipher_type,
227 .cra_module = THIS_MODULE, 306 .cra_module = THIS_MODULE,
228 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), 307 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
229 .cra_u = { 308 .cra_u = {
230 .cipher = { 309 .blkcipher = {
231 .cia_min_keysize = AES_MIN_KEY_SIZE, 310 .min_keysize = AES_MIN_KEY_SIZE,
232 .cia_max_keysize = AES_MAX_KEY_SIZE, 311 .max_keysize = AES_MAX_KEY_SIZE,
233 .cia_setkey = aes_set_key, 312 .ivsize = AES_BLOCK_SIZE,
234 .cia_encrypt = aes_encrypt, 313 .setkey = cbc_aes_set_key,
235 .cia_decrypt = aes_decrypt, 314 .encrypt = cbc_aes_encrypt,
236 .cia_encrypt_ecb = aes_encrypt_ecb, 315 .decrypt = cbc_aes_decrypt,
237 .cia_decrypt_ecb = aes_decrypt_ecb,
238 .cia_encrypt_cbc = aes_encrypt_cbc,
239 .cia_decrypt_cbc = aes_decrypt_cbc,
240 } 316 }
241 } 317 }
242}; 318};
@@ -256,13 +332,40 @@ static int __init aes_init(void)
256 return -ENOSYS; 332 return -ENOSYS;
257 333
258 ret = crypto_register_alg(&aes_alg); 334 ret = crypto_register_alg(&aes_alg);
259 if (ret != 0) 335 if (ret != 0) {
260 printk(KERN_INFO "crypt_s390: aes_s390 couldn't be loaded.\n"); 336 printk(KERN_INFO "crypt_s390: aes-s390 couldn't be loaded.\n");
337 goto aes_err;
338 }
339
340 ret = crypto_register_alg(&ecb_aes_alg);
341 if (ret != 0) {
342 printk(KERN_INFO
343 "crypt_s390: ecb-aes-s390 couldn't be loaded.\n");
344 goto ecb_aes_err;
345 }
346
347 ret = crypto_register_alg(&cbc_aes_alg);
348 if (ret != 0) {
349 printk(KERN_INFO
350 "crypt_s390: cbc-aes-s390 couldn't be loaded.\n");
351 goto cbc_aes_err;
352 }
353
354out:
261 return ret; 355 return ret;
356
357cbc_aes_err:
358 crypto_unregister_alg(&ecb_aes_alg);
359ecb_aes_err:
360 crypto_unregister_alg(&aes_alg);
361aes_err:
362 goto out;
262} 363}
263 364
264static void __exit aes_fini(void) 365static void __exit aes_fini(void)
265{ 366{
367 crypto_unregister_alg(&cbc_aes_alg);
368 crypto_unregister_alg(&ecb_aes_alg);
266 crypto_unregister_alg(&aes_alg); 369 crypto_unregister_alg(&aes_alg);
267} 370}
268 371
diff --git a/arch/s390/crypto/crypt_s390.h b/arch/s390/crypto/crypt_s390.h
index d1c259a7fe33..efd836c2e4a6 100644
--- a/arch/s390/crypto/crypt_s390.h
+++ b/arch/s390/crypto/crypt_s390.h
@@ -20,6 +20,9 @@
20#define CRYPT_S390_OP_MASK 0xFF00 20#define CRYPT_S390_OP_MASK 0xFF00
21#define CRYPT_S390_FUNC_MASK 0x00FF 21#define CRYPT_S390_FUNC_MASK 0x00FF
22 22
23#define CRYPT_S390_PRIORITY 300
24#define CRYPT_S390_COMPOSITE_PRIORITY 400
25
23/* s930 cryptographic operations */ 26/* s930 cryptographic operations */
24enum crypt_s390_operations { 27enum crypt_s390_operations {
25 CRYPT_S390_KM = 0x0100, 28 CRYPT_S390_KM = 0x0100,
diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c
index b3f7496a79b4..2aba04852fe3 100644
--- a/arch/s390/crypto/des_s390.c
+++ b/arch/s390/crypto/des_s390.c
@@ -13,9 +13,10 @@
13 * (at your option) any later version. 13 * (at your option) any later version.
14 * 14 *
15 */ 15 */
16
17#include <crypto/algapi.h>
16#include <linux/init.h> 18#include <linux/init.h>
17#include <linux/module.h> 19#include <linux/module.h>
18#include <linux/crypto.h>
19 20
20#include "crypt_s390.h" 21#include "crypt_s390.h"
21#include "crypto_des.h" 22#include "crypto_des.h"
@@ -45,9 +46,10 @@ struct crypt_s390_des3_192_ctx {
45}; 46};
46 47
47static int des_setkey(struct crypto_tfm *tfm, const u8 *key, 48static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
48 unsigned int keylen, u32 *flags) 49 unsigned int keylen)
49{ 50{
50 struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); 51 struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm);
52 u32 *flags = &tfm->crt_flags;
51 int ret; 53 int ret;
52 54
53 /* test if key is valid (not a weak key) */ 55 /* test if key is valid (not a weak key) */
@@ -71,85 +73,159 @@ static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
71 crypt_s390_km(KM_DEA_DECRYPT, dctx->key, out, in, DES_BLOCK_SIZE); 73 crypt_s390_km(KM_DEA_DECRYPT, dctx->key, out, in, DES_BLOCK_SIZE);
72} 74}
73 75
74static unsigned int des_encrypt_ecb(const struct cipher_desc *desc, u8 *out, 76static struct crypto_alg des_alg = {
75 const u8 *in, unsigned int nbytes) 77 .cra_name = "des",
78 .cra_driver_name = "des-s390",
79 .cra_priority = CRYPT_S390_PRIORITY,
80 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
81 .cra_blocksize = DES_BLOCK_SIZE,
82 .cra_ctxsize = sizeof(struct crypt_s390_des_ctx),
83 .cra_module = THIS_MODULE,
84 .cra_list = LIST_HEAD_INIT(des_alg.cra_list),
85 .cra_u = {
86 .cipher = {
87 .cia_min_keysize = DES_KEY_SIZE,
88 .cia_max_keysize = DES_KEY_SIZE,
89 .cia_setkey = des_setkey,
90 .cia_encrypt = des_encrypt,
91 .cia_decrypt = des_decrypt,
92 }
93 }
94};
95
96static int ecb_desall_crypt(struct blkcipher_desc *desc, long func,
97 void *param, struct blkcipher_walk *walk)
76{ 98{
77 struct crypt_s390_des_ctx *sctx = crypto_tfm_ctx(desc->tfm); 99 int ret = blkcipher_walk_virt(desc, walk);
78 int ret; 100 unsigned int nbytes;
101
102 while ((nbytes = walk->nbytes)) {
103 /* only use complete blocks */
104 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1);
105 u8 *out = walk->dst.virt.addr;
106 u8 *in = walk->src.virt.addr;
79 107
80 /* only use complete blocks */ 108 ret = crypt_s390_km(func, param, out, in, n);
81 nbytes &= ~(DES_BLOCK_SIZE - 1); 109 BUG_ON((ret < 0) || (ret != n));
82 ret = crypt_s390_km(KM_DEA_ENCRYPT, sctx->key, out, in, nbytes);
83 BUG_ON((ret < 0) || (ret != nbytes));
84 110
85 return nbytes; 111 nbytes &= DES_BLOCK_SIZE - 1;
112 ret = blkcipher_walk_done(desc, walk, nbytes);
113 }
114
115 return ret;
86} 116}
87 117
88static unsigned int des_decrypt_ecb(const struct cipher_desc *desc, u8 *out, 118static int cbc_desall_crypt(struct blkcipher_desc *desc, long func,
89 const u8 *in, unsigned int nbytes) 119 void *param, struct blkcipher_walk *walk)
90{ 120{
91 struct crypt_s390_des_ctx *sctx = crypto_tfm_ctx(desc->tfm); 121 int ret = blkcipher_walk_virt(desc, walk);
92 int ret; 122 unsigned int nbytes = walk->nbytes;
123
124 if (!nbytes)
125 goto out;
126
127 memcpy(param, walk->iv, DES_BLOCK_SIZE);
128 do {
129 /* only use complete blocks */
130 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1);
131 u8 *out = walk->dst.virt.addr;
132 u8 *in = walk->src.virt.addr;
93 133
94 /* only use complete blocks */ 134 ret = crypt_s390_kmc(func, param, out, in, n);
95 nbytes &= ~(DES_BLOCK_SIZE - 1); 135 BUG_ON((ret < 0) || (ret != n));
96 ret = crypt_s390_km(KM_DEA_DECRYPT, sctx->key, out, in, nbytes);
97 BUG_ON((ret < 0) || (ret != nbytes));
98 136
99 return nbytes; 137 nbytes &= DES_BLOCK_SIZE - 1;
138 ret = blkcipher_walk_done(desc, walk, nbytes);
139 } while ((nbytes = walk->nbytes));
140 memcpy(walk->iv, param, DES_BLOCK_SIZE);
141
142out:
143 return ret;
100} 144}
101 145
102static unsigned int des_encrypt_cbc(const struct cipher_desc *desc, u8 *out, 146static int ecb_des_encrypt(struct blkcipher_desc *desc,
103 const u8 *in, unsigned int nbytes) 147 struct scatterlist *dst, struct scatterlist *src,
148 unsigned int nbytes)
104{ 149{
105 struct crypt_s390_des_ctx *sctx = crypto_tfm_ctx(desc->tfm); 150 struct crypt_s390_des_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
106 int ret; 151 struct blkcipher_walk walk;
107 152
108 /* only use complete blocks */ 153 blkcipher_walk_init(&walk, dst, src, nbytes);
109 nbytes &= ~(DES_BLOCK_SIZE - 1); 154 return ecb_desall_crypt(desc, KM_DEA_ENCRYPT, sctx->key, &walk);
155}
110 156
111 memcpy(sctx->iv, desc->info, DES_BLOCK_SIZE); 157static int ecb_des_decrypt(struct blkcipher_desc *desc,
112 ret = crypt_s390_kmc(KMC_DEA_ENCRYPT, &sctx->iv, out, in, nbytes); 158 struct scatterlist *dst, struct scatterlist *src,
113 BUG_ON((ret < 0) || (ret != nbytes)); 159 unsigned int nbytes)
160{
161 struct crypt_s390_des_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
162 struct blkcipher_walk walk;
114 163
115 memcpy(desc->info, sctx->iv, DES_BLOCK_SIZE); 164 blkcipher_walk_init(&walk, dst, src, nbytes);
116 return nbytes; 165 return ecb_desall_crypt(desc, KM_DEA_DECRYPT, sctx->key, &walk);
117} 166}
118 167
119static unsigned int des_decrypt_cbc(const struct cipher_desc *desc, u8 *out, 168static struct crypto_alg ecb_des_alg = {
120 const u8 *in, unsigned int nbytes) 169 .cra_name = "ecb(des)",
170 .cra_driver_name = "ecb-des-s390",
171 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
172 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
173 .cra_blocksize = DES_BLOCK_SIZE,
174 .cra_ctxsize = sizeof(struct crypt_s390_des_ctx),
175 .cra_type = &crypto_blkcipher_type,
176 .cra_module = THIS_MODULE,
177 .cra_list = LIST_HEAD_INIT(ecb_des_alg.cra_list),
178 .cra_u = {
179 .blkcipher = {
180 .min_keysize = DES_KEY_SIZE,
181 .max_keysize = DES_KEY_SIZE,
182 .setkey = des_setkey,
183 .encrypt = ecb_des_encrypt,
184 .decrypt = ecb_des_decrypt,
185 }
186 }
187};
188
189static int cbc_des_encrypt(struct blkcipher_desc *desc,
190 struct scatterlist *dst, struct scatterlist *src,
191 unsigned int nbytes)
121{ 192{
122 struct crypt_s390_des_ctx *sctx = crypto_tfm_ctx(desc->tfm); 193 struct crypt_s390_des_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
123 int ret; 194 struct blkcipher_walk walk;
124 195
125 /* only use complete blocks */ 196 blkcipher_walk_init(&walk, dst, src, nbytes);
126 nbytes &= ~(DES_BLOCK_SIZE - 1); 197 return cbc_desall_crypt(desc, KMC_DEA_ENCRYPT, sctx->iv, &walk);
198}
127 199
128 memcpy(&sctx->iv, desc->info, DES_BLOCK_SIZE); 200static int cbc_des_decrypt(struct blkcipher_desc *desc,
129 ret = crypt_s390_kmc(KMC_DEA_DECRYPT, &sctx->iv, out, in, nbytes); 201 struct scatterlist *dst, struct scatterlist *src,
130 BUG_ON((ret < 0) || (ret != nbytes)); 202 unsigned int nbytes)
203{
204 struct crypt_s390_des_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
205 struct blkcipher_walk walk;
131 206
132 return nbytes; 207 blkcipher_walk_init(&walk, dst, src, nbytes);
208 return cbc_desall_crypt(desc, KMC_DEA_DECRYPT, sctx->iv, &walk);
133} 209}
134 210
135static struct crypto_alg des_alg = { 211static struct crypto_alg cbc_des_alg = {
136 .cra_name = "des", 212 .cra_name = "cbc(des)",
137 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 213 .cra_driver_name = "cbc-des-s390",
214 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
215 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
138 .cra_blocksize = DES_BLOCK_SIZE, 216 .cra_blocksize = DES_BLOCK_SIZE,
139 .cra_ctxsize = sizeof(struct crypt_s390_des_ctx), 217 .cra_ctxsize = sizeof(struct crypt_s390_des_ctx),
218 .cra_type = &crypto_blkcipher_type,
140 .cra_module = THIS_MODULE, 219 .cra_module = THIS_MODULE,
141 .cra_list = LIST_HEAD_INIT(des_alg.cra_list), 220 .cra_list = LIST_HEAD_INIT(cbc_des_alg.cra_list),
142 .cra_u = { 221 .cra_u = {
143 .cipher = { 222 .blkcipher = {
144 .cia_min_keysize = DES_KEY_SIZE, 223 .min_keysize = DES_KEY_SIZE,
145 .cia_max_keysize = DES_KEY_SIZE, 224 .max_keysize = DES_KEY_SIZE,
146 .cia_setkey = des_setkey, 225 .ivsize = DES_BLOCK_SIZE,
147 .cia_encrypt = des_encrypt, 226 .setkey = des_setkey,
148 .cia_decrypt = des_decrypt, 227 .encrypt = cbc_des_encrypt,
149 .cia_encrypt_ecb = des_encrypt_ecb, 228 .decrypt = cbc_des_decrypt,
150 .cia_decrypt_ecb = des_decrypt_ecb,
151 .cia_encrypt_cbc = des_encrypt_cbc,
152 .cia_decrypt_cbc = des_decrypt_cbc,
153 } 229 }
154 } 230 }
155}; 231};
@@ -167,11 +243,12 @@ static struct crypto_alg des_alg = {
167 * 243 *
168 */ 244 */
169static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key, 245static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key,
170 unsigned int keylen, u32 *flags) 246 unsigned int keylen)
171{ 247{
172 int i, ret; 248 int i, ret;
173 struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); 249 struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm);
174 const u8* temp_key = key; 250 const u8 *temp_key = key;
251 u32 *flags = &tfm->crt_flags;
175 252
176 if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) { 253 if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) {
177 *flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED; 254 *flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED;
@@ -202,89 +279,111 @@ static void des3_128_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
202 DES3_128_BLOCK_SIZE); 279 DES3_128_BLOCK_SIZE);
203} 280}
204 281
205static unsigned int des3_128_encrypt_ecb(const struct cipher_desc *desc, 282static struct crypto_alg des3_128_alg = {
206 u8 *out, const u8 *in, 283 .cra_name = "des3_ede128",
207 unsigned int nbytes) 284 .cra_driver_name = "des3_ede128-s390",
208{ 285 .cra_priority = CRYPT_S390_PRIORITY,
209 struct crypt_s390_des3_128_ctx *sctx = crypto_tfm_ctx(desc->tfm); 286 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
210 int ret; 287 .cra_blocksize = DES3_128_BLOCK_SIZE,
288 .cra_ctxsize = sizeof(struct crypt_s390_des3_128_ctx),
289 .cra_module = THIS_MODULE,
290 .cra_list = LIST_HEAD_INIT(des3_128_alg.cra_list),
291 .cra_u = {
292 .cipher = {
293 .cia_min_keysize = DES3_128_KEY_SIZE,
294 .cia_max_keysize = DES3_128_KEY_SIZE,
295 .cia_setkey = des3_128_setkey,
296 .cia_encrypt = des3_128_encrypt,
297 .cia_decrypt = des3_128_decrypt,
298 }
299 }
300};
211 301
212 /* only use complete blocks */ 302static int ecb_des3_128_encrypt(struct blkcipher_desc *desc,
213 nbytes &= ~(DES3_128_BLOCK_SIZE - 1); 303 struct scatterlist *dst,
214 ret = crypt_s390_km(KM_TDEA_128_ENCRYPT, sctx->key, out, in, nbytes); 304 struct scatterlist *src, unsigned int nbytes)
215 BUG_ON((ret < 0) || (ret != nbytes)); 305{
306 struct crypt_s390_des3_128_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
307 struct blkcipher_walk walk;
216 308
217 return nbytes; 309 blkcipher_walk_init(&walk, dst, src, nbytes);
310 return ecb_desall_crypt(desc, KM_TDEA_128_ENCRYPT, sctx->key, &walk);
218} 311}
219 312
220static unsigned int des3_128_decrypt_ecb(const struct cipher_desc *desc, 313static int ecb_des3_128_decrypt(struct blkcipher_desc *desc,
221 u8 *out, const u8 *in, 314 struct scatterlist *dst,
222 unsigned int nbytes) 315 struct scatterlist *src, unsigned int nbytes)
223{ 316{
224 struct crypt_s390_des3_128_ctx *sctx = crypto_tfm_ctx(desc->tfm); 317 struct crypt_s390_des3_128_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
225 int ret; 318 struct blkcipher_walk walk;
226 319
227 /* only use complete blocks */ 320 blkcipher_walk_init(&walk, dst, src, nbytes);
228 nbytes &= ~(DES3_128_BLOCK_SIZE - 1); 321 return ecb_desall_crypt(desc, KM_TDEA_128_DECRYPT, sctx->key, &walk);
229 ret = crypt_s390_km(KM_TDEA_128_DECRYPT, sctx->key, out, in, nbytes);
230 BUG_ON((ret < 0) || (ret != nbytes));
231
232 return nbytes;
233} 322}
234 323
235static unsigned int des3_128_encrypt_cbc(const struct cipher_desc *desc, 324static struct crypto_alg ecb_des3_128_alg = {
236 u8 *out, const u8 *in, 325 .cra_name = "ecb(des3_ede128)",
237 unsigned int nbytes) 326 .cra_driver_name = "ecb-des3_ede128-s390",
238{ 327 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
239 struct crypt_s390_des3_128_ctx *sctx = crypto_tfm_ctx(desc->tfm); 328 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
240 int ret; 329 .cra_blocksize = DES3_128_BLOCK_SIZE,
241 330 .cra_ctxsize = sizeof(struct crypt_s390_des3_128_ctx),
242 /* only use complete blocks */ 331 .cra_type = &crypto_blkcipher_type,
243 nbytes &= ~(DES3_128_BLOCK_SIZE - 1); 332 .cra_module = THIS_MODULE,
333 .cra_list = LIST_HEAD_INIT(
334 ecb_des3_128_alg.cra_list),
335 .cra_u = {
336 .blkcipher = {
337 .min_keysize = DES3_128_KEY_SIZE,
338 .max_keysize = DES3_128_KEY_SIZE,
339 .setkey = des3_128_setkey,
340 .encrypt = ecb_des3_128_encrypt,
341 .decrypt = ecb_des3_128_decrypt,
342 }
343 }
344};
244 345
245 memcpy(sctx->iv, desc->info, DES3_128_BLOCK_SIZE); 346static int cbc_des3_128_encrypt(struct blkcipher_desc *desc,
246 ret = crypt_s390_kmc(KMC_TDEA_128_ENCRYPT, &sctx->iv, out, in, nbytes); 347 struct scatterlist *dst,
247 BUG_ON((ret < 0) || (ret != nbytes)); 348 struct scatterlist *src, unsigned int nbytes)
349{
350 struct crypt_s390_des3_128_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
351 struct blkcipher_walk walk;
248 352
249 memcpy(desc->info, sctx->iv, DES3_128_BLOCK_SIZE); 353 blkcipher_walk_init(&walk, dst, src, nbytes);
250 return nbytes; 354 return cbc_desall_crypt(desc, KMC_TDEA_128_ENCRYPT, sctx->iv, &walk);
251} 355}
252 356
253static unsigned int des3_128_decrypt_cbc(const struct cipher_desc *desc, 357static int cbc_des3_128_decrypt(struct blkcipher_desc *desc,
254 u8 *out, const u8 *in, 358 struct scatterlist *dst,
255 unsigned int nbytes) 359 struct scatterlist *src, unsigned int nbytes)
256{ 360{
257 struct crypt_s390_des3_128_ctx *sctx = crypto_tfm_ctx(desc->tfm); 361 struct crypt_s390_des3_128_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
258 int ret; 362 struct blkcipher_walk walk;
259
260 /* only use complete blocks */
261 nbytes &= ~(DES3_128_BLOCK_SIZE - 1);
262
263 memcpy(&sctx->iv, desc->info, DES3_128_BLOCK_SIZE);
264 ret = crypt_s390_kmc(KMC_TDEA_128_DECRYPT, &sctx->iv, out, in, nbytes);
265 BUG_ON((ret < 0) || (ret != nbytes));
266 363
267 return nbytes; 364 blkcipher_walk_init(&walk, dst, src, nbytes);
365 return cbc_desall_crypt(desc, KMC_TDEA_128_DECRYPT, sctx->iv, &walk);
268} 366}
269 367
270static struct crypto_alg des3_128_alg = { 368static struct crypto_alg cbc_des3_128_alg = {
271 .cra_name = "des3_ede128", 369 .cra_name = "cbc(des3_ede128)",
272 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 370 .cra_driver_name = "cbc-des3_ede128-s390",
371 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
372 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
273 .cra_blocksize = DES3_128_BLOCK_SIZE, 373 .cra_blocksize = DES3_128_BLOCK_SIZE,
274 .cra_ctxsize = sizeof(struct crypt_s390_des3_128_ctx), 374 .cra_ctxsize = sizeof(struct crypt_s390_des3_128_ctx),
375 .cra_type = &crypto_blkcipher_type,
275 .cra_module = THIS_MODULE, 376 .cra_module = THIS_MODULE,
276 .cra_list = LIST_HEAD_INIT(des3_128_alg.cra_list), 377 .cra_list = LIST_HEAD_INIT(
378 cbc_des3_128_alg.cra_list),
277 .cra_u = { 379 .cra_u = {
278 .cipher = { 380 .blkcipher = {
279 .cia_min_keysize = DES3_128_KEY_SIZE, 381 .min_keysize = DES3_128_KEY_SIZE,
280 .cia_max_keysize = DES3_128_KEY_SIZE, 382 .max_keysize = DES3_128_KEY_SIZE,
281 .cia_setkey = des3_128_setkey, 383 .ivsize = DES3_128_BLOCK_SIZE,
282 .cia_encrypt = des3_128_encrypt, 384 .setkey = des3_128_setkey,
283 .cia_decrypt = des3_128_decrypt, 385 .encrypt = cbc_des3_128_encrypt,
284 .cia_encrypt_ecb = des3_128_encrypt_ecb, 386 .decrypt = cbc_des3_128_decrypt,
285 .cia_decrypt_ecb = des3_128_decrypt_ecb,
286 .cia_encrypt_cbc = des3_128_encrypt_cbc,
287 .cia_decrypt_cbc = des3_128_decrypt_cbc,
288 } 387 }
289 } 388 }
290}; 389};
@@ -303,11 +402,12 @@ static struct crypto_alg des3_128_alg = {
303 * 402 *
304 */ 403 */
305static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key, 404static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key,
306 unsigned int keylen, u32 *flags) 405 unsigned int keylen)
307{ 406{
308 int i, ret; 407 int i, ret;
309 struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); 408 struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm);
310 const u8* temp_key = key; 409 const u8 *temp_key = key;
410 u32 *flags = &tfm->crt_flags;
311 411
312 if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && 412 if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) &&
313 memcmp(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2], 413 memcmp(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2],
@@ -341,89 +441,111 @@ static void des3_192_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
341 DES3_192_BLOCK_SIZE); 441 DES3_192_BLOCK_SIZE);
342} 442}
343 443
344static unsigned int des3_192_encrypt_ecb(const struct cipher_desc *desc, 444static struct crypto_alg des3_192_alg = {
345 u8 *out, const u8 *in, 445 .cra_name = "des3_ede",
346 unsigned int nbytes) 446 .cra_driver_name = "des3_ede-s390",
347{ 447 .cra_priority = CRYPT_S390_PRIORITY,
348 struct crypt_s390_des3_192_ctx *sctx = crypto_tfm_ctx(desc->tfm); 448 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
349 int ret; 449 .cra_blocksize = DES3_192_BLOCK_SIZE,
450 .cra_ctxsize = sizeof(struct crypt_s390_des3_192_ctx),
451 .cra_module = THIS_MODULE,
452 .cra_list = LIST_HEAD_INIT(des3_192_alg.cra_list),
453 .cra_u = {
454 .cipher = {
455 .cia_min_keysize = DES3_192_KEY_SIZE,
456 .cia_max_keysize = DES3_192_KEY_SIZE,
457 .cia_setkey = des3_192_setkey,
458 .cia_encrypt = des3_192_encrypt,
459 .cia_decrypt = des3_192_decrypt,
460 }
461 }
462};
350 463
351 /* only use complete blocks */ 464static int ecb_des3_192_encrypt(struct blkcipher_desc *desc,
352 nbytes &= ~(DES3_192_BLOCK_SIZE - 1); 465 struct scatterlist *dst,
353 ret = crypt_s390_km(KM_TDEA_192_ENCRYPT, sctx->key, out, in, nbytes); 466 struct scatterlist *src, unsigned int nbytes)
354 BUG_ON((ret < 0) || (ret != nbytes)); 467{
468 struct crypt_s390_des3_192_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
469 struct blkcipher_walk walk;
355 470
356 return nbytes; 471 blkcipher_walk_init(&walk, dst, src, nbytes);
472 return ecb_desall_crypt(desc, KM_TDEA_192_ENCRYPT, sctx->key, &walk);
357} 473}
358 474
359static unsigned int des3_192_decrypt_ecb(const struct cipher_desc *desc, 475static int ecb_des3_192_decrypt(struct blkcipher_desc *desc,
360 u8 *out, const u8 *in, 476 struct scatterlist *dst,
361 unsigned int nbytes) 477 struct scatterlist *src, unsigned int nbytes)
362{ 478{
363 struct crypt_s390_des3_192_ctx *sctx = crypto_tfm_ctx(desc->tfm); 479 struct crypt_s390_des3_192_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
364 int ret; 480 struct blkcipher_walk walk;
365
366 /* only use complete blocks */
367 nbytes &= ~(DES3_192_BLOCK_SIZE - 1);
368 ret = crypt_s390_km(KM_TDEA_192_DECRYPT, sctx->key, out, in, nbytes);
369 BUG_ON((ret < 0) || (ret != nbytes));
370 481
371 return nbytes; 482 blkcipher_walk_init(&walk, dst, src, nbytes);
483 return ecb_desall_crypt(desc, KM_TDEA_192_DECRYPT, sctx->key, &walk);
372} 484}
373 485
374static unsigned int des3_192_encrypt_cbc(const struct cipher_desc *desc, 486static struct crypto_alg ecb_des3_192_alg = {
375 u8 *out, const u8 *in, 487 .cra_name = "ecb(des3_ede)",
376 unsigned int nbytes) 488 .cra_driver_name = "ecb-des3_ede-s390",
377{ 489 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
378 struct crypt_s390_des3_192_ctx *sctx = crypto_tfm_ctx(desc->tfm); 490 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
379 int ret; 491 .cra_blocksize = DES3_192_BLOCK_SIZE,
380 492 .cra_ctxsize = sizeof(struct crypt_s390_des3_192_ctx),
381 /* only use complete blocks */ 493 .cra_type = &crypto_blkcipher_type,
382 nbytes &= ~(DES3_192_BLOCK_SIZE - 1); 494 .cra_module = THIS_MODULE,
495 .cra_list = LIST_HEAD_INIT(
496 ecb_des3_192_alg.cra_list),
497 .cra_u = {
498 .blkcipher = {
499 .min_keysize = DES3_192_KEY_SIZE,
500 .max_keysize = DES3_192_KEY_SIZE,
501 .setkey = des3_192_setkey,
502 .encrypt = ecb_des3_192_encrypt,
503 .decrypt = ecb_des3_192_decrypt,
504 }
505 }
506};
383 507
384 memcpy(sctx->iv, desc->info, DES3_192_BLOCK_SIZE); 508static int cbc_des3_192_encrypt(struct blkcipher_desc *desc,
385 ret = crypt_s390_kmc(KMC_TDEA_192_ENCRYPT, &sctx->iv, out, in, nbytes); 509 struct scatterlist *dst,
386 BUG_ON((ret < 0) || (ret != nbytes)); 510 struct scatterlist *src, unsigned int nbytes)
511{
512 struct crypt_s390_des3_192_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
513 struct blkcipher_walk walk;
387 514
388 memcpy(desc->info, sctx->iv, DES3_192_BLOCK_SIZE); 515 blkcipher_walk_init(&walk, dst, src, nbytes);
389 return nbytes; 516 return cbc_desall_crypt(desc, KMC_TDEA_192_ENCRYPT, sctx->iv, &walk);
390} 517}
391 518
392static unsigned int des3_192_decrypt_cbc(const struct cipher_desc *desc, 519static int cbc_des3_192_decrypt(struct blkcipher_desc *desc,
393 u8 *out, const u8 *in, 520 struct scatterlist *dst,
394 unsigned int nbytes) 521 struct scatterlist *src, unsigned int nbytes)
395{ 522{
396 struct crypt_s390_des3_192_ctx *sctx = crypto_tfm_ctx(desc->tfm); 523 struct crypt_s390_des3_192_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
397 int ret; 524 struct blkcipher_walk walk;
398 525
399 /* only use complete blocks */ 526 blkcipher_walk_init(&walk, dst, src, nbytes);
400 nbytes &= ~(DES3_192_BLOCK_SIZE - 1); 527 return cbc_desall_crypt(desc, KMC_TDEA_192_DECRYPT, sctx->iv, &walk);
401
402 memcpy(&sctx->iv, desc->info, DES3_192_BLOCK_SIZE);
403 ret = crypt_s390_kmc(KMC_TDEA_192_DECRYPT, &sctx->iv, out, in, nbytes);
404 BUG_ON((ret < 0) || (ret != nbytes));
405
406 return nbytes;
407} 528}
408 529
409static struct crypto_alg des3_192_alg = { 530static struct crypto_alg cbc_des3_192_alg = {
410 .cra_name = "des3_ede", 531 .cra_name = "cbc(des3_ede)",
411 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 532 .cra_driver_name = "cbc-des3_ede-s390",
533 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
534 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
412 .cra_blocksize = DES3_192_BLOCK_SIZE, 535 .cra_blocksize = DES3_192_BLOCK_SIZE,
413 .cra_ctxsize = sizeof(struct crypt_s390_des3_192_ctx), 536 .cra_ctxsize = sizeof(struct crypt_s390_des3_192_ctx),
537 .cra_type = &crypto_blkcipher_type,
414 .cra_module = THIS_MODULE, 538 .cra_module = THIS_MODULE,
415 .cra_list = LIST_HEAD_INIT(des3_192_alg.cra_list), 539 .cra_list = LIST_HEAD_INIT(
540 cbc_des3_192_alg.cra_list),
416 .cra_u = { 541 .cra_u = {
417 .cipher = { 542 .blkcipher = {
418 .cia_min_keysize = DES3_192_KEY_SIZE, 543 .min_keysize = DES3_192_KEY_SIZE,
419 .cia_max_keysize = DES3_192_KEY_SIZE, 544 .max_keysize = DES3_192_KEY_SIZE,
420 .cia_setkey = des3_192_setkey, 545 .ivsize = DES3_192_BLOCK_SIZE,
421 .cia_encrypt = des3_192_encrypt, 546 .setkey = des3_192_setkey,
422 .cia_decrypt = des3_192_decrypt, 547 .encrypt = cbc_des3_192_encrypt,
423 .cia_encrypt_ecb = des3_192_encrypt_ecb, 548 .decrypt = cbc_des3_192_decrypt,
424 .cia_decrypt_ecb = des3_192_decrypt_ecb,
425 .cia_encrypt_cbc = des3_192_encrypt_cbc,
426 .cia_decrypt_cbc = des3_192_decrypt_cbc,
427 } 549 }
428 } 550 }
429}; 551};
@@ -437,22 +559,69 @@ static int init(void)
437 !crypt_s390_func_available(KM_TDEA_192_ENCRYPT)) 559 !crypt_s390_func_available(KM_TDEA_192_ENCRYPT))
438 return -ENOSYS; 560 return -ENOSYS;
439 561
440 ret |= (crypto_register_alg(&des_alg) == 0) ? 0:1; 562 ret = crypto_register_alg(&des_alg);
441 ret |= (crypto_register_alg(&des3_128_alg) == 0) ? 0:2; 563 if (ret)
442 ret |= (crypto_register_alg(&des3_192_alg) == 0) ? 0:4; 564 goto des_err;
443 if (ret) { 565 ret = crypto_register_alg(&ecb_des_alg);
444 crypto_unregister_alg(&des3_192_alg); 566 if (ret)
445 crypto_unregister_alg(&des3_128_alg); 567 goto ecb_des_err;
446 crypto_unregister_alg(&des_alg); 568 ret = crypto_register_alg(&cbc_des_alg);
447 return -EEXIST; 569 if (ret)
448 } 570 goto cbc_des_err;
449 return 0; 571
572 ret = crypto_register_alg(&des3_128_alg);
573 if (ret)
574 goto des3_128_err;
575 ret = crypto_register_alg(&ecb_des3_128_alg);
576 if (ret)
577 goto ecb_des3_128_err;
578 ret = crypto_register_alg(&cbc_des3_128_alg);
579 if (ret)
580 goto cbc_des3_128_err;
581
582 ret = crypto_register_alg(&des3_192_alg);
583 if (ret)
584 goto des3_192_err;
585 ret = crypto_register_alg(&ecb_des3_192_alg);
586 if (ret)
587 goto ecb_des3_192_err;
588 ret = crypto_register_alg(&cbc_des3_192_alg);
589 if (ret)
590 goto cbc_des3_192_err;
591
592out:
593 return ret;
594
595cbc_des3_192_err:
596 crypto_unregister_alg(&ecb_des3_192_alg);
597ecb_des3_192_err:
598 crypto_unregister_alg(&des3_192_alg);
599des3_192_err:
600 crypto_unregister_alg(&cbc_des3_128_alg);
601cbc_des3_128_err:
602 crypto_unregister_alg(&ecb_des3_128_alg);
603ecb_des3_128_err:
604 crypto_unregister_alg(&des3_128_alg);
605des3_128_err:
606 crypto_unregister_alg(&cbc_des_alg);
607cbc_des_err:
608 crypto_unregister_alg(&ecb_des_alg);
609ecb_des_err:
610 crypto_unregister_alg(&des_alg);
611des_err:
612 goto out;
450} 613}
451 614
452static void __exit fini(void) 615static void __exit fini(void)
453{ 616{
617 crypto_unregister_alg(&cbc_des3_192_alg);
618 crypto_unregister_alg(&ecb_des3_192_alg);
454 crypto_unregister_alg(&des3_192_alg); 619 crypto_unregister_alg(&des3_192_alg);
620 crypto_unregister_alg(&cbc_des3_128_alg);
621 crypto_unregister_alg(&ecb_des3_128_alg);
455 crypto_unregister_alg(&des3_128_alg); 622 crypto_unregister_alg(&des3_128_alg);
623 crypto_unregister_alg(&cbc_des_alg);
624 crypto_unregister_alg(&ecb_des_alg);
456 crypto_unregister_alg(&des_alg); 625 crypto_unregister_alg(&des_alg);
457} 626}
458 627
diff --git a/arch/s390/crypto/sha1_s390.c b/arch/s390/crypto/sha1_s390.c
index 9d34a35b1aa5..49ca8690ee39 100644
--- a/arch/s390/crypto/sha1_s390.c
+++ b/arch/s390/crypto/sha1_s390.c
@@ -126,6 +126,8 @@ static void sha1_final(struct crypto_tfm *tfm, u8 *out)
126 126
127static struct crypto_alg alg = { 127static struct crypto_alg alg = {
128 .cra_name = "sha1", 128 .cra_name = "sha1",
129 .cra_driver_name = "sha1-s390",
130 .cra_priority = CRYPT_S390_PRIORITY,
129 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 131 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
130 .cra_blocksize = SHA1_BLOCK_SIZE, 132 .cra_blocksize = SHA1_BLOCK_SIZE,
131 .cra_ctxsize = sizeof(struct crypt_s390_sha1_ctx), 133 .cra_ctxsize = sizeof(struct crypt_s390_sha1_ctx),
diff --git a/arch/s390/crypto/sha256_s390.c b/arch/s390/crypto/sha256_s390.c
index f573df30f31d..8e4e67503fe7 100644
--- a/arch/s390/crypto/sha256_s390.c
+++ b/arch/s390/crypto/sha256_s390.c
@@ -127,6 +127,8 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out)
127 127
128static struct crypto_alg alg = { 128static struct crypto_alg alg = {
129 .cra_name = "sha256", 129 .cra_name = "sha256",
130 .cra_driver_name = "sha256-s390",
131 .cra_priority = CRYPT_S390_PRIORITY,
130 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 132 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
131 .cra_blocksize = SHA256_BLOCK_SIZE, 133 .cra_blocksize = SHA256_BLOCK_SIZE,
132 .cra_ctxsize = sizeof(struct s390_sha256_ctx), 134 .cra_ctxsize = sizeof(struct s390_sha256_ctx),
diff --git a/arch/x86_64/crypto/Makefile b/arch/x86_64/crypto/Makefile
index 426d20f4b72e..15b538a8b7f7 100644
--- a/arch/x86_64/crypto/Makefile
+++ b/arch/x86_64/crypto/Makefile
@@ -5,5 +5,8 @@
5# 5#
6 6
7obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o 7obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o
8obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o
8 9
9aes-x86_64-y := aes-x86_64-asm.o aes.o 10aes-x86_64-y := aes-x86_64-asm.o aes.o
11twofish-x86_64-y := twofish-x86_64-asm.o twofish.o
12
diff --git a/arch/x86_64/crypto/aes.c b/arch/x86_64/crypto/aes.c
index 68866fab37aa..5cdb13ea5cc2 100644
--- a/arch/x86_64/crypto/aes.c
+++ b/arch/x86_64/crypto/aes.c
@@ -228,13 +228,14 @@ static void __init gen_tabs(void)
228} 228}
229 229
230static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 230static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
231 unsigned int key_len, u32 *flags) 231 unsigned int key_len)
232{ 232{
233 struct aes_ctx *ctx = crypto_tfm_ctx(tfm); 233 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
234 const __le32 *key = (const __le32 *)in_key; 234 const __le32 *key = (const __le32 *)in_key;
235 u32 *flags = &tfm->crt_flags;
235 u32 i, j, t, u, v, w; 236 u32 i, j, t, u, v, w;
236 237
237 if (key_len != 16 && key_len != 24 && key_len != 32) { 238 if (key_len % 8) {
238 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 239 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
239 return -EINVAL; 240 return -EINVAL;
240 } 241 }
diff --git a/arch/x86_64/crypto/twofish-x86_64-asm.S b/arch/x86_64/crypto/twofish-x86_64-asm.S
new file mode 100644
index 000000000000..35974a586615
--- /dev/null
+++ b/arch/x86_64/crypto/twofish-x86_64-asm.S
@@ -0,0 +1,324 @@
1/***************************************************************************
2* Copyright (C) 2006 by Joachim Fritschi, <jfritschi@freenet.de> *
3* *
4* This program is free software; you can redistribute it and/or modify *
5* it under the terms of the GNU General Public License as published by *
6* the Free Software Foundation; either version 2 of the License, or *
7* (at your option) any later version. *
8* *
9* This program is distributed in the hope that it will be useful, *
10* but WITHOUT ANY WARRANTY; without even the implied warranty of *
11* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
12* GNU General Public License for more details. *
13* *
14* You should have received a copy of the GNU General Public License *
15* along with this program; if not, write to the *
16* Free Software Foundation, Inc., *
17* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
18***************************************************************************/
19
20.file "twofish-x86_64-asm.S"
21.text
22
23#include <asm/asm-offsets.h>
24
25#define a_offset 0
26#define b_offset 4
27#define c_offset 8
28#define d_offset 12
29
30/* Structure of the crypto context struct*/
31
32#define s0 0 /* S0 Array 256 Words each */
33#define s1 1024 /* S1 Array */
34#define s2 2048 /* S2 Array */
35#define s3 3072 /* S3 Array */
36#define w 4096 /* 8 whitening keys (word) */
37#define k 4128 /* key 1-32 ( word ) */
38
39/* define a few register aliases to allow macro substitution */
40
41#define R0 %rax
42#define R0D %eax
43#define R0B %al
44#define R0H %ah
45
46#define R1 %rbx
47#define R1D %ebx
48#define R1B %bl
49#define R1H %bh
50
51#define R2 %rcx
52#define R2D %ecx
53#define R2B %cl
54#define R2H %ch
55
56#define R3 %rdx
57#define R3D %edx
58#define R3B %dl
59#define R3H %dh
60
61
62/* performs input whitening */
63#define input_whitening(src,context,offset)\
64 xor w+offset(context), src;
65
66/* performs input whitening */
67#define output_whitening(src,context,offset)\
68 xor w+16+offset(context), src;
69
70
71/*
72 * a input register containing a (rotated 16)
73 * b input register containing b
74 * c input register containing c
75 * d input register containing d (already rol $1)
76 * operations on a and b are interleaved to increase performance
77 */
78#define encrypt_round(a,b,c,d,round)\
79 movzx b ## B, %edi;\
80 mov s1(%r11,%rdi,4),%r8d;\
81 movzx a ## B, %edi;\
82 mov s2(%r11,%rdi,4),%r9d;\
83 movzx b ## H, %edi;\
84 ror $16, b ## D;\
85 xor s2(%r11,%rdi,4),%r8d;\
86 movzx a ## H, %edi;\
87 ror $16, a ## D;\
88 xor s3(%r11,%rdi,4),%r9d;\
89 movzx b ## B, %edi;\
90 xor s3(%r11,%rdi,4),%r8d;\
91 movzx a ## B, %edi;\
92 xor (%r11,%rdi,4), %r9d;\
93 movzx b ## H, %edi;\
94 ror $15, b ## D;\
95 xor (%r11,%rdi,4), %r8d;\
96 movzx a ## H, %edi;\
97 xor s1(%r11,%rdi,4),%r9d;\
98 add %r8d, %r9d;\
99 add %r9d, %r8d;\
100 add k+round(%r11), %r9d;\
101 xor %r9d, c ## D;\
102 rol $15, c ## D;\
103 add k+4+round(%r11),%r8d;\
104 xor %r8d, d ## D;
105
106/*
107 * a input register containing a(rotated 16)
108 * b input register containing b
109 * c input register containing c
110 * d input register containing d (already rol $1)
111 * operations on a and b are interleaved to increase performance
112 * during the round a and b are prepared for the output whitening
113 */
114#define encrypt_last_round(a,b,c,d,round)\
115 mov b ## D, %r10d;\
116 shl $32, %r10;\
117 movzx b ## B, %edi;\
118 mov s1(%r11,%rdi,4),%r8d;\
119 movzx a ## B, %edi;\
120 mov s2(%r11,%rdi,4),%r9d;\
121 movzx b ## H, %edi;\
122 ror $16, b ## D;\
123 xor s2(%r11,%rdi,4),%r8d;\
124 movzx a ## H, %edi;\
125 ror $16, a ## D;\
126 xor s3(%r11,%rdi,4),%r9d;\
127 movzx b ## B, %edi;\
128 xor s3(%r11,%rdi,4),%r8d;\
129 movzx a ## B, %edi;\
130 xor (%r11,%rdi,4), %r9d;\
131 xor a, %r10;\
132 movzx b ## H, %edi;\
133 xor (%r11,%rdi,4), %r8d;\
134 movzx a ## H, %edi;\
135 xor s1(%r11,%rdi,4),%r9d;\
136 add %r8d, %r9d;\
137 add %r9d, %r8d;\
138 add k+round(%r11), %r9d;\
139 xor %r9d, c ## D;\
140 ror $1, c ## D;\
141 add k+4+round(%r11),%r8d;\
142 xor %r8d, d ## D
143
144/*
145 * a input register containing a
146 * b input register containing b (rotated 16)
147 * c input register containing c (already rol $1)
148 * d input register containing d
149 * operations on a and b are interleaved to increase performance
150 */
151#define decrypt_round(a,b,c,d,round)\
152 movzx a ## B, %edi;\
153 mov (%r11,%rdi,4), %r9d;\
154 movzx b ## B, %edi;\
155 mov s3(%r11,%rdi,4),%r8d;\
156 movzx a ## H, %edi;\
157 ror $16, a ## D;\
158 xor s1(%r11,%rdi,4),%r9d;\
159 movzx b ## H, %edi;\
160 ror $16, b ## D;\
161 xor (%r11,%rdi,4), %r8d;\
162 movzx a ## B, %edi;\
163 xor s2(%r11,%rdi,4),%r9d;\
164 movzx b ## B, %edi;\
165 xor s1(%r11,%rdi,4),%r8d;\
166 movzx a ## H, %edi;\
167 ror $15, a ## D;\
168 xor s3(%r11,%rdi,4),%r9d;\
169 movzx b ## H, %edi;\
170 xor s2(%r11,%rdi,4),%r8d;\
171 add %r8d, %r9d;\
172 add %r9d, %r8d;\
173 add k+round(%r11), %r9d;\
174 xor %r9d, c ## D;\
175 add k+4+round(%r11),%r8d;\
176 xor %r8d, d ## D;\
177 rol $15, d ## D;
178
179/*
180 * a input register containing a
181 * b input register containing b
182 * c input register containing c (already rol $1)
183 * d input register containing d
184 * operations on a and b are interleaved to increase performance
185 * during the round a and b are prepared for the output whitening
186 */
187#define decrypt_last_round(a,b,c,d,round)\
188 movzx a ## B, %edi;\
189 mov (%r11,%rdi,4), %r9d;\
190 movzx b ## B, %edi;\
191 mov s3(%r11,%rdi,4),%r8d;\
192 movzx b ## H, %edi;\
193 ror $16, b ## D;\
194 xor (%r11,%rdi,4), %r8d;\
195 movzx a ## H, %edi;\
196 mov b ## D, %r10d;\
197 shl $32, %r10;\
198 xor a, %r10;\
199 ror $16, a ## D;\
200 xor s1(%r11,%rdi,4),%r9d;\
201 movzx b ## B, %edi;\
202 xor s1(%r11,%rdi,4),%r8d;\
203 movzx a ## B, %edi;\
204 xor s2(%r11,%rdi,4),%r9d;\
205 movzx b ## H, %edi;\
206 xor s2(%r11,%rdi,4),%r8d;\
207 movzx a ## H, %edi;\
208 xor s3(%r11,%rdi,4),%r9d;\
209 add %r8d, %r9d;\
210 add %r9d, %r8d;\
211 add k+round(%r11), %r9d;\
212 xor %r9d, c ## D;\
213 add k+4+round(%r11),%r8d;\
214 xor %r8d, d ## D;\
215 ror $1, d ## D;
216
217.align 8
218.global twofish_enc_blk
219.global twofish_dec_blk
220
221twofish_enc_blk:
222 pushq R1
223
224 /* %rdi contains the crypto tfm adress */
225 /* %rsi contains the output adress */
226 /* %rdx contains the input adress */
227 add $crypto_tfm_ctx_offset, %rdi /* set ctx adress */
228 /* ctx adress is moved to free one non-rex register
229 as target for the 8bit high operations */
230 mov %rdi, %r11
231
232 movq (R3), R1
233 movq 8(R3), R3
234 input_whitening(R1,%r11,a_offset)
235 input_whitening(R3,%r11,c_offset)
236 mov R1D, R0D
237 rol $16, R0D
238 shr $32, R1
239 mov R3D, R2D
240 shr $32, R3
241 rol $1, R3D
242
243 encrypt_round(R0,R1,R2,R3,0);
244 encrypt_round(R2,R3,R0,R1,8);
245 encrypt_round(R0,R1,R2,R3,2*8);
246 encrypt_round(R2,R3,R0,R1,3*8);
247 encrypt_round(R0,R1,R2,R3,4*8);
248 encrypt_round(R2,R3,R0,R1,5*8);
249 encrypt_round(R0,R1,R2,R3,6*8);
250 encrypt_round(R2,R3,R0,R1,7*8);
251 encrypt_round(R0,R1,R2,R3,8*8);
252 encrypt_round(R2,R3,R0,R1,9*8);
253 encrypt_round(R0,R1,R2,R3,10*8);
254 encrypt_round(R2,R3,R0,R1,11*8);
255 encrypt_round(R0,R1,R2,R3,12*8);
256 encrypt_round(R2,R3,R0,R1,13*8);
257 encrypt_round(R0,R1,R2,R3,14*8);
258 encrypt_last_round(R2,R3,R0,R1,15*8);
259
260
261 output_whitening(%r10,%r11,a_offset)
262 movq %r10, (%rsi)
263
264 shl $32, R1
265 xor R0, R1
266
267 output_whitening(R1,%r11,c_offset)
268 movq R1, 8(%rsi)
269
270 popq R1
271 movq $1,%rax
272 ret
273
274twofish_dec_blk:
275 pushq R1
276
277 /* %rdi contains the crypto tfm adress */
278 /* %rsi contains the output adress */
279 /* %rdx contains the input adress */
280 add $crypto_tfm_ctx_offset, %rdi /* set ctx adress */
281 /* ctx adress is moved to free one non-rex register
282 as target for the 8bit high operations */
283 mov %rdi, %r11
284
285 movq (R3), R1
286 movq 8(R3), R3
287 output_whitening(R1,%r11,a_offset)
288 output_whitening(R3,%r11,c_offset)
289 mov R1D, R0D
290 shr $32, R1
291 rol $16, R1D
292 mov R3D, R2D
293 shr $32, R3
294 rol $1, R2D
295
296 decrypt_round(R0,R1,R2,R3,15*8);
297 decrypt_round(R2,R3,R0,R1,14*8);
298 decrypt_round(R0,R1,R2,R3,13*8);
299 decrypt_round(R2,R3,R0,R1,12*8);
300 decrypt_round(R0,R1,R2,R3,11*8);
301 decrypt_round(R2,R3,R0,R1,10*8);
302 decrypt_round(R0,R1,R2,R3,9*8);
303 decrypt_round(R2,R3,R0,R1,8*8);
304 decrypt_round(R0,R1,R2,R3,7*8);
305 decrypt_round(R2,R3,R0,R1,6*8);
306 decrypt_round(R0,R1,R2,R3,5*8);
307 decrypt_round(R2,R3,R0,R1,4*8);
308 decrypt_round(R0,R1,R2,R3,3*8);
309 decrypt_round(R2,R3,R0,R1,2*8);
310 decrypt_round(R0,R1,R2,R3,1*8);
311 decrypt_last_round(R2,R3,R0,R1,0);
312
313 input_whitening(%r10,%r11,a_offset)
314 movq %r10, (%rsi)
315
316 shl $32, R1
317 xor R0, R1
318
319 input_whitening(R1,%r11,c_offset)
320 movq R1, 8(%rsi)
321
322 popq R1
323 movq $1,%rax
324 ret
diff --git a/arch/x86_64/crypto/twofish.c b/arch/x86_64/crypto/twofish.c
new file mode 100644
index 000000000000..182d91d5cfb9
--- /dev/null
+++ b/arch/x86_64/crypto/twofish.c
@@ -0,0 +1,97 @@
1/*
2 * Glue Code for optimized x86_64 assembler version of TWOFISH
3 *
4 * Originally Twofish for GPG
5 * By Matthew Skala <mskala@ansuz.sooke.bc.ca>, July 26, 1998
6 * 256-bit key length added March 20, 1999
7 * Some modifications to reduce the text size by Werner Koch, April, 1998
8 * Ported to the kerneli patch by Marc Mutz <Marc@Mutz.com>
9 * Ported to CryptoAPI by Colin Slater <hoho@tacomeat.net>
10 *
11 * The original author has disclaimed all copyright interest in this
12 * code and thus put it in the public domain. The subsequent authors
13 * have put this under the GNU General Public License.
14 *
15 * This program is free software; you can redistribute it and/or modify
16 * it under the terms of the GNU General Public License as published by
17 * the Free Software Foundation; either version 2 of the License, or
18 * (at your option) any later version.
19 *
20 * This program is distributed in the hope that it will be useful,
21 * but WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23 * GNU General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, write to the Free Software
27 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
28 * USA
29 *
30 * This code is a "clean room" implementation, written from the paper
31 * _Twofish: A 128-Bit Block Cipher_ by Bruce Schneier, John Kelsey,
32 * Doug Whiting, David Wagner, Chris Hall, and Niels Ferguson, available
33 * through http://www.counterpane.com/twofish.html
34 *
35 * For background information on multiplication in finite fields, used for
36 * the matrix operations in the key schedule, see the book _Contemporary
37 * Abstract Algebra_ by Joseph A. Gallian, especially chapter 22 in the
38 * Third Edition.
39 */
40
41#include <crypto/twofish.h>
42#include <linux/crypto.h>
43#include <linux/init.h>
44#include <linux/kernel.h>
45#include <linux/module.h>
46#include <linux/types.h>
47
48asmlinkage void twofish_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
49asmlinkage void twofish_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
50
51static void twofish_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
52{
53 twofish_enc_blk(tfm, dst, src);
54}
55
56static void twofish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
57{
58 twofish_dec_blk(tfm, dst, src);
59}
60
61static struct crypto_alg alg = {
62 .cra_name = "twofish",
63 .cra_driver_name = "twofish-x86_64",
64 .cra_priority = 200,
65 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
66 .cra_blocksize = TF_BLOCK_SIZE,
67 .cra_ctxsize = sizeof(struct twofish_ctx),
68 .cra_alignmask = 3,
69 .cra_module = THIS_MODULE,
70 .cra_list = LIST_HEAD_INIT(alg.cra_list),
71 .cra_u = {
72 .cipher = {
73 .cia_min_keysize = TF_MIN_KEY_SIZE,
74 .cia_max_keysize = TF_MAX_KEY_SIZE,
75 .cia_setkey = twofish_setkey,
76 .cia_encrypt = twofish_encrypt,
77 .cia_decrypt = twofish_decrypt
78 }
79 }
80};
81
82static int __init init(void)
83{
84 return crypto_register_alg(&alg);
85}
86
87static void __exit fini(void)
88{
89 crypto_unregister_alg(&alg);
90}
91
92module_init(init);
93module_exit(fini);
94
95MODULE_LICENSE("GPL");
96MODULE_DESCRIPTION ("Twofish Cipher Algorithm, x86_64 asm optimized");
97MODULE_ALIAS("twofish");