aboutsummaryrefslogtreecommitdiffstats
path: root/arch/s390/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'arch/s390/crypto')
-rw-r--r--arch/s390/crypto/Makefile8
-rw-r--r--arch/s390/crypto/aes_s390.c248
-rw-r--r--arch/s390/crypto/crypt_s390.h (renamed from arch/s390/crypto/crypt_z990.h)267
-rw-r--r--arch/s390/crypto/crypt_s390_query.c129
-rw-r--r--arch/s390/crypto/crypt_z990_query.c111
-rw-r--r--arch/s390/crypto/des_s390.c (renamed from arch/s390/crypto/des_z990.c)54
-rw-r--r--arch/s390/crypto/sha1_s390.c (renamed from arch/s390/crypto/sha1_z990.c)32
-rw-r--r--arch/s390/crypto/sha256_s390.c151
8 files changed, 721 insertions, 279 deletions
diff --git a/arch/s390/crypto/Makefile b/arch/s390/crypto/Makefile
index 96a05e6b51e0..bfe2541dc5cf 100644
--- a/arch/s390/crypto/Makefile
+++ b/arch/s390/crypto/Makefile
@@ -2,7 +2,9 @@
2# Cryptographic API 2# Cryptographic API
3# 3#
4 4
5obj-$(CONFIG_CRYPTO_SHA1_Z990) += sha1_z990.o 5obj-$(CONFIG_CRYPTO_SHA1_S390) += sha1_s390.o
6obj-$(CONFIG_CRYPTO_DES_Z990) += des_z990.o des_check_key.o 6obj-$(CONFIG_CRYPTO_SHA256_S390) += sha256_s390.o
7obj-$(CONFIG_CRYPTO_DES_S390) += des_s390.o des_check_key.o
8obj-$(CONFIG_CRYPTO_AES_S390) += aes_s390.o
7 9
8obj-$(CONFIG_CRYPTO_TEST) += crypt_z990_query.o 10obj-$(CONFIG_CRYPTO_TEST) += crypt_s390_query.o
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c
new file mode 100644
index 000000000000..7a1033d8e00f
--- /dev/null
+++ b/arch/s390/crypto/aes_s390.c
@@ -0,0 +1,248 @@
1/*
2 * Cryptographic API.
3 *
4 * s390 implementation of the AES Cipher Algorithm.
5 *
6 * s390 Version:
7 * Copyright (C) 2005 IBM Deutschland GmbH, IBM Corporation
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 *
10 * Derived from "crypto/aes.c"
11 *
12 * This program is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by the Free
14 * Software Foundation; either version 2 of the License, or (at your option)
15 * any later version.
16 *
17 */
18
19#include <linux/module.h>
20#include <linux/init.h>
21#include <linux/crypto.h>
22#include "crypt_s390.h"
23
24#define AES_MIN_KEY_SIZE 16
25#define AES_MAX_KEY_SIZE 32
26
27/* data block size for all key lengths */
28#define AES_BLOCK_SIZE 16
29
30int has_aes_128 = 0;
31int has_aes_192 = 0;
32int has_aes_256 = 0;
33
34struct s390_aes_ctx {
35 u8 iv[AES_BLOCK_SIZE];
36 u8 key[AES_MAX_KEY_SIZE];
37 int key_len;
38};
39
40static int aes_set_key(void *ctx, const u8 *in_key, unsigned int key_len,
41 u32 *flags)
42{
43 struct s390_aes_ctx *sctx = ctx;
44
45 switch (key_len) {
46 case 16:
47 if (!has_aes_128)
48 goto fail;
49 break;
50 case 24:
51 if (!has_aes_192)
52 goto fail;
53
54 break;
55 case 32:
56 if (!has_aes_256)
57 goto fail;
58 break;
59 default:
60 /* invalid key length */
61 goto fail;
62 break;
63 }
64
65 sctx->key_len = key_len;
66 memcpy(sctx->key, in_key, key_len);
67 return 0;
68fail:
69 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
70 return -EINVAL;
71}
72
73static void aes_encrypt(void *ctx, u8 *out, const u8 *in)
74{
75 const struct s390_aes_ctx *sctx = ctx;
76
77 switch (sctx->key_len) {
78 case 16:
79 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
80 AES_BLOCK_SIZE);
81 break;
82 case 24:
83 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
84 AES_BLOCK_SIZE);
85 break;
86 case 32:
87 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
88 AES_BLOCK_SIZE);
89 break;
90 }
91}
92
93static void aes_decrypt(void *ctx, u8 *out, const u8 *in)
94{
95 const struct s390_aes_ctx *sctx = ctx;
96
97 switch (sctx->key_len) {
98 case 16:
99 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
100 AES_BLOCK_SIZE);
101 break;
102 case 24:
103 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
104 AES_BLOCK_SIZE);
105 break;
106 case 32:
107 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
108 AES_BLOCK_SIZE);
109 break;
110 }
111}
112
113static unsigned int aes_encrypt_ecb(const struct cipher_desc *desc, u8 *out,
114 const u8 *in, unsigned int nbytes)
115{
116 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
117
118 switch (sctx->key_len) {
119 case 16:
120 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, nbytes);
121 break;
122 case 24:
123 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in, nbytes);
124 break;
125 case 32:
126 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in, nbytes);
127 break;
128 }
129 return nbytes & ~(AES_BLOCK_SIZE - 1);
130}
131
132static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out,
133 const u8 *in, unsigned int nbytes)
134{
135 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
136
137 switch (sctx->key_len) {
138 case 16:
139 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, nbytes);
140 break;
141 case 24:
142 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in, nbytes);
143 break;
144 case 32:
145 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in, nbytes);
146 break;
147 }
148 return nbytes & ~(AES_BLOCK_SIZE - 1);
149}
150
151static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out,
152 const u8 *in, unsigned int nbytes)
153{
154 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
155
156 memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE);
157 switch (sctx->key_len) {
158 case 16:
159 crypt_s390_kmc(KMC_AES_128_ENCRYPT, &sctx->iv, out, in, nbytes);
160 break;
161 case 24:
162 crypt_s390_kmc(KMC_AES_192_ENCRYPT, &sctx->iv, out, in, nbytes);
163 break;
164 case 32:
165 crypt_s390_kmc(KMC_AES_256_ENCRYPT, &sctx->iv, out, in, nbytes);
166 break;
167 }
168 memcpy(desc->info, &sctx->iv, AES_BLOCK_SIZE);
169
170 return nbytes & ~(AES_BLOCK_SIZE - 1);
171}
172
173static unsigned int aes_decrypt_cbc(const struct cipher_desc *desc, u8 *out,
174 const u8 *in, unsigned int nbytes)
175{
176 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
177
178 memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE);
179 switch (sctx->key_len) {
180 case 16:
181 crypt_s390_kmc(KMC_AES_128_DECRYPT, &sctx->iv, out, in, nbytes);
182 break;
183 case 24:
184 crypt_s390_kmc(KMC_AES_192_DECRYPT, &sctx->iv, out, in, nbytes);
185 break;
186 case 32:
187 crypt_s390_kmc(KMC_AES_256_DECRYPT, &sctx->iv, out, in, nbytes);
188 break;
189 }
190 return nbytes & ~(AES_BLOCK_SIZE - 1);
191}
192
193
194static struct crypto_alg aes_alg = {
195 .cra_name = "aes",
196 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
197 .cra_blocksize = AES_BLOCK_SIZE,
198 .cra_ctxsize = sizeof(struct s390_aes_ctx),
199 .cra_module = THIS_MODULE,
200 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
201 .cra_u = {
202 .cipher = {
203 .cia_min_keysize = AES_MIN_KEY_SIZE,
204 .cia_max_keysize = AES_MAX_KEY_SIZE,
205 .cia_setkey = aes_set_key,
206 .cia_encrypt = aes_encrypt,
207 .cia_decrypt = aes_decrypt,
208 .cia_encrypt_ecb = aes_encrypt_ecb,
209 .cia_decrypt_ecb = aes_decrypt_ecb,
210 .cia_encrypt_cbc = aes_encrypt_cbc,
211 .cia_decrypt_cbc = aes_decrypt_cbc,
212 }
213 }
214};
215
216static int __init aes_init(void)
217{
218 int ret;
219
220 if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
221 has_aes_128 = 1;
222 if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
223 has_aes_192 = 1;
224 if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
225 has_aes_256 = 1;
226
227 if (!has_aes_128 && !has_aes_192 && !has_aes_256)
228 return -ENOSYS;
229
230 ret = crypto_register_alg(&aes_alg);
231 if (ret != 0)
232 printk(KERN_INFO "crypt_s390: aes_s390 couldn't be loaded.\n");
233 return ret;
234}
235
236static void __exit aes_fini(void)
237{
238 crypto_unregister_alg(&aes_alg);
239}
240
241module_init(aes_init);
242module_exit(aes_fini);
243
244MODULE_ALIAS("aes");
245
246MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
247MODULE_LICENSE("GPL");
248
diff --git a/arch/s390/crypto/crypt_z990.h b/arch/s390/crypto/crypt_s390.h
index 4df660b99e5a..d1c259a7fe33 100644
--- a/arch/s390/crypto/crypt_z990.h
+++ b/arch/s390/crypto/crypt_s390.h
@@ -1,7 +1,7 @@
1/* 1/*
2 * Cryptographic API. 2 * Cryptographic API.
3 * 3 *
4 * Support for z990 cryptographic instructions. 4 * Support for s390 cryptographic instructions.
5 * 5 *
6 * Copyright (C) 2003 IBM Deutschland GmbH, IBM Corporation 6 * Copyright (C) 2003 IBM Deutschland GmbH, IBM Corporation
7 * Author(s): Thomas Spatzier (tspat@de.ibm.com) 7 * Author(s): Thomas Spatzier (tspat@de.ibm.com)
@@ -12,84 +12,108 @@
12 * any later version. 12 * any later version.
13 * 13 *
14 */ 14 */
15#ifndef _CRYPTO_ARCH_S390_CRYPT_Z990_H 15#ifndef _CRYPTO_ARCH_S390_CRYPT_S390_H
16#define _CRYPTO_ARCH_S390_CRYPT_Z990_H 16#define _CRYPTO_ARCH_S390_CRYPT_S390_H
17 17
18#include <asm/errno.h> 18#include <asm/errno.h>
19 19
20#define CRYPT_Z990_OP_MASK 0xFF00 20#define CRYPT_S390_OP_MASK 0xFF00
21#define CRYPT_Z990_FUNC_MASK 0x00FF 21#define CRYPT_S390_FUNC_MASK 0x00FF
22 22
23 23/* s930 cryptographic operations */
24/*z990 cryptographic operations*/ 24enum crypt_s390_operations {
25enum crypt_z990_operations { 25 CRYPT_S390_KM = 0x0100,
26 CRYPT_Z990_KM = 0x0100, 26 CRYPT_S390_KMC = 0x0200,
27 CRYPT_Z990_KMC = 0x0200, 27 CRYPT_S390_KIMD = 0x0300,
28 CRYPT_Z990_KIMD = 0x0300, 28 CRYPT_S390_KLMD = 0x0400,
29 CRYPT_Z990_KLMD = 0x0400, 29 CRYPT_S390_KMAC = 0x0500
30 CRYPT_Z990_KMAC = 0x0500
31}; 30};
32 31
33/*function codes for KM (CIPHER MESSAGE) instruction*/ 32/* function codes for KM (CIPHER MESSAGE) instruction
34enum crypt_z990_km_func { 33 * 0x80 is the decipher modifier bit
35 KM_QUERY = CRYPT_Z990_KM | 0, 34 */
36 KM_DEA_ENCRYPT = CRYPT_Z990_KM | 1, 35enum crypt_s390_km_func {
37 KM_DEA_DECRYPT = CRYPT_Z990_KM | 1 | 0x80, //modifier bit->decipher 36 KM_QUERY = CRYPT_S390_KM | 0x0,
38 KM_TDEA_128_ENCRYPT = CRYPT_Z990_KM | 2, 37 KM_DEA_ENCRYPT = CRYPT_S390_KM | 0x1,
39 KM_TDEA_128_DECRYPT = CRYPT_Z990_KM | 2 | 0x80, 38 KM_DEA_DECRYPT = CRYPT_S390_KM | 0x1 | 0x80,
40 KM_TDEA_192_ENCRYPT = CRYPT_Z990_KM | 3, 39 KM_TDEA_128_ENCRYPT = CRYPT_S390_KM | 0x2,
41 KM_TDEA_192_DECRYPT = CRYPT_Z990_KM | 3 | 0x80, 40 KM_TDEA_128_DECRYPT = CRYPT_S390_KM | 0x2 | 0x80,
41 KM_TDEA_192_ENCRYPT = CRYPT_S390_KM | 0x3,
42 KM_TDEA_192_DECRYPT = CRYPT_S390_KM | 0x3 | 0x80,
43 KM_AES_128_ENCRYPT = CRYPT_S390_KM | 0x12,
44 KM_AES_128_DECRYPT = CRYPT_S390_KM | 0x12 | 0x80,
45 KM_AES_192_ENCRYPT = CRYPT_S390_KM | 0x13,
46 KM_AES_192_DECRYPT = CRYPT_S390_KM | 0x13 | 0x80,
47 KM_AES_256_ENCRYPT = CRYPT_S390_KM | 0x14,
48 KM_AES_256_DECRYPT = CRYPT_S390_KM | 0x14 | 0x80,
42}; 49};
43 50
44/*function codes for KMC (CIPHER MESSAGE WITH CHAINING) instruction*/ 51/* function codes for KMC (CIPHER MESSAGE WITH CHAINING)
45enum crypt_z990_kmc_func { 52 * instruction
46 KMC_QUERY = CRYPT_Z990_KMC | 0, 53 */
47 KMC_DEA_ENCRYPT = CRYPT_Z990_KMC | 1, 54enum crypt_s390_kmc_func {
48 KMC_DEA_DECRYPT = CRYPT_Z990_KMC | 1 | 0x80, //modifier bit->decipher 55 KMC_QUERY = CRYPT_S390_KMC | 0x0,
49 KMC_TDEA_128_ENCRYPT = CRYPT_Z990_KMC | 2, 56 KMC_DEA_ENCRYPT = CRYPT_S390_KMC | 0x1,
50 KMC_TDEA_128_DECRYPT = CRYPT_Z990_KMC | 2 | 0x80, 57 KMC_DEA_DECRYPT = CRYPT_S390_KMC | 0x1 | 0x80,
51 KMC_TDEA_192_ENCRYPT = CRYPT_Z990_KMC | 3, 58 KMC_TDEA_128_ENCRYPT = CRYPT_S390_KMC | 0x2,
52 KMC_TDEA_192_DECRYPT = CRYPT_Z990_KMC | 3 | 0x80, 59 KMC_TDEA_128_DECRYPT = CRYPT_S390_KMC | 0x2 | 0x80,
60 KMC_TDEA_192_ENCRYPT = CRYPT_S390_KMC | 0x3,
61 KMC_TDEA_192_DECRYPT = CRYPT_S390_KMC | 0x3 | 0x80,
62 KMC_AES_128_ENCRYPT = CRYPT_S390_KMC | 0x12,
63 KMC_AES_128_DECRYPT = CRYPT_S390_KMC | 0x12 | 0x80,
64 KMC_AES_192_ENCRYPT = CRYPT_S390_KMC | 0x13,
65 KMC_AES_192_DECRYPT = CRYPT_S390_KMC | 0x13 | 0x80,
66 KMC_AES_256_ENCRYPT = CRYPT_S390_KMC | 0x14,
67 KMC_AES_256_DECRYPT = CRYPT_S390_KMC | 0x14 | 0x80,
53}; 68};
54 69
55/*function codes for KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST) instruction*/ 70/* function codes for KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST)
56enum crypt_z990_kimd_func { 71 * instruction
57 KIMD_QUERY = CRYPT_Z990_KIMD | 0, 72 */
58 KIMD_SHA_1 = CRYPT_Z990_KIMD | 1, 73enum crypt_s390_kimd_func {
74 KIMD_QUERY = CRYPT_S390_KIMD | 0,
75 KIMD_SHA_1 = CRYPT_S390_KIMD | 1,
76 KIMD_SHA_256 = CRYPT_S390_KIMD | 2,
59}; 77};
60 78
61/*function codes for KLMD (COMPUTE LAST MESSAGE DIGEST) instruction*/ 79/* function codes for KLMD (COMPUTE LAST MESSAGE DIGEST)
62enum crypt_z990_klmd_func { 80 * instruction
63 KLMD_QUERY = CRYPT_Z990_KLMD | 0, 81 */
64 KLMD_SHA_1 = CRYPT_Z990_KLMD | 1, 82enum crypt_s390_klmd_func {
83 KLMD_QUERY = CRYPT_S390_KLMD | 0,
84 KLMD_SHA_1 = CRYPT_S390_KLMD | 1,
85 KLMD_SHA_256 = CRYPT_S390_KLMD | 2,
65}; 86};
66 87
67/*function codes for KMAC (COMPUTE MESSAGE AUTHENTICATION CODE) instruction*/ 88/* function codes for KMAC (COMPUTE MESSAGE AUTHENTICATION CODE)
68enum crypt_z990_kmac_func { 89 * instruction
69 KMAC_QUERY = CRYPT_Z990_KMAC | 0, 90 */
70 KMAC_DEA = CRYPT_Z990_KMAC | 1, 91enum crypt_s390_kmac_func {
71 KMAC_TDEA_128 = CRYPT_Z990_KMAC | 2, 92 KMAC_QUERY = CRYPT_S390_KMAC | 0,
72 KMAC_TDEA_192 = CRYPT_Z990_KMAC | 3 93 KMAC_DEA = CRYPT_S390_KMAC | 1,
94 KMAC_TDEA_128 = CRYPT_S390_KMAC | 2,
95 KMAC_TDEA_192 = CRYPT_S390_KMAC | 3
73}; 96};
74 97
75/*status word for z990 crypto instructions' QUERY functions*/ 98/* status word for s390 crypto instructions' QUERY functions */
76struct crypt_z990_query_status { 99struct crypt_s390_query_status {
77 u64 high; 100 u64 high;
78 u64 low; 101 u64 low;
79}; 102};
80 103
81/* 104/*
82 * Standard fixup and ex_table sections for crypt_z990 inline functions. 105 * Standard fixup and ex_table sections for crypt_s390 inline functions.
83 * label 0: the z990 crypto operation 106 * label 0: the s390 crypto operation
84 * label 1: just after 1 to catch illegal operation exception on non-z990 107 * label 1: just after 1 to catch illegal operation exception
108 * (unsupported model)
85 * label 6: the return point after fixup 109 * label 6: the return point after fixup
86 * label 7: set error value if exception _in_ crypto operation 110 * label 7: set error value if exception _in_ crypto operation
87 * label 8: set error value if illegal operation exception 111 * label 8: set error value if illegal operation exception
88 * [ret] is the variable to receive the error code 112 * [ret] is the variable to receive the error code
89 * [ERR] is the error code value 113 * [ERR] is the error code value
90 */ 114 */
91#ifndef __s390x__ 115#ifndef CONFIG_64BIT
92#define __crypt_z990_fixup \ 116#define __crypt_s390_fixup \
93 ".section .fixup,\"ax\" \n" \ 117 ".section .fixup,\"ax\" \n" \
94 "7: lhi %0,%h[e1] \n" \ 118 "7: lhi %0,%h[e1] \n" \
95 " bras 1,9f \n" \ 119 " bras 1,9f \n" \
@@ -105,8 +129,8 @@ struct crypt_z990_query_status {
105 " .long 0b,7b \n" \ 129 " .long 0b,7b \n" \
106 " .long 1b,8b \n" \ 130 " .long 1b,8b \n" \
107 ".previous" 131 ".previous"
108#else /* __s390x__ */ 132#else /* CONFIG_64BIT */
109#define __crypt_z990_fixup \ 133#define __crypt_s390_fixup \
110 ".section .fixup,\"ax\" \n" \ 134 ".section .fixup,\"ax\" \n" \
111 "7: lhi %0,%h[e1] \n" \ 135 "7: lhi %0,%h[e1] \n" \
112 " jg 6b \n" \ 136 " jg 6b \n" \
@@ -118,25 +142,25 @@ struct crypt_z990_query_status {
118 " .quad 0b,7b \n" \ 142 " .quad 0b,7b \n" \
119 " .quad 1b,8b \n" \ 143 " .quad 1b,8b \n" \
120 ".previous" 144 ".previous"
121#endif /* __s390x__ */ 145#endif /* CONFIG_64BIT */
122 146
123/* 147/*
124 * Standard code for setting the result of z990 crypto instructions. 148 * Standard code for setting the result of s390 crypto instructions.
125 * %0: the register which will receive the result 149 * %0: the register which will receive the result
126 * [result]: the register containing the result (e.g. second operand length 150 * [result]: the register containing the result (e.g. second operand length
127 * to compute number of processed bytes]. 151 * to compute number of processed bytes].
128 */ 152 */
129#ifndef __s390x__ 153#ifndef CONFIG_64BIT
130#define __crypt_z990_set_result \ 154#define __crypt_s390_set_result \
131 " lr %0,%[result] \n" 155 " lr %0,%[result] \n"
132#else /* __s390x__ */ 156#else /* CONFIG_64BIT */
133#define __crypt_z990_set_result \ 157#define __crypt_s390_set_result \
134 " lgr %0,%[result] \n" 158 " lgr %0,%[result] \n"
135#endif 159#endif
136 160
137/* 161/*
138 * Executes the KM (CIPHER MESSAGE) operation of the z990 CPU. 162 * Executes the KM (CIPHER MESSAGE) operation of the CPU.
139 * @param func: the function code passed to KM; see crypt_z990_km_func 163 * @param func: the function code passed to KM; see crypt_s390_km_func
140 * @param param: address of parameter block; see POP for details on each func 164 * @param param: address of parameter block; see POP for details on each func
141 * @param dest: address of destination memory area 165 * @param dest: address of destination memory area
142 * @param src: address of source memory area 166 * @param src: address of source memory area
@@ -145,9 +169,9 @@ struct crypt_z990_query_status {
145 * for encryption/decryption funcs 169 * for encryption/decryption funcs
146 */ 170 */
147static inline int 171static inline int
148crypt_z990_km(long func, void* param, u8* dest, const u8* src, long src_len) 172crypt_s390_km(long func, void* param, u8* dest, const u8* src, long src_len)
149{ 173{
150 register long __func asm("0") = func & CRYPT_Z990_FUNC_MASK; 174 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
151 register void* __param asm("1") = param; 175 register void* __param asm("1") = param;
152 register u8* __dest asm("4") = dest; 176 register u8* __dest asm("4") = dest;
153 register const u8* __src asm("2") = src; 177 register const u8* __src asm("2") = src;
@@ -156,26 +180,26 @@ crypt_z990_km(long func, void* param, u8* dest, const u8* src, long src_len)
156 180
157 ret = 0; 181 ret = 0;
158 __asm__ __volatile__ ( 182 __asm__ __volatile__ (
159 "0: .insn rre,0xB92E0000,%1,%2 \n" //KM opcode 183 "0: .insn rre,0xB92E0000,%1,%2 \n" /* KM opcode */
160 "1: brc 1,0b \n" //handle partial completion 184 "1: brc 1,0b \n" /* handle partial completion */
161 __crypt_z990_set_result 185 __crypt_s390_set_result
162 "6: \n" 186 "6: \n"
163 __crypt_z990_fixup 187 __crypt_s390_fixup
164 : "+d" (ret), "+a" (__dest), "+a" (__src), 188 : "+d" (ret), "+a" (__dest), "+a" (__src),
165 [result] "+d" (__src_len) 189 [result] "+d" (__src_len)
166 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func), 190 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
167 "a" (__param) 191 "a" (__param)
168 : "cc", "memory" 192 : "cc", "memory"
169 ); 193 );
170 if (ret >= 0 && func & CRYPT_Z990_FUNC_MASK){ 194 if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
171 ret = src_len - ret; 195 ret = src_len - ret;
172 } 196 }
173 return ret; 197 return ret;
174} 198}
175 199
176/* 200/*
177 * Executes the KMC (CIPHER MESSAGE WITH CHAINING) operation of the z990 CPU. 201 * Executes the KMC (CIPHER MESSAGE WITH CHAINING) operation of the CPU.
178 * @param func: the function code passed to KM; see crypt_z990_kmc_func 202 * @param func: the function code passed to KM; see crypt_s390_kmc_func
179 * @param param: address of parameter block; see POP for details on each func 203 * @param param: address of parameter block; see POP for details on each func
180 * @param dest: address of destination memory area 204 * @param dest: address of destination memory area
181 * @param src: address of source memory area 205 * @param src: address of source memory area
@@ -184,9 +208,9 @@ crypt_z990_km(long func, void* param, u8* dest, const u8* src, long src_len)
184 * for encryption/decryption funcs 208 * for encryption/decryption funcs
185 */ 209 */
186static inline int 210static inline int
187crypt_z990_kmc(long func, void* param, u8* dest, const u8* src, long src_len) 211crypt_s390_kmc(long func, void* param, u8* dest, const u8* src, long src_len)
188{ 212{
189 register long __func asm("0") = func & CRYPT_Z990_FUNC_MASK; 213 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
190 register void* __param asm("1") = param; 214 register void* __param asm("1") = param;
191 register u8* __dest asm("4") = dest; 215 register u8* __dest asm("4") = dest;
192 register const u8* __src asm("2") = src; 216 register const u8* __src asm("2") = src;
@@ -195,18 +219,18 @@ crypt_z990_kmc(long func, void* param, u8* dest, const u8* src, long src_len)
195 219
196 ret = 0; 220 ret = 0;
197 __asm__ __volatile__ ( 221 __asm__ __volatile__ (
198 "0: .insn rre,0xB92F0000,%1,%2 \n" //KMC opcode 222 "0: .insn rre,0xB92F0000,%1,%2 \n" /* KMC opcode */
199 "1: brc 1,0b \n" //handle partial completion 223 "1: brc 1,0b \n" /* handle partial completion */
200 __crypt_z990_set_result 224 __crypt_s390_set_result
201 "6: \n" 225 "6: \n"
202 __crypt_z990_fixup 226 __crypt_s390_fixup
203 : "+d" (ret), "+a" (__dest), "+a" (__src), 227 : "+d" (ret), "+a" (__dest), "+a" (__src),
204 [result] "+d" (__src_len) 228 [result] "+d" (__src_len)
205 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func), 229 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
206 "a" (__param) 230 "a" (__param)
207 : "cc", "memory" 231 : "cc", "memory"
208 ); 232 );
209 if (ret >= 0 && func & CRYPT_Z990_FUNC_MASK){ 233 if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
210 ret = src_len - ret; 234 ret = src_len - ret;
211 } 235 }
212 return ret; 236 return ret;
@@ -214,8 +238,8 @@ crypt_z990_kmc(long func, void* param, u8* dest, const u8* src, long src_len)
214 238
215/* 239/*
216 * Executes the KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST) operation 240 * Executes the KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST) operation
217 * of the z990 CPU. 241 * of the CPU.
218 * @param func: the function code passed to KM; see crypt_z990_kimd_func 242 * @param func: the function code passed to KM; see crypt_s390_kimd_func
219 * @param param: address of parameter block; see POP for details on each func 243 * @param param: address of parameter block; see POP for details on each func
220 * @param src: address of source memory area 244 * @param src: address of source memory area
221 * @param src_len: length of src operand in bytes 245 * @param src_len: length of src operand in bytes
@@ -223,9 +247,9 @@ crypt_z990_kmc(long func, void* param, u8* dest, const u8* src, long src_len)
223 * for digest funcs 247 * for digest funcs
224 */ 248 */
225static inline int 249static inline int
226crypt_z990_kimd(long func, void* param, const u8* src, long src_len) 250crypt_s390_kimd(long func, void* param, const u8* src, long src_len)
227{ 251{
228 register long __func asm("0") = func & CRYPT_Z990_FUNC_MASK; 252 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
229 register void* __param asm("1") = param; 253 register void* __param asm("1") = param;
230 register const u8* __src asm("2") = src; 254 register const u8* __src asm("2") = src;
231 register long __src_len asm("3") = src_len; 255 register long __src_len asm("3") = src_len;
@@ -233,25 +257,25 @@ crypt_z990_kimd(long func, void* param, const u8* src, long src_len)
233 257
234 ret = 0; 258 ret = 0;
235 __asm__ __volatile__ ( 259 __asm__ __volatile__ (
236 "0: .insn rre,0xB93E0000,%1,%1 \n" //KIMD opcode 260 "0: .insn rre,0xB93E0000,%1,%1 \n" /* KIMD opcode */
237 "1: brc 1,0b \n" /*handle partical completion of kimd*/ 261 "1: brc 1,0b \n" /* handle partical completion */
238 __crypt_z990_set_result 262 __crypt_s390_set_result
239 "6: \n" 263 "6: \n"
240 __crypt_z990_fixup 264 __crypt_s390_fixup
241 : "+d" (ret), "+a" (__src), [result] "+d" (__src_len) 265 : "+d" (ret), "+a" (__src), [result] "+d" (__src_len)
242 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func), 266 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
243 "a" (__param) 267 "a" (__param)
244 : "cc", "memory" 268 : "cc", "memory"
245 ); 269 );
246 if (ret >= 0 && (func & CRYPT_Z990_FUNC_MASK)){ 270 if (ret >= 0 && (func & CRYPT_S390_FUNC_MASK)){
247 ret = src_len - ret; 271 ret = src_len - ret;
248 } 272 }
249 return ret; 273 return ret;
250} 274}
251 275
252/* 276/*
253 * Executes the KLMD (COMPUTE LAST MESSAGE DIGEST) operation of the z990 CPU. 277 * Executes the KLMD (COMPUTE LAST MESSAGE DIGEST) operation of the CPU.
254 * @param func: the function code passed to KM; see crypt_z990_klmd_func 278 * @param func: the function code passed to KM; see crypt_s390_klmd_func
255 * @param param: address of parameter block; see POP for details on each func 279 * @param param: address of parameter block; see POP for details on each func
256 * @param src: address of source memory area 280 * @param src: address of source memory area
257 * @param src_len: length of src operand in bytes 281 * @param src_len: length of src operand in bytes
@@ -259,9 +283,9 @@ crypt_z990_kimd(long func, void* param, const u8* src, long src_len)
259 * for digest funcs 283 * for digest funcs
260 */ 284 */
261static inline int 285static inline int
262crypt_z990_klmd(long func, void* param, const u8* src, long src_len) 286crypt_s390_klmd(long func, void* param, const u8* src, long src_len)
263{ 287{
264 register long __func asm("0") = func & CRYPT_Z990_FUNC_MASK; 288 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
265 register void* __param asm("1") = param; 289 register void* __param asm("1") = param;
266 register const u8* __src asm("2") = src; 290 register const u8* __src asm("2") = src;
267 register long __src_len asm("3") = src_len; 291 register long __src_len asm("3") = src_len;
@@ -269,17 +293,17 @@ crypt_z990_klmd(long func, void* param, const u8* src, long src_len)
269 293
270 ret = 0; 294 ret = 0;
271 __asm__ __volatile__ ( 295 __asm__ __volatile__ (
272 "0: .insn rre,0xB93F0000,%1,%1 \n" //KLMD opcode 296 "0: .insn rre,0xB93F0000,%1,%1 \n" /* KLMD opcode */
273 "1: brc 1,0b \n" /*handle partical completion of klmd*/ 297 "1: brc 1,0b \n" /* handle partical completion */
274 __crypt_z990_set_result 298 __crypt_s390_set_result
275 "6: \n" 299 "6: \n"
276 __crypt_z990_fixup 300 __crypt_s390_fixup
277 : "+d" (ret), "+a" (__src), [result] "+d" (__src_len) 301 : "+d" (ret), "+a" (__src), [result] "+d" (__src_len)
278 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func), 302 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
279 "a" (__param) 303 "a" (__param)
280 : "cc", "memory" 304 : "cc", "memory"
281 ); 305 );
282 if (ret >= 0 && func & CRYPT_Z990_FUNC_MASK){ 306 if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
283 ret = src_len - ret; 307 ret = src_len - ret;
284 } 308 }
285 return ret; 309 return ret;
@@ -287,8 +311,8 @@ crypt_z990_klmd(long func, void* param, const u8* src, long src_len)
287 311
288/* 312/*
289 * Executes the KMAC (COMPUTE MESSAGE AUTHENTICATION CODE) operation 313 * Executes the KMAC (COMPUTE MESSAGE AUTHENTICATION CODE) operation
290 * of the z990 CPU. 314 * of the CPU.
291 * @param func: the function code passed to KM; see crypt_z990_klmd_func 315 * @param func: the function code passed to KM; see crypt_s390_klmd_func
292 * @param param: address of parameter block; see POP for details on each func 316 * @param param: address of parameter block; see POP for details on each func
293 * @param src: address of source memory area 317 * @param src: address of source memory area
294 * @param src_len: length of src operand in bytes 318 * @param src_len: length of src operand in bytes
@@ -296,9 +320,9 @@ crypt_z990_klmd(long func, void* param, const u8* src, long src_len)
296 * for digest funcs 320 * for digest funcs
297 */ 321 */
298static inline int 322static inline int
299crypt_z990_kmac(long func, void* param, const u8* src, long src_len) 323crypt_s390_kmac(long func, void* param, const u8* src, long src_len)
300{ 324{
301 register long __func asm("0") = func & CRYPT_Z990_FUNC_MASK; 325 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
302 register void* __param asm("1") = param; 326 register void* __param asm("1") = param;
303 register const u8* __src asm("2") = src; 327 register const u8* __src asm("2") = src;
304 register long __src_len asm("3") = src_len; 328 register long __src_len asm("3") = src_len;
@@ -306,58 +330,58 @@ crypt_z990_kmac(long func, void* param, const u8* src, long src_len)
306 330
307 ret = 0; 331 ret = 0;
308 __asm__ __volatile__ ( 332 __asm__ __volatile__ (
309 "0: .insn rre,0xB91E0000,%5,%5 \n" //KMAC opcode 333 "0: .insn rre,0xB91E0000,%5,%5 \n" /* KMAC opcode */
310 "1: brc 1,0b \n" /*handle partical completion of klmd*/ 334 "1: brc 1,0b \n" /* handle partical completion */
311 __crypt_z990_set_result 335 __crypt_s390_set_result
312 "6: \n" 336 "6: \n"
313 __crypt_z990_fixup 337 __crypt_s390_fixup
314 : "+d" (ret), "+a" (__src), [result] "+d" (__src_len) 338 : "+d" (ret), "+a" (__src), [result] "+d" (__src_len)
315 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func), 339 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
316 "a" (__param) 340 "a" (__param)
317 : "cc", "memory" 341 : "cc", "memory"
318 ); 342 );
319 if (ret >= 0 && func & CRYPT_Z990_FUNC_MASK){ 343 if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
320 ret = src_len - ret; 344 ret = src_len - ret;
321 } 345 }
322 return ret; 346 return ret;
323} 347}
324 348
325/** 349/**
326 * Tests if a specific z990 crypto function is implemented on the machine. 350 * Tests if a specific crypto function is implemented on the machine.
327 * @param func: the function code of the specific function; 0 if op in general 351 * @param func: the function code of the specific function; 0 if op in general
328 * @return 1 if func available; 0 if func or op in general not available 352 * @return 1 if func available; 0 if func or op in general not available
329 */ 353 */
330static inline int 354static inline int
331crypt_z990_func_available(int func) 355crypt_s390_func_available(int func)
332{ 356{
333 int ret; 357 int ret;
334 358
335 struct crypt_z990_query_status status = { 359 struct crypt_s390_query_status status = {
336 .high = 0, 360 .high = 0,
337 .low = 0 361 .low = 0
338 }; 362 };
339 switch (func & CRYPT_Z990_OP_MASK){ 363 switch (func & CRYPT_S390_OP_MASK){
340 case CRYPT_Z990_KM: 364 case CRYPT_S390_KM:
341 ret = crypt_z990_km(KM_QUERY, &status, NULL, NULL, 0); 365 ret = crypt_s390_km(KM_QUERY, &status, NULL, NULL, 0);
342 break; 366 break;
343 case CRYPT_Z990_KMC: 367 case CRYPT_S390_KMC:
344 ret = crypt_z990_kmc(KMC_QUERY, &status, NULL, NULL, 0); 368 ret = crypt_s390_kmc(KMC_QUERY, &status, NULL, NULL, 0);
345 break; 369 break;
346 case CRYPT_Z990_KIMD: 370 case CRYPT_S390_KIMD:
347 ret = crypt_z990_kimd(KIMD_QUERY, &status, NULL, 0); 371 ret = crypt_s390_kimd(KIMD_QUERY, &status, NULL, 0);
348 break; 372 break;
349 case CRYPT_Z990_KLMD: 373 case CRYPT_S390_KLMD:
350 ret = crypt_z990_klmd(KLMD_QUERY, &status, NULL, 0); 374 ret = crypt_s390_klmd(KLMD_QUERY, &status, NULL, 0);
351 break; 375 break;
352 case CRYPT_Z990_KMAC: 376 case CRYPT_S390_KMAC:
353 ret = crypt_z990_kmac(KMAC_QUERY, &status, NULL, 0); 377 ret = crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0);
354 break; 378 break;
355 default: 379 default:
356 ret = 0; 380 ret = 0;
357 return ret; 381 return ret;
358 } 382 }
359 if (ret >= 0){ 383 if (ret >= 0){
360 func &= CRYPT_Z990_FUNC_MASK; 384 func &= CRYPT_S390_FUNC_MASK;
361 func &= 0x7f; //mask modifier bit 385 func &= 0x7f; //mask modifier bit
362 if (func < 64){ 386 if (func < 64){
363 ret = (status.high >> (64 - func - 1)) & 0x1; 387 ret = (status.high >> (64 - func - 1)) & 0x1;
@@ -370,5 +394,4 @@ crypt_z990_func_available(int func)
370 return ret; 394 return ret;
371} 395}
372 396
373 397#endif // _CRYPTO_ARCH_S390_CRYPT_S390_H
374#endif // _CRYPTO_ARCH_S390_CRYPT_Z990_H
diff --git a/arch/s390/crypto/crypt_s390_query.c b/arch/s390/crypto/crypt_s390_query.c
new file mode 100644
index 000000000000..def02bdc44a4
--- /dev/null
+++ b/arch/s390/crypto/crypt_s390_query.c
@@ -0,0 +1,129 @@
1/*
2 * Cryptographic API.
3 *
4 * Support for s390 cryptographic instructions.
5 * Testing module for querying processor crypto capabilities.
6 *
7 * Copyright (c) 2003 IBM Deutschland Entwicklung GmbH, IBM Corporation
8 * Author(s): Thomas Spatzier (tspat@de.ibm.com)
9 *
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version.
14 *
15 */
16#include <linux/module.h>
17#include <linux/init.h>
18#include <linux/kernel.h>
19#include <asm/errno.h>
20#include "crypt_s390.h"
21
22static void query_available_functions(void)
23{
24 printk(KERN_INFO "#####################\n");
25
26 /* query available KM functions */
27 printk(KERN_INFO "KM_QUERY: %d\n",
28 crypt_s390_func_available(KM_QUERY));
29 printk(KERN_INFO "KM_DEA: %d\n",
30 crypt_s390_func_available(KM_DEA_ENCRYPT));
31 printk(KERN_INFO "KM_TDEA_128: %d\n",
32 crypt_s390_func_available(KM_TDEA_128_ENCRYPT));
33 printk(KERN_INFO "KM_TDEA_192: %d\n",
34 crypt_s390_func_available(KM_TDEA_192_ENCRYPT));
35 printk(KERN_INFO "KM_AES_128: %d\n",
36 crypt_s390_func_available(KM_AES_128_ENCRYPT));
37 printk(KERN_INFO "KM_AES_192: %d\n",
38 crypt_s390_func_available(KM_AES_192_ENCRYPT));
39 printk(KERN_INFO "KM_AES_256: %d\n",
40 crypt_s390_func_available(KM_AES_256_ENCRYPT));
41
42 /* query available KMC functions */
43 printk(KERN_INFO "KMC_QUERY: %d\n",
44 crypt_s390_func_available(KMC_QUERY));
45 printk(KERN_INFO "KMC_DEA: %d\n",
46 crypt_s390_func_available(KMC_DEA_ENCRYPT));
47 printk(KERN_INFO "KMC_TDEA_128: %d\n",
48 crypt_s390_func_available(KMC_TDEA_128_ENCRYPT));
49 printk(KERN_INFO "KMC_TDEA_192: %d\n",
50 crypt_s390_func_available(KMC_TDEA_192_ENCRYPT));
51 printk(KERN_INFO "KMC_AES_128: %d\n",
52 crypt_s390_func_available(KMC_AES_128_ENCRYPT));
53 printk(KERN_INFO "KMC_AES_192: %d\n",
54 crypt_s390_func_available(KMC_AES_192_ENCRYPT));
55 printk(KERN_INFO "KMC_AES_256: %d\n",
56 crypt_s390_func_available(KMC_AES_256_ENCRYPT));
57
58 /* query available KIMD fucntions */
59 printk(KERN_INFO "KIMD_QUERY: %d\n",
60 crypt_s390_func_available(KIMD_QUERY));
61 printk(KERN_INFO "KIMD_SHA_1: %d\n",
62 crypt_s390_func_available(KIMD_SHA_1));
63 printk(KERN_INFO "KIMD_SHA_256: %d\n",
64 crypt_s390_func_available(KIMD_SHA_256));
65
66 /* query available KLMD functions */
67 printk(KERN_INFO "KLMD_QUERY: %d\n",
68 crypt_s390_func_available(KLMD_QUERY));
69 printk(KERN_INFO "KLMD_SHA_1: %d\n",
70 crypt_s390_func_available(KLMD_SHA_1));
71 printk(KERN_INFO "KLMD_SHA_256: %d\n",
72 crypt_s390_func_available(KLMD_SHA_256));
73
74 /* query available KMAC functions */
75 printk(KERN_INFO "KMAC_QUERY: %d\n",
76 crypt_s390_func_available(KMAC_QUERY));
77 printk(KERN_INFO "KMAC_DEA: %d\n",
78 crypt_s390_func_available(KMAC_DEA));
79 printk(KERN_INFO "KMAC_TDEA_128: %d\n",
80 crypt_s390_func_available(KMAC_TDEA_128));
81 printk(KERN_INFO "KMAC_TDEA_192: %d\n",
82 crypt_s390_func_available(KMAC_TDEA_192));
83}
84
85static int init(void)
86{
87 struct crypt_s390_query_status status = {
88 .high = 0,
89 .low = 0
90 };
91
92 printk(KERN_INFO "crypt_s390: querying available crypto functions\n");
93 crypt_s390_km(KM_QUERY, &status, NULL, NULL, 0);
94 printk(KERN_INFO "KM:\t%016llx %016llx\n",
95 (unsigned long long) status.high,
96 (unsigned long long) status.low);
97 status.high = status.low = 0;
98 crypt_s390_kmc(KMC_QUERY, &status, NULL, NULL, 0);
99 printk(KERN_INFO "KMC:\t%016llx %016llx\n",
100 (unsigned long long) status.high,
101 (unsigned long long) status.low);
102 status.high = status.low = 0;
103 crypt_s390_kimd(KIMD_QUERY, &status, NULL, 0);
104 printk(KERN_INFO "KIMD:\t%016llx %016llx\n",
105 (unsigned long long) status.high,
106 (unsigned long long) status.low);
107 status.high = status.low = 0;
108 crypt_s390_klmd(KLMD_QUERY, &status, NULL, 0);
109 printk(KERN_INFO "KLMD:\t%016llx %016llx\n",
110 (unsigned long long) status.high,
111 (unsigned long long) status.low);
112 status.high = status.low = 0;
113 crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0);
114 printk(KERN_INFO "KMAC:\t%016llx %016llx\n",
115 (unsigned long long) status.high,
116 (unsigned long long) status.low);
117
118 query_available_functions();
119 return -ECANCELED;
120}
121
122static void __exit cleanup(void)
123{
124}
125
126module_init(init);
127module_exit(cleanup);
128
129MODULE_LICENSE("GPL");
diff --git a/arch/s390/crypto/crypt_z990_query.c b/arch/s390/crypto/crypt_z990_query.c
deleted file mode 100644
index 7133983d1384..000000000000
--- a/arch/s390/crypto/crypt_z990_query.c
+++ /dev/null
@@ -1,111 +0,0 @@
1/*
2 * Cryptographic API.
3 *
4 * Support for z990 cryptographic instructions.
5 * Testing module for querying processor crypto capabilities.
6 *
7 * Copyright (c) 2003 IBM Deutschland Entwicklung GmbH, IBM Corporation
8 * Author(s): Thomas Spatzier (tspat@de.ibm.com)
9 *
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version.
14 *
15 */
16#include <linux/module.h>
17#include <linux/init.h>
18#include <linux/kernel.h>
19#include <asm/errno.h>
20#include "crypt_z990.h"
21
22static void
23query_available_functions(void)
24{
25 printk(KERN_INFO "#####################\n");
26 //query available KM functions
27 printk(KERN_INFO "KM_QUERY: %d\n",
28 crypt_z990_func_available(KM_QUERY));
29 printk(KERN_INFO "KM_DEA: %d\n",
30 crypt_z990_func_available(KM_DEA_ENCRYPT));
31 printk(KERN_INFO "KM_TDEA_128: %d\n",
32 crypt_z990_func_available(KM_TDEA_128_ENCRYPT));
33 printk(KERN_INFO "KM_TDEA_192: %d\n",
34 crypt_z990_func_available(KM_TDEA_192_ENCRYPT));
35 //query available KMC functions
36 printk(KERN_INFO "KMC_QUERY: %d\n",
37 crypt_z990_func_available(KMC_QUERY));
38 printk(KERN_INFO "KMC_DEA: %d\n",
39 crypt_z990_func_available(KMC_DEA_ENCRYPT));
40 printk(KERN_INFO "KMC_TDEA_128: %d\n",
41 crypt_z990_func_available(KMC_TDEA_128_ENCRYPT));
42 printk(KERN_INFO "KMC_TDEA_192: %d\n",
43 crypt_z990_func_available(KMC_TDEA_192_ENCRYPT));
44 //query available KIMD fucntions
45 printk(KERN_INFO "KIMD_QUERY: %d\n",
46 crypt_z990_func_available(KIMD_QUERY));
47 printk(KERN_INFO "KIMD_SHA_1: %d\n",
48 crypt_z990_func_available(KIMD_SHA_1));
49 //query available KLMD functions
50 printk(KERN_INFO "KLMD_QUERY: %d\n",
51 crypt_z990_func_available(KLMD_QUERY));
52 printk(KERN_INFO "KLMD_SHA_1: %d\n",
53 crypt_z990_func_available(KLMD_SHA_1));
54 //query available KMAC functions
55 printk(KERN_INFO "KMAC_QUERY: %d\n",
56 crypt_z990_func_available(KMAC_QUERY));
57 printk(KERN_INFO "KMAC_DEA: %d\n",
58 crypt_z990_func_available(KMAC_DEA));
59 printk(KERN_INFO "KMAC_TDEA_128: %d\n",
60 crypt_z990_func_available(KMAC_TDEA_128));
61 printk(KERN_INFO "KMAC_TDEA_192: %d\n",
62 crypt_z990_func_available(KMAC_TDEA_192));
63}
64
65static int
66init(void)
67{
68 struct crypt_z990_query_status status = {
69 .high = 0,
70 .low = 0
71 };
72
73 printk(KERN_INFO "crypt_z990: querying available crypto functions\n");
74 crypt_z990_km(KM_QUERY, &status, NULL, NULL, 0);
75 printk(KERN_INFO "KM: %016llx %016llx\n",
76 (unsigned long long) status.high,
77 (unsigned long long) status.low);
78 status.high = status.low = 0;
79 crypt_z990_kmc(KMC_QUERY, &status, NULL, NULL, 0);
80 printk(KERN_INFO "KMC: %016llx %016llx\n",
81 (unsigned long long) status.high,
82 (unsigned long long) status.low);
83 status.high = status.low = 0;
84 crypt_z990_kimd(KIMD_QUERY, &status, NULL, 0);
85 printk(KERN_INFO "KIMD: %016llx %016llx\n",
86 (unsigned long long) status.high,
87 (unsigned long long) status.low);
88 status.high = status.low = 0;
89 crypt_z990_klmd(KLMD_QUERY, &status, NULL, 0);
90 printk(KERN_INFO "KLMD: %016llx %016llx\n",
91 (unsigned long long) status.high,
92 (unsigned long long) status.low);
93 status.high = status.low = 0;
94 crypt_z990_kmac(KMAC_QUERY, &status, NULL, 0);
95 printk(KERN_INFO "KMAC: %016llx %016llx\n",
96 (unsigned long long) status.high,
97 (unsigned long long) status.low);
98
99 query_available_functions();
100 return -1;
101}
102
103static void __exit
104cleanup(void)
105{
106}
107
108module_init(init);
109module_exit(cleanup);
110
111MODULE_LICENSE("GPL");
diff --git a/arch/s390/crypto/des_z990.c b/arch/s390/crypto/des_s390.c
index 813cf37b1177..a38bb2a3eef6 100644
--- a/arch/s390/crypto/des_z990.c
+++ b/arch/s390/crypto/des_s390.c
@@ -1,7 +1,7 @@
1/* 1/*
2 * Cryptographic API. 2 * Cryptographic API.
3 * 3 *
4 * z990 implementation of the DES Cipher Algorithm. 4 * s390 implementation of the DES Cipher Algorithm.
5 * 5 *
6 * Copyright (c) 2003 IBM Deutschland Entwicklung GmbH, IBM Corporation 6 * Copyright (c) 2003 IBM Deutschland Entwicklung GmbH, IBM Corporation
7 * Author(s): Thomas Spatzier (tspat@de.ibm.com) 7 * Author(s): Thomas Spatzier (tspat@de.ibm.com)
@@ -19,7 +19,7 @@
19#include <linux/errno.h> 19#include <linux/errno.h>
20#include <asm/scatterlist.h> 20#include <asm/scatterlist.h>
21#include <linux/crypto.h> 21#include <linux/crypto.h>
22#include "crypt_z990.h" 22#include "crypt_s390.h"
23#include "crypto_des.h" 23#include "crypto_des.h"
24 24
25#define DES_BLOCK_SIZE 8 25#define DES_BLOCK_SIZE 8
@@ -31,17 +31,17 @@
31#define DES3_192_KEY_SIZE (3 * DES_KEY_SIZE) 31#define DES3_192_KEY_SIZE (3 * DES_KEY_SIZE)
32#define DES3_192_BLOCK_SIZE DES_BLOCK_SIZE 32#define DES3_192_BLOCK_SIZE DES_BLOCK_SIZE
33 33
34struct crypt_z990_des_ctx { 34struct crypt_s390_des_ctx {
35 u8 iv[DES_BLOCK_SIZE]; 35 u8 iv[DES_BLOCK_SIZE];
36 u8 key[DES_KEY_SIZE]; 36 u8 key[DES_KEY_SIZE];
37}; 37};
38 38
39struct crypt_z990_des3_128_ctx { 39struct crypt_s390_des3_128_ctx {
40 u8 iv[DES_BLOCK_SIZE]; 40 u8 iv[DES_BLOCK_SIZE];
41 u8 key[DES3_128_KEY_SIZE]; 41 u8 key[DES3_128_KEY_SIZE];
42}; 42};
43 43
44struct crypt_z990_des3_192_ctx { 44struct crypt_s390_des3_192_ctx {
45 u8 iv[DES_BLOCK_SIZE]; 45 u8 iv[DES_BLOCK_SIZE];
46 u8 key[DES3_192_KEY_SIZE]; 46 u8 key[DES3_192_KEY_SIZE];
47}; 47};
@@ -49,7 +49,7 @@ struct crypt_z990_des3_192_ctx {
49static int 49static int
50des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) 50des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
51{ 51{
52 struct crypt_z990_des_ctx *dctx; 52 struct crypt_s390_des_ctx *dctx;
53 int ret; 53 int ret;
54 54
55 dctx = ctx; 55 dctx = ctx;
@@ -65,26 +65,26 @@ des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
65static void 65static void
66des_encrypt(void *ctx, u8 *dst, const u8 *src) 66des_encrypt(void *ctx, u8 *dst, const u8 *src)
67{ 67{
68 struct crypt_z990_des_ctx *dctx; 68 struct crypt_s390_des_ctx *dctx;
69 69
70 dctx = ctx; 70 dctx = ctx;
71 crypt_z990_km(KM_DEA_ENCRYPT, dctx->key, dst, src, DES_BLOCK_SIZE); 71 crypt_s390_km(KM_DEA_ENCRYPT, dctx->key, dst, src, DES_BLOCK_SIZE);
72} 72}
73 73
74static void 74static void
75des_decrypt(void *ctx, u8 *dst, const u8 *src) 75des_decrypt(void *ctx, u8 *dst, const u8 *src)
76{ 76{
77 struct crypt_z990_des_ctx *dctx; 77 struct crypt_s390_des_ctx *dctx;
78 78
79 dctx = ctx; 79 dctx = ctx;
80 crypt_z990_km(KM_DEA_DECRYPT, dctx->key, dst, src, DES_BLOCK_SIZE); 80 crypt_s390_km(KM_DEA_DECRYPT, dctx->key, dst, src, DES_BLOCK_SIZE);
81} 81}
82 82
83static struct crypto_alg des_alg = { 83static struct crypto_alg des_alg = {
84 .cra_name = "des", 84 .cra_name = "des",
85 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 85 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
86 .cra_blocksize = DES_BLOCK_SIZE, 86 .cra_blocksize = DES_BLOCK_SIZE,
87 .cra_ctxsize = sizeof(struct crypt_z990_des_ctx), 87 .cra_ctxsize = sizeof(struct crypt_s390_des_ctx),
88 .cra_module = THIS_MODULE, 88 .cra_module = THIS_MODULE,
89 .cra_list = LIST_HEAD_INIT(des_alg.cra_list), 89 .cra_list = LIST_HEAD_INIT(des_alg.cra_list),
90 .cra_u = { .cipher = { 90 .cra_u = { .cipher = {
@@ -111,7 +111,7 @@ static int
111des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) 111des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
112{ 112{
113 int i, ret; 113 int i, ret;
114 struct crypt_z990_des3_128_ctx *dctx; 114 struct crypt_s390_des3_128_ctx *dctx;
115 const u8* temp_key = key; 115 const u8* temp_key = key;
116 116
117 dctx = ctx; 117 dctx = ctx;
@@ -132,20 +132,20 @@ des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
132static void 132static void
133des3_128_encrypt(void *ctx, u8 *dst, const u8 *src) 133des3_128_encrypt(void *ctx, u8 *dst, const u8 *src)
134{ 134{
135 struct crypt_z990_des3_128_ctx *dctx; 135 struct crypt_s390_des3_128_ctx *dctx;
136 136
137 dctx = ctx; 137 dctx = ctx;
138 crypt_z990_km(KM_TDEA_128_ENCRYPT, dctx->key, dst, (void*)src, 138 crypt_s390_km(KM_TDEA_128_ENCRYPT, dctx->key, dst, (void*)src,
139 DES3_128_BLOCK_SIZE); 139 DES3_128_BLOCK_SIZE);
140} 140}
141 141
142static void 142static void
143des3_128_decrypt(void *ctx, u8 *dst, const u8 *src) 143des3_128_decrypt(void *ctx, u8 *dst, const u8 *src)
144{ 144{
145 struct crypt_z990_des3_128_ctx *dctx; 145 struct crypt_s390_des3_128_ctx *dctx;
146 146
147 dctx = ctx; 147 dctx = ctx;
148 crypt_z990_km(KM_TDEA_128_DECRYPT, dctx->key, dst, (void*)src, 148 crypt_s390_km(KM_TDEA_128_DECRYPT, dctx->key, dst, (void*)src,
149 DES3_128_BLOCK_SIZE); 149 DES3_128_BLOCK_SIZE);
150} 150}
151 151
@@ -153,7 +153,7 @@ static struct crypto_alg des3_128_alg = {
153 .cra_name = "des3_ede128", 153 .cra_name = "des3_ede128",
154 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 154 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
155 .cra_blocksize = DES3_128_BLOCK_SIZE, 155 .cra_blocksize = DES3_128_BLOCK_SIZE,
156 .cra_ctxsize = sizeof(struct crypt_z990_des3_128_ctx), 156 .cra_ctxsize = sizeof(struct crypt_s390_des3_128_ctx),
157 .cra_module = THIS_MODULE, 157 .cra_module = THIS_MODULE,
158 .cra_list = LIST_HEAD_INIT(des3_128_alg.cra_list), 158 .cra_list = LIST_HEAD_INIT(des3_128_alg.cra_list),
159 .cra_u = { .cipher = { 159 .cra_u = { .cipher = {
@@ -181,7 +181,7 @@ static int
181des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) 181des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
182{ 182{
183 int i, ret; 183 int i, ret;
184 struct crypt_z990_des3_192_ctx *dctx; 184 struct crypt_s390_des3_192_ctx *dctx;
185 const u8* temp_key; 185 const u8* temp_key;
186 186
187 dctx = ctx; 187 dctx = ctx;
@@ -206,20 +206,20 @@ des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
206static void 206static void
207des3_192_encrypt(void *ctx, u8 *dst, const u8 *src) 207des3_192_encrypt(void *ctx, u8 *dst, const u8 *src)
208{ 208{
209 struct crypt_z990_des3_192_ctx *dctx; 209 struct crypt_s390_des3_192_ctx *dctx;
210 210
211 dctx = ctx; 211 dctx = ctx;
212 crypt_z990_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src, 212 crypt_s390_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src,
213 DES3_192_BLOCK_SIZE); 213 DES3_192_BLOCK_SIZE);
214} 214}
215 215
216static void 216static void
217des3_192_decrypt(void *ctx, u8 *dst, const u8 *src) 217des3_192_decrypt(void *ctx, u8 *dst, const u8 *src)
218{ 218{
219 struct crypt_z990_des3_192_ctx *dctx; 219 struct crypt_s390_des3_192_ctx *dctx;
220 220
221 dctx = ctx; 221 dctx = ctx;
222 crypt_z990_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src, 222 crypt_s390_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src,
223 DES3_192_BLOCK_SIZE); 223 DES3_192_BLOCK_SIZE);
224} 224}
225 225
@@ -227,7 +227,7 @@ static struct crypto_alg des3_192_alg = {
227 .cra_name = "des3_ede", 227 .cra_name = "des3_ede",
228 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 228 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
229 .cra_blocksize = DES3_192_BLOCK_SIZE, 229 .cra_blocksize = DES3_192_BLOCK_SIZE,
230 .cra_ctxsize = sizeof(struct crypt_z990_des3_192_ctx), 230 .cra_ctxsize = sizeof(struct crypt_s390_des3_192_ctx),
231 .cra_module = THIS_MODULE, 231 .cra_module = THIS_MODULE,
232 .cra_list = LIST_HEAD_INIT(des3_192_alg.cra_list), 232 .cra_list = LIST_HEAD_INIT(des3_192_alg.cra_list),
233 .cra_u = { .cipher = { 233 .cra_u = { .cipher = {
@@ -245,9 +245,9 @@ init(void)
245{ 245{
246 int ret; 246 int ret;
247 247
248 if (!crypt_z990_func_available(KM_DEA_ENCRYPT) || 248 if (!crypt_s390_func_available(KM_DEA_ENCRYPT) ||
249 !crypt_z990_func_available(KM_TDEA_128_ENCRYPT) || 249 !crypt_s390_func_available(KM_TDEA_128_ENCRYPT) ||
250 !crypt_z990_func_available(KM_TDEA_192_ENCRYPT)){ 250 !crypt_s390_func_available(KM_TDEA_192_ENCRYPT)){
251 return -ENOSYS; 251 return -ENOSYS;
252 } 252 }
253 253
@@ -262,7 +262,7 @@ init(void)
262 return -EEXIST; 262 return -EEXIST;
263 } 263 }
264 264
265 printk(KERN_INFO "crypt_z990: des_z990 loaded.\n"); 265 printk(KERN_INFO "crypt_s390: des_s390 loaded.\n");
266 return 0; 266 return 0;
267} 267}
268 268
diff --git a/arch/s390/crypto/sha1_z990.c b/arch/s390/crypto/sha1_s390.c
index 298174ddf5b1..98c896b86dcd 100644
--- a/arch/s390/crypto/sha1_z990.c
+++ b/arch/s390/crypto/sha1_s390.c
@@ -1,7 +1,7 @@
1/* 1/*
2 * Cryptographic API. 2 * Cryptographic API.
3 * 3 *
4 * z990 implementation of the SHA1 Secure Hash Algorithm. 4 * s390 implementation of the SHA1 Secure Hash Algorithm.
5 * 5 *
6 * Derived from cryptoapi implementation, adapted for in-place 6 * Derived from cryptoapi implementation, adapted for in-place
7 * scatterlist interface. Originally based on the public domain 7 * scatterlist interface. Originally based on the public domain
@@ -28,22 +28,22 @@
28#include <linux/crypto.h> 28#include <linux/crypto.h>
29#include <asm/scatterlist.h> 29#include <asm/scatterlist.h>
30#include <asm/byteorder.h> 30#include <asm/byteorder.h>
31#include "crypt_z990.h" 31#include "crypt_s390.h"
32 32
33#define SHA1_DIGEST_SIZE 20 33#define SHA1_DIGEST_SIZE 20
34#define SHA1_BLOCK_SIZE 64 34#define SHA1_BLOCK_SIZE 64
35 35
36struct crypt_z990_sha1_ctx { 36struct crypt_s390_sha1_ctx {
37 u64 count; 37 u64 count;
38 u32 state[5]; 38 u32 state[5];
39 u32 buf_len; 39 u32 buf_len;
40 u8 buffer[2 * SHA1_BLOCK_SIZE]; 40 u8 buffer[2 * SHA1_BLOCK_SIZE];
41}; 41};
42 42
43static void 43static void
44sha1_init(void *ctx) 44sha1_init(void *ctx)
45{ 45{
46 static const struct crypt_z990_sha1_ctx initstate = { 46 static const struct crypt_s390_sha1_ctx initstate = {
47 .state = { 47 .state = {
48 0x67452301, 48 0x67452301,
49 0xEFCDAB89, 49 0xEFCDAB89,
@@ -58,7 +58,7 @@ sha1_init(void *ctx)
58static void 58static void
59sha1_update(void *ctx, const u8 *data, unsigned int len) 59sha1_update(void *ctx, const u8 *data, unsigned int len)
60{ 60{
61 struct crypt_z990_sha1_ctx *sctx; 61 struct crypt_s390_sha1_ctx *sctx;
62 long imd_len; 62 long imd_len;
63 63
64 sctx = ctx; 64 sctx = ctx;
@@ -69,7 +69,7 @@ sha1_update(void *ctx, const u8 *data, unsigned int len)
69 //complete full block and hash 69 //complete full block and hash
70 memcpy(sctx->buffer + sctx->buf_len, data, 70 memcpy(sctx->buffer + sctx->buf_len, data,
71 SHA1_BLOCK_SIZE - sctx->buf_len); 71 SHA1_BLOCK_SIZE - sctx->buf_len);
72 crypt_z990_kimd(KIMD_SHA_1, sctx->state, sctx->buffer, 72 crypt_s390_kimd(KIMD_SHA_1, sctx->state, sctx->buffer,
73 SHA1_BLOCK_SIZE); 73 SHA1_BLOCK_SIZE);
74 data += SHA1_BLOCK_SIZE - sctx->buf_len; 74 data += SHA1_BLOCK_SIZE - sctx->buf_len;
75 len -= SHA1_BLOCK_SIZE - sctx->buf_len; 75 len -= SHA1_BLOCK_SIZE - sctx->buf_len;
@@ -79,7 +79,7 @@ sha1_update(void *ctx, const u8 *data, unsigned int len)
79 //rest of data contains full blocks? 79 //rest of data contains full blocks?
80 imd_len = len & ~0x3ful; 80 imd_len = len & ~0x3ful;
81 if (imd_len){ 81 if (imd_len){
82 crypt_z990_kimd(KIMD_SHA_1, sctx->state, data, imd_len); 82 crypt_s390_kimd(KIMD_SHA_1, sctx->state, data, imd_len);
83 data += imd_len; 83 data += imd_len;
84 len -= imd_len; 84 len -= imd_len;
85 } 85 }
@@ -92,7 +92,7 @@ sha1_update(void *ctx, const u8 *data, unsigned int len)
92 92
93 93
94static void 94static void
95pad_message(struct crypt_z990_sha1_ctx* sctx) 95pad_message(struct crypt_s390_sha1_ctx* sctx)
96{ 96{
97 int index; 97 int index;
98 98
@@ -113,11 +113,11 @@ pad_message(struct crypt_z990_sha1_ctx* sctx)
113static void 113static void
114sha1_final(void* ctx, u8 *out) 114sha1_final(void* ctx, u8 *out)
115{ 115{
116 struct crypt_z990_sha1_ctx *sctx = ctx; 116 struct crypt_s390_sha1_ctx *sctx = ctx;
117 117
118 //must perform manual padding 118 //must perform manual padding
119 pad_message(sctx); 119 pad_message(sctx);
120 crypt_z990_kimd(KIMD_SHA_1, sctx->state, sctx->buffer, sctx->buf_len); 120 crypt_s390_kimd(KIMD_SHA_1, sctx->state, sctx->buffer, sctx->buf_len);
121 //copy digest to out 121 //copy digest to out
122 memcpy(out, sctx->state, SHA1_DIGEST_SIZE); 122 memcpy(out, sctx->state, SHA1_DIGEST_SIZE);
123 /* Wipe context */ 123 /* Wipe context */
@@ -128,7 +128,7 @@ static struct crypto_alg alg = {
128 .cra_name = "sha1", 128 .cra_name = "sha1",
129 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 129 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
130 .cra_blocksize = SHA1_BLOCK_SIZE, 130 .cra_blocksize = SHA1_BLOCK_SIZE,
131 .cra_ctxsize = sizeof(struct crypt_z990_sha1_ctx), 131 .cra_ctxsize = sizeof(struct crypt_s390_sha1_ctx),
132 .cra_module = THIS_MODULE, 132 .cra_module = THIS_MODULE,
133 .cra_list = LIST_HEAD_INIT(alg.cra_list), 133 .cra_list = LIST_HEAD_INIT(alg.cra_list),
134 .cra_u = { .digest = { 134 .cra_u = { .digest = {
@@ -143,10 +143,10 @@ init(void)
143{ 143{
144 int ret = -ENOSYS; 144 int ret = -ENOSYS;
145 145
146 if (crypt_z990_func_available(KIMD_SHA_1)){ 146 if (crypt_s390_func_available(KIMD_SHA_1)){
147 ret = crypto_register_alg(&alg); 147 ret = crypto_register_alg(&alg);
148 if (ret == 0){ 148 if (ret == 0){
149 printk(KERN_INFO "crypt_z990: sha1_z990 loaded.\n"); 149 printk(KERN_INFO "crypt_s390: sha1_s390 loaded.\n");
150 } 150 }
151 } 151 }
152 return ret; 152 return ret;
diff --git a/arch/s390/crypto/sha256_s390.c b/arch/s390/crypto/sha256_s390.c
new file mode 100644
index 000000000000..b75bdbd476c7
--- /dev/null
+++ b/arch/s390/crypto/sha256_s390.c
@@ -0,0 +1,151 @@
1/*
2 * Cryptographic API.
3 *
4 * s390 implementation of the SHA256 Secure Hash Algorithm.
5 *
6 * s390 Version:
7 * Copyright (C) 2005 IBM Deutschland GmbH, IBM Corporation
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 *
10 * Derived from "crypto/sha256.c"
11 * and "arch/s390/crypto/sha1_s390.c"
12 *
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
16 * any later version.
17 *
18 */
19#include <linux/init.h>
20#include <linux/module.h>
21#include <linux/crypto.h>
22
23#include "crypt_s390.h"
24
25#define SHA256_DIGEST_SIZE 32
26#define SHA256_BLOCK_SIZE 64
27
28struct s390_sha256_ctx {
29 u64 count;
30 u32 state[8];
31 u8 buf[2 * SHA256_BLOCK_SIZE];
32};
33
34static void sha256_init(void *ctx)
35{
36 struct s390_sha256_ctx *sctx = ctx;
37
38 sctx->state[0] = 0x6a09e667;
39 sctx->state[1] = 0xbb67ae85;
40 sctx->state[2] = 0x3c6ef372;
41 sctx->state[3] = 0xa54ff53a;
42 sctx->state[4] = 0x510e527f;
43 sctx->state[5] = 0x9b05688c;
44 sctx->state[6] = 0x1f83d9ab;
45 sctx->state[7] = 0x5be0cd19;
46 sctx->count = 0;
47 memset(sctx->buf, 0, sizeof(sctx->buf));
48}
49
50static void sha256_update(void *ctx, const u8 *data, unsigned int len)
51{
52 struct s390_sha256_ctx *sctx = ctx;
53 unsigned int index;
54
55 /* how much is already in the buffer? */
56 index = sctx->count / 8 & 0x3f;
57
58 /* update message bit length */
59 sctx->count += len * 8;
60
61 /* process one block */
62 if ((index + len) >= SHA256_BLOCK_SIZE) {
63 memcpy(sctx->buf + index, data, SHA256_BLOCK_SIZE - index);
64 crypt_s390_kimd(KIMD_SHA_256, sctx->state, sctx->buf,
65 SHA256_BLOCK_SIZE);
66 data += SHA256_BLOCK_SIZE - index;
67 len -= SHA256_BLOCK_SIZE - index;
68 }
69
70 /* anything left? */
71 if (len)
72 memcpy(sctx->buf + index , data, len);
73}
74
75static void pad_message(struct s390_sha256_ctx* sctx)
76{
77 int index, end;
78
79 index = sctx->count / 8 & 0x3f;
80 end = index < 56 ? SHA256_BLOCK_SIZE : 2 * SHA256_BLOCK_SIZE;
81
82 /* start pad with 1 */
83 sctx->buf[index] = 0x80;
84
85 /* pad with zeros */
86 index++;
87 memset(sctx->buf + index, 0x00, end - index - 8);
88
89 /* append message length */
90 memcpy(sctx->buf + end - 8, &sctx->count, sizeof sctx->count);
91
92 sctx->count = end * 8;
93}
94
95/* Add padding and return the message digest */
96static void sha256_final(void* ctx, u8 *out)
97{
98 struct s390_sha256_ctx *sctx = ctx;
99
100 /* must perform manual padding */
101 pad_message(sctx);
102
103 crypt_s390_kimd(KIMD_SHA_256, sctx->state, sctx->buf,
104 sctx->count / 8);
105
106 /* copy digest to out */
107 memcpy(out, sctx->state, SHA256_DIGEST_SIZE);
108
109 /* wipe context */
110 memset(sctx, 0, sizeof *sctx);
111}
112
113static struct crypto_alg alg = {
114 .cra_name = "sha256",
115 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
116 .cra_blocksize = SHA256_BLOCK_SIZE,
117 .cra_ctxsize = sizeof(struct s390_sha256_ctx),
118 .cra_module = THIS_MODULE,
119 .cra_list = LIST_HEAD_INIT(alg.cra_list),
120 .cra_u = { .digest = {
121 .dia_digestsize = SHA256_DIGEST_SIZE,
122 .dia_init = sha256_init,
123 .dia_update = sha256_update,
124 .dia_final = sha256_final } }
125};
126
127static int init(void)
128{
129 int ret;
130
131 if (!crypt_s390_func_available(KIMD_SHA_256))
132 return -ENOSYS;
133
134 ret = crypto_register_alg(&alg);
135 if (ret != 0)
136 printk(KERN_INFO "crypt_s390: sha256_s390 couldn't be loaded.");
137 return ret;
138}
139
140static void __exit fini(void)
141{
142 crypto_unregister_alg(&alg);
143}
144
145module_init(init);
146module_exit(fini);
147
148MODULE_ALIAS("sha256");
149
150MODULE_LICENSE("GPL");
151MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm");