aboutsummaryrefslogtreecommitdiffstats
path: root/arch/s390
diff options
context:
space:
mode:
authorJan Glauber <jan.glauber@de.ibm.com>2006-01-06 03:19:17 -0500
committerLinus Torvalds <torvalds@g5.osdl.org>2006-01-06 11:33:50 -0500
commitc1e26e1ef7ab50f30e5fbf004fe96ed44321ca78 (patch)
treed4319a9441da5b776637945f9413e702296f5ad3 /arch/s390
parentd0f4c16febf258ba8c0f917ac3ba935fc5459566 (diff)
[PATCH] s390: in-kernel crypto rename
Replace all references to z990 by s390 in the in-kernel crypto files in arch/s390/crypto. The code is not specific to a particular machine (z990) but to the s390 platform. Big diff, does nothing.. Signed-off-by: Jan Glauber <jan.glauber@de.ibm.com> Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com> Signed-off-by: Andrew Morton <akpm@osdl.org> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'arch/s390')
-rw-r--r--arch/s390/crypto/Makefile6
-rw-r--r--arch/s390/crypto/crypt_s390.h (renamed from arch/s390/crypto/crypt_z990.h)243
-rw-r--r--arch/s390/crypto/crypt_s390_query.c (renamed from arch/s390/crypto/crypt_z990_query.c)86
-rw-r--r--arch/s390/crypto/des_s390.c (renamed from arch/s390/crypto/des_z990.c)54
-rw-r--r--arch/s390/crypto/sha1_s390.c (renamed from arch/s390/crypto/sha1_z990.c)32
-rw-r--r--arch/s390/defconfig4
6 files changed, 218 insertions, 207 deletions
diff --git a/arch/s390/crypto/Makefile b/arch/s390/crypto/Makefile
index 96a05e6b51e0..50843f8d0951 100644
--- a/arch/s390/crypto/Makefile
+++ b/arch/s390/crypto/Makefile
@@ -2,7 +2,7 @@
2# Cryptographic API 2# Cryptographic API
3# 3#
4 4
5obj-$(CONFIG_CRYPTO_SHA1_Z990) += sha1_z990.o 5obj-$(CONFIG_CRYPTO_SHA1_S390) += sha1_s390.o
6obj-$(CONFIG_CRYPTO_DES_Z990) += des_z990.o des_check_key.o 6obj-$(CONFIG_CRYPTO_DES_S390) += des_s390.o des_check_key.o
7 7
8obj-$(CONFIG_CRYPTO_TEST) += crypt_z990_query.o 8obj-$(CONFIG_CRYPTO_TEST) += crypt_s390_query.o
diff --git a/arch/s390/crypto/crypt_z990.h b/arch/s390/crypto/crypt_s390.h
index 4df660b99e5a..4d24f6689755 100644
--- a/arch/s390/crypto/crypt_z990.h
+++ b/arch/s390/crypto/crypt_s390.h
@@ -1,7 +1,7 @@
1/* 1/*
2 * Cryptographic API. 2 * Cryptographic API.
3 * 3 *
4 * Support for z990 cryptographic instructions. 4 * Support for s390 cryptographic instructions.
5 * 5 *
6 * Copyright (C) 2003 IBM Deutschland GmbH, IBM Corporation 6 * Copyright (C) 2003 IBM Deutschland GmbH, IBM Corporation
7 * Author(s): Thomas Spatzier (tspat@de.ibm.com) 7 * Author(s): Thomas Spatzier (tspat@de.ibm.com)
@@ -12,76 +12,86 @@
12 * any later version. 12 * any later version.
13 * 13 *
14 */ 14 */
15#ifndef _CRYPTO_ARCH_S390_CRYPT_Z990_H 15#ifndef _CRYPTO_ARCH_S390_CRYPT_S390_H
16#define _CRYPTO_ARCH_S390_CRYPT_Z990_H 16#define _CRYPTO_ARCH_S390_CRYPT_S390_H
17 17
18#include <asm/errno.h> 18#include <asm/errno.h>
19 19
20#define CRYPT_Z990_OP_MASK 0xFF00 20#define CRYPT_S390_OP_MASK 0xFF00
21#define CRYPT_Z990_FUNC_MASK 0x00FF 21#define CRYPT_S390_FUNC_MASK 0x00FF
22 22
23 23/* s930 cryptographic operations */
24/*z990 cryptographic operations*/ 24enum crypt_s390_operations {
25enum crypt_z990_operations { 25 CRYPT_S390_KM = 0x0100,
26 CRYPT_Z990_KM = 0x0100, 26 CRYPT_S390_KMC = 0x0200,
27 CRYPT_Z990_KMC = 0x0200, 27 CRYPT_S390_KIMD = 0x0300,
28 CRYPT_Z990_KIMD = 0x0300, 28 CRYPT_S390_KLMD = 0x0400,
29 CRYPT_Z990_KLMD = 0x0400, 29 CRYPT_S390_KMAC = 0x0500
30 CRYPT_Z990_KMAC = 0x0500
31}; 30};
32 31
33/*function codes for KM (CIPHER MESSAGE) instruction*/ 32/* function codes for KM (CIPHER MESSAGE) instruction
34enum crypt_z990_km_func { 33 * 0x80 is the decipher modifier bit
35 KM_QUERY = CRYPT_Z990_KM | 0, 34 */
36 KM_DEA_ENCRYPT = CRYPT_Z990_KM | 1, 35enum crypt_s390_km_func {
37 KM_DEA_DECRYPT = CRYPT_Z990_KM | 1 | 0x80, //modifier bit->decipher 36 KM_QUERY = CRYPT_S390_KM | 0,
38 KM_TDEA_128_ENCRYPT = CRYPT_Z990_KM | 2, 37 KM_DEA_ENCRYPT = CRYPT_S390_KM | 1,
39 KM_TDEA_128_DECRYPT = CRYPT_Z990_KM | 2 | 0x80, 38 KM_DEA_DECRYPT = CRYPT_S390_KM | 1 | 0x80,
40 KM_TDEA_192_ENCRYPT = CRYPT_Z990_KM | 3, 39 KM_TDEA_128_ENCRYPT = CRYPT_S390_KM | 2,
41 KM_TDEA_192_DECRYPT = CRYPT_Z990_KM | 3 | 0x80, 40 KM_TDEA_128_DECRYPT = CRYPT_S390_KM | 2 | 0x80,
41 KM_TDEA_192_ENCRYPT = CRYPT_S390_KM | 3,
42 KM_TDEA_192_DECRYPT = CRYPT_S390_KM | 3 | 0x80,
42}; 43};
43 44
44/*function codes for KMC (CIPHER MESSAGE WITH CHAINING) instruction*/ 45/* function codes for KMC (CIPHER MESSAGE WITH CHAINING)
45enum crypt_z990_kmc_func { 46 * instruction
46 KMC_QUERY = CRYPT_Z990_KMC | 0, 47 */
47 KMC_DEA_ENCRYPT = CRYPT_Z990_KMC | 1, 48enum crypt_s390_kmc_func {
48 KMC_DEA_DECRYPT = CRYPT_Z990_KMC | 1 | 0x80, //modifier bit->decipher 49 KMC_QUERY = CRYPT_S390_KMC | 0,
49 KMC_TDEA_128_ENCRYPT = CRYPT_Z990_KMC | 2, 50 KMC_DEA_ENCRYPT = CRYPT_S390_KMC | 1,
50 KMC_TDEA_128_DECRYPT = CRYPT_Z990_KMC | 2 | 0x80, 51 KMC_DEA_DECRYPT = CRYPT_S390_KMC | 1 | 0x80,
51 KMC_TDEA_192_ENCRYPT = CRYPT_Z990_KMC | 3, 52 KMC_TDEA_128_ENCRYPT = CRYPT_S390_KMC | 2,
52 KMC_TDEA_192_DECRYPT = CRYPT_Z990_KMC | 3 | 0x80, 53 KMC_TDEA_128_DECRYPT = CRYPT_S390_KMC | 2 | 0x80,
54 KMC_TDEA_192_ENCRYPT = CRYPT_S390_KMC | 3,
55 KMC_TDEA_192_DECRYPT = CRYPT_S390_KMC | 3 | 0x80,
53}; 56};
54 57
55/*function codes for KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST) instruction*/ 58/* function codes for KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST)
56enum crypt_z990_kimd_func { 59 * instruction
57 KIMD_QUERY = CRYPT_Z990_KIMD | 0, 60 */
58 KIMD_SHA_1 = CRYPT_Z990_KIMD | 1, 61enum crypt_s390_kimd_func {
62 KIMD_QUERY = CRYPT_S390_KIMD | 0,
63 KIMD_SHA_1 = CRYPT_S390_KIMD | 1,
59}; 64};
60 65
61/*function codes for KLMD (COMPUTE LAST MESSAGE DIGEST) instruction*/ 66/* function codes for KLMD (COMPUTE LAST MESSAGE DIGEST)
62enum crypt_z990_klmd_func { 67 * instruction
63 KLMD_QUERY = CRYPT_Z990_KLMD | 0, 68 */
64 KLMD_SHA_1 = CRYPT_Z990_KLMD | 1, 69enum crypt_s390_klmd_func {
70 KLMD_QUERY = CRYPT_S390_KLMD | 0,
71 KLMD_SHA_1 = CRYPT_S390_KLMD | 1,
65}; 72};
66 73
67/*function codes for KMAC (COMPUTE MESSAGE AUTHENTICATION CODE) instruction*/ 74/* function codes for KMAC (COMPUTE MESSAGE AUTHENTICATION CODE)
68enum crypt_z990_kmac_func { 75 * instruction
69 KMAC_QUERY = CRYPT_Z990_KMAC | 0, 76 */
70 KMAC_DEA = CRYPT_Z990_KMAC | 1, 77enum crypt_s390_kmac_func {
71 KMAC_TDEA_128 = CRYPT_Z990_KMAC | 2, 78 KMAC_QUERY = CRYPT_S390_KMAC | 0,
72 KMAC_TDEA_192 = CRYPT_Z990_KMAC | 3 79 KMAC_DEA = CRYPT_S390_KMAC | 1,
80 KMAC_TDEA_128 = CRYPT_S390_KMAC | 2,
81 KMAC_TDEA_192 = CRYPT_S390_KMAC | 3
73}; 82};
74 83
75/*status word for z990 crypto instructions' QUERY functions*/ 84/* status word for s390 crypto instructions' QUERY functions */
76struct crypt_z990_query_status { 85struct crypt_s390_query_status {
77 u64 high; 86 u64 high;
78 u64 low; 87 u64 low;
79}; 88};
80 89
81/* 90/*
82 * Standard fixup and ex_table sections for crypt_z990 inline functions. 91 * Standard fixup and ex_table sections for crypt_s390 inline functions.
83 * label 0: the z990 crypto operation 92 * label 0: the s390 crypto operation
84 * label 1: just after 1 to catch illegal operation exception on non-z990 93 * label 1: just after 1 to catch illegal operation exception
94 * (unsupported model)
85 * label 6: the return point after fixup 95 * label 6: the return point after fixup
86 * label 7: set error value if exception _in_ crypto operation 96 * label 7: set error value if exception _in_ crypto operation
87 * label 8: set error value if illegal operation exception 97 * label 8: set error value if illegal operation exception
@@ -89,7 +99,7 @@ struct crypt_z990_query_status {
89 * [ERR] is the error code value 99 * [ERR] is the error code value
90 */ 100 */
91#ifndef __s390x__ 101#ifndef __s390x__
92#define __crypt_z990_fixup \ 102#define __crypt_s390_fixup \
93 ".section .fixup,\"ax\" \n" \ 103 ".section .fixup,\"ax\" \n" \
94 "7: lhi %0,%h[e1] \n" \ 104 "7: lhi %0,%h[e1] \n" \
95 " bras 1,9f \n" \ 105 " bras 1,9f \n" \
@@ -106,7 +116,7 @@ struct crypt_z990_query_status {
106 " .long 1b,8b \n" \ 116 " .long 1b,8b \n" \
107 ".previous" 117 ".previous"
108#else /* __s390x__ */ 118#else /* __s390x__ */
109#define __crypt_z990_fixup \ 119#define __crypt_s390_fixup \
110 ".section .fixup,\"ax\" \n" \ 120 ".section .fixup,\"ax\" \n" \
111 "7: lhi %0,%h[e1] \n" \ 121 "7: lhi %0,%h[e1] \n" \
112 " jg 6b \n" \ 122 " jg 6b \n" \
@@ -121,22 +131,22 @@ struct crypt_z990_query_status {
121#endif /* __s390x__ */ 131#endif /* __s390x__ */
122 132
123/* 133/*
124 * Standard code for setting the result of z990 crypto instructions. 134 * Standard code for setting the result of s390 crypto instructions.
125 * %0: the register which will receive the result 135 * %0: the register which will receive the result
126 * [result]: the register containing the result (e.g. second operand length 136 * [result]: the register containing the result (e.g. second operand length
127 * to compute number of processed bytes]. 137 * to compute number of processed bytes].
128 */ 138 */
129#ifndef __s390x__ 139#ifndef __s390x__
130#define __crypt_z990_set_result \ 140#define __crypt_s390_set_result \
131 " lr %0,%[result] \n" 141 " lr %0,%[result] \n"
132#else /* __s390x__ */ 142#else /* __s390x__ */
133#define __crypt_z990_set_result \ 143#define __crypt_s390_set_result \
134 " lgr %0,%[result] \n" 144 " lgr %0,%[result] \n"
135#endif 145#endif
136 146
137/* 147/*
138 * Executes the KM (CIPHER MESSAGE) operation of the z990 CPU. 148 * Executes the KM (CIPHER MESSAGE) operation of the CPU.
139 * @param func: the function code passed to KM; see crypt_z990_km_func 149 * @param func: the function code passed to KM; see crypt_s390_km_func
140 * @param param: address of parameter block; see POP for details on each func 150 * @param param: address of parameter block; see POP for details on each func
141 * @param dest: address of destination memory area 151 * @param dest: address of destination memory area
142 * @param src: address of source memory area 152 * @param src: address of source memory area
@@ -145,9 +155,9 @@ struct crypt_z990_query_status {
145 * for encryption/decryption funcs 155 * for encryption/decryption funcs
146 */ 156 */
147static inline int 157static inline int
148crypt_z990_km(long func, void* param, u8* dest, const u8* src, long src_len) 158crypt_s390_km(long func, void* param, u8* dest, const u8* src, long src_len)
149{ 159{
150 register long __func asm("0") = func & CRYPT_Z990_FUNC_MASK; 160 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
151 register void* __param asm("1") = param; 161 register void* __param asm("1") = param;
152 register u8* __dest asm("4") = dest; 162 register u8* __dest asm("4") = dest;
153 register const u8* __src asm("2") = src; 163 register const u8* __src asm("2") = src;
@@ -156,26 +166,26 @@ crypt_z990_km(long func, void* param, u8* dest, const u8* src, long src_len)
156 166
157 ret = 0; 167 ret = 0;
158 __asm__ __volatile__ ( 168 __asm__ __volatile__ (
159 "0: .insn rre,0xB92E0000,%1,%2 \n" //KM opcode 169 "0: .insn rre,0xB92E0000,%1,%2 \n" /* KM opcode */
160 "1: brc 1,0b \n" //handle partial completion 170 "1: brc 1,0b \n" /* handle partial completion */
161 __crypt_z990_set_result 171 __crypt_s390_set_result
162 "6: \n" 172 "6: \n"
163 __crypt_z990_fixup 173 __crypt_s390_fixup
164 : "+d" (ret), "+a" (__dest), "+a" (__src), 174 : "+d" (ret), "+a" (__dest), "+a" (__src),
165 [result] "+d" (__src_len) 175 [result] "+d" (__src_len)
166 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func), 176 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
167 "a" (__param) 177 "a" (__param)
168 : "cc", "memory" 178 : "cc", "memory"
169 ); 179 );
170 if (ret >= 0 && func & CRYPT_Z990_FUNC_MASK){ 180 if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
171 ret = src_len - ret; 181 ret = src_len - ret;
172 } 182 }
173 return ret; 183 return ret;
174} 184}
175 185
176/* 186/*
177 * Executes the KMC (CIPHER MESSAGE WITH CHAINING) operation of the z990 CPU. 187 * Executes the KMC (CIPHER MESSAGE WITH CHAINING) operation of the CPU.
178 * @param func: the function code passed to KM; see crypt_z990_kmc_func 188 * @param func: the function code passed to KM; see crypt_s390_kmc_func
179 * @param param: address of parameter block; see POP for details on each func 189 * @param param: address of parameter block; see POP for details on each func
180 * @param dest: address of destination memory area 190 * @param dest: address of destination memory area
181 * @param src: address of source memory area 191 * @param src: address of source memory area
@@ -184,9 +194,9 @@ crypt_z990_km(long func, void* param, u8* dest, const u8* src, long src_len)
184 * for encryption/decryption funcs 194 * for encryption/decryption funcs
185 */ 195 */
186static inline int 196static inline int
187crypt_z990_kmc(long func, void* param, u8* dest, const u8* src, long src_len) 197crypt_s390_kmc(long func, void* param, u8* dest, const u8* src, long src_len)
188{ 198{
189 register long __func asm("0") = func & CRYPT_Z990_FUNC_MASK; 199 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
190 register void* __param asm("1") = param; 200 register void* __param asm("1") = param;
191 register u8* __dest asm("4") = dest; 201 register u8* __dest asm("4") = dest;
192 register const u8* __src asm("2") = src; 202 register const u8* __src asm("2") = src;
@@ -195,18 +205,18 @@ crypt_z990_kmc(long func, void* param, u8* dest, const u8* src, long src_len)
195 205
196 ret = 0; 206 ret = 0;
197 __asm__ __volatile__ ( 207 __asm__ __volatile__ (
198 "0: .insn rre,0xB92F0000,%1,%2 \n" //KMC opcode 208 "0: .insn rre,0xB92F0000,%1,%2 \n" /* KMC opcode */
199 "1: brc 1,0b \n" //handle partial completion 209 "1: brc 1,0b \n" /* handle partial completion */
200 __crypt_z990_set_result 210 __crypt_s390_set_result
201 "6: \n" 211 "6: \n"
202 __crypt_z990_fixup 212 __crypt_s390_fixup
203 : "+d" (ret), "+a" (__dest), "+a" (__src), 213 : "+d" (ret), "+a" (__dest), "+a" (__src),
204 [result] "+d" (__src_len) 214 [result] "+d" (__src_len)
205 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func), 215 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
206 "a" (__param) 216 "a" (__param)
207 : "cc", "memory" 217 : "cc", "memory"
208 ); 218 );
209 if (ret >= 0 && func & CRYPT_Z990_FUNC_MASK){ 219 if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
210 ret = src_len - ret; 220 ret = src_len - ret;
211 } 221 }
212 return ret; 222 return ret;
@@ -214,8 +224,8 @@ crypt_z990_kmc(long func, void* param, u8* dest, const u8* src, long src_len)
214 224
215/* 225/*
216 * Executes the KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST) operation 226 * Executes the KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST) operation
217 * of the z990 CPU. 227 * of the CPU.
218 * @param func: the function code passed to KM; see crypt_z990_kimd_func 228 * @param func: the function code passed to KM; see crypt_s390_kimd_func
219 * @param param: address of parameter block; see POP for details on each func 229 * @param param: address of parameter block; see POP for details on each func
220 * @param src: address of source memory area 230 * @param src: address of source memory area
221 * @param src_len: length of src operand in bytes 231 * @param src_len: length of src operand in bytes
@@ -223,9 +233,9 @@ crypt_z990_kmc(long func, void* param, u8* dest, const u8* src, long src_len)
223 * for digest funcs 233 * for digest funcs
224 */ 234 */
225static inline int 235static inline int
226crypt_z990_kimd(long func, void* param, const u8* src, long src_len) 236crypt_s390_kimd(long func, void* param, const u8* src, long src_len)
227{ 237{
228 register long __func asm("0") = func & CRYPT_Z990_FUNC_MASK; 238 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
229 register void* __param asm("1") = param; 239 register void* __param asm("1") = param;
230 register const u8* __src asm("2") = src; 240 register const u8* __src asm("2") = src;
231 register long __src_len asm("3") = src_len; 241 register long __src_len asm("3") = src_len;
@@ -233,25 +243,25 @@ crypt_z990_kimd(long func, void* param, const u8* src, long src_len)
233 243
234 ret = 0; 244 ret = 0;
235 __asm__ __volatile__ ( 245 __asm__ __volatile__ (
236 "0: .insn rre,0xB93E0000,%1,%1 \n" //KIMD opcode 246 "0: .insn rre,0xB93E0000,%1,%1 \n" /* KIMD opcode */
237 "1: brc 1,0b \n" /*handle partical completion of kimd*/ 247 "1: brc 1,0b \n" /* handle partical completion */
238 __crypt_z990_set_result 248 __crypt_s390_set_result
239 "6: \n" 249 "6: \n"
240 __crypt_z990_fixup 250 __crypt_s390_fixup
241 : "+d" (ret), "+a" (__src), [result] "+d" (__src_len) 251 : "+d" (ret), "+a" (__src), [result] "+d" (__src_len)
242 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func), 252 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
243 "a" (__param) 253 "a" (__param)
244 : "cc", "memory" 254 : "cc", "memory"
245 ); 255 );
246 if (ret >= 0 && (func & CRYPT_Z990_FUNC_MASK)){ 256 if (ret >= 0 && (func & CRYPT_S390_FUNC_MASK)){
247 ret = src_len - ret; 257 ret = src_len - ret;
248 } 258 }
249 return ret; 259 return ret;
250} 260}
251 261
252/* 262/*
253 * Executes the KLMD (COMPUTE LAST MESSAGE DIGEST) operation of the z990 CPU. 263 * Executes the KLMD (COMPUTE LAST MESSAGE DIGEST) operation of the CPU.
254 * @param func: the function code passed to KM; see crypt_z990_klmd_func 264 * @param func: the function code passed to KM; see crypt_s390_klmd_func
255 * @param param: address of parameter block; see POP for details on each func 265 * @param param: address of parameter block; see POP for details on each func
256 * @param src: address of source memory area 266 * @param src: address of source memory area
257 * @param src_len: length of src operand in bytes 267 * @param src_len: length of src operand in bytes
@@ -259,9 +269,9 @@ crypt_z990_kimd(long func, void* param, const u8* src, long src_len)
259 * for digest funcs 269 * for digest funcs
260 */ 270 */
261static inline int 271static inline int
262crypt_z990_klmd(long func, void* param, const u8* src, long src_len) 272crypt_s390_klmd(long func, void* param, const u8* src, long src_len)
263{ 273{
264 register long __func asm("0") = func & CRYPT_Z990_FUNC_MASK; 274 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
265 register void* __param asm("1") = param; 275 register void* __param asm("1") = param;
266 register const u8* __src asm("2") = src; 276 register const u8* __src asm("2") = src;
267 register long __src_len asm("3") = src_len; 277 register long __src_len asm("3") = src_len;
@@ -269,17 +279,17 @@ crypt_z990_klmd(long func, void* param, const u8* src, long src_len)
269 279
270 ret = 0; 280 ret = 0;
271 __asm__ __volatile__ ( 281 __asm__ __volatile__ (
272 "0: .insn rre,0xB93F0000,%1,%1 \n" //KLMD opcode 282 "0: .insn rre,0xB93F0000,%1,%1 \n" /* KLMD opcode */
273 "1: brc 1,0b \n" /*handle partical completion of klmd*/ 283 "1: brc 1,0b \n" /* handle partical completion */
274 __crypt_z990_set_result 284 __crypt_s390_set_result
275 "6: \n" 285 "6: \n"
276 __crypt_z990_fixup 286 __crypt_s390_fixup
277 : "+d" (ret), "+a" (__src), [result] "+d" (__src_len) 287 : "+d" (ret), "+a" (__src), [result] "+d" (__src_len)
278 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func), 288 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
279 "a" (__param) 289 "a" (__param)
280 : "cc", "memory" 290 : "cc", "memory"
281 ); 291 );
282 if (ret >= 0 && func & CRYPT_Z990_FUNC_MASK){ 292 if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
283 ret = src_len - ret; 293 ret = src_len - ret;
284 } 294 }
285 return ret; 295 return ret;
@@ -287,8 +297,8 @@ crypt_z990_klmd(long func, void* param, const u8* src, long src_len)
287 297
288/* 298/*
289 * Executes the KMAC (COMPUTE MESSAGE AUTHENTICATION CODE) operation 299 * Executes the KMAC (COMPUTE MESSAGE AUTHENTICATION CODE) operation
290 * of the z990 CPU. 300 * of the CPU.
291 * @param func: the function code passed to KM; see crypt_z990_klmd_func 301 * @param func: the function code passed to KM; see crypt_s390_klmd_func
292 * @param param: address of parameter block; see POP for details on each func 302 * @param param: address of parameter block; see POP for details on each func
293 * @param src: address of source memory area 303 * @param src: address of source memory area
294 * @param src_len: length of src operand in bytes 304 * @param src_len: length of src operand in bytes
@@ -296,9 +306,9 @@ crypt_z990_klmd(long func, void* param, const u8* src, long src_len)
296 * for digest funcs 306 * for digest funcs
297 */ 307 */
298static inline int 308static inline int
299crypt_z990_kmac(long func, void* param, const u8* src, long src_len) 309crypt_s390_kmac(long func, void* param, const u8* src, long src_len)
300{ 310{
301 register long __func asm("0") = func & CRYPT_Z990_FUNC_MASK; 311 register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
302 register void* __param asm("1") = param; 312 register void* __param asm("1") = param;
303 register const u8* __src asm("2") = src; 313 register const u8* __src asm("2") = src;
304 register long __src_len asm("3") = src_len; 314 register long __src_len asm("3") = src_len;
@@ -306,58 +316,58 @@ crypt_z990_kmac(long func, void* param, const u8* src, long src_len)
306 316
307 ret = 0; 317 ret = 0;
308 __asm__ __volatile__ ( 318 __asm__ __volatile__ (
309 "0: .insn rre,0xB91E0000,%5,%5 \n" //KMAC opcode 319 "0: .insn rre,0xB91E0000,%5,%5 \n" /* KMAC opcode */
310 "1: brc 1,0b \n" /*handle partical completion of klmd*/ 320 "1: brc 1,0b \n" /* handle partical completion */
311 __crypt_z990_set_result 321 __crypt_s390_set_result
312 "6: \n" 322 "6: \n"
313 __crypt_z990_fixup 323 __crypt_s390_fixup
314 : "+d" (ret), "+a" (__src), [result] "+d" (__src_len) 324 : "+d" (ret), "+a" (__src), [result] "+d" (__src_len)
315 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func), 325 : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
316 "a" (__param) 326 "a" (__param)
317 : "cc", "memory" 327 : "cc", "memory"
318 ); 328 );
319 if (ret >= 0 && func & CRYPT_Z990_FUNC_MASK){ 329 if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
320 ret = src_len - ret; 330 ret = src_len - ret;
321 } 331 }
322 return ret; 332 return ret;
323} 333}
324 334
325/** 335/**
326 * Tests if a specific z990 crypto function is implemented on the machine. 336 * Tests if a specific crypto function is implemented on the machine.
327 * @param func: the function code of the specific function; 0 if op in general 337 * @param func: the function code of the specific function; 0 if op in general
328 * @return 1 if func available; 0 if func or op in general not available 338 * @return 1 if func available; 0 if func or op in general not available
329 */ 339 */
330static inline int 340static inline int
331crypt_z990_func_available(int func) 341crypt_s390_func_available(int func)
332{ 342{
333 int ret; 343 int ret;
334 344
335 struct crypt_z990_query_status status = { 345 struct crypt_s390_query_status status = {
336 .high = 0, 346 .high = 0,
337 .low = 0 347 .low = 0
338 }; 348 };
339 switch (func & CRYPT_Z990_OP_MASK){ 349 switch (func & CRYPT_S390_OP_MASK){
340 case CRYPT_Z990_KM: 350 case CRYPT_S390_KM:
341 ret = crypt_z990_km(KM_QUERY, &status, NULL, NULL, 0); 351 ret = crypt_s390_km(KM_QUERY, &status, NULL, NULL, 0);
342 break; 352 break;
343 case CRYPT_Z990_KMC: 353 case CRYPT_S390_KMC:
344 ret = crypt_z990_kmc(KMC_QUERY, &status, NULL, NULL, 0); 354 ret = crypt_s390_kmc(KMC_QUERY, &status, NULL, NULL, 0);
345 break; 355 break;
346 case CRYPT_Z990_KIMD: 356 case CRYPT_S390_KIMD:
347 ret = crypt_z990_kimd(KIMD_QUERY, &status, NULL, 0); 357 ret = crypt_s390_kimd(KIMD_QUERY, &status, NULL, 0);
348 break; 358 break;
349 case CRYPT_Z990_KLMD: 359 case CRYPT_S390_KLMD:
350 ret = crypt_z990_klmd(KLMD_QUERY, &status, NULL, 0); 360 ret = crypt_s390_klmd(KLMD_QUERY, &status, NULL, 0);
351 break; 361 break;
352 case CRYPT_Z990_KMAC: 362 case CRYPT_S390_KMAC:
353 ret = crypt_z990_kmac(KMAC_QUERY, &status, NULL, 0); 363 ret = crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0);
354 break; 364 break;
355 default: 365 default:
356 ret = 0; 366 ret = 0;
357 return ret; 367 return ret;
358 } 368 }
359 if (ret >= 0){ 369 if (ret >= 0){
360 func &= CRYPT_Z990_FUNC_MASK; 370 func &= CRYPT_S390_FUNC_MASK;
361 func &= 0x7f; //mask modifier bit 371 func &= 0x7f; //mask modifier bit
362 if (func < 64){ 372 if (func < 64){
363 ret = (status.high >> (64 - func - 1)) & 0x1; 373 ret = (status.high >> (64 - func - 1)) & 0x1;
@@ -370,5 +380,4 @@ crypt_z990_func_available(int func)
370 return ret; 380 return ret;
371} 381}
372 382
373 383#endif // _CRYPTO_ARCH_S390_CRYPT_S390_H
374#endif // _CRYPTO_ARCH_S390_CRYPT_Z990_H
diff --git a/arch/s390/crypto/crypt_z990_query.c b/arch/s390/crypto/crypt_s390_query.c
index 7133983d1384..0fa6bdf197cf 100644
--- a/arch/s390/crypto/crypt_z990_query.c
+++ b/arch/s390/crypto/crypt_s390_query.c
@@ -1,7 +1,7 @@
1/* 1/*
2 * Cryptographic API. 2 * Cryptographic API.
3 * 3 *
4 * Support for z990 cryptographic instructions. 4 * Support for s390 cryptographic instructions.
5 * Testing module for querying processor crypto capabilities. 5 * Testing module for querying processor crypto capabilities.
6 * 6 *
7 * Copyright (c) 2003 IBM Deutschland Entwicklung GmbH, IBM Corporation 7 * Copyright (c) 2003 IBM Deutschland Entwicklung GmbH, IBM Corporation
@@ -17,91 +17,93 @@
17#include <linux/init.h> 17#include <linux/init.h>
18#include <linux/kernel.h> 18#include <linux/kernel.h>
19#include <asm/errno.h> 19#include <asm/errno.h>
20#include "crypt_z990.h" 20#include "crypt_s390.h"
21 21
22static void 22static void query_available_functions(void)
23query_available_functions(void)
24{ 23{
25 printk(KERN_INFO "#####################\n"); 24 printk(KERN_INFO "#####################\n");
26 //query available KM functions 25
26 /* query available KM functions */
27 printk(KERN_INFO "KM_QUERY: %d\n", 27 printk(KERN_INFO "KM_QUERY: %d\n",
28 crypt_z990_func_available(KM_QUERY)); 28 crypt_s390_func_available(KM_QUERY));
29 printk(KERN_INFO "KM_DEA: %d\n", 29 printk(KERN_INFO "KM_DEA: %d\n",
30 crypt_z990_func_available(KM_DEA_ENCRYPT)); 30 crypt_s390_func_available(KM_DEA_ENCRYPT));
31 printk(KERN_INFO "KM_TDEA_128: %d\n", 31 printk(KERN_INFO "KM_TDEA_128: %d\n",
32 crypt_z990_func_available(KM_TDEA_128_ENCRYPT)); 32 crypt_s390_func_available(KM_TDEA_128_ENCRYPT));
33 printk(KERN_INFO "KM_TDEA_192: %d\n", 33 printk(KERN_INFO "KM_TDEA_192: %d\n",
34 crypt_z990_func_available(KM_TDEA_192_ENCRYPT)); 34 crypt_s390_func_available(KM_TDEA_192_ENCRYPT));
35 //query available KMC functions 35
36 /* query available KMC functions */
36 printk(KERN_INFO "KMC_QUERY: %d\n", 37 printk(KERN_INFO "KMC_QUERY: %d\n",
37 crypt_z990_func_available(KMC_QUERY)); 38 crypt_s390_func_available(KMC_QUERY));
38 printk(KERN_INFO "KMC_DEA: %d\n", 39 printk(KERN_INFO "KMC_DEA: %d\n",
39 crypt_z990_func_available(KMC_DEA_ENCRYPT)); 40 crypt_s390_func_available(KMC_DEA_ENCRYPT));
40 printk(KERN_INFO "KMC_TDEA_128: %d\n", 41 printk(KERN_INFO "KMC_TDEA_128: %d\n",
41 crypt_z990_func_available(KMC_TDEA_128_ENCRYPT)); 42 crypt_s390_func_available(KMC_TDEA_128_ENCRYPT));
42 printk(KERN_INFO "KMC_TDEA_192: %d\n", 43 printk(KERN_INFO "KMC_TDEA_192: %d\n",
43 crypt_z990_func_available(KMC_TDEA_192_ENCRYPT)); 44 crypt_s390_func_available(KMC_TDEA_192_ENCRYPT));
44 //query available KIMD fucntions 45
46 /* query available KIMD fucntions */
45 printk(KERN_INFO "KIMD_QUERY: %d\n", 47 printk(KERN_INFO "KIMD_QUERY: %d\n",
46 crypt_z990_func_available(KIMD_QUERY)); 48 crypt_s390_func_available(KIMD_QUERY));
47 printk(KERN_INFO "KIMD_SHA_1: %d\n", 49 printk(KERN_INFO "KIMD_SHA_1: %d\n",
48 crypt_z990_func_available(KIMD_SHA_1)); 50 crypt_s390_func_available(KIMD_SHA_1));
49 //query available KLMD functions 51
52 /* query available KLMD functions */
50 printk(KERN_INFO "KLMD_QUERY: %d\n", 53 printk(KERN_INFO "KLMD_QUERY: %d\n",
51 crypt_z990_func_available(KLMD_QUERY)); 54 crypt_s390_func_available(KLMD_QUERY));
52 printk(KERN_INFO "KLMD_SHA_1: %d\n", 55 printk(KERN_INFO "KLMD_SHA_1: %d\n",
53 crypt_z990_func_available(KLMD_SHA_1)); 56 crypt_s390_func_available(KLMD_SHA_1));
54 //query available KMAC functions 57
58 /* query available KMAC functions */
55 printk(KERN_INFO "KMAC_QUERY: %d\n", 59 printk(KERN_INFO "KMAC_QUERY: %d\n",
56 crypt_z990_func_available(KMAC_QUERY)); 60 crypt_s3990_func_available(KMAC_QUERY));
57 printk(KERN_INFO "KMAC_DEA: %d\n", 61 printk(KERN_INFO "KMAC_DEA: %d\n",
58 crypt_z990_func_available(KMAC_DEA)); 62 crypt_s390_func_available(KMAC_DEA));
59 printk(KERN_INFO "KMAC_TDEA_128: %d\n", 63 printk(KERN_INFO "KMAC_TDEA_128: %d\n",
60 crypt_z990_func_available(KMAC_TDEA_128)); 64 crypt_s390_func_available(KMAC_TDEA_128));
61 printk(KERN_INFO "KMAC_TDEA_192: %d\n", 65 printk(KERN_INFO "KMAC_TDEA_192: %d\n",
62 crypt_z990_func_available(KMAC_TDEA_192)); 66 crypt_s390_func_available(KMAC_TDEA_192));
63} 67}
64 68
65static int 69static int init(void)
66init(void)
67{ 70{
68 struct crypt_z990_query_status status = { 71 struct crypt_s390_query_status status = {
69 .high = 0, 72 .high = 0,
70 .low = 0 73 .low = 0
71 }; 74 };
72 75
73 printk(KERN_INFO "crypt_z990: querying available crypto functions\n"); 76 printk(KERN_INFO "crypt_s390: querying available crypto functions\n");
74 crypt_z990_km(KM_QUERY, &status, NULL, NULL, 0); 77 crypt_s390_km(KM_QUERY, &status, NULL, NULL, 0);
75 printk(KERN_INFO "KM: %016llx %016llx\n", 78 printk(KERN_INFO "KM:\t%016llx %016llx\n",
76 (unsigned long long) status.high, 79 (unsigned long long) status.high,
77 (unsigned long long) status.low); 80 (unsigned long long) status.low);
78 status.high = status.low = 0; 81 status.high = status.low = 0;
79 crypt_z990_kmc(KMC_QUERY, &status, NULL, NULL, 0); 82 crypt_s390_kmc(KMC_QUERY, &status, NULL, NULL, 0);
80 printk(KERN_INFO "KMC: %016llx %016llx\n", 83 printk(KERN_INFO "KMC:\t%016llx %016llx\n",
81 (unsigned long long) status.high, 84 (unsigned long long) status.high,
82 (unsigned long long) status.low); 85 (unsigned long long) status.low);
83 status.high = status.low = 0; 86 status.high = status.low = 0;
84 crypt_z990_kimd(KIMD_QUERY, &status, NULL, 0); 87 crypt_s390_kimd(KIMD_QUERY, &status, NULL, 0);
85 printk(KERN_INFO "KIMD: %016llx %016llx\n", 88 printk(KERN_INFO "KIMD:\t%016llx %016llx\n",
86 (unsigned long long) status.high, 89 (unsigned long long) status.high,
87 (unsigned long long) status.low); 90 (unsigned long long) status.low);
88 status.high = status.low = 0; 91 status.high = status.low = 0;
89 crypt_z990_klmd(KLMD_QUERY, &status, NULL, 0); 92 crypt_s390_klmd(KLMD_QUERY, &status, NULL, 0);
90 printk(KERN_INFO "KLMD: %016llx %016llx\n", 93 printk(KERN_INFO "KLMD:\t%016llx %016llx\n",
91 (unsigned long long) status.high, 94 (unsigned long long) status.high,
92 (unsigned long long) status.low); 95 (unsigned long long) status.low);
93 status.high = status.low = 0; 96 status.high = status.low = 0;
94 crypt_z990_kmac(KMAC_QUERY, &status, NULL, 0); 97 crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0);
95 printk(KERN_INFO "KMAC: %016llx %016llx\n", 98 printk(KERN_INFO "KMAC:\t%016llx %016llx\n",
96 (unsigned long long) status.high, 99 (unsigned long long) status.high,
97 (unsigned long long) status.low); 100 (unsigned long long) status.low);
98 101
99 query_available_functions(); 102 query_available_functions();
100 return -1; 103 return -ECANCELED;
101} 104}
102 105
103static void __exit 106static void __exit cleanup(void)
104cleanup(void)
105{ 107{
106} 108}
107 109
diff --git a/arch/s390/crypto/des_z990.c b/arch/s390/crypto/des_s390.c
index 813cf37b1177..a38bb2a3eef6 100644
--- a/arch/s390/crypto/des_z990.c
+++ b/arch/s390/crypto/des_s390.c
@@ -1,7 +1,7 @@
1/* 1/*
2 * Cryptographic API. 2 * Cryptographic API.
3 * 3 *
4 * z990 implementation of the DES Cipher Algorithm. 4 * s390 implementation of the DES Cipher Algorithm.
5 * 5 *
6 * Copyright (c) 2003 IBM Deutschland Entwicklung GmbH, IBM Corporation 6 * Copyright (c) 2003 IBM Deutschland Entwicklung GmbH, IBM Corporation
7 * Author(s): Thomas Spatzier (tspat@de.ibm.com) 7 * Author(s): Thomas Spatzier (tspat@de.ibm.com)
@@ -19,7 +19,7 @@
19#include <linux/errno.h> 19#include <linux/errno.h>
20#include <asm/scatterlist.h> 20#include <asm/scatterlist.h>
21#include <linux/crypto.h> 21#include <linux/crypto.h>
22#include "crypt_z990.h" 22#include "crypt_s390.h"
23#include "crypto_des.h" 23#include "crypto_des.h"
24 24
25#define DES_BLOCK_SIZE 8 25#define DES_BLOCK_SIZE 8
@@ -31,17 +31,17 @@
31#define DES3_192_KEY_SIZE (3 * DES_KEY_SIZE) 31#define DES3_192_KEY_SIZE (3 * DES_KEY_SIZE)
32#define DES3_192_BLOCK_SIZE DES_BLOCK_SIZE 32#define DES3_192_BLOCK_SIZE DES_BLOCK_SIZE
33 33
34struct crypt_z990_des_ctx { 34struct crypt_s390_des_ctx {
35 u8 iv[DES_BLOCK_SIZE]; 35 u8 iv[DES_BLOCK_SIZE];
36 u8 key[DES_KEY_SIZE]; 36 u8 key[DES_KEY_SIZE];
37}; 37};
38 38
39struct crypt_z990_des3_128_ctx { 39struct crypt_s390_des3_128_ctx {
40 u8 iv[DES_BLOCK_SIZE]; 40 u8 iv[DES_BLOCK_SIZE];
41 u8 key[DES3_128_KEY_SIZE]; 41 u8 key[DES3_128_KEY_SIZE];
42}; 42};
43 43
44struct crypt_z990_des3_192_ctx { 44struct crypt_s390_des3_192_ctx {
45 u8 iv[DES_BLOCK_SIZE]; 45 u8 iv[DES_BLOCK_SIZE];
46 u8 key[DES3_192_KEY_SIZE]; 46 u8 key[DES3_192_KEY_SIZE];
47}; 47};
@@ -49,7 +49,7 @@ struct crypt_z990_des3_192_ctx {
49static int 49static int
50des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) 50des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
51{ 51{
52 struct crypt_z990_des_ctx *dctx; 52 struct crypt_s390_des_ctx *dctx;
53 int ret; 53 int ret;
54 54
55 dctx = ctx; 55 dctx = ctx;
@@ -65,26 +65,26 @@ des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
65static void 65static void
66des_encrypt(void *ctx, u8 *dst, const u8 *src) 66des_encrypt(void *ctx, u8 *dst, const u8 *src)
67{ 67{
68 struct crypt_z990_des_ctx *dctx; 68 struct crypt_s390_des_ctx *dctx;
69 69
70 dctx = ctx; 70 dctx = ctx;
71 crypt_z990_km(KM_DEA_ENCRYPT, dctx->key, dst, src, DES_BLOCK_SIZE); 71 crypt_s390_km(KM_DEA_ENCRYPT, dctx->key, dst, src, DES_BLOCK_SIZE);
72} 72}
73 73
74static void 74static void
75des_decrypt(void *ctx, u8 *dst, const u8 *src) 75des_decrypt(void *ctx, u8 *dst, const u8 *src)
76{ 76{
77 struct crypt_z990_des_ctx *dctx; 77 struct crypt_s390_des_ctx *dctx;
78 78
79 dctx = ctx; 79 dctx = ctx;
80 crypt_z990_km(KM_DEA_DECRYPT, dctx->key, dst, src, DES_BLOCK_SIZE); 80 crypt_s390_km(KM_DEA_DECRYPT, dctx->key, dst, src, DES_BLOCK_SIZE);
81} 81}
82 82
83static struct crypto_alg des_alg = { 83static struct crypto_alg des_alg = {
84 .cra_name = "des", 84 .cra_name = "des",
85 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 85 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
86 .cra_blocksize = DES_BLOCK_SIZE, 86 .cra_blocksize = DES_BLOCK_SIZE,
87 .cra_ctxsize = sizeof(struct crypt_z990_des_ctx), 87 .cra_ctxsize = sizeof(struct crypt_s390_des_ctx),
88 .cra_module = THIS_MODULE, 88 .cra_module = THIS_MODULE,
89 .cra_list = LIST_HEAD_INIT(des_alg.cra_list), 89 .cra_list = LIST_HEAD_INIT(des_alg.cra_list),
90 .cra_u = { .cipher = { 90 .cra_u = { .cipher = {
@@ -111,7 +111,7 @@ static int
111des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) 111des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
112{ 112{
113 int i, ret; 113 int i, ret;
114 struct crypt_z990_des3_128_ctx *dctx; 114 struct crypt_s390_des3_128_ctx *dctx;
115 const u8* temp_key = key; 115 const u8* temp_key = key;
116 116
117 dctx = ctx; 117 dctx = ctx;
@@ -132,20 +132,20 @@ des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
132static void 132static void
133des3_128_encrypt(void *ctx, u8 *dst, const u8 *src) 133des3_128_encrypt(void *ctx, u8 *dst, const u8 *src)
134{ 134{
135 struct crypt_z990_des3_128_ctx *dctx; 135 struct crypt_s390_des3_128_ctx *dctx;
136 136
137 dctx = ctx; 137 dctx = ctx;
138 crypt_z990_km(KM_TDEA_128_ENCRYPT, dctx->key, dst, (void*)src, 138 crypt_s390_km(KM_TDEA_128_ENCRYPT, dctx->key, dst, (void*)src,
139 DES3_128_BLOCK_SIZE); 139 DES3_128_BLOCK_SIZE);
140} 140}
141 141
142static void 142static void
143des3_128_decrypt(void *ctx, u8 *dst, const u8 *src) 143des3_128_decrypt(void *ctx, u8 *dst, const u8 *src)
144{ 144{
145 struct crypt_z990_des3_128_ctx *dctx; 145 struct crypt_s390_des3_128_ctx *dctx;
146 146
147 dctx = ctx; 147 dctx = ctx;
148 crypt_z990_km(KM_TDEA_128_DECRYPT, dctx->key, dst, (void*)src, 148 crypt_s390_km(KM_TDEA_128_DECRYPT, dctx->key, dst, (void*)src,
149 DES3_128_BLOCK_SIZE); 149 DES3_128_BLOCK_SIZE);
150} 150}
151 151
@@ -153,7 +153,7 @@ static struct crypto_alg des3_128_alg = {
153 .cra_name = "des3_ede128", 153 .cra_name = "des3_ede128",
154 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 154 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
155 .cra_blocksize = DES3_128_BLOCK_SIZE, 155 .cra_blocksize = DES3_128_BLOCK_SIZE,
156 .cra_ctxsize = sizeof(struct crypt_z990_des3_128_ctx), 156 .cra_ctxsize = sizeof(struct crypt_s390_des3_128_ctx),
157 .cra_module = THIS_MODULE, 157 .cra_module = THIS_MODULE,
158 .cra_list = LIST_HEAD_INIT(des3_128_alg.cra_list), 158 .cra_list = LIST_HEAD_INIT(des3_128_alg.cra_list),
159 .cra_u = { .cipher = { 159 .cra_u = { .cipher = {
@@ -181,7 +181,7 @@ static int
181des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) 181des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
182{ 182{
183 int i, ret; 183 int i, ret;
184 struct crypt_z990_des3_192_ctx *dctx; 184 struct crypt_s390_des3_192_ctx *dctx;
185 const u8* temp_key; 185 const u8* temp_key;
186 186
187 dctx = ctx; 187 dctx = ctx;
@@ -206,20 +206,20 @@ des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags)
206static void 206static void
207des3_192_encrypt(void *ctx, u8 *dst, const u8 *src) 207des3_192_encrypt(void *ctx, u8 *dst, const u8 *src)
208{ 208{
209 struct crypt_z990_des3_192_ctx *dctx; 209 struct crypt_s390_des3_192_ctx *dctx;
210 210
211 dctx = ctx; 211 dctx = ctx;
212 crypt_z990_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src, 212 crypt_s390_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src,
213 DES3_192_BLOCK_SIZE); 213 DES3_192_BLOCK_SIZE);
214} 214}
215 215
216static void 216static void
217des3_192_decrypt(void *ctx, u8 *dst, const u8 *src) 217des3_192_decrypt(void *ctx, u8 *dst, const u8 *src)
218{ 218{
219 struct crypt_z990_des3_192_ctx *dctx; 219 struct crypt_s390_des3_192_ctx *dctx;
220 220
221 dctx = ctx; 221 dctx = ctx;
222 crypt_z990_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src, 222 crypt_s390_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src,
223 DES3_192_BLOCK_SIZE); 223 DES3_192_BLOCK_SIZE);
224} 224}
225 225
@@ -227,7 +227,7 @@ static struct crypto_alg des3_192_alg = {
227 .cra_name = "des3_ede", 227 .cra_name = "des3_ede",
228 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 228 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
229 .cra_blocksize = DES3_192_BLOCK_SIZE, 229 .cra_blocksize = DES3_192_BLOCK_SIZE,
230 .cra_ctxsize = sizeof(struct crypt_z990_des3_192_ctx), 230 .cra_ctxsize = sizeof(struct crypt_s390_des3_192_ctx),
231 .cra_module = THIS_MODULE, 231 .cra_module = THIS_MODULE,
232 .cra_list = LIST_HEAD_INIT(des3_192_alg.cra_list), 232 .cra_list = LIST_HEAD_INIT(des3_192_alg.cra_list),
233 .cra_u = { .cipher = { 233 .cra_u = { .cipher = {
@@ -245,9 +245,9 @@ init(void)
245{ 245{
246 int ret; 246 int ret;
247 247
248 if (!crypt_z990_func_available(KM_DEA_ENCRYPT) || 248 if (!crypt_s390_func_available(KM_DEA_ENCRYPT) ||
249 !crypt_z990_func_available(KM_TDEA_128_ENCRYPT) || 249 !crypt_s390_func_available(KM_TDEA_128_ENCRYPT) ||
250 !crypt_z990_func_available(KM_TDEA_192_ENCRYPT)){ 250 !crypt_s390_func_available(KM_TDEA_192_ENCRYPT)){
251 return -ENOSYS; 251 return -ENOSYS;
252 } 252 }
253 253
@@ -262,7 +262,7 @@ init(void)
262 return -EEXIST; 262 return -EEXIST;
263 } 263 }
264 264
265 printk(KERN_INFO "crypt_z990: des_z990 loaded.\n"); 265 printk(KERN_INFO "crypt_s390: des_s390 loaded.\n");
266 return 0; 266 return 0;
267} 267}
268 268
diff --git a/arch/s390/crypto/sha1_z990.c b/arch/s390/crypto/sha1_s390.c
index 298174ddf5b1..98c896b86dcd 100644
--- a/arch/s390/crypto/sha1_z990.c
+++ b/arch/s390/crypto/sha1_s390.c
@@ -1,7 +1,7 @@
1/* 1/*
2 * Cryptographic API. 2 * Cryptographic API.
3 * 3 *
4 * z990 implementation of the SHA1 Secure Hash Algorithm. 4 * s390 implementation of the SHA1 Secure Hash Algorithm.
5 * 5 *
6 * Derived from cryptoapi implementation, adapted for in-place 6 * Derived from cryptoapi implementation, adapted for in-place
7 * scatterlist interface. Originally based on the public domain 7 * scatterlist interface. Originally based on the public domain
@@ -28,22 +28,22 @@
28#include <linux/crypto.h> 28#include <linux/crypto.h>
29#include <asm/scatterlist.h> 29#include <asm/scatterlist.h>
30#include <asm/byteorder.h> 30#include <asm/byteorder.h>
31#include "crypt_z990.h" 31#include "crypt_s390.h"
32 32
33#define SHA1_DIGEST_SIZE 20 33#define SHA1_DIGEST_SIZE 20
34#define SHA1_BLOCK_SIZE 64 34#define SHA1_BLOCK_SIZE 64
35 35
36struct crypt_z990_sha1_ctx { 36struct crypt_s390_sha1_ctx {
37 u64 count; 37 u64 count;
38 u32 state[5]; 38 u32 state[5];
39 u32 buf_len; 39 u32 buf_len;
40 u8 buffer[2 * SHA1_BLOCK_SIZE]; 40 u8 buffer[2 * SHA1_BLOCK_SIZE];
41}; 41};
42 42
43static void 43static void
44sha1_init(void *ctx) 44sha1_init(void *ctx)
45{ 45{
46 static const struct crypt_z990_sha1_ctx initstate = { 46 static const struct crypt_s390_sha1_ctx initstate = {
47 .state = { 47 .state = {
48 0x67452301, 48 0x67452301,
49 0xEFCDAB89, 49 0xEFCDAB89,
@@ -58,7 +58,7 @@ sha1_init(void *ctx)
58static void 58static void
59sha1_update(void *ctx, const u8 *data, unsigned int len) 59sha1_update(void *ctx, const u8 *data, unsigned int len)
60{ 60{
61 struct crypt_z990_sha1_ctx *sctx; 61 struct crypt_s390_sha1_ctx *sctx;
62 long imd_len; 62 long imd_len;
63 63
64 sctx = ctx; 64 sctx = ctx;
@@ -69,7 +69,7 @@ sha1_update(void *ctx, const u8 *data, unsigned int len)
69 //complete full block and hash 69 //complete full block and hash
70 memcpy(sctx->buffer + sctx->buf_len, data, 70 memcpy(sctx->buffer + sctx->buf_len, data,
71 SHA1_BLOCK_SIZE - sctx->buf_len); 71 SHA1_BLOCK_SIZE - sctx->buf_len);
72 crypt_z990_kimd(KIMD_SHA_1, sctx->state, sctx->buffer, 72 crypt_s390_kimd(KIMD_SHA_1, sctx->state, sctx->buffer,
73 SHA1_BLOCK_SIZE); 73 SHA1_BLOCK_SIZE);
74 data += SHA1_BLOCK_SIZE - sctx->buf_len; 74 data += SHA1_BLOCK_SIZE - sctx->buf_len;
75 len -= SHA1_BLOCK_SIZE - sctx->buf_len; 75 len -= SHA1_BLOCK_SIZE - sctx->buf_len;
@@ -79,7 +79,7 @@ sha1_update(void *ctx, const u8 *data, unsigned int len)
79 //rest of data contains full blocks? 79 //rest of data contains full blocks?
80 imd_len = len & ~0x3ful; 80 imd_len = len & ~0x3ful;
81 if (imd_len){ 81 if (imd_len){
82 crypt_z990_kimd(KIMD_SHA_1, sctx->state, data, imd_len); 82 crypt_s390_kimd(KIMD_SHA_1, sctx->state, data, imd_len);
83 data += imd_len; 83 data += imd_len;
84 len -= imd_len; 84 len -= imd_len;
85 } 85 }
@@ -92,7 +92,7 @@ sha1_update(void *ctx, const u8 *data, unsigned int len)
92 92
93 93
94static void 94static void
95pad_message(struct crypt_z990_sha1_ctx* sctx) 95pad_message(struct crypt_s390_sha1_ctx* sctx)
96{ 96{
97 int index; 97 int index;
98 98
@@ -113,11 +113,11 @@ pad_message(struct crypt_z990_sha1_ctx* sctx)
113static void 113static void
114sha1_final(void* ctx, u8 *out) 114sha1_final(void* ctx, u8 *out)
115{ 115{
116 struct crypt_z990_sha1_ctx *sctx = ctx; 116 struct crypt_s390_sha1_ctx *sctx = ctx;
117 117
118 //must perform manual padding 118 //must perform manual padding
119 pad_message(sctx); 119 pad_message(sctx);
120 crypt_z990_kimd(KIMD_SHA_1, sctx->state, sctx->buffer, sctx->buf_len); 120 crypt_s390_kimd(KIMD_SHA_1, sctx->state, sctx->buffer, sctx->buf_len);
121 //copy digest to out 121 //copy digest to out
122 memcpy(out, sctx->state, SHA1_DIGEST_SIZE); 122 memcpy(out, sctx->state, SHA1_DIGEST_SIZE);
123 /* Wipe context */ 123 /* Wipe context */
@@ -128,7 +128,7 @@ static struct crypto_alg alg = {
128 .cra_name = "sha1", 128 .cra_name = "sha1",
129 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 129 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
130 .cra_blocksize = SHA1_BLOCK_SIZE, 130 .cra_blocksize = SHA1_BLOCK_SIZE,
131 .cra_ctxsize = sizeof(struct crypt_z990_sha1_ctx), 131 .cra_ctxsize = sizeof(struct crypt_s390_sha1_ctx),
132 .cra_module = THIS_MODULE, 132 .cra_module = THIS_MODULE,
133 .cra_list = LIST_HEAD_INIT(alg.cra_list), 133 .cra_list = LIST_HEAD_INIT(alg.cra_list),
134 .cra_u = { .digest = { 134 .cra_u = { .digest = {
@@ -143,10 +143,10 @@ init(void)
143{ 143{
144 int ret = -ENOSYS; 144 int ret = -ENOSYS;
145 145
146 if (crypt_z990_func_available(KIMD_SHA_1)){ 146 if (crypt_s390_func_available(KIMD_SHA_1)){
147 ret = crypto_register_alg(&alg); 147 ret = crypto_register_alg(&alg);
148 if (ret == 0){ 148 if (ret == 0){
149 printk(KERN_INFO "crypt_z990: sha1_z990 loaded.\n"); 149 printk(KERN_INFO "crypt_s390: sha1_s390 loaded.\n");
150 } 150 }
151 } 151 }
152 return ret; 152 return ret;
diff --git a/arch/s390/defconfig b/arch/s390/defconfig
index 0c495fe83112..0cb2995bf21c 100644
--- a/arch/s390/defconfig
+++ b/arch/s390/defconfig
@@ -632,13 +632,13 @@ CONFIG_CRYPTO=y
632# CONFIG_CRYPTO_MD4 is not set 632# CONFIG_CRYPTO_MD4 is not set
633# CONFIG_CRYPTO_MD5 is not set 633# CONFIG_CRYPTO_MD5 is not set
634# CONFIG_CRYPTO_SHA1 is not set 634# CONFIG_CRYPTO_SHA1 is not set
635# CONFIG_CRYPTO_SHA1_Z990 is not set 635# CONFIG_CRYPTO_SHA1_S390 is not set
636# CONFIG_CRYPTO_SHA256 is not set 636# CONFIG_CRYPTO_SHA256 is not set
637# CONFIG_CRYPTO_SHA512 is not set 637# CONFIG_CRYPTO_SHA512 is not set
638# CONFIG_CRYPTO_WP512 is not set 638# CONFIG_CRYPTO_WP512 is not set
639# CONFIG_CRYPTO_TGR192 is not set 639# CONFIG_CRYPTO_TGR192 is not set
640# CONFIG_CRYPTO_DES is not set 640# CONFIG_CRYPTO_DES is not set
641# CONFIG_CRYPTO_DES_Z990 is not set 641# CONFIG_CRYPTO_DES_S390 is not set
642# CONFIG_CRYPTO_BLOWFISH is not set 642# CONFIG_CRYPTO_BLOWFISH is not set
643# CONFIG_CRYPTO_TWOFISH is not set 643# CONFIG_CRYPTO_TWOFISH is not set
644# CONFIG_CRYPTO_SERPENT is not set 644# CONFIG_CRYPTO_SERPENT is not set