aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/sparc/Kbuild1
-rw-r--r--arch/sparc/crypto/Makefile7
-rw-r--r--arch/sparc/crypto/sha1_asm.S72
-rw-r--r--arch/sparc/crypto/sha1_glue.c181
-rw-r--r--arch/sparc/lib/ksyms.c4
-rw-r--r--crypto/Kconfig9
6 files changed, 274 insertions, 0 deletions
diff --git a/arch/sparc/Kbuild b/arch/sparc/Kbuild
index 5cd01161fd00..675afa285ddb 100644
--- a/arch/sparc/Kbuild
+++ b/arch/sparc/Kbuild
@@ -6,3 +6,4 @@ obj-y += kernel/
6obj-y += mm/ 6obj-y += mm/
7obj-y += math-emu/ 7obj-y += math-emu/
8obj-y += net/ 8obj-y += net/
9obj-y += crypto/
diff --git a/arch/sparc/crypto/Makefile b/arch/sparc/crypto/Makefile
new file mode 100644
index 000000000000..9760472fe32b
--- /dev/null
+++ b/arch/sparc/crypto/Makefile
@@ -0,0 +1,7 @@
1#
2# Arch-specific CryptoAPI modules.
3#
4
5obj-$(CONFIG_CRYPTO_SHA1_SPARC64) += sha1-sparc64.o
6
7sha1-sparc64-y := sha1_asm.o sha1_glue.o
diff --git a/arch/sparc/crypto/sha1_asm.S b/arch/sparc/crypto/sha1_asm.S
new file mode 100644
index 000000000000..d2147eb054c1
--- /dev/null
+++ b/arch/sparc/crypto/sha1_asm.S
@@ -0,0 +1,72 @@
1#include <linux/linkage.h>
2#include <asm/visasm.h>
3
4ENTRY(sha1_sparc64_transform)
5 /* %o0 = digest, %o1 = data, %o2 = rounds */
6 VISEntryHalf
7 ld [%o0 + 0x00], %f0
8 ld [%o0 + 0x04], %f1
9 ld [%o0 + 0x08], %f2
10 andcc %o1, 0x7, %g0
11 ld [%o0 + 0x0c], %f3
12 bne,pn %xcc, 10f
13 ld [%o0 + 0x10], %f4
14
151:
16 ldd [%o1 + 0x00], %f8
17 ldd [%o1 + 0x08], %f10
18 ldd [%o1 + 0x10], %f12
19 ldd [%o1 + 0x18], %f14
20 ldd [%o1 + 0x20], %f16
21 ldd [%o1 + 0x28], %f18
22 ldd [%o1 + 0x30], %f20
23 ldd [%o1 + 0x38], %f22
24
25 /* sha1 */
26 .word 0x81b02820
27
28 subcc %o2, 1, %o2
29 bne,pt %xcc, 1b
30 add %o1, 0x40, %o1
31
325:
33 st %f0, [%o0 + 0x00]
34 st %f1, [%o0 + 0x04]
35 st %f2, [%o0 + 0x08]
36 st %f3, [%o0 + 0x0c]
37 st %f4, [%o0 + 0x10]
38 retl
39 VISExitHalf
4010:
41 alignaddr %o1, %g0, %o1
42
43 ldd [%o1 + 0x00], %f10
441:
45 ldd [%o1 + 0x08], %f12
46 ldd [%o1 + 0x10], %f14
47 ldd [%o1 + 0x18], %f16
48 ldd [%o1 + 0x20], %f18
49 ldd [%o1 + 0x28], %f20
50 ldd [%o1 + 0x30], %f22
51 ldd [%o1 + 0x38], %f24
52 ldd [%o1 + 0x40], %f26
53
54 faligndata %f10, %f12, %f8
55 faligndata %f12, %f14, %f10
56 faligndata %f14, %f16, %f12
57 faligndata %f16, %f18, %f14
58 faligndata %f18, %f20, %f16
59 faligndata %f20, %f22, %f18
60 faligndata %f22, %f24, %f20
61 faligndata %f24, %f26, %f22
62
63 /* sha1 */
64 .word 0x81b02820
65
66 subcc %o2, 1, %o2
67 fsrc1 %f26, %f10
68 bne,pt %xcc, 1b
69 add %o1, 0x40, %o1
70
71 ba,a,pt %xcc, 5b
72ENDPROC(sha1_sparc64_transform)
diff --git a/arch/sparc/crypto/sha1_glue.c b/arch/sparc/crypto/sha1_glue.c
new file mode 100644
index 000000000000..6bd1abc5489d
--- /dev/null
+++ b/arch/sparc/crypto/sha1_glue.c
@@ -0,0 +1,181 @@
1/* Glue code for SHA1 hashing optimized for sparc64 crypto opcodes.
2 *
3 * This is based largely upon arch/x86/crypto/sha1_ssse3_glue.c
4 *
5 * Copyright (c) Alan Smithee.
6 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
7 * Copyright (c) Jean-Francois Dive <jef@linuxbe.org>
8 * Copyright (c) Mathias Krause <minipli@googlemail.com>
9 */
10
11#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
12
13#include <crypto/internal/hash.h>
14#include <linux/init.h>
15#include <linux/module.h>
16#include <linux/mm.h>
17#include <linux/cryptohash.h>
18#include <linux/types.h>
19#include <crypto/sha.h>
20
21#include <asm/pstate.h>
22#include <asm/elf.h>
23
24asmlinkage void sha1_sparc64_transform(u32 *digest, const char *data,
25 unsigned int rounds);
26
27static int sha1_sparc64_init(struct shash_desc *desc)
28{
29 struct sha1_state *sctx = shash_desc_ctx(desc);
30
31 *sctx = (struct sha1_state){
32 .state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
33 };
34
35 return 0;
36}
37
38static void __sha1_sparc64_update(struct sha1_state *sctx, const u8 *data,
39 unsigned int len, unsigned int partial)
40{
41 unsigned int done = 0;
42
43 sctx->count += len;
44 if (partial) {
45 done = SHA1_BLOCK_SIZE - partial;
46 memcpy(sctx->buffer + partial, data, done);
47 sha1_sparc64_transform(sctx->state, sctx->buffer, 1);
48 }
49 if (len - done >= SHA1_BLOCK_SIZE) {
50 const unsigned int rounds = (len - done) / SHA1_BLOCK_SIZE;
51
52 sha1_sparc64_transform(sctx->state, data + done, rounds);
53 done += rounds * SHA1_BLOCK_SIZE;
54 }
55
56 memcpy(sctx->buffer, data + done, len - done);
57}
58
59static int sha1_sparc64_update(struct shash_desc *desc, const u8 *data,
60 unsigned int len)
61{
62 struct sha1_state *sctx = shash_desc_ctx(desc);
63 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE;
64
65 /* Handle the fast case right here */
66 if (partial + len < SHA1_BLOCK_SIZE) {
67 sctx->count += len;
68 memcpy(sctx->buffer + partial, data, len);
69 } else
70 __sha1_sparc64_update(sctx, data, len, partial);
71
72 return 0;
73}
74
75/* Add padding and return the message digest. */
76static int sha1_sparc64_final(struct shash_desc *desc, u8 *out)
77{
78 struct sha1_state *sctx = shash_desc_ctx(desc);
79 unsigned int i, index, padlen;
80 __be32 *dst = (__be32 *)out;
81 __be64 bits;
82 static const u8 padding[SHA1_BLOCK_SIZE] = { 0x80, };
83
84 bits = cpu_to_be64(sctx->count << 3);
85
86 /* Pad out to 56 mod 64 and append length */
87 index = sctx->count % SHA1_BLOCK_SIZE;
88 padlen = (index < 56) ? (56 - index) : ((SHA1_BLOCK_SIZE+56) - index);
89
90 /* We need to fill a whole block for __sha1_sparc64_update() */
91 if (padlen <= 56) {
92 sctx->count += padlen;
93 memcpy(sctx->buffer + index, padding, padlen);
94 } else {
95 __sha1_sparc64_update(sctx, padding, padlen, index);
96 }
97 __sha1_sparc64_update(sctx, (const u8 *)&bits, sizeof(bits), 56);
98
99 /* Store state in digest */
100 for (i = 0; i < 5; i++)
101 dst[i] = cpu_to_be32(sctx->state[i]);
102
103 /* Wipe context */
104 memset(sctx, 0, sizeof(*sctx));
105
106 return 0;
107}
108
109static int sha1_sparc64_export(struct shash_desc *desc, void *out)
110{
111 struct sha1_state *sctx = shash_desc_ctx(desc);
112
113 memcpy(out, sctx, sizeof(*sctx));
114
115 return 0;
116}
117
118static int sha1_sparc64_import(struct shash_desc *desc, const void *in)
119{
120 struct sha1_state *sctx = shash_desc_ctx(desc);
121
122 memcpy(sctx, in, sizeof(*sctx));
123
124 return 0;
125}
126
127static struct shash_alg alg = {
128 .digestsize = SHA1_DIGEST_SIZE,
129 .init = sha1_sparc64_init,
130 .update = sha1_sparc64_update,
131 .final = sha1_sparc64_final,
132 .export = sha1_sparc64_export,
133 .import = sha1_sparc64_import,
134 .descsize = sizeof(struct sha1_state),
135 .statesize = sizeof(struct sha1_state),
136 .base = {
137 .cra_name = "sha1",
138 .cra_driver_name= "sha1-sparc64",
139 .cra_priority = 150,
140 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
141 .cra_blocksize = SHA1_BLOCK_SIZE,
142 .cra_module = THIS_MODULE,
143 }
144};
145
146static bool __init sparc64_has_sha1_opcode(void)
147{
148 unsigned long cfr;
149
150 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
151 return false;
152
153 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
154 if (!(cfr & CFR_SHA1))
155 return false;
156
157 return true;
158}
159
160static int __init sha1_sparc64_mod_init(void)
161{
162 if (sparc64_has_sha1_opcode()) {
163 pr_info("Using sparc64 sha1 opcode optimized SHA-1 implementation\n");
164 return crypto_register_shash(&alg);
165 }
166 pr_info("sparc64 sha1 opcode not available.\n");
167 return -ENODEV;
168}
169
170static void __exit sha1_sparc64_mod_fini(void)
171{
172 crypto_unregister_shash(&alg);
173}
174
175module_init(sha1_sparc64_mod_init);
176module_exit(sha1_sparc64_mod_fini);
177
178MODULE_LICENSE("GPL");
179MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm, sparc64 sha1 opcode accelerated");
180
181MODULE_ALIAS("sha1");
diff --git a/arch/sparc/lib/ksyms.c b/arch/sparc/lib/ksyms.c
index 3b31218cafc6..ee31b884c61b 100644
--- a/arch/sparc/lib/ksyms.c
+++ b/arch/sparc/lib/ksyms.c
@@ -134,6 +134,10 @@ EXPORT_SYMBOL(copy_user_page);
134void VISenter(void); 134void VISenter(void);
135EXPORT_SYMBOL(VISenter); 135EXPORT_SYMBOL(VISenter);
136 136
137/* CRYPTO code needs this */
138void VISenterhalf(void);
139EXPORT_SYMBOL(VISenterhalf);
140
137extern void xor_vis_2(unsigned long, unsigned long *, unsigned long *); 141extern void xor_vis_2(unsigned long, unsigned long *, unsigned long *);
138extern void xor_vis_3(unsigned long, unsigned long *, unsigned long *, 142extern void xor_vis_3(unsigned long, unsigned long *, unsigned long *,
139 unsigned long *); 143 unsigned long *);
diff --git a/crypto/Kconfig b/crypto/Kconfig
index a3238051b03e..167c856f906d 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -433,6 +433,15 @@ config CRYPTO_SHA1_SSSE3
433 using Supplemental SSE3 (SSSE3) instructions or Advanced Vector 433 using Supplemental SSE3 (SSSE3) instructions or Advanced Vector
434 Extensions (AVX), when available. 434 Extensions (AVX), when available.
435 435
436config CRYPTO_SHA1_SPARC64
437 tristate "SHA1 digest algorithm (SPARC64)"
438 depends on SPARC64
439 select CRYPTO_SHA1
440 select CRYPTO_HASH
441 help
442 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2) implemented
443 using sparc64 crypto instructions, when available.
444
436config CRYPTO_SHA256 445config CRYPTO_SHA256
437 tristate "SHA224 and SHA256 digest algorithm" 446 tristate "SHA224 and SHA256 digest algorithm"
438 select CRYPTO_HASH 447 select CRYPTO_HASH