aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2015-04-09 06:55:42 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2015-04-10 09:39:44 -0400
commitb59e2ae3690c8ef5f8ddeeb0b6b3313521b915e6 (patch)
treeedeb434373ecda30560cbe32e9a51e61c905e1dd
parentdde00981e64b3c6621cafe3eea2eef6a4055208c (diff)
crypto: arm/sha256 - move SHA-224/256 ASM/NEON implementation to base layer
This removes all the boilerplate from the existing implementation, and replaces it with calls into the base layer. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--arch/arm/crypto/sha256_glue.c170
-rw-r--r--arch/arm/crypto/sha256_glue.h17
-rw-r--r--arch/arm/crypto/sha256_neon_glue.c143
3 files changed, 66 insertions, 264 deletions
diff --git a/arch/arm/crypto/sha256_glue.c b/arch/arm/crypto/sha256_glue.c
index ccef5e25bbcb..a84e869ef900 100644
--- a/arch/arm/crypto/sha256_glue.c
+++ b/arch/arm/crypto/sha256_glue.c
@@ -24,165 +24,49 @@
24#include <linux/types.h> 24#include <linux/types.h>
25#include <linux/string.h> 25#include <linux/string.h>
26#include <crypto/sha.h> 26#include <crypto/sha.h>
27#include <asm/byteorder.h> 27#include <crypto/sha256_base.h>
28#include <asm/simd.h> 28#include <asm/simd.h>
29#include <asm/neon.h> 29#include <asm/neon.h>
30
30#include "sha256_glue.h" 31#include "sha256_glue.h"
31 32
32asmlinkage void sha256_block_data_order(u32 *digest, const void *data, 33asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
33 unsigned int num_blks); 34 unsigned int num_blks);
34
35
36int sha256_init(struct shash_desc *desc)
37{
38 struct sha256_state *sctx = shash_desc_ctx(desc);
39
40 sctx->state[0] = SHA256_H0;
41 sctx->state[1] = SHA256_H1;
42 sctx->state[2] = SHA256_H2;
43 sctx->state[3] = SHA256_H3;
44 sctx->state[4] = SHA256_H4;
45 sctx->state[5] = SHA256_H5;
46 sctx->state[6] = SHA256_H6;
47 sctx->state[7] = SHA256_H7;
48 sctx->count = 0;
49
50 return 0;
51}
52
53int sha224_init(struct shash_desc *desc)
54{
55 struct sha256_state *sctx = shash_desc_ctx(desc);
56
57 sctx->state[0] = SHA224_H0;
58 sctx->state[1] = SHA224_H1;
59 sctx->state[2] = SHA224_H2;
60 sctx->state[3] = SHA224_H3;
61 sctx->state[4] = SHA224_H4;
62 sctx->state[5] = SHA224_H5;
63 sctx->state[6] = SHA224_H6;
64 sctx->state[7] = SHA224_H7;
65 sctx->count = 0;
66
67 return 0;
68}
69
70int __sha256_update(struct shash_desc *desc, const u8 *data, unsigned int len,
71 unsigned int partial)
72{
73 struct sha256_state *sctx = shash_desc_ctx(desc);
74 unsigned int done = 0;
75 35
76 sctx->count += len; 36int crypto_sha256_arm_update(struct shash_desc *desc, const u8 *data,
77 37 unsigned int len)
78 if (partial) {
79 done = SHA256_BLOCK_SIZE - partial;
80 memcpy(sctx->buf + partial, data, done);
81 sha256_block_data_order(sctx->state, sctx->buf, 1);
82 }
83
84 if (len - done >= SHA256_BLOCK_SIZE) {
85 const unsigned int rounds = (len - done) / SHA256_BLOCK_SIZE;
86
87 sha256_block_data_order(sctx->state, data + done, rounds);
88 done += rounds * SHA256_BLOCK_SIZE;
89 }
90
91 memcpy(sctx->buf, data + done, len - done);
92
93 return 0;
94}
95
96int sha256_update(struct shash_desc *desc, const u8 *data, unsigned int len)
97{ 38{
98 struct sha256_state *sctx = shash_desc_ctx(desc); 39 /* make sure casting to sha256_block_fn() is safe */
99 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; 40 BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
100
101 /* Handle the fast case right here */
102 if (partial + len < SHA256_BLOCK_SIZE) {
103 sctx->count += len;
104 memcpy(sctx->buf + partial, data, len);
105 41
106 return 0; 42 return sha256_base_do_update(desc, data, len,
107 } 43 (sha256_block_fn *)sha256_block_data_order);
108
109 return __sha256_update(desc, data, len, partial);
110} 44}
45EXPORT_SYMBOL(crypto_sha256_arm_update);
111 46
112/* Add padding and return the message digest. */
113static int sha256_final(struct shash_desc *desc, u8 *out) 47static int sha256_final(struct shash_desc *desc, u8 *out)
114{ 48{
115 struct sha256_state *sctx = shash_desc_ctx(desc); 49 sha256_base_do_finalize(desc,
116 unsigned int i, index, padlen; 50 (sha256_block_fn *)sha256_block_data_order);
117 __be32 *dst = (__be32 *)out; 51 return sha256_base_finish(desc, out);
118 __be64 bits;
119 static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, };
120
121 /* save number of bits */
122 bits = cpu_to_be64(sctx->count << 3);
123
124 /* Pad out to 56 mod 64 and append length */
125 index = sctx->count % SHA256_BLOCK_SIZE;
126 padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56)-index);
127
128 /* We need to fill a whole block for __sha256_update */
129 if (padlen <= 56) {
130 sctx->count += padlen;
131 memcpy(sctx->buf + index, padding, padlen);
132 } else {
133 __sha256_update(desc, padding, padlen, index);
134 }
135 __sha256_update(desc, (const u8 *)&bits, sizeof(bits), 56);
136
137 /* Store state in digest */
138 for (i = 0; i < 8; i++)
139 dst[i] = cpu_to_be32(sctx->state[i]);
140
141 /* Wipe context */
142 memset(sctx, 0, sizeof(*sctx));
143
144 return 0;
145}
146
147static int sha224_final(struct shash_desc *desc, u8 *out)
148{
149 u8 D[SHA256_DIGEST_SIZE];
150
151 sha256_final(desc, D);
152
153 memcpy(out, D, SHA224_DIGEST_SIZE);
154 memzero_explicit(D, SHA256_DIGEST_SIZE);
155
156 return 0;
157}
158
159int sha256_export(struct shash_desc *desc, void *out)
160{
161 struct sha256_state *sctx = shash_desc_ctx(desc);
162
163 memcpy(out, sctx, sizeof(*sctx));
164
165 return 0;
166} 52}
167 53
168int sha256_import(struct shash_desc *desc, const void *in) 54int crypto_sha256_arm_finup(struct shash_desc *desc, const u8 *data,
55 unsigned int len, u8 *out)
169{ 56{
170 struct sha256_state *sctx = shash_desc_ctx(desc); 57 sha256_base_do_update(desc, data, len,
171 58 (sha256_block_fn *)sha256_block_data_order);
172 memcpy(sctx, in, sizeof(*sctx)); 59 return sha256_final(desc, out);
173
174 return 0;
175} 60}
61EXPORT_SYMBOL(crypto_sha256_arm_finup);
176 62
177static struct shash_alg algs[] = { { 63static struct shash_alg algs[] = { {
178 .digestsize = SHA256_DIGEST_SIZE, 64 .digestsize = SHA256_DIGEST_SIZE,
179 .init = sha256_init, 65 .init = sha256_base_init,
180 .update = sha256_update, 66 .update = crypto_sha256_arm_update,
181 .final = sha256_final, 67 .final = sha256_final,
182 .export = sha256_export, 68 .finup = crypto_sha256_arm_finup,
183 .import = sha256_import,
184 .descsize = sizeof(struct sha256_state), 69 .descsize = sizeof(struct sha256_state),
185 .statesize = sizeof(struct sha256_state),
186 .base = { 70 .base = {
187 .cra_name = "sha256", 71 .cra_name = "sha256",
188 .cra_driver_name = "sha256-asm", 72 .cra_driver_name = "sha256-asm",
@@ -193,13 +77,11 @@ static struct shash_alg algs[] = { {
193 } 77 }
194}, { 78}, {
195 .digestsize = SHA224_DIGEST_SIZE, 79 .digestsize = SHA224_DIGEST_SIZE,
196 .init = sha224_init, 80 .init = sha224_base_init,
197 .update = sha256_update, 81 .update = crypto_sha256_arm_update,
198 .final = sha224_final, 82 .final = sha256_final,
199 .export = sha256_export, 83 .finup = crypto_sha256_arm_finup,
200 .import = sha256_import,
201 .descsize = sizeof(struct sha256_state), 84 .descsize = sizeof(struct sha256_state),
202 .statesize = sizeof(struct sha256_state),
203 .base = { 85 .base = {
204 .cra_name = "sha224", 86 .cra_name = "sha224",
205 .cra_driver_name = "sha224-asm", 87 .cra_driver_name = "sha224-asm",
diff --git a/arch/arm/crypto/sha256_glue.h b/arch/arm/crypto/sha256_glue.h
index 0312f4ffe8cc..7cf0bf786ada 100644
--- a/arch/arm/crypto/sha256_glue.h
+++ b/arch/arm/crypto/sha256_glue.h
@@ -2,22 +2,13 @@
2#define _CRYPTO_SHA256_GLUE_H 2#define _CRYPTO_SHA256_GLUE_H
3 3
4#include <linux/crypto.h> 4#include <linux/crypto.h>
5#include <crypto/sha.h>
6 5
7extern struct shash_alg sha256_neon_algs[2]; 6extern struct shash_alg sha256_neon_algs[2];
8 7
9extern int sha256_init(struct shash_desc *desc); 8int crypto_sha256_arm_update(struct shash_desc *desc, const u8 *data,
9 unsigned int len);
10 10
11extern int sha224_init(struct shash_desc *desc); 11int crypto_sha256_arm_finup(struct shash_desc *desc, const u8 *data,
12 12 unsigned int len, u8 *hash);
13extern int __sha256_update(struct shash_desc *desc, const u8 *data,
14 unsigned int len, unsigned int partial);
15
16extern int sha256_update(struct shash_desc *desc, const u8 *data,
17 unsigned int len);
18
19extern int sha256_export(struct shash_desc *desc, void *out);
20
21extern int sha256_import(struct shash_desc *desc, const void *in);
22 13
23#endif /* _CRYPTO_SHA256_GLUE_H */ 14#endif /* _CRYPTO_SHA256_GLUE_H */
diff --git a/arch/arm/crypto/sha256_neon_glue.c b/arch/arm/crypto/sha256_neon_glue.c
index c4da10090eee..39ccd658817e 100644
--- a/arch/arm/crypto/sha256_neon_glue.c
+++ b/arch/arm/crypto/sha256_neon_glue.c
@@ -19,131 +19,62 @@
19#include <linux/types.h> 19#include <linux/types.h>
20#include <linux/string.h> 20#include <linux/string.h>
21#include <crypto/sha.h> 21#include <crypto/sha.h>
22#include <crypto/sha256_base.h>
22#include <asm/byteorder.h> 23#include <asm/byteorder.h>
23#include <asm/simd.h> 24#include <asm/simd.h>
24#include <asm/neon.h> 25#include <asm/neon.h>
26
25#include "sha256_glue.h" 27#include "sha256_glue.h"
26 28
27asmlinkage void sha256_block_data_order_neon(u32 *digest, const void *data, 29asmlinkage void sha256_block_data_order_neon(u32 *digest, const void *data,
28 unsigned int num_blks); 30 unsigned int num_blks);
29
30 31
31static int __sha256_neon_update(struct shash_desc *desc, const u8 *data, 32static int sha256_update(struct shash_desc *desc, const u8 *data,
32 unsigned int len, unsigned int partial) 33 unsigned int len)
33{ 34{
34 struct sha256_state *sctx = shash_desc_ctx(desc); 35 struct sha256_state *sctx = shash_desc_ctx(desc);
35 unsigned int done = 0;
36
37 sctx->count += len;
38
39 if (partial) {
40 done = SHA256_BLOCK_SIZE - partial;
41 memcpy(sctx->buf + partial, data, done);
42 sha256_block_data_order_neon(sctx->state, sctx->buf, 1);
43 }
44
45 if (len - done >= SHA256_BLOCK_SIZE) {
46 const unsigned int rounds = (len - done) / SHA256_BLOCK_SIZE;
47 36
48 sha256_block_data_order_neon(sctx->state, data + done, rounds); 37 if (!may_use_simd() ||
49 done += rounds * SHA256_BLOCK_SIZE; 38 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE)
50 } 39 return crypto_sha256_arm_update(desc, data, len);
51 40
52 memcpy(sctx->buf, data + done, len - done); 41 kernel_neon_begin();
42 sha256_base_do_update(desc, data, len,
43 (sha256_block_fn *)sha256_block_data_order_neon);
44 kernel_neon_end();
53 45
54 return 0; 46 return 0;
55} 47}
56 48
57static int sha256_neon_update(struct shash_desc *desc, const u8 *data, 49static int sha256_finup(struct shash_desc *desc, const u8 *data,
58 unsigned int len) 50 unsigned int len, u8 *out)
59{
60 struct sha256_state *sctx = shash_desc_ctx(desc);
61 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
62 int res;
63
64 /* Handle the fast case right here */
65 if (partial + len < SHA256_BLOCK_SIZE) {
66 sctx->count += len;
67 memcpy(sctx->buf + partial, data, len);
68
69 return 0;
70 }
71
72 if (!may_use_simd()) {
73 res = __sha256_update(desc, data, len, partial);
74 } else {
75 kernel_neon_begin();
76 res = __sha256_neon_update(desc, data, len, partial);
77 kernel_neon_end();
78 }
79
80 return res;
81}
82
83/* Add padding and return the message digest. */
84static int sha256_neon_final(struct shash_desc *desc, u8 *out)
85{ 51{
86 struct sha256_state *sctx = shash_desc_ctx(desc); 52 if (!may_use_simd())
87 unsigned int i, index, padlen; 53 return crypto_sha256_arm_finup(desc, data, len, out);
88 __be32 *dst = (__be32 *)out; 54
89 __be64 bits; 55 kernel_neon_begin();
90 static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, }; 56 if (len)
91 57 sha256_base_do_update(desc, data, len,
92 /* save number of bits */ 58 (sha256_block_fn *)sha256_block_data_order_neon);
93 bits = cpu_to_be64(sctx->count << 3); 59 sha256_base_do_finalize(desc,
94 60 (sha256_block_fn *)sha256_block_data_order_neon);
95 /* Pad out to 56 mod 64 and append length */ 61 kernel_neon_end();
96 index = sctx->count % SHA256_BLOCK_SIZE; 62
97 padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56)-index); 63 return sha256_base_finish(desc, out);
98
99 if (!may_use_simd()) {
100 sha256_update(desc, padding, padlen);
101 sha256_update(desc, (const u8 *)&bits, sizeof(bits));
102 } else {
103 kernel_neon_begin();
104 /* We need to fill a whole block for __sha256_neon_update() */
105 if (padlen <= 56) {
106 sctx->count += padlen;
107 memcpy(sctx->buf + index, padding, padlen);
108 } else {
109 __sha256_neon_update(desc, padding, padlen, index);
110 }
111 __sha256_neon_update(desc, (const u8 *)&bits,
112 sizeof(bits), 56);
113 kernel_neon_end();
114 }
115
116 /* Store state in digest */
117 for (i = 0; i < 8; i++)
118 dst[i] = cpu_to_be32(sctx->state[i]);
119
120 /* Wipe context */
121 memzero_explicit(sctx, sizeof(*sctx));
122
123 return 0;
124} 64}
125 65
126static int sha224_neon_final(struct shash_desc *desc, u8 *out) 66static int sha256_final(struct shash_desc *desc, u8 *out)
127{ 67{
128 u8 D[SHA256_DIGEST_SIZE]; 68 return sha256_finup(desc, NULL, 0, out);
129
130 sha256_neon_final(desc, D);
131
132 memcpy(out, D, SHA224_DIGEST_SIZE);
133 memzero_explicit(D, SHA256_DIGEST_SIZE);
134
135 return 0;
136} 69}
137 70
138struct shash_alg sha256_neon_algs[] = { { 71struct shash_alg sha256_neon_algs[] = { {
139 .digestsize = SHA256_DIGEST_SIZE, 72 .digestsize = SHA256_DIGEST_SIZE,
140 .init = sha256_init, 73 .init = sha256_base_init,
141 .update = sha256_neon_update, 74 .update = sha256_update,
142 .final = sha256_neon_final, 75 .final = sha256_final,
143 .export = sha256_export, 76 .finup = sha256_finup,
144 .import = sha256_import,
145 .descsize = sizeof(struct sha256_state), 77 .descsize = sizeof(struct sha256_state),
146 .statesize = sizeof(struct sha256_state),
147 .base = { 78 .base = {
148 .cra_name = "sha256", 79 .cra_name = "sha256",
149 .cra_driver_name = "sha256-neon", 80 .cra_driver_name = "sha256-neon",
@@ -154,13 +85,11 @@ struct shash_alg sha256_neon_algs[] = { {
154 } 85 }
155}, { 86}, {
156 .digestsize = SHA224_DIGEST_SIZE, 87 .digestsize = SHA224_DIGEST_SIZE,
157 .init = sha224_init, 88 .init = sha224_base_init,
158 .update = sha256_neon_update, 89 .update = sha256_update,
159 .final = sha224_neon_final, 90 .final = sha256_final,
160 .export = sha256_export, 91 .finup = sha256_finup,
161 .import = sha256_import,
162 .descsize = sizeof(struct sha256_state), 92 .descsize = sizeof(struct sha256_state),
163 .statesize = sizeof(struct sha256_state),
164 .base = { 93 .base = {
165 .cra_name = "sha224", 94 .cra_name = "sha224",
166 .cra_driver_name = "sha224-neon", 95 .cra_driver_name = "sha224-neon",