aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto/vmx/ghash.c
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2015-06-23 00:04:48 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2015-06-23 00:04:48 -0400
commit44d21c3f3a2ef2f58b18bda64c52c99e723f3f4a (patch)
tree5146cf96cb0dbd7121176d484417ab942c92dcd4 /drivers/crypto/vmx/ghash.c
parentefdfce2b7ff3205ba0fba10270b92b80bbc6187d (diff)
parentfe55dfdcdfabf160ab0c14617725c57c7a1facfc (diff)
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto update from Herbert Xu: "Here is the crypto update for 4.2: API: - Convert RNG interface to new style. - New AEAD interface with one SG list for AD and plain/cipher text. All external AEAD users have been converted. - New asymmetric key interface (akcipher). Algorithms: - Chacha20, Poly1305 and RFC7539 support. - New RSA implementation. - Jitter RNG. - DRBG is now seeded with both /dev/random and Jitter RNG. If kernel pool isn't ready then DRBG will be reseeded when it is. - DRBG is now the default crypto API RNG, replacing krng. - 842 compression (previously part of powerpc nx driver). Drivers: - Accelerated SHA-512 for arm64. - New Marvell CESA driver that supports DMA and more algorithms. - Updated powerpc nx 842 support. - Added support for SEC1 hardware to talitos" * git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (292 commits) crypto: marvell/cesa - remove COMPILE_TEST dependency crypto: algif_aead - Temporarily disable all AEAD algorithms crypto: af_alg - Forbid the use internal algorithms crypto: echainiv - Only hold RNG during initialisation crypto: seqiv - Add compatibility support without RNG crypto: eseqiv - Offer normal cipher functionality without RNG crypto: chainiv - Offer normal cipher functionality without RNG crypto: user - Add CRYPTO_MSG_DELRNG crypto: user - Move cryptouser.h to uapi crypto: rng - Do not free default RNG when it becomes unused crypto: skcipher - Allow givencrypt to be NULL crypto: sahara - propagate the error on clk_disable_unprepare() failure crypto: rsa - fix invalid select for AKCIPHER crypto: picoxcell - Update to the current clk API crypto: nx - Check for bogus firmware properties crypto: marvell/cesa - add DT bindings documentation crypto: marvell/cesa - add support for Kirkwood and Dove SoCs crypto: marvell/cesa - add support for Orion SoCs crypto: marvell/cesa - add allhwsupport module parameter crypto: marvell/cesa - add support for all armada SoCs ...
Diffstat (limited to 'drivers/crypto/vmx/ghash.c')
-rw-r--r--drivers/crypto/vmx/ghash.c298
1 files changed, 151 insertions, 147 deletions
diff --git a/drivers/crypto/vmx/ghash.c b/drivers/crypto/vmx/ghash.c
index f255ec4a04d4..b5e29002b666 100644
--- a/drivers/crypto/vmx/ghash.c
+++ b/drivers/crypto/vmx/ghash.c
@@ -39,184 +39,188 @@
39void gcm_init_p8(u128 htable[16], const u64 Xi[2]); 39void gcm_init_p8(u128 htable[16], const u64 Xi[2]);
40void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]); 40void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]);
41void gcm_ghash_p8(u64 Xi[2], const u128 htable[16], 41void gcm_ghash_p8(u64 Xi[2], const u128 htable[16],
42 const u8 *in,size_t len); 42 const u8 *in, size_t len);
43 43
44struct p8_ghash_ctx { 44struct p8_ghash_ctx {
45 u128 htable[16]; 45 u128 htable[16];
46 struct crypto_shash *fallback; 46 struct crypto_shash *fallback;
47}; 47};
48 48
49struct p8_ghash_desc_ctx { 49struct p8_ghash_desc_ctx {
50 u64 shash[2]; 50 u64 shash[2];
51 u8 buffer[GHASH_DIGEST_SIZE]; 51 u8 buffer[GHASH_DIGEST_SIZE];
52 int bytes; 52 int bytes;
53 struct shash_desc fallback_desc; 53 struct shash_desc fallback_desc;
54}; 54};
55 55
56static int p8_ghash_init_tfm(struct crypto_tfm *tfm) 56static int p8_ghash_init_tfm(struct crypto_tfm *tfm)
57{ 57{
58 const char *alg; 58 const char *alg;
59 struct crypto_shash *fallback; 59 struct crypto_shash *fallback;
60 struct crypto_shash *shash_tfm = __crypto_shash_cast(tfm); 60 struct crypto_shash *shash_tfm = __crypto_shash_cast(tfm);
61 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm); 61 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm);
62 62
63 if (!(alg = crypto_tfm_alg_name(tfm))) { 63 if (!(alg = crypto_tfm_alg_name(tfm))) {
64 printk(KERN_ERR "Failed to get algorithm name.\n"); 64 printk(KERN_ERR "Failed to get algorithm name.\n");
65 return -ENOENT; 65 return -ENOENT;
66 } 66 }
67 67
68 fallback = crypto_alloc_shash(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK); 68 fallback = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
69 if (IS_ERR(fallback)) { 69 if (IS_ERR(fallback)) {
70 printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n", 70 printk(KERN_ERR
71 alg, PTR_ERR(fallback)); 71 "Failed to allocate transformation for '%s': %ld\n",
72 return PTR_ERR(fallback); 72 alg, PTR_ERR(fallback));
73 } 73 return PTR_ERR(fallback);
74 printk(KERN_INFO "Using '%s' as fallback implementation.\n", 74 }
75 crypto_tfm_alg_driver_name(crypto_shash_tfm(fallback))); 75 printk(KERN_INFO "Using '%s' as fallback implementation.\n",
76 76 crypto_tfm_alg_driver_name(crypto_shash_tfm(fallback)));
77 crypto_shash_set_flags(fallback, 77
78 crypto_shash_get_flags((struct crypto_shash *) tfm)); 78 crypto_shash_set_flags(fallback,
79 ctx->fallback = fallback; 79 crypto_shash_get_flags((struct crypto_shash
80 80 *) tfm));
81 shash_tfm->descsize = sizeof(struct p8_ghash_desc_ctx) 81 ctx->fallback = fallback;
82 + crypto_shash_descsize(fallback); 82
83 83 shash_tfm->descsize = sizeof(struct p8_ghash_desc_ctx)
84 return 0; 84 + crypto_shash_descsize(fallback);
85
86 return 0;
85} 87}
86 88
87static void p8_ghash_exit_tfm(struct crypto_tfm *tfm) 89static void p8_ghash_exit_tfm(struct crypto_tfm *tfm)
88{ 90{
89 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm); 91 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm);
90 92
91 if (ctx->fallback) { 93 if (ctx->fallback) {
92 crypto_free_shash(ctx->fallback); 94 crypto_free_shash(ctx->fallback);
93 ctx->fallback = NULL; 95 ctx->fallback = NULL;
94 } 96 }
95} 97}
96 98
97static int p8_ghash_init(struct shash_desc *desc) 99static int p8_ghash_init(struct shash_desc *desc)
98{ 100{
99 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 101 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
100 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 102 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
101 103
102 dctx->bytes = 0; 104 dctx->bytes = 0;
103 memset(dctx->shash, 0, GHASH_DIGEST_SIZE); 105 memset(dctx->shash, 0, GHASH_DIGEST_SIZE);
104 dctx->fallback_desc.tfm = ctx->fallback; 106 dctx->fallback_desc.tfm = ctx->fallback;
105 dctx->fallback_desc.flags = desc->flags; 107 dctx->fallback_desc.flags = desc->flags;
106 return crypto_shash_init(&dctx->fallback_desc); 108 return crypto_shash_init(&dctx->fallback_desc);
107} 109}
108 110
109static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key, 111static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key,
110 unsigned int keylen) 112 unsigned int keylen)
111{ 113{
112 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm)); 114 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm));
113 115
114 if (keylen != GHASH_KEY_LEN) 116 if (keylen != GHASH_KEY_LEN)
115 return -EINVAL; 117 return -EINVAL;
116 118
117 preempt_disable(); 119 preempt_disable();
118 pagefault_disable(); 120 pagefault_disable();
119 enable_kernel_altivec(); 121 enable_kernel_altivec();
120 enable_kernel_fp(); 122 enable_kernel_fp();
121 gcm_init_p8(ctx->htable, (const u64 *) key); 123 gcm_init_p8(ctx->htable, (const u64 *) key);
122 pagefault_enable(); 124 pagefault_enable();
123 preempt_enable(); 125 preempt_enable();
124 return crypto_shash_setkey(ctx->fallback, key, keylen); 126 return crypto_shash_setkey(ctx->fallback, key, keylen);
125} 127}
126 128
127static int p8_ghash_update(struct shash_desc *desc, 129static int p8_ghash_update(struct shash_desc *desc,
128 const u8 *src, unsigned int srclen) 130 const u8 *src, unsigned int srclen)
129{ 131{
130 unsigned int len; 132 unsigned int len;
131 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 133 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
132 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 134 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
133 135
134 if (IN_INTERRUPT) { 136 if (IN_INTERRUPT) {
135 return crypto_shash_update(&dctx->fallback_desc, src, srclen); 137 return crypto_shash_update(&dctx->fallback_desc, src,
136 } else { 138 srclen);
137 if (dctx->bytes) { 139 } else {
138 if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) { 140 if (dctx->bytes) {
139 memcpy(dctx->buffer + dctx->bytes, src, srclen); 141 if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) {
140 dctx->bytes += srclen; 142 memcpy(dctx->buffer + dctx->bytes, src,
141 return 0; 143 srclen);
142 } 144 dctx->bytes += srclen;
143 memcpy(dctx->buffer + dctx->bytes, src, 145 return 0;
144 GHASH_DIGEST_SIZE - dctx->bytes); 146 }
145 preempt_disable(); 147 memcpy(dctx->buffer + dctx->bytes, src,
146 pagefault_disable(); 148 GHASH_DIGEST_SIZE - dctx->bytes);
147 enable_kernel_altivec(); 149 preempt_disable();
148 enable_kernel_fp(); 150 pagefault_disable();
149 gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer, 151 enable_kernel_altivec();
150 GHASH_DIGEST_SIZE); 152 enable_kernel_fp();
151 pagefault_enable(); 153 gcm_ghash_p8(dctx->shash, ctx->htable,
152 preempt_enable(); 154 dctx->buffer, GHASH_DIGEST_SIZE);
153 src += GHASH_DIGEST_SIZE - dctx->bytes; 155 pagefault_enable();
154 srclen -= GHASH_DIGEST_SIZE - dctx->bytes; 156 preempt_enable();
155 dctx->bytes = 0; 157 src += GHASH_DIGEST_SIZE - dctx->bytes;
156 } 158 srclen -= GHASH_DIGEST_SIZE - dctx->bytes;
157 len = srclen & ~(GHASH_DIGEST_SIZE - 1); 159 dctx->bytes = 0;
158 if (len) { 160 }
159 preempt_disable(); 161 len = srclen & ~(GHASH_DIGEST_SIZE - 1);
160 pagefault_disable(); 162 if (len) {
161 enable_kernel_altivec(); 163 preempt_disable();
162 enable_kernel_fp(); 164 pagefault_disable();
163 gcm_ghash_p8(dctx->shash, ctx->htable, src, len); 165 enable_kernel_altivec();
164 pagefault_enable(); 166 enable_kernel_fp();
165 preempt_enable(); 167 gcm_ghash_p8(dctx->shash, ctx->htable, src, len);
166 src += len; 168 pagefault_enable();
167 srclen -= len; 169 preempt_enable();
168 } 170 src += len;
169 if (srclen) { 171 srclen -= len;
170 memcpy(dctx->buffer, src, srclen); 172 }
171 dctx->bytes = srclen; 173 if (srclen) {
172 } 174 memcpy(dctx->buffer, src, srclen);
173 return 0; 175 dctx->bytes = srclen;
174 } 176 }
177 return 0;
178 }
175} 179}
176 180
177static int p8_ghash_final(struct shash_desc *desc, u8 *out) 181static int p8_ghash_final(struct shash_desc *desc, u8 *out)
178{ 182{
179 int i; 183 int i;
180 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 184 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
181 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 185 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
182 186
183 if (IN_INTERRUPT) { 187 if (IN_INTERRUPT) {
184 return crypto_shash_final(&dctx->fallback_desc, out); 188 return crypto_shash_final(&dctx->fallback_desc, out);
185 } else { 189 } else {
186 if (dctx->bytes) { 190 if (dctx->bytes) {
187 for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++) 191 for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++)
188 dctx->buffer[i] = 0; 192 dctx->buffer[i] = 0;
189 preempt_disable(); 193 preempt_disable();
190 pagefault_disable(); 194 pagefault_disable();
191 enable_kernel_altivec(); 195 enable_kernel_altivec();
192 enable_kernel_fp(); 196 enable_kernel_fp();
193 gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer, 197 gcm_ghash_p8(dctx->shash, ctx->htable,
194 GHASH_DIGEST_SIZE); 198 dctx->buffer, GHASH_DIGEST_SIZE);
195 pagefault_enable(); 199 pagefault_enable();
196 preempt_enable(); 200 preempt_enable();
197 dctx->bytes = 0; 201 dctx->bytes = 0;
198 } 202 }
199 memcpy(out, dctx->shash, GHASH_DIGEST_SIZE); 203 memcpy(out, dctx->shash, GHASH_DIGEST_SIZE);
200 return 0; 204 return 0;
201 } 205 }
202} 206}
203 207
204struct shash_alg p8_ghash_alg = { 208struct shash_alg p8_ghash_alg = {
205 .digestsize = GHASH_DIGEST_SIZE, 209 .digestsize = GHASH_DIGEST_SIZE,
206 .init = p8_ghash_init, 210 .init = p8_ghash_init,
207 .update = p8_ghash_update, 211 .update = p8_ghash_update,
208 .final = p8_ghash_final, 212 .final = p8_ghash_final,
209 .setkey = p8_ghash_setkey, 213 .setkey = p8_ghash_setkey,
210 .descsize = sizeof(struct p8_ghash_desc_ctx), 214 .descsize = sizeof(struct p8_ghash_desc_ctx),
211 .base = { 215 .base = {
212 .cra_name = "ghash", 216 .cra_name = "ghash",
213 .cra_driver_name = "p8_ghash", 217 .cra_driver_name = "p8_ghash",
214 .cra_priority = 1000, 218 .cra_priority = 1000,
215 .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_NEED_FALLBACK, 219 .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_NEED_FALLBACK,
216 .cra_blocksize = GHASH_BLOCK_SIZE, 220 .cra_blocksize = GHASH_BLOCK_SIZE,
217 .cra_ctxsize = sizeof(struct p8_ghash_ctx), 221 .cra_ctxsize = sizeof(struct p8_ghash_ctx),
218 .cra_module = THIS_MODULE, 222 .cra_module = THIS_MODULE,
219 .cra_init = p8_ghash_init_tfm, 223 .cra_init = p8_ghash_init_tfm,
220 .cra_exit = p8_ghash_exit_tfm, 224 .cra_exit = p8_ghash_exit_tfm,
221 }, 225 },
222}; 226};