aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2017-07-24 06:28:04 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2017-08-03 21:27:15 -0400
commit45fe93dff2fb58b22de04c729f8447ba0f773d93 (patch)
tree6f6039696f88fb5461831d4bfbc4e62f059092d5
parenta7c391f04fe3259fb0417d71fec78ae28f25780e (diff)
crypto: algapi - make crypto_xor() take separate dst and src arguments
There are quite a number of occurrences in the kernel of the pattern if (dst != src) memcpy(dst, src, walk.total % AES_BLOCK_SIZE); crypto_xor(dst, final, walk.total % AES_BLOCK_SIZE); or crypto_xor(keystream, src, nbytes); memcpy(dst, keystream, nbytes); where crypto_xor() is preceded or followed by a memcpy() invocation that is only there because crypto_xor() uses its output parameter as one of the inputs. To avoid having to add new instances of this pattern in the arm64 code, which will be refactored to implement non-SIMD fallbacks, add an alternative implementation called crypto_xor_cpy(), taking separate input and output arguments. This removes the need for the separate memcpy(). Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--arch/arm/crypto/aes-ce-glue.c4
-rw-r--r--arch/arm/crypto/aes-neonbs-glue.c5
-rw-r--r--arch/arm64/crypto/aes-glue.c4
-rw-r--r--arch/arm64/crypto/aes-neonbs-glue.c5
-rw-r--r--arch/sparc/crypto/aes_glue.c3
-rw-r--r--arch/x86/crypto/aesni-intel_glue.c4
-rw-r--r--arch/x86/crypto/blowfish_glue.c3
-rw-r--r--arch/x86/crypto/cast5_avx_glue.c3
-rw-r--r--arch/x86/crypto/des3_ede_glue.c3
-rw-r--r--crypto/ctr.c3
-rw-r--r--crypto/pcbc.c12
-rw-r--r--drivers/crypto/vmx/aes_ctr.c3
-rw-r--r--drivers/md/dm-crypt.c11
-rw-r--r--include/crypto/algapi.h19
14 files changed, 42 insertions, 40 deletions
diff --git a/arch/arm/crypto/aes-ce-glue.c b/arch/arm/crypto/aes-ce-glue.c
index 0f966a8ca1ce..d0a9cec73707 100644
--- a/arch/arm/crypto/aes-ce-glue.c
+++ b/arch/arm/crypto/aes-ce-glue.c
@@ -285,9 +285,7 @@ static int ctr_encrypt(struct skcipher_request *req)
285 285
286 ce_aes_ctr_encrypt(tail, NULL, (u8 *)ctx->key_enc, 286 ce_aes_ctr_encrypt(tail, NULL, (u8 *)ctx->key_enc,
287 num_rounds(ctx), blocks, walk.iv); 287 num_rounds(ctx), blocks, walk.iv);
288 if (tdst != tsrc) 288 crypto_xor_cpy(tdst, tsrc, tail, nbytes);
289 memcpy(tdst, tsrc, nbytes);
290 crypto_xor(tdst, tail, nbytes);
291 err = skcipher_walk_done(&walk, 0); 289 err = skcipher_walk_done(&walk, 0);
292 } 290 }
293 kernel_neon_end(); 291 kernel_neon_end();
diff --git a/arch/arm/crypto/aes-neonbs-glue.c b/arch/arm/crypto/aes-neonbs-glue.c
index c76377961444..18768f330449 100644
--- a/arch/arm/crypto/aes-neonbs-glue.c
+++ b/arch/arm/crypto/aes-neonbs-glue.c
@@ -221,9 +221,8 @@ static int ctr_encrypt(struct skcipher_request *req)
221 u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE; 221 u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
222 u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE; 222 u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
223 223
224 if (dst != src) 224 crypto_xor_cpy(dst, src, final,
225 memcpy(dst, src, walk.total % AES_BLOCK_SIZE); 225 walk.total % AES_BLOCK_SIZE);
226 crypto_xor(dst, final, walk.total % AES_BLOCK_SIZE);
227 226
228 err = skcipher_walk_done(&walk, 0); 227 err = skcipher_walk_done(&walk, 0);
229 break; 228 break;
diff --git a/arch/arm64/crypto/aes-glue.c b/arch/arm64/crypto/aes-glue.c
index bcf596b0197e..0da30e3b0e4b 100644
--- a/arch/arm64/crypto/aes-glue.c
+++ b/arch/arm64/crypto/aes-glue.c
@@ -241,9 +241,7 @@ static int ctr_encrypt(struct skcipher_request *req)
241 241
242 aes_ctr_encrypt(tail, NULL, (u8 *)ctx->key_enc, rounds, 242 aes_ctr_encrypt(tail, NULL, (u8 *)ctx->key_enc, rounds,
243 blocks, walk.iv, first); 243 blocks, walk.iv, first);
244 if (tdst != tsrc) 244 crypto_xor_cpy(tdst, tsrc, tail, nbytes);
245 memcpy(tdst, tsrc, nbytes);
246 crypto_xor(tdst, tail, nbytes);
247 err = skcipher_walk_done(&walk, 0); 245 err = skcipher_walk_done(&walk, 0);
248 } 246 }
249 kernel_neon_end(); 247 kernel_neon_end();
diff --git a/arch/arm64/crypto/aes-neonbs-glue.c b/arch/arm64/crypto/aes-neonbs-glue.c
index db2501d93550..9001aec16007 100644
--- a/arch/arm64/crypto/aes-neonbs-glue.c
+++ b/arch/arm64/crypto/aes-neonbs-glue.c
@@ -224,9 +224,8 @@ static int ctr_encrypt(struct skcipher_request *req)
224 u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE; 224 u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
225 u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE; 225 u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
226 226
227 if (dst != src) 227 crypto_xor_cpy(dst, src, final,
228 memcpy(dst, src, walk.total % AES_BLOCK_SIZE); 228 walk.total % AES_BLOCK_SIZE);
229 crypto_xor(dst, final, walk.total % AES_BLOCK_SIZE);
230 229
231 err = skcipher_walk_done(&walk, 0); 230 err = skcipher_walk_done(&walk, 0);
232 break; 231 break;
diff --git a/arch/sparc/crypto/aes_glue.c b/arch/sparc/crypto/aes_glue.c
index c90930de76ba..3cd4f6b198b6 100644
--- a/arch/sparc/crypto/aes_glue.c
+++ b/arch/sparc/crypto/aes_glue.c
@@ -344,8 +344,7 @@ static void ctr_crypt_final(struct crypto_sparc64_aes_ctx *ctx,
344 344
345 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk, 345 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
346 keystream, AES_BLOCK_SIZE); 346 keystream, AES_BLOCK_SIZE);
347 crypto_xor((u8 *) keystream, src, nbytes); 347 crypto_xor_cpy(dst, (u8 *) keystream, src, nbytes);
348 memcpy(dst, keystream, nbytes);
349 crypto_inc(ctrblk, AES_BLOCK_SIZE); 348 crypto_inc(ctrblk, AES_BLOCK_SIZE);
350} 349}
351 350
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c
index 4a55cdcdc008..5c15d6b57329 100644
--- a/arch/x86/crypto/aesni-intel_glue.c
+++ b/arch/x86/crypto/aesni-intel_glue.c
@@ -475,8 +475,8 @@ static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
475 unsigned int nbytes = walk->nbytes; 475 unsigned int nbytes = walk->nbytes;
476 476
477 aesni_enc(ctx, keystream, ctrblk); 477 aesni_enc(ctx, keystream, ctrblk);
478 crypto_xor(keystream, src, nbytes); 478 crypto_xor_cpy(dst, keystream, src, nbytes);
479 memcpy(dst, keystream, nbytes); 479
480 crypto_inc(ctrblk, AES_BLOCK_SIZE); 480 crypto_inc(ctrblk, AES_BLOCK_SIZE);
481} 481}
482 482
diff --git a/arch/x86/crypto/blowfish_glue.c b/arch/x86/crypto/blowfish_glue.c
index 17c05531dfd1..f9eca34301e2 100644
--- a/arch/x86/crypto/blowfish_glue.c
+++ b/arch/x86/crypto/blowfish_glue.c
@@ -271,8 +271,7 @@ static void ctr_crypt_final(struct bf_ctx *ctx, struct blkcipher_walk *walk)
271 unsigned int nbytes = walk->nbytes; 271 unsigned int nbytes = walk->nbytes;
272 272
273 blowfish_enc_blk(ctx, keystream, ctrblk); 273 blowfish_enc_blk(ctx, keystream, ctrblk);
274 crypto_xor(keystream, src, nbytes); 274 crypto_xor_cpy(dst, keystream, src, nbytes);
275 memcpy(dst, keystream, nbytes);
276 275
277 crypto_inc(ctrblk, BF_BLOCK_SIZE); 276 crypto_inc(ctrblk, BF_BLOCK_SIZE);
278} 277}
diff --git a/arch/x86/crypto/cast5_avx_glue.c b/arch/x86/crypto/cast5_avx_glue.c
index 8648158f3916..dbea6020ffe7 100644
--- a/arch/x86/crypto/cast5_avx_glue.c
+++ b/arch/x86/crypto/cast5_avx_glue.c
@@ -256,8 +256,7 @@ static void ctr_crypt_final(struct blkcipher_desc *desc,
256 unsigned int nbytes = walk->nbytes; 256 unsigned int nbytes = walk->nbytes;
257 257
258 __cast5_encrypt(ctx, keystream, ctrblk); 258 __cast5_encrypt(ctx, keystream, ctrblk);
259 crypto_xor(keystream, src, nbytes); 259 crypto_xor_cpy(dst, keystream, src, nbytes);
260 memcpy(dst, keystream, nbytes);
261 260
262 crypto_inc(ctrblk, CAST5_BLOCK_SIZE); 261 crypto_inc(ctrblk, CAST5_BLOCK_SIZE);
263} 262}
diff --git a/arch/x86/crypto/des3_ede_glue.c b/arch/x86/crypto/des3_ede_glue.c
index d6fc59aaaadf..30c0a37f4882 100644
--- a/arch/x86/crypto/des3_ede_glue.c
+++ b/arch/x86/crypto/des3_ede_glue.c
@@ -277,8 +277,7 @@ static void ctr_crypt_final(struct des3_ede_x86_ctx *ctx,
277 unsigned int nbytes = walk->nbytes; 277 unsigned int nbytes = walk->nbytes;
278 278
279 des3_ede_enc_blk(ctx, keystream, ctrblk); 279 des3_ede_enc_blk(ctx, keystream, ctrblk);
280 crypto_xor(keystream, src, nbytes); 280 crypto_xor_cpy(dst, keystream, src, nbytes);
281 memcpy(dst, keystream, nbytes);
282 281
283 crypto_inc(ctrblk, DES3_EDE_BLOCK_SIZE); 282 crypto_inc(ctrblk, DES3_EDE_BLOCK_SIZE);
284} 283}
diff --git a/crypto/ctr.c b/crypto/ctr.c
index 477d9226ccaa..854d924f9d8e 100644
--- a/crypto/ctr.c
+++ b/crypto/ctr.c
@@ -65,8 +65,7 @@ static void crypto_ctr_crypt_final(struct blkcipher_walk *walk,
65 unsigned int nbytes = walk->nbytes; 65 unsigned int nbytes = walk->nbytes;
66 66
67 crypto_cipher_encrypt_one(tfm, keystream, ctrblk); 67 crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
68 crypto_xor(keystream, src, nbytes); 68 crypto_xor_cpy(dst, keystream, src, nbytes);
69 memcpy(dst, keystream, nbytes);
70 69
71 crypto_inc(ctrblk, bsize); 70 crypto_inc(ctrblk, bsize);
72} 71}
diff --git a/crypto/pcbc.c b/crypto/pcbc.c
index 29dd2b4a3b85..d9e45a958720 100644
--- a/crypto/pcbc.c
+++ b/crypto/pcbc.c
@@ -55,8 +55,7 @@ static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
55 do { 55 do {
56 crypto_xor(iv, src, bsize); 56 crypto_xor(iv, src, bsize);
57 crypto_cipher_encrypt_one(tfm, dst, iv); 57 crypto_cipher_encrypt_one(tfm, dst, iv);
58 memcpy(iv, dst, bsize); 58 crypto_xor_cpy(iv, dst, src, bsize);
59 crypto_xor(iv, src, bsize);
60 59
61 src += bsize; 60 src += bsize;
62 dst += bsize; 61 dst += bsize;
@@ -79,8 +78,7 @@ static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
79 memcpy(tmpbuf, src, bsize); 78 memcpy(tmpbuf, src, bsize);
80 crypto_xor(iv, src, bsize); 79 crypto_xor(iv, src, bsize);
81 crypto_cipher_encrypt_one(tfm, src, iv); 80 crypto_cipher_encrypt_one(tfm, src, iv);
82 memcpy(iv, tmpbuf, bsize); 81 crypto_xor_cpy(iv, tmpbuf, src, bsize);
83 crypto_xor(iv, src, bsize);
84 82
85 src += bsize; 83 src += bsize;
86 } while ((nbytes -= bsize) >= bsize); 84 } while ((nbytes -= bsize) >= bsize);
@@ -127,8 +125,7 @@ static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
127 do { 125 do {
128 crypto_cipher_decrypt_one(tfm, dst, src); 126 crypto_cipher_decrypt_one(tfm, dst, src);
129 crypto_xor(dst, iv, bsize); 127 crypto_xor(dst, iv, bsize);
130 memcpy(iv, src, bsize); 128 crypto_xor_cpy(iv, dst, src, bsize);
131 crypto_xor(iv, dst, bsize);
132 129
133 src += bsize; 130 src += bsize;
134 dst += bsize; 131 dst += bsize;
@@ -153,8 +150,7 @@ static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
153 memcpy(tmpbuf, src, bsize); 150 memcpy(tmpbuf, src, bsize);
154 crypto_cipher_decrypt_one(tfm, src, src); 151 crypto_cipher_decrypt_one(tfm, src, src);
155 crypto_xor(src, iv, bsize); 152 crypto_xor(src, iv, bsize);
156 memcpy(iv, tmpbuf, bsize); 153 crypto_xor_cpy(iv, src, tmpbuf, bsize);
157 crypto_xor(iv, src, bsize);
158 154
159 src += bsize; 155 src += bsize;
160 } while ((nbytes -= bsize) >= bsize); 156 } while ((nbytes -= bsize) >= bsize);
diff --git a/drivers/crypto/vmx/aes_ctr.c b/drivers/crypto/vmx/aes_ctr.c
index 9c26d9e8dbea..17d84217dd76 100644
--- a/drivers/crypto/vmx/aes_ctr.c
+++ b/drivers/crypto/vmx/aes_ctr.c
@@ -104,8 +104,7 @@ static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
104 pagefault_enable(); 104 pagefault_enable();
105 preempt_enable(); 105 preempt_enable();
106 106
107 crypto_xor(keystream, src, nbytes); 107 crypto_xor_cpy(dst, keystream, src, nbytes);
108 memcpy(dst, keystream, nbytes);
109 crypto_inc(ctrblk, AES_BLOCK_SIZE); 108 crypto_inc(ctrblk, AES_BLOCK_SIZE);
110} 109}
111 110
diff --git a/drivers/md/dm-crypt.c b/drivers/md/dm-crypt.c
index cdf6b1e12460..fa17e5452796 100644
--- a/drivers/md/dm-crypt.c
+++ b/drivers/md/dm-crypt.c
@@ -758,9 +758,8 @@ static int crypt_iv_tcw_whitening(struct crypt_config *cc,
758 int i, r; 758 int i, r;
759 759
760 /* xor whitening with sector number */ 760 /* xor whitening with sector number */
761 memcpy(buf, tcw->whitening, TCW_WHITENING_SIZE); 761 crypto_xor_cpy(buf, tcw->whitening, (u8 *)&sector, 8);
762 crypto_xor(buf, (u8 *)&sector, 8); 762 crypto_xor_cpy(&buf[8], tcw->whitening + 8, (u8 *)&sector, 8);
763 crypto_xor(&buf[8], (u8 *)&sector, 8);
764 763
765 /* calculate crc32 for every 32bit part and xor it */ 764 /* calculate crc32 for every 32bit part and xor it */
766 desc->tfm = tcw->crc32_tfm; 765 desc->tfm = tcw->crc32_tfm;
@@ -805,10 +804,10 @@ static int crypt_iv_tcw_gen(struct crypt_config *cc, u8 *iv,
805 } 804 }
806 805
807 /* Calculate IV */ 806 /* Calculate IV */
808 memcpy(iv, tcw->iv_seed, cc->iv_size); 807 crypto_xor_cpy(iv, tcw->iv_seed, (u8 *)&sector, 8);
809 crypto_xor(iv, (u8 *)&sector, 8);
810 if (cc->iv_size > 8) 808 if (cc->iv_size > 8)
811 crypto_xor(&iv[8], (u8 *)&sector, cc->iv_size - 8); 809 crypto_xor_cpy(&iv[8], tcw->iv_seed + 8, (u8 *)&sector,
810 cc->iv_size - 8);
812 811
813 return r; 812 return r;
814} 813}
diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h
index fd547f946bf8..e3cebf640c00 100644
--- a/include/crypto/algapi.h
+++ b/include/crypto/algapi.h
@@ -211,6 +211,25 @@ static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
211 } 211 }
212} 212}
213 213
214static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2,
215 unsigned int size)
216{
217 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
218 __builtin_constant_p(size) &&
219 (size % sizeof(unsigned long)) == 0) {
220 unsigned long *d = (unsigned long *)dst;
221 unsigned long *s1 = (unsigned long *)src1;
222 unsigned long *s2 = (unsigned long *)src2;
223
224 while (size > 0) {
225 *d++ = *s1++ ^ *s2++;
226 size -= sizeof(unsigned long);
227 }
228 } else {
229 __crypto_xor(dst, src1, src2, size);
230 }
231}
232
214int blkcipher_walk_done(struct blkcipher_desc *desc, 233int blkcipher_walk_done(struct blkcipher_desc *desc,
215 struct blkcipher_walk *walk, int err); 234 struct blkcipher_walk *walk, int err);
216int blkcipher_walk_virt(struct blkcipher_desc *desc, 235int blkcipher_walk_virt(struct blkcipher_desc *desc,