diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2014-07-28 14:35:30 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2014-07-28 14:35:30 -0400 |
commit | 31dab719fa50cf56d56d3dc25980fecd336f6ca8 (patch) | |
tree | 20254d20758d2410d9caa68fa1786b71a4225875 | |
parent | e8a91e0e872c52d2af62ae2cf30b6aef1da1ae52 (diff) | |
parent | f3c400ef473e00c680ea713a66196b05870b3710 (diff) |
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull ARM AES crypto fixes from Herbert Xu:
"This push fixes a regression on ARM where odd-sized blocks supplied to
AES may cause crashes"
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6:
crypto: arm-aes - fix encryption of unaligned data
crypto: arm64-aes - fix encryption of unaligned data
-rw-r--r-- | arch/arm/crypto/aesbs-glue.c | 10 | ||||
-rw-r--r-- | arch/arm64/crypto/aes-glue.c | 12 |
2 files changed, 11 insertions, 11 deletions
diff --git a/arch/arm/crypto/aesbs-glue.c b/arch/arm/crypto/aesbs-glue.c index 4522366da759..15468fbbdea3 100644 --- a/arch/arm/crypto/aesbs-glue.c +++ b/arch/arm/crypto/aesbs-glue.c | |||
@@ -137,7 +137,7 @@ static int aesbs_cbc_encrypt(struct blkcipher_desc *desc, | |||
137 | dst += AES_BLOCK_SIZE; | 137 | dst += AES_BLOCK_SIZE; |
138 | } while (--blocks); | 138 | } while (--blocks); |
139 | } | 139 | } |
140 | err = blkcipher_walk_done(desc, &walk, 0); | 140 | err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); |
141 | } | 141 | } |
142 | return err; | 142 | return err; |
143 | } | 143 | } |
@@ -158,7 +158,7 @@ static int aesbs_cbc_decrypt(struct blkcipher_desc *desc, | |||
158 | bsaes_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr, | 158 | bsaes_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr, |
159 | walk.nbytes, &ctx->dec, walk.iv); | 159 | walk.nbytes, &ctx->dec, walk.iv); |
160 | kernel_neon_end(); | 160 | kernel_neon_end(); |
161 | err = blkcipher_walk_done(desc, &walk, 0); | 161 | err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); |
162 | } | 162 | } |
163 | while (walk.nbytes) { | 163 | while (walk.nbytes) { |
164 | u32 blocks = walk.nbytes / AES_BLOCK_SIZE; | 164 | u32 blocks = walk.nbytes / AES_BLOCK_SIZE; |
@@ -182,7 +182,7 @@ static int aesbs_cbc_decrypt(struct blkcipher_desc *desc, | |||
182 | dst += AES_BLOCK_SIZE; | 182 | dst += AES_BLOCK_SIZE; |
183 | src += AES_BLOCK_SIZE; | 183 | src += AES_BLOCK_SIZE; |
184 | } while (--blocks); | 184 | } while (--blocks); |
185 | err = blkcipher_walk_done(desc, &walk, 0); | 185 | err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); |
186 | } | 186 | } |
187 | return err; | 187 | return err; |
188 | } | 188 | } |
@@ -268,7 +268,7 @@ static int aesbs_xts_encrypt(struct blkcipher_desc *desc, | |||
268 | bsaes_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr, | 268 | bsaes_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr, |
269 | walk.nbytes, &ctx->enc, walk.iv); | 269 | walk.nbytes, &ctx->enc, walk.iv); |
270 | kernel_neon_end(); | 270 | kernel_neon_end(); |
271 | err = blkcipher_walk_done(desc, &walk, 0); | 271 | err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); |
272 | } | 272 | } |
273 | return err; | 273 | return err; |
274 | } | 274 | } |
@@ -292,7 +292,7 @@ static int aesbs_xts_decrypt(struct blkcipher_desc *desc, | |||
292 | bsaes_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr, | 292 | bsaes_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr, |
293 | walk.nbytes, &ctx->dec, walk.iv); | 293 | walk.nbytes, &ctx->dec, walk.iv); |
294 | kernel_neon_end(); | 294 | kernel_neon_end(); |
295 | err = blkcipher_walk_done(desc, &walk, 0); | 295 | err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); |
296 | } | 296 | } |
297 | return err; | 297 | return err; |
298 | } | 298 | } |
diff --git a/arch/arm64/crypto/aes-glue.c b/arch/arm64/crypto/aes-glue.c index 60f2f4c12256..79cd911ef88c 100644 --- a/arch/arm64/crypto/aes-glue.c +++ b/arch/arm64/crypto/aes-glue.c | |||
@@ -106,7 +106,7 @@ static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |||
106 | for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { | 106 | for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { |
107 | aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, | 107 | aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
108 | (u8 *)ctx->key_enc, rounds, blocks, first); | 108 | (u8 *)ctx->key_enc, rounds, blocks, first); |
109 | err = blkcipher_walk_done(desc, &walk, 0); | 109 | err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); |
110 | } | 110 | } |
111 | kernel_neon_end(); | 111 | kernel_neon_end(); |
112 | return err; | 112 | return err; |
@@ -128,7 +128,7 @@ static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |||
128 | for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { | 128 | for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { |
129 | aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, | 129 | aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
130 | (u8 *)ctx->key_dec, rounds, blocks, first); | 130 | (u8 *)ctx->key_dec, rounds, blocks, first); |
131 | err = blkcipher_walk_done(desc, &walk, 0); | 131 | err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); |
132 | } | 132 | } |
133 | kernel_neon_end(); | 133 | kernel_neon_end(); |
134 | return err; | 134 | return err; |
@@ -151,7 +151,7 @@ static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |||
151 | aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, | 151 | aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
152 | (u8 *)ctx->key_enc, rounds, blocks, walk.iv, | 152 | (u8 *)ctx->key_enc, rounds, blocks, walk.iv, |
153 | first); | 153 | first); |
154 | err = blkcipher_walk_done(desc, &walk, 0); | 154 | err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); |
155 | } | 155 | } |
156 | kernel_neon_end(); | 156 | kernel_neon_end(); |
157 | return err; | 157 | return err; |
@@ -174,7 +174,7 @@ static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |||
174 | aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, | 174 | aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
175 | (u8 *)ctx->key_dec, rounds, blocks, walk.iv, | 175 | (u8 *)ctx->key_dec, rounds, blocks, walk.iv, |
176 | first); | 176 | first); |
177 | err = blkcipher_walk_done(desc, &walk, 0); | 177 | err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); |
178 | } | 178 | } |
179 | kernel_neon_end(); | 179 | kernel_neon_end(); |
180 | return err; | 180 | return err; |
@@ -243,7 +243,7 @@ static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |||
243 | aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, | 243 | aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
244 | (u8 *)ctx->key1.key_enc, rounds, blocks, | 244 | (u8 *)ctx->key1.key_enc, rounds, blocks, |
245 | (u8 *)ctx->key2.key_enc, walk.iv, first); | 245 | (u8 *)ctx->key2.key_enc, walk.iv, first); |
246 | err = blkcipher_walk_done(desc, &walk, 0); | 246 | err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); |
247 | } | 247 | } |
248 | kernel_neon_end(); | 248 | kernel_neon_end(); |
249 | 249 | ||
@@ -267,7 +267,7 @@ static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |||
267 | aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, | 267 | aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
268 | (u8 *)ctx->key1.key_dec, rounds, blocks, | 268 | (u8 *)ctx->key1.key_dec, rounds, blocks, |
269 | (u8 *)ctx->key2.key_enc, walk.iv, first); | 269 | (u8 *)ctx->key2.key_enc, walk.iv, first); |
270 | err = blkcipher_walk_done(desc, &walk, 0); | 270 | err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); |
271 | } | 271 | } |
272 | kernel_neon_end(); | 272 | kernel_neon_end(); |
273 | 273 | ||