aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2012-12-19 23:31:02 -0500
committerLinus Torvalds <torvalds@linux-foundation.org>2012-12-19 23:31:02 -0500
commitf01af9f85855e38fbd601e033a8eac204cc4cc1c (patch)
tree106b1b24dd17a0c6cb9fdda35a249f429310b025
parent9eb127cc04c4005c8c0708ce92146d91da862b42 (diff)
parent62ba63dc892cf836ecb9ce4fdb7644d45c95070b (diff)
Merge git://git.kernel.org/pub/scm/linux/kernel/git/davem/sparc
Pull sparc fixes from David Miller: "Please pull to get these sparc AES/DES/CAMELLIA crypto bug fixes as well as an addition of a pte_accessible() define for sparc64 and a hugetlb fix from Dave Kleikamp." * git://git.kernel.org/pub/scm/linux/kernel/git/davem/sparc: sparc64: Set CRYPTO_TFM_REQ_MAY_SLEEP consistently in CAMELLIA code. sparc64: Set CRYPTO_TFM_REQ_MAY_SLEEP consistently in DES code. sparc64: Fix ECB looping constructs in AES code. sparc64: Set CRYPTO_TFM_REQ_MAY_SLEEP consistently in AES code. sparc64: Fix AES ctr mode block size. sparc64: Fix unrolled AES 256-bit key loops. sparc64: Define pte_accessible() sparc: huge_ptep_set_* functions need to call set_huge_pte_at()
-rw-r--r--arch/sparc/crypto/aes_asm.S20
-rw-r--r--arch/sparc/crypto/aes_glue.c31
-rw-r--r--arch/sparc/crypto/camellia_glue.c3
-rw-r--r--arch/sparc/crypto/des_asm.S1
-rw-r--r--arch/sparc/crypto/des_glue.c6
-rw-r--r--arch/sparc/include/asm/hugetlb.h10
-rw-r--r--arch/sparc/include/asm/pgtable_64.h8
7 files changed, 67 insertions, 12 deletions
diff --git a/arch/sparc/crypto/aes_asm.S b/arch/sparc/crypto/aes_asm.S
index 23f6cbb910d..1cda8aa7cb8 100644
--- a/arch/sparc/crypto/aes_asm.S
+++ b/arch/sparc/crypto/aes_asm.S
@@ -1024,7 +1024,11 @@ ENTRY(aes_sparc64_ecb_encrypt_256)
1024 add %o2, 0x20, %o2 1024 add %o2, 0x20, %o2
1025 brlz,pt %o3, 11f 1025 brlz,pt %o3, 11f
1026 nop 1026 nop
102710: ldx [%o1 + 0x00], %g3 102710: ldd [%o0 + 0xd0], %f56
1028 ldd [%o0 + 0xd8], %f58
1029 ldd [%o0 + 0xe0], %f60
1030 ldd [%o0 + 0xe8], %f62
1031 ldx [%o1 + 0x00], %g3
1028 ldx [%o1 + 0x08], %g7 1032 ldx [%o1 + 0x08], %g7
1029 xor %g1, %g3, %g3 1033 xor %g1, %g3, %g3
1030 xor %g2, %g7, %g7 1034 xor %g2, %g7, %g7
@@ -1128,9 +1132,9 @@ ENTRY(aes_sparc64_ecb_decrypt_256)
1128 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len */ 1132 /* %o0=&key[key_len], %o1=input, %o2=output, %o3=len */
1129 ldx [%o0 - 0x10], %g1 1133 ldx [%o0 - 0x10], %g1
1130 subcc %o3, 0x10, %o3 1134 subcc %o3, 0x10, %o3
1135 ldx [%o0 - 0x08], %g2
1131 be 10f 1136 be 10f
1132 ldx [%o0 - 0x08], %g2 1137 sub %o0, 0xf0, %o0
1133 sub %o0, 0xf0, %o0
11341: ldx [%o1 + 0x00], %g3 11381: ldx [%o1 + 0x00], %g3
1135 ldx [%o1 + 0x08], %g7 1139 ldx [%o1 + 0x08], %g7
1136 ldx [%o1 + 0x10], %o4 1140 ldx [%o1 + 0x10], %o4
@@ -1154,7 +1158,11 @@ ENTRY(aes_sparc64_ecb_decrypt_256)
1154 add %o2, 0x20, %o2 1158 add %o2, 0x20, %o2
1155 brlz,pt %o3, 11f 1159 brlz,pt %o3, 11f
1156 nop 1160 nop
115710: ldx [%o1 + 0x00], %g3 116110: ldd [%o0 + 0x18], %f56
1162 ldd [%o0 + 0x10], %f58
1163 ldd [%o0 + 0x08], %f60
1164 ldd [%o0 + 0x00], %f62
1165 ldx [%o1 + 0x00], %g3
1158 ldx [%o1 + 0x08], %g7 1166 ldx [%o1 + 0x08], %g7
1159 xor %g1, %g3, %g3 1167 xor %g1, %g3, %g3
1160 xor %g2, %g7, %g7 1168 xor %g2, %g7, %g7
@@ -1511,11 +1519,11 @@ ENTRY(aes_sparc64_ctr_crypt_256)
1511 add %o2, 0x20, %o2 1519 add %o2, 0x20, %o2
1512 brlz,pt %o3, 11f 1520 brlz,pt %o3, 11f
1513 nop 1521 nop
1514 ldd [%o0 + 0xd0], %f56 152210: ldd [%o0 + 0xd0], %f56
1515 ldd [%o0 + 0xd8], %f58 1523 ldd [%o0 + 0xd8], %f58
1516 ldd [%o0 + 0xe0], %f60 1524 ldd [%o0 + 0xe0], %f60
1517 ldd [%o0 + 0xe8], %f62 1525 ldd [%o0 + 0xe8], %f62
151810: xor %g1, %g3, %o5 1526 xor %g1, %g3, %o5
1519 MOVXTOD_O5_F0 1527 MOVXTOD_O5_F0
1520 xor %g2, %g7, %o5 1528 xor %g2, %g7, %o5
1521 MOVXTOD_O5_F2 1529 MOVXTOD_O5_F2
diff --git a/arch/sparc/crypto/aes_glue.c b/arch/sparc/crypto/aes_glue.c
index 3965d1d36df..503e6d96ad4 100644
--- a/arch/sparc/crypto/aes_glue.c
+++ b/arch/sparc/crypto/aes_glue.c
@@ -222,6 +222,7 @@ static int ecb_encrypt(struct blkcipher_desc *desc,
222 222
223 blkcipher_walk_init(&walk, dst, src, nbytes); 223 blkcipher_walk_init(&walk, dst, src, nbytes);
224 err = blkcipher_walk_virt(desc, &walk); 224 err = blkcipher_walk_virt(desc, &walk);
225 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
225 226
226 ctx->ops->load_encrypt_keys(&ctx->key[0]); 227 ctx->ops->load_encrypt_keys(&ctx->key[0]);
227 while ((nbytes = walk.nbytes)) { 228 while ((nbytes = walk.nbytes)) {
@@ -251,6 +252,7 @@ static int ecb_decrypt(struct blkcipher_desc *desc,
251 252
252 blkcipher_walk_init(&walk, dst, src, nbytes); 253 blkcipher_walk_init(&walk, dst, src, nbytes);
253 err = blkcipher_walk_virt(desc, &walk); 254 err = blkcipher_walk_virt(desc, &walk);
255 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
254 256
255 ctx->ops->load_decrypt_keys(&ctx->key[0]); 257 ctx->ops->load_decrypt_keys(&ctx->key[0]);
256 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)]; 258 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
@@ -280,6 +282,7 @@ static int cbc_encrypt(struct blkcipher_desc *desc,
280 282
281 blkcipher_walk_init(&walk, dst, src, nbytes); 283 blkcipher_walk_init(&walk, dst, src, nbytes);
282 err = blkcipher_walk_virt(desc, &walk); 284 err = blkcipher_walk_virt(desc, &walk);
285 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
283 286
284 ctx->ops->load_encrypt_keys(&ctx->key[0]); 287 ctx->ops->load_encrypt_keys(&ctx->key[0]);
285 while ((nbytes = walk.nbytes)) { 288 while ((nbytes = walk.nbytes)) {
@@ -309,6 +312,7 @@ static int cbc_decrypt(struct blkcipher_desc *desc,
309 312
310 blkcipher_walk_init(&walk, dst, src, nbytes); 313 blkcipher_walk_init(&walk, dst, src, nbytes);
311 err = blkcipher_walk_virt(desc, &walk); 314 err = blkcipher_walk_virt(desc, &walk);
315 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
312 316
313 ctx->ops->load_decrypt_keys(&ctx->key[0]); 317 ctx->ops->load_decrypt_keys(&ctx->key[0]);
314 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)]; 318 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
@@ -329,6 +333,22 @@ static int cbc_decrypt(struct blkcipher_desc *desc,
329 return err; 333 return err;
330} 334}
331 335
336static void ctr_crypt_final(struct crypto_sparc64_aes_ctx *ctx,
337 struct blkcipher_walk *walk)
338{
339 u8 *ctrblk = walk->iv;
340 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
341 u8 *src = walk->src.virt.addr;
342 u8 *dst = walk->dst.virt.addr;
343 unsigned int nbytes = walk->nbytes;
344
345 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
346 keystream, AES_BLOCK_SIZE);
347 crypto_xor((u8 *) keystream, src, nbytes);
348 memcpy(dst, keystream, nbytes);
349 crypto_inc(ctrblk, AES_BLOCK_SIZE);
350}
351
332static int ctr_crypt(struct blkcipher_desc *desc, 352static int ctr_crypt(struct blkcipher_desc *desc,
333 struct scatterlist *dst, struct scatterlist *src, 353 struct scatterlist *dst, struct scatterlist *src,
334 unsigned int nbytes) 354 unsigned int nbytes)
@@ -338,10 +358,11 @@ static int ctr_crypt(struct blkcipher_desc *desc,
338 int err; 358 int err;
339 359
340 blkcipher_walk_init(&walk, dst, src, nbytes); 360 blkcipher_walk_init(&walk, dst, src, nbytes);
341 err = blkcipher_walk_virt(desc, &walk); 361 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
362 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
342 363
343 ctx->ops->load_encrypt_keys(&ctx->key[0]); 364 ctx->ops->load_encrypt_keys(&ctx->key[0]);
344 while ((nbytes = walk.nbytes)) { 365 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
345 unsigned int block_len = nbytes & AES_BLOCK_MASK; 366 unsigned int block_len = nbytes & AES_BLOCK_MASK;
346 367
347 if (likely(block_len)) { 368 if (likely(block_len)) {
@@ -353,6 +374,10 @@ static int ctr_crypt(struct blkcipher_desc *desc,
353 nbytes &= AES_BLOCK_SIZE - 1; 374 nbytes &= AES_BLOCK_SIZE - 1;
354 err = blkcipher_walk_done(desc, &walk, nbytes); 375 err = blkcipher_walk_done(desc, &walk, nbytes);
355 } 376 }
377 if (walk.nbytes) {
378 ctr_crypt_final(ctx, &walk);
379 err = blkcipher_walk_done(desc, &walk, 0);
380 }
356 fprs_write(0); 381 fprs_write(0);
357 return err; 382 return err;
358} 383}
@@ -418,7 +443,7 @@ static struct crypto_alg algs[] = { {
418 .cra_driver_name = "ctr-aes-sparc64", 443 .cra_driver_name = "ctr-aes-sparc64",
419 .cra_priority = SPARC_CR_OPCODE_PRIORITY, 444 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
420 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 445 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
421 .cra_blocksize = AES_BLOCK_SIZE, 446 .cra_blocksize = 1,
422 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx), 447 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
423 .cra_alignmask = 7, 448 .cra_alignmask = 7,
424 .cra_type = &crypto_blkcipher_type, 449 .cra_type = &crypto_blkcipher_type,
diff --git a/arch/sparc/crypto/camellia_glue.c b/arch/sparc/crypto/camellia_glue.c
index 62c89af3fd3..888f6260b4e 100644
--- a/arch/sparc/crypto/camellia_glue.c
+++ b/arch/sparc/crypto/camellia_glue.c
@@ -98,6 +98,7 @@ static int __ecb_crypt(struct blkcipher_desc *desc,
98 98
99 blkcipher_walk_init(&walk, dst, src, nbytes); 99 blkcipher_walk_init(&walk, dst, src, nbytes);
100 err = blkcipher_walk_virt(desc, &walk); 100 err = blkcipher_walk_virt(desc, &walk);
101 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
101 102
102 if (encrypt) 103 if (encrypt)
103 key = &ctx->encrypt_key[0]; 104 key = &ctx->encrypt_key[0];
@@ -160,6 +161,7 @@ static int cbc_encrypt(struct blkcipher_desc *desc,
160 161
161 blkcipher_walk_init(&walk, dst, src, nbytes); 162 blkcipher_walk_init(&walk, dst, src, nbytes);
162 err = blkcipher_walk_virt(desc, &walk); 163 err = blkcipher_walk_virt(desc, &walk);
164 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
163 165
164 key = &ctx->encrypt_key[0]; 166 key = &ctx->encrypt_key[0];
165 camellia_sparc64_load_keys(key, ctx->key_len); 167 camellia_sparc64_load_keys(key, ctx->key_len);
@@ -198,6 +200,7 @@ static int cbc_decrypt(struct blkcipher_desc *desc,
198 200
199 blkcipher_walk_init(&walk, dst, src, nbytes); 201 blkcipher_walk_init(&walk, dst, src, nbytes);
200 err = blkcipher_walk_virt(desc, &walk); 202 err = blkcipher_walk_virt(desc, &walk);
203 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
201 204
202 key = &ctx->decrypt_key[0]; 205 key = &ctx->decrypt_key[0];
203 camellia_sparc64_load_keys(key, ctx->key_len); 206 camellia_sparc64_load_keys(key, ctx->key_len);
diff --git a/arch/sparc/crypto/des_asm.S b/arch/sparc/crypto/des_asm.S
index 30b6e90b28b..b5c8fc269b5 100644
--- a/arch/sparc/crypto/des_asm.S
+++ b/arch/sparc/crypto/des_asm.S
@@ -376,6 +376,7 @@ ENTRY(des3_ede_sparc64_ecb_crypt)
3761: ldd [%o1 + 0x00], %f60 3761: ldd [%o1 + 0x00], %f60
377 DES3_LOOP_BODY(60) 377 DES3_LOOP_BODY(60)
378 std %f60, [%o2 + 0x00] 378 std %f60, [%o2 + 0x00]
379 add %o1, 0x08, %o1
379 subcc %o3, 0x08, %o3 380 subcc %o3, 0x08, %o3
380 bne,pt %icc, 1b 381 bne,pt %icc, 1b
381 add %o2, 0x08, %o2 382 add %o2, 0x08, %o2
diff --git a/arch/sparc/crypto/des_glue.c b/arch/sparc/crypto/des_glue.c
index 41524cebcc4..3065bc61f9d 100644
--- a/arch/sparc/crypto/des_glue.c
+++ b/arch/sparc/crypto/des_glue.c
@@ -100,6 +100,7 @@ static int __ecb_crypt(struct blkcipher_desc *desc,
100 100
101 blkcipher_walk_init(&walk, dst, src, nbytes); 101 blkcipher_walk_init(&walk, dst, src, nbytes);
102 err = blkcipher_walk_virt(desc, &walk); 102 err = blkcipher_walk_virt(desc, &walk);
103 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
103 104
104 if (encrypt) 105 if (encrypt)
105 des_sparc64_load_keys(&ctx->encrypt_expkey[0]); 106 des_sparc64_load_keys(&ctx->encrypt_expkey[0]);
@@ -147,6 +148,7 @@ static int cbc_encrypt(struct blkcipher_desc *desc,
147 148
148 blkcipher_walk_init(&walk, dst, src, nbytes); 149 blkcipher_walk_init(&walk, dst, src, nbytes);
149 err = blkcipher_walk_virt(desc, &walk); 150 err = blkcipher_walk_virt(desc, &walk);
151 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
150 152
151 des_sparc64_load_keys(&ctx->encrypt_expkey[0]); 153 des_sparc64_load_keys(&ctx->encrypt_expkey[0]);
152 while ((nbytes = walk.nbytes)) { 154 while ((nbytes = walk.nbytes)) {
@@ -177,6 +179,7 @@ static int cbc_decrypt(struct blkcipher_desc *desc,
177 179
178 blkcipher_walk_init(&walk, dst, src, nbytes); 180 blkcipher_walk_init(&walk, dst, src, nbytes);
179 err = blkcipher_walk_virt(desc, &walk); 181 err = blkcipher_walk_virt(desc, &walk);
182 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
180 183
181 des_sparc64_load_keys(&ctx->decrypt_expkey[0]); 184 des_sparc64_load_keys(&ctx->decrypt_expkey[0]);
182 while ((nbytes = walk.nbytes)) { 185 while ((nbytes = walk.nbytes)) {
@@ -266,6 +269,7 @@ static int __ecb3_crypt(struct blkcipher_desc *desc,
266 269
267 blkcipher_walk_init(&walk, dst, src, nbytes); 270 blkcipher_walk_init(&walk, dst, src, nbytes);
268 err = blkcipher_walk_virt(desc, &walk); 271 err = blkcipher_walk_virt(desc, &walk);
272 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
269 273
270 if (encrypt) 274 if (encrypt)
271 K = &ctx->encrypt_expkey[0]; 275 K = &ctx->encrypt_expkey[0];
@@ -317,6 +321,7 @@ static int cbc3_encrypt(struct blkcipher_desc *desc,
317 321
318 blkcipher_walk_init(&walk, dst, src, nbytes); 322 blkcipher_walk_init(&walk, dst, src, nbytes);
319 err = blkcipher_walk_virt(desc, &walk); 323 err = blkcipher_walk_virt(desc, &walk);
324 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
320 325
321 K = &ctx->encrypt_expkey[0]; 326 K = &ctx->encrypt_expkey[0];
322 des3_ede_sparc64_load_keys(K); 327 des3_ede_sparc64_load_keys(K);
@@ -352,6 +357,7 @@ static int cbc3_decrypt(struct blkcipher_desc *desc,
352 357
353 blkcipher_walk_init(&walk, dst, src, nbytes); 358 blkcipher_walk_init(&walk, dst, src, nbytes);
354 err = blkcipher_walk_virt(desc, &walk); 359 err = blkcipher_walk_virt(desc, &walk);
360 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
355 361
356 K = &ctx->decrypt_expkey[0]; 362 K = &ctx->decrypt_expkey[0];
357 des3_ede_sparc64_load_keys(K); 363 des3_ede_sparc64_load_keys(K);
diff --git a/arch/sparc/include/asm/hugetlb.h b/arch/sparc/include/asm/hugetlb.h
index 8c5eed6d267..9661e9bc7bb 100644
--- a/arch/sparc/include/asm/hugetlb.h
+++ b/arch/sparc/include/asm/hugetlb.h
@@ -61,14 +61,20 @@ static inline pte_t huge_pte_wrprotect(pte_t pte)
61static inline void huge_ptep_set_wrprotect(struct mm_struct *mm, 61static inline void huge_ptep_set_wrprotect(struct mm_struct *mm,
62 unsigned long addr, pte_t *ptep) 62 unsigned long addr, pte_t *ptep)
63{ 63{
64 ptep_set_wrprotect(mm, addr, ptep); 64 pte_t old_pte = *ptep;
65 set_huge_pte_at(mm, addr, ptep, pte_wrprotect(old_pte));
65} 66}
66 67
67static inline int huge_ptep_set_access_flags(struct vm_area_struct *vma, 68static inline int huge_ptep_set_access_flags(struct vm_area_struct *vma,
68 unsigned long addr, pte_t *ptep, 69 unsigned long addr, pte_t *ptep,
69 pte_t pte, int dirty) 70 pte_t pte, int dirty)
70{ 71{
71 return ptep_set_access_flags(vma, addr, ptep, pte, dirty); 72 int changed = !pte_same(*ptep, pte);
73 if (changed) {
74 set_huge_pte_at(vma->vm_mm, addr, ptep, pte);
75 flush_tlb_page(vma, addr);
76 }
77 return changed;
72} 78}
73 79
74static inline pte_t huge_ptep_get(pte_t *ptep) 80static inline pte_t huge_ptep_get(pte_t *ptep)
diff --git a/arch/sparc/include/asm/pgtable_64.h b/arch/sparc/include/asm/pgtable_64.h
index 95515f1e7ce..7870be0f5ad 100644
--- a/arch/sparc/include/asm/pgtable_64.h
+++ b/arch/sparc/include/asm/pgtable_64.h
@@ -617,6 +617,12 @@ static inline unsigned long pte_present(pte_t pte)
617 return val; 617 return val;
618} 618}
619 619
620#define pte_accessible pte_accessible
621static inline unsigned long pte_accessible(pte_t a)
622{
623 return pte_val(a) & _PAGE_VALID;
624}
625
620static inline unsigned long pte_special(pte_t pte) 626static inline unsigned long pte_special(pte_t pte)
621{ 627{
622 return pte_val(pte) & _PAGE_SPECIAL; 628 return pte_val(pte) & _PAGE_SPECIAL;
@@ -802,7 +808,7 @@ static inline void __set_pte_at(struct mm_struct *mm, unsigned long addr,
802 * SUN4V NOTE: _PAGE_VALID is the same value in both the SUN4U 808 * SUN4V NOTE: _PAGE_VALID is the same value in both the SUN4U
803 * and SUN4V pte layout, so this inline test is fine. 809 * and SUN4V pte layout, so this inline test is fine.
804 */ 810 */
805 if (likely(mm != &init_mm) && (pte_val(orig) & _PAGE_VALID)) 811 if (likely(mm != &init_mm) && pte_accessible(orig))
806 tlb_batch_add(mm, addr, ptep, orig, fullmm); 812 tlb_batch_add(mm, addr, ptep, orig, fullmm);
807} 813}
808 814