aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorDavid Hildenbrand <dahi@linux.vnet.ibm.com>2015-05-11 11:52:18 -0400
committerIngo Molnar <mingo@kernel.org>2015-05-19 02:39:17 -0400
commit5f76eea88dcbe75506d98e0207b9e3bd47941f2d (patch)
treefd90846667602705b9665aa9d8b93d5a5d49ae94 /drivers/crypto
parent2f09b227eeed4b3a072fe818c82a4c773b778cde (diff)
sched/preempt, powerpc: Disable preemption in enable_kernel_altivec() explicitly
enable_kernel_altivec() has to be called with disabled preemption. Let's make this explicit, to prepare for pagefault_disable() not touching preemption anymore. Reviewed-and-tested-by: Thomas Gleixner <tglx@linutronix.de> Signed-off-by: David Hildenbrand <dahi@linux.vnet.ibm.com> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Acked-by: Benjamin Herrenschmidt <benh@kernel.crashing.org> Cc: David.Laight@ACULAB.COM Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: airlied@linux.ie Cc: akpm@linux-foundation.org Cc: bigeasy@linutronix.de Cc: borntraeger@de.ibm.com Cc: daniel.vetter@intel.com Cc: heiko.carstens@de.ibm.com Cc: herbert@gondor.apana.org.au Cc: hocko@suse.cz Cc: hughd@google.com Cc: mst@redhat.com Cc: paulus@samba.org Cc: ralf@linux-mips.org Cc: schwidefsky@de.ibm.com Cc: yang.shi@windriver.com Link: http://lkml.kernel.org/r/1431359540-32227-14-git-send-email-dahi@linux.vnet.ibm.com Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/vmx/aes.c8
-rw-r--r--drivers/crypto/vmx/aes_cbc.c6
-rw-r--r--drivers/crypto/vmx/ghash.c8
3 files changed, 21 insertions, 1 deletions
diff --git a/drivers/crypto/vmx/aes.c b/drivers/crypto/vmx/aes.c
index ab300ea19434..a9064e36e7b5 100644
--- a/drivers/crypto/vmx/aes.c
+++ b/drivers/crypto/vmx/aes.c
@@ -78,12 +78,14 @@ static int p8_aes_setkey(struct crypto_tfm *tfm, const u8 *key,
78 int ret; 78 int ret;
79 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 79 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
80 80
81 preempt_disable();
81 pagefault_disable(); 82 pagefault_disable();
82 enable_kernel_altivec(); 83 enable_kernel_altivec();
83 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); 84 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
84 ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); 85 ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
85 pagefault_enable(); 86 pagefault_enable();
86 87 preempt_enable();
88
87 ret += crypto_cipher_setkey(ctx->fallback, key, keylen); 89 ret += crypto_cipher_setkey(ctx->fallback, key, keylen);
88 return ret; 90 return ret;
89} 91}
@@ -95,10 +97,12 @@ static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
95 if (in_interrupt()) { 97 if (in_interrupt()) {
96 crypto_cipher_encrypt_one(ctx->fallback, dst, src); 98 crypto_cipher_encrypt_one(ctx->fallback, dst, src);
97 } else { 99 } else {
100 preempt_disable();
98 pagefault_disable(); 101 pagefault_disable();
99 enable_kernel_altivec(); 102 enable_kernel_altivec();
100 aes_p8_encrypt(src, dst, &ctx->enc_key); 103 aes_p8_encrypt(src, dst, &ctx->enc_key);
101 pagefault_enable(); 104 pagefault_enable();
105 preempt_enable();
102 } 106 }
103} 107}
104 108
@@ -109,10 +113,12 @@ static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
109 if (in_interrupt()) { 113 if (in_interrupt()) {
110 crypto_cipher_decrypt_one(ctx->fallback, dst, src); 114 crypto_cipher_decrypt_one(ctx->fallback, dst, src);
111 } else { 115 } else {
116 preempt_disable();
112 pagefault_disable(); 117 pagefault_disable();
113 enable_kernel_altivec(); 118 enable_kernel_altivec();
114 aes_p8_decrypt(src, dst, &ctx->dec_key); 119 aes_p8_decrypt(src, dst, &ctx->dec_key);
115 pagefault_enable(); 120 pagefault_enable();
121 preempt_enable();
116 } 122 }
117} 123}
118 124
diff --git a/drivers/crypto/vmx/aes_cbc.c b/drivers/crypto/vmx/aes_cbc.c
index 1a559b7dddb5..477284abdd11 100644
--- a/drivers/crypto/vmx/aes_cbc.c
+++ b/drivers/crypto/vmx/aes_cbc.c
@@ -79,11 +79,13 @@ static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key,
79 int ret; 79 int ret;
80 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); 80 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
81 81
82 preempt_disable();
82 pagefault_disable(); 83 pagefault_disable();
83 enable_kernel_altivec(); 84 enable_kernel_altivec();
84 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); 85 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
85 ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); 86 ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
86 pagefault_enable(); 87 pagefault_enable();
88 preempt_enable();
87 89
88 ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); 90 ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
89 return ret; 91 return ret;
@@ -106,6 +108,7 @@ static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc,
106 if (in_interrupt()) { 108 if (in_interrupt()) {
107 ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes); 109 ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes);
108 } else { 110 } else {
111 preempt_disable();
109 pagefault_disable(); 112 pagefault_disable();
110 enable_kernel_altivec(); 113 enable_kernel_altivec();
111 114
@@ -119,6 +122,7 @@ static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc,
119 } 122 }
120 123
121 pagefault_enable(); 124 pagefault_enable();
125 preempt_enable();
122 } 126 }
123 127
124 return ret; 128 return ret;
@@ -141,6 +145,7 @@ static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc,
141 if (in_interrupt()) { 145 if (in_interrupt()) {
142 ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src, nbytes); 146 ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src, nbytes);
143 } else { 147 } else {
148 preempt_disable();
144 pagefault_disable(); 149 pagefault_disable();
145 enable_kernel_altivec(); 150 enable_kernel_altivec();
146 151
@@ -154,6 +159,7 @@ static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc,
154 } 159 }
155 160
156 pagefault_enable(); 161 pagefault_enable();
162 preempt_enable();
157 } 163 }
158 164
159 return ret; 165 return ret;
diff --git a/drivers/crypto/vmx/ghash.c b/drivers/crypto/vmx/ghash.c
index d0ffe277af5c..f255ec4a04d4 100644
--- a/drivers/crypto/vmx/ghash.c
+++ b/drivers/crypto/vmx/ghash.c
@@ -114,11 +114,13 @@ static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key,
114 if (keylen != GHASH_KEY_LEN) 114 if (keylen != GHASH_KEY_LEN)
115 return -EINVAL; 115 return -EINVAL;
116 116
117 preempt_disable();
117 pagefault_disable(); 118 pagefault_disable();
118 enable_kernel_altivec(); 119 enable_kernel_altivec();
119 enable_kernel_fp(); 120 enable_kernel_fp();
120 gcm_init_p8(ctx->htable, (const u64 *) key); 121 gcm_init_p8(ctx->htable, (const u64 *) key);
121 pagefault_enable(); 122 pagefault_enable();
123 preempt_enable();
122 return crypto_shash_setkey(ctx->fallback, key, keylen); 124 return crypto_shash_setkey(ctx->fallback, key, keylen);
123} 125}
124 126
@@ -140,23 +142,27 @@ static int p8_ghash_update(struct shash_desc *desc,
140 } 142 }
141 memcpy(dctx->buffer + dctx->bytes, src, 143 memcpy(dctx->buffer + dctx->bytes, src,
142 GHASH_DIGEST_SIZE - dctx->bytes); 144 GHASH_DIGEST_SIZE - dctx->bytes);
145 preempt_disable();
143 pagefault_disable(); 146 pagefault_disable();
144 enable_kernel_altivec(); 147 enable_kernel_altivec();
145 enable_kernel_fp(); 148 enable_kernel_fp();
146 gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer, 149 gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer,
147 GHASH_DIGEST_SIZE); 150 GHASH_DIGEST_SIZE);
148 pagefault_enable(); 151 pagefault_enable();
152 preempt_enable();
149 src += GHASH_DIGEST_SIZE - dctx->bytes; 153 src += GHASH_DIGEST_SIZE - dctx->bytes;
150 srclen -= GHASH_DIGEST_SIZE - dctx->bytes; 154 srclen -= GHASH_DIGEST_SIZE - dctx->bytes;
151 dctx->bytes = 0; 155 dctx->bytes = 0;
152 } 156 }
153 len = srclen & ~(GHASH_DIGEST_SIZE - 1); 157 len = srclen & ~(GHASH_DIGEST_SIZE - 1);
154 if (len) { 158 if (len) {
159 preempt_disable();
155 pagefault_disable(); 160 pagefault_disable();
156 enable_kernel_altivec(); 161 enable_kernel_altivec();
157 enable_kernel_fp(); 162 enable_kernel_fp();
158 gcm_ghash_p8(dctx->shash, ctx->htable, src, len); 163 gcm_ghash_p8(dctx->shash, ctx->htable, src, len);
159 pagefault_enable(); 164 pagefault_enable();
165 preempt_enable();
160 src += len; 166 src += len;
161 srclen -= len; 167 srclen -= len;
162 } 168 }
@@ -180,12 +186,14 @@ static int p8_ghash_final(struct shash_desc *desc, u8 *out)
180 if (dctx->bytes) { 186 if (dctx->bytes) {
181 for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++) 187 for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++)
182 dctx->buffer[i] = 0; 188 dctx->buffer[i] = 0;
189 preempt_disable();
183 pagefault_disable(); 190 pagefault_disable();
184 enable_kernel_altivec(); 191 enable_kernel_altivec();
185 enable_kernel_fp(); 192 enable_kernel_fp();
186 gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer, 193 gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer,
187 GHASH_DIGEST_SIZE); 194 GHASH_DIGEST_SIZE);
188 pagefault_enable(); 195 pagefault_enable();
196 preempt_enable();
189 dctx->bytes = 0; 197 dctx->bytes = 0;
190 } 198 }
191 memcpy(out, dctx->shash, GHASH_DIGEST_SIZE); 199 memcpy(out, dctx->shash, GHASH_DIGEST_SIZE);