aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto/vmx
diff options
context:
space:
mode:
authorLi Zhong <zhong@linux.vnet.ibm.com>2017-01-20 03:35:33 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2017-01-23 09:50:34 -0500
commit7dede913fc2ab9c0d3bff3a49e26fa9e858b0c13 (patch)
treede8b83b15328d461e7a4c921728786dffd803b38 /drivers/crypto/vmx
parentd03f7b0d58ac1bed9d98960dfe831f69a6f15aab (diff)
crypto: vmx - disable preemption to enable vsx in aes_ctr.c
Some preemptible check warnings were reported from enable_kernel_vsx(). This patch disables preemption in aes_ctr.c before enabling vsx, and they are now consistent with other files in the same directory. Signed-off-by: Li Zhong <zhong@linux.vnet.ibm.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto/vmx')
-rw-r--r--drivers/crypto/vmx/aes_ctr.c6
1 files changed, 6 insertions, 0 deletions
diff --git a/drivers/crypto/vmx/aes_ctr.c b/drivers/crypto/vmx/aes_ctr.c
index 38ed10d761d0..7cf6d31c1123 100644
--- a/drivers/crypto/vmx/aes_ctr.c
+++ b/drivers/crypto/vmx/aes_ctr.c
@@ -80,11 +80,13 @@ static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key,
80 int ret; 80 int ret;
81 struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); 81 struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
82 82
83 preempt_disable();
83 pagefault_disable(); 84 pagefault_disable();
84 enable_kernel_vsx(); 85 enable_kernel_vsx();
85 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); 86 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
86 disable_kernel_vsx(); 87 disable_kernel_vsx();
87 pagefault_enable(); 88 pagefault_enable();
89 preempt_enable();
88 90
89 ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); 91 ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
90 return ret; 92 return ret;
@@ -99,11 +101,13 @@ static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
99 u8 *dst = walk->dst.virt.addr; 101 u8 *dst = walk->dst.virt.addr;
100 unsigned int nbytes = walk->nbytes; 102 unsigned int nbytes = walk->nbytes;
101 103
104 preempt_disable();
102 pagefault_disable(); 105 pagefault_disable();
103 enable_kernel_vsx(); 106 enable_kernel_vsx();
104 aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key); 107 aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
105 disable_kernel_vsx(); 108 disable_kernel_vsx();
106 pagefault_enable(); 109 pagefault_enable();
110 preempt_enable();
107 111
108 crypto_xor(keystream, src, nbytes); 112 crypto_xor(keystream, src, nbytes);
109 memcpy(dst, keystream, nbytes); 113 memcpy(dst, keystream, nbytes);
@@ -132,6 +136,7 @@ static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
132 blkcipher_walk_init(&walk, dst, src, nbytes); 136 blkcipher_walk_init(&walk, dst, src, nbytes);
133 ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); 137 ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
134 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { 138 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
139 preempt_disable();
135 pagefault_disable(); 140 pagefault_disable();
136 enable_kernel_vsx(); 141 enable_kernel_vsx();
137 aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, 142 aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
@@ -143,6 +148,7 @@ static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
143 walk.iv); 148 walk.iv);
144 disable_kernel_vsx(); 149 disable_kernel_vsx();
145 pagefault_enable(); 150 pagefault_enable();
151 preempt_enable();
146 152
147 /* We need to update IV mostly for last bytes/round */ 153 /* We need to update IV mostly for last bytes/round */
148 inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE; 154 inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE;