diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2015-01-05 15:54:41 -0500 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2015-01-08 05:46:19 -0500 |
commit | ad511e260a27b8e35d273cc0ecfe5a8ff9543181 (patch) | |
tree | 15a7d801cbeb7d33584b8e2a55eff0df73041fd4 /drivers/crypto/qat | |
parent | 15acabfd02e35e270360fbe0def898e48754b3d6 (diff) |
crypto: qat - Fix incorrect uses of memzero_explicit
memzero_explicit should only be used on stack variables that get
zapped just before they go out of scope.
This patch replaces all unnecessary uses of memzero_explicit with
memset, removes two memzero_explicit calls altogether as the tfm
context comes pre-zeroed, and adds a missing memzero_explicit of
the stack variable buff in qat_alg_do_precomputes. The memzeros
on ipad/opad + digest_size/auth_keylen are also removed as the
entire auth_state is already zeroed on entry.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Acked-by: Tadeusz Struk <tadeusz.struk@intel.com>
Diffstat (limited to 'drivers/crypto/qat')
-rw-r--r-- | drivers/crypto/qat/qat_common/qat_algs.c | 45 |
1 files changed, 20 insertions, 25 deletions
diff --git a/drivers/crypto/qat/qat_common/qat_algs.c b/drivers/crypto/qat/qat_common/qat_algs.c index f32d0a58bcc0..a0d95f329094 100644 --- a/drivers/crypto/qat/qat_common/qat_algs.c +++ b/drivers/crypto/qat/qat_common/qat_algs.c | |||
@@ -173,7 +173,7 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash, | |||
173 | __be64 *hash512_state_out; | 173 | __be64 *hash512_state_out; |
174 | int i, offset; | 174 | int i, offset; |
175 | 175 | ||
176 | memzero_explicit(auth_state.data, MAX_AUTH_STATE_SIZE + 64); | 176 | memset(auth_state.data, 0, sizeof(auth_state.data)); |
177 | shash->tfm = ctx->hash_tfm; | 177 | shash->tfm = ctx->hash_tfm; |
178 | shash->flags = 0x0; | 178 | shash->flags = 0x0; |
179 | 179 | ||
@@ -186,13 +186,10 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash, | |||
186 | 186 | ||
187 | memcpy(ipad, buff, digest_size); | 187 | memcpy(ipad, buff, digest_size); |
188 | memcpy(opad, buff, digest_size); | 188 | memcpy(opad, buff, digest_size); |
189 | memzero_explicit(ipad + digest_size, block_size - digest_size); | 189 | memzero_explicit(buff, sizeof(buff)); |
190 | memzero_explicit(opad + digest_size, block_size - digest_size); | ||
191 | } else { | 190 | } else { |
192 | memcpy(ipad, auth_key, auth_keylen); | 191 | memcpy(ipad, auth_key, auth_keylen); |
193 | memcpy(opad, auth_key, auth_keylen); | 192 | memcpy(opad, auth_key, auth_keylen); |
194 | memzero_explicit(ipad + auth_keylen, block_size - auth_keylen); | ||
195 | memzero_explicit(opad + auth_keylen, block_size - auth_keylen); | ||
196 | } | 193 | } |
197 | 194 | ||
198 | for (i = 0; i < block_size; i++) { | 195 | for (i = 0; i < block_size; i++) { |
@@ -582,10 +579,10 @@ static int qat_alg_aead_setkey(struct crypto_aead *tfm, const uint8_t *key, | |||
582 | if (ctx->enc_cd) { | 579 | if (ctx->enc_cd) { |
583 | /* rekeying */ | 580 | /* rekeying */ |
584 | dev = &GET_DEV(ctx->inst->accel_dev); | 581 | dev = &GET_DEV(ctx->inst->accel_dev); |
585 | memzero_explicit(ctx->enc_cd, sizeof(*ctx->enc_cd)); | 582 | memset(ctx->enc_cd, 0, sizeof(*ctx->enc_cd)); |
586 | memzero_explicit(ctx->dec_cd, sizeof(*ctx->dec_cd)); | 583 | memset(ctx->dec_cd, 0, sizeof(*ctx->dec_cd)); |
587 | memzero_explicit(&ctx->enc_fw_req, sizeof(ctx->enc_fw_req)); | 584 | memset(&ctx->enc_fw_req, 0, sizeof(ctx->enc_fw_req)); |
588 | memzero_explicit(&ctx->dec_fw_req, sizeof(ctx->dec_fw_req)); | 585 | memset(&ctx->dec_fw_req, 0, sizeof(ctx->dec_fw_req)); |
589 | } else { | 586 | } else { |
590 | /* new key */ | 587 | /* new key */ |
591 | int node = get_current_node(); | 588 | int node = get_current_node(); |
@@ -620,12 +617,12 @@ static int qat_alg_aead_setkey(struct crypto_aead *tfm, const uint8_t *key, | |||
620 | return 0; | 617 | return 0; |
621 | 618 | ||
622 | out_free_all: | 619 | out_free_all: |
623 | memzero_explicit(ctx->dec_cd, sizeof(struct qat_alg_cd)); | 620 | memset(ctx->dec_cd, 0, sizeof(struct qat_alg_cd)); |
624 | dma_free_coherent(dev, sizeof(struct qat_alg_cd), | 621 | dma_free_coherent(dev, sizeof(struct qat_alg_cd), |
625 | ctx->dec_cd, ctx->dec_cd_paddr); | 622 | ctx->dec_cd, ctx->dec_cd_paddr); |
626 | ctx->dec_cd = NULL; | 623 | ctx->dec_cd = NULL; |
627 | out_free_enc: | 624 | out_free_enc: |
628 | memzero_explicit(ctx->enc_cd, sizeof(struct qat_alg_cd)); | 625 | memset(ctx->enc_cd, 0, sizeof(struct qat_alg_cd)); |
629 | dma_free_coherent(dev, sizeof(struct qat_alg_cd), | 626 | dma_free_coherent(dev, sizeof(struct qat_alg_cd), |
630 | ctx->enc_cd, ctx->enc_cd_paddr); | 627 | ctx->enc_cd, ctx->enc_cd_paddr); |
631 | ctx->enc_cd = NULL; | 628 | ctx->enc_cd = NULL; |
@@ -969,10 +966,10 @@ static int qat_alg_ablkcipher_setkey(struct crypto_ablkcipher *tfm, | |||
969 | if (ctx->enc_cd) { | 966 | if (ctx->enc_cd) { |
970 | /* rekeying */ | 967 | /* rekeying */ |
971 | dev = &GET_DEV(ctx->inst->accel_dev); | 968 | dev = &GET_DEV(ctx->inst->accel_dev); |
972 | memzero_explicit(ctx->enc_cd, sizeof(*ctx->enc_cd)); | 969 | memset(ctx->enc_cd, 0, sizeof(*ctx->enc_cd)); |
973 | memzero_explicit(ctx->dec_cd, sizeof(*ctx->dec_cd)); | 970 | memset(ctx->dec_cd, 0, sizeof(*ctx->dec_cd)); |
974 | memzero_explicit(&ctx->enc_fw_req, sizeof(ctx->enc_fw_req)); | 971 | memset(&ctx->enc_fw_req, 0, sizeof(ctx->enc_fw_req)); |
975 | memzero_explicit(&ctx->dec_fw_req, sizeof(ctx->dec_fw_req)); | 972 | memset(&ctx->dec_fw_req, 0, sizeof(ctx->dec_fw_req)); |
976 | } else { | 973 | } else { |
977 | /* new key */ | 974 | /* new key */ |
978 | int node = get_current_node(); | 975 | int node = get_current_node(); |
@@ -1007,12 +1004,12 @@ static int qat_alg_ablkcipher_setkey(struct crypto_ablkcipher *tfm, | |||
1007 | return 0; | 1004 | return 0; |
1008 | 1005 | ||
1009 | out_free_all: | 1006 | out_free_all: |
1010 | memzero_explicit(ctx->dec_cd, sizeof(*ctx->enc_cd)); | 1007 | memset(ctx->dec_cd, 0, sizeof(*ctx->enc_cd)); |
1011 | dma_free_coherent(dev, sizeof(*ctx->enc_cd), | 1008 | dma_free_coherent(dev, sizeof(*ctx->enc_cd), |
1012 | ctx->dec_cd, ctx->dec_cd_paddr); | 1009 | ctx->dec_cd, ctx->dec_cd_paddr); |
1013 | ctx->dec_cd = NULL; | 1010 | ctx->dec_cd = NULL; |
1014 | out_free_enc: | 1011 | out_free_enc: |
1015 | memzero_explicit(ctx->enc_cd, sizeof(*ctx->dec_cd)); | 1012 | memset(ctx->enc_cd, 0, sizeof(*ctx->dec_cd)); |
1016 | dma_free_coherent(dev, sizeof(*ctx->dec_cd), | 1013 | dma_free_coherent(dev, sizeof(*ctx->dec_cd), |
1017 | ctx->enc_cd, ctx->enc_cd_paddr); | 1014 | ctx->enc_cd, ctx->enc_cd_paddr); |
1018 | ctx->enc_cd = NULL; | 1015 | ctx->enc_cd = NULL; |
@@ -1101,7 +1098,6 @@ static int qat_alg_aead_init(struct crypto_tfm *tfm, | |||
1101 | { | 1098 | { |
1102 | struct qat_alg_aead_ctx *ctx = crypto_tfm_ctx(tfm); | 1099 | struct qat_alg_aead_ctx *ctx = crypto_tfm_ctx(tfm); |
1103 | 1100 | ||
1104 | memzero_explicit(ctx, sizeof(*ctx)); | ||
1105 | ctx->hash_tfm = crypto_alloc_shash(hash_name, 0, 0); | 1101 | ctx->hash_tfm = crypto_alloc_shash(hash_name, 0, 0); |
1106 | if (IS_ERR(ctx->hash_tfm)) | 1102 | if (IS_ERR(ctx->hash_tfm)) |
1107 | return -EFAULT; | 1103 | return -EFAULT; |
@@ -1142,12 +1138,12 @@ static void qat_alg_aead_exit(struct crypto_tfm *tfm) | |||
1142 | 1138 | ||
1143 | dev = &GET_DEV(inst->accel_dev); | 1139 | dev = &GET_DEV(inst->accel_dev); |
1144 | if (ctx->enc_cd) { | 1140 | if (ctx->enc_cd) { |
1145 | memzero_explicit(ctx->enc_cd, sizeof(struct qat_alg_cd)); | 1141 | memset(ctx->enc_cd, 0, sizeof(struct qat_alg_cd)); |
1146 | dma_free_coherent(dev, sizeof(struct qat_alg_cd), | 1142 | dma_free_coherent(dev, sizeof(struct qat_alg_cd), |
1147 | ctx->enc_cd, ctx->enc_cd_paddr); | 1143 | ctx->enc_cd, ctx->enc_cd_paddr); |
1148 | } | 1144 | } |
1149 | if (ctx->dec_cd) { | 1145 | if (ctx->dec_cd) { |
1150 | memzero_explicit(ctx->dec_cd, sizeof(struct qat_alg_cd)); | 1146 | memset(ctx->dec_cd, 0, sizeof(struct qat_alg_cd)); |
1151 | dma_free_coherent(dev, sizeof(struct qat_alg_cd), | 1147 | dma_free_coherent(dev, sizeof(struct qat_alg_cd), |
1152 | ctx->dec_cd, ctx->dec_cd_paddr); | 1148 | ctx->dec_cd, ctx->dec_cd_paddr); |
1153 | } | 1149 | } |
@@ -1158,7 +1154,6 @@ static int qat_alg_ablkcipher_init(struct crypto_tfm *tfm) | |||
1158 | { | 1154 | { |
1159 | struct qat_alg_ablkcipher_ctx *ctx = crypto_tfm_ctx(tfm); | 1155 | struct qat_alg_ablkcipher_ctx *ctx = crypto_tfm_ctx(tfm); |
1160 | 1156 | ||
1161 | memzero_explicit(ctx, sizeof(*ctx)); | ||
1162 | spin_lock_init(&ctx->lock); | 1157 | spin_lock_init(&ctx->lock); |
1163 | tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) + | 1158 | tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) + |
1164 | sizeof(struct qat_crypto_request); | 1159 | sizeof(struct qat_crypto_request); |
@@ -1177,15 +1172,15 @@ static void qat_alg_ablkcipher_exit(struct crypto_tfm *tfm) | |||
1177 | 1172 | ||
1178 | dev = &GET_DEV(inst->accel_dev); | 1173 | dev = &GET_DEV(inst->accel_dev); |
1179 | if (ctx->enc_cd) { | 1174 | if (ctx->enc_cd) { |
1180 | memzero_explicit(ctx->enc_cd, | 1175 | memset(ctx->enc_cd, 0, |
1181 | sizeof(struct icp_qat_hw_cipher_algo_blk)); | 1176 | sizeof(struct icp_qat_hw_cipher_algo_blk)); |
1182 | dma_free_coherent(dev, | 1177 | dma_free_coherent(dev, |
1183 | sizeof(struct icp_qat_hw_cipher_algo_blk), | 1178 | sizeof(struct icp_qat_hw_cipher_algo_blk), |
1184 | ctx->enc_cd, ctx->enc_cd_paddr); | 1179 | ctx->enc_cd, ctx->enc_cd_paddr); |
1185 | } | 1180 | } |
1186 | if (ctx->dec_cd) { | 1181 | if (ctx->dec_cd) { |
1187 | memzero_explicit(ctx->dec_cd, | 1182 | memset(ctx->dec_cd, 0, |
1188 | sizeof(struct icp_qat_hw_cipher_algo_blk)); | 1183 | sizeof(struct icp_qat_hw_cipher_algo_blk)); |
1189 | dma_free_coherent(dev, | 1184 | dma_free_coherent(dev, |
1190 | sizeof(struct icp_qat_hw_cipher_algo_blk), | 1185 | sizeof(struct icp_qat_hw_cipher_algo_blk), |
1191 | ctx->dec_cd, ctx->dec_cd_paddr); | 1186 | ctx->dec_cd, ctx->dec_cd_paddr); |