diff options
-rw-r--r-- | crypto/ahash.c | 17 | ||||
-rw-r--r-- | crypto/algapi.c | 233 | ||||
-rw-r--r-- | crypto/rng.c | 4 | ||||
-rw-r--r-- | include/crypto/acompress.h | 38 | ||||
-rw-r--r-- | include/crypto/aead.h | 38 | ||||
-rw-r--r-- | include/crypto/akcipher.h | 74 | ||||
-rw-r--r-- | include/crypto/hash.h | 32 | ||||
-rw-r--r-- | include/crypto/kpp.h | 48 | ||||
-rw-r--r-- | include/crypto/rng.h | 27 | ||||
-rw-r--r-- | include/crypto/skcipher.h | 36 | ||||
-rw-r--r-- | include/linux/crypto.h | 105 |
11 files changed, 376 insertions, 276 deletions
diff --git a/crypto/ahash.c b/crypto/ahash.c index 3a348fbcf8f9..5d320a811f75 100644 --- a/crypto/ahash.c +++ b/crypto/ahash.c | |||
@@ -364,20 +364,28 @@ static int crypto_ahash_op(struct ahash_request *req, | |||
364 | 364 | ||
365 | int crypto_ahash_final(struct ahash_request *req) | 365 | int crypto_ahash_final(struct ahash_request *req) |
366 | { | 366 | { |
367 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
368 | struct crypto_alg *alg = tfm->base.__crt_alg; | ||
369 | unsigned int nbytes = req->nbytes; | ||
367 | int ret; | 370 | int ret; |
368 | 371 | ||
372 | crypto_stats_get(alg); | ||
369 | ret = crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final); | 373 | ret = crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final); |
370 | crypto_stat_ahash_final(req, ret); | 374 | crypto_stats_ahash_final(nbytes, ret, alg); |
371 | return ret; | 375 | return ret; |
372 | } | 376 | } |
373 | EXPORT_SYMBOL_GPL(crypto_ahash_final); | 377 | EXPORT_SYMBOL_GPL(crypto_ahash_final); |
374 | 378 | ||
375 | int crypto_ahash_finup(struct ahash_request *req) | 379 | int crypto_ahash_finup(struct ahash_request *req) |
376 | { | 380 | { |
381 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
382 | struct crypto_alg *alg = tfm->base.__crt_alg; | ||
383 | unsigned int nbytes = req->nbytes; | ||
377 | int ret; | 384 | int ret; |
378 | 385 | ||
386 | crypto_stats_get(alg); | ||
379 | ret = crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup); | 387 | ret = crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup); |
380 | crypto_stat_ahash_final(req, ret); | 388 | crypto_stats_ahash_final(nbytes, ret, alg); |
381 | return ret; | 389 | return ret; |
382 | } | 390 | } |
383 | EXPORT_SYMBOL_GPL(crypto_ahash_finup); | 391 | EXPORT_SYMBOL_GPL(crypto_ahash_finup); |
@@ -385,13 +393,16 @@ EXPORT_SYMBOL_GPL(crypto_ahash_finup); | |||
385 | int crypto_ahash_digest(struct ahash_request *req) | 393 | int crypto_ahash_digest(struct ahash_request *req) |
386 | { | 394 | { |
387 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | 395 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
396 | struct crypto_alg *alg = tfm->base.__crt_alg; | ||
397 | unsigned int nbytes = req->nbytes; | ||
388 | int ret; | 398 | int ret; |
389 | 399 | ||
400 | crypto_stats_get(alg); | ||
390 | if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) | 401 | if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) |
391 | ret = -ENOKEY; | 402 | ret = -ENOKEY; |
392 | else | 403 | else |
393 | ret = crypto_ahash_op(req, tfm->digest); | 404 | ret = crypto_ahash_op(req, tfm->digest); |
394 | crypto_stat_ahash_final(req, ret); | 405 | crypto_stats_ahash_final(nbytes, ret, alg); |
395 | return ret; | 406 | return ret; |
396 | } | 407 | } |
397 | EXPORT_SYMBOL_GPL(crypto_ahash_digest); | 408 | EXPORT_SYMBOL_GPL(crypto_ahash_digest); |
diff --git a/crypto/algapi.c b/crypto/algapi.c index 42fe316f80ee..4c1e6079d271 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c | |||
@@ -1078,6 +1078,239 @@ int crypto_type_has_alg(const char *name, const struct crypto_type *frontend, | |||
1078 | } | 1078 | } |
1079 | EXPORT_SYMBOL_GPL(crypto_type_has_alg); | 1079 | EXPORT_SYMBOL_GPL(crypto_type_has_alg); |
1080 | 1080 | ||
1081 | #ifdef CONFIG_CRYPTO_STATS | ||
1082 | void crypto_stats_get(struct crypto_alg *alg) | ||
1083 | { | ||
1084 | crypto_alg_get(alg); | ||
1085 | } | ||
1086 | EXPORT_SYMBOL_GPL(crypto_stats_get); | ||
1087 | |||
1088 | void crypto_stats_ablkcipher_encrypt(unsigned int nbytes, int ret, | ||
1089 | struct crypto_alg *alg) | ||
1090 | { | ||
1091 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1092 | atomic64_inc(&alg->cipher_err_cnt); | ||
1093 | } else { | ||
1094 | atomic64_inc(&alg->encrypt_cnt); | ||
1095 | atomic64_add(nbytes, &alg->encrypt_tlen); | ||
1096 | } | ||
1097 | crypto_alg_put(alg); | ||
1098 | } | ||
1099 | EXPORT_SYMBOL_GPL(crypto_stats_ablkcipher_encrypt); | ||
1100 | |||
1101 | void crypto_stats_ablkcipher_decrypt(unsigned int nbytes, int ret, | ||
1102 | struct crypto_alg *alg) | ||
1103 | { | ||
1104 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1105 | atomic64_inc(&alg->cipher_err_cnt); | ||
1106 | } else { | ||
1107 | atomic64_inc(&alg->decrypt_cnt); | ||
1108 | atomic64_add(nbytes, &alg->decrypt_tlen); | ||
1109 | } | ||
1110 | crypto_alg_put(alg); | ||
1111 | } | ||
1112 | EXPORT_SYMBOL_GPL(crypto_stats_ablkcipher_decrypt); | ||
1113 | |||
1114 | void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, | ||
1115 | int ret) | ||
1116 | { | ||
1117 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1118 | atomic64_inc(&alg->aead_err_cnt); | ||
1119 | } else { | ||
1120 | atomic64_inc(&alg->encrypt_cnt); | ||
1121 | atomic64_add(cryptlen, &alg->encrypt_tlen); | ||
1122 | } | ||
1123 | crypto_alg_put(alg); | ||
1124 | } | ||
1125 | EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt); | ||
1126 | |||
1127 | void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, | ||
1128 | int ret) | ||
1129 | { | ||
1130 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1131 | atomic64_inc(&alg->aead_err_cnt); | ||
1132 | } else { | ||
1133 | atomic64_inc(&alg->decrypt_cnt); | ||
1134 | atomic64_add(cryptlen, &alg->decrypt_tlen); | ||
1135 | } | ||
1136 | crypto_alg_put(alg); | ||
1137 | } | ||
1138 | EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt); | ||
1139 | |||
1140 | void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, | ||
1141 | struct crypto_alg *alg) | ||
1142 | { | ||
1143 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1144 | atomic64_inc(&alg->akcipher_err_cnt); | ||
1145 | } else { | ||
1146 | atomic64_inc(&alg->encrypt_cnt); | ||
1147 | atomic64_add(src_len, &alg->encrypt_tlen); | ||
1148 | } | ||
1149 | crypto_alg_put(alg); | ||
1150 | } | ||
1151 | EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt); | ||
1152 | |||
1153 | void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, | ||
1154 | struct crypto_alg *alg) | ||
1155 | { | ||
1156 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1157 | atomic64_inc(&alg->akcipher_err_cnt); | ||
1158 | } else { | ||
1159 | atomic64_inc(&alg->decrypt_cnt); | ||
1160 | atomic64_add(src_len, &alg->decrypt_tlen); | ||
1161 | } | ||
1162 | crypto_alg_put(alg); | ||
1163 | } | ||
1164 | EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt); | ||
1165 | |||
1166 | void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg) | ||
1167 | { | ||
1168 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) | ||
1169 | atomic64_inc(&alg->akcipher_err_cnt); | ||
1170 | else | ||
1171 | atomic64_inc(&alg->sign_cnt); | ||
1172 | crypto_alg_put(alg); | ||
1173 | } | ||
1174 | EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign); | ||
1175 | |||
1176 | void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg) | ||
1177 | { | ||
1178 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) | ||
1179 | atomic64_inc(&alg->akcipher_err_cnt); | ||
1180 | else | ||
1181 | atomic64_inc(&alg->verify_cnt); | ||
1182 | crypto_alg_put(alg); | ||
1183 | } | ||
1184 | EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify); | ||
1185 | |||
1186 | void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg) | ||
1187 | { | ||
1188 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1189 | atomic64_inc(&alg->compress_err_cnt); | ||
1190 | } else { | ||
1191 | atomic64_inc(&alg->compress_cnt); | ||
1192 | atomic64_add(slen, &alg->compress_tlen); | ||
1193 | } | ||
1194 | crypto_alg_put(alg); | ||
1195 | } | ||
1196 | EXPORT_SYMBOL_GPL(crypto_stats_compress); | ||
1197 | |||
1198 | void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg) | ||
1199 | { | ||
1200 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1201 | atomic64_inc(&alg->compress_err_cnt); | ||
1202 | } else { | ||
1203 | atomic64_inc(&alg->decompress_cnt); | ||
1204 | atomic64_add(slen, &alg->decompress_tlen); | ||
1205 | } | ||
1206 | crypto_alg_put(alg); | ||
1207 | } | ||
1208 | EXPORT_SYMBOL_GPL(crypto_stats_decompress); | ||
1209 | |||
1210 | void crypto_stats_ahash_update(unsigned int nbytes, int ret, | ||
1211 | struct crypto_alg *alg) | ||
1212 | { | ||
1213 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) | ||
1214 | atomic64_inc(&alg->hash_err_cnt); | ||
1215 | else | ||
1216 | atomic64_add(nbytes, &alg->hash_tlen); | ||
1217 | crypto_alg_put(alg); | ||
1218 | } | ||
1219 | EXPORT_SYMBOL_GPL(crypto_stats_ahash_update); | ||
1220 | |||
1221 | void crypto_stats_ahash_final(unsigned int nbytes, int ret, | ||
1222 | struct crypto_alg *alg) | ||
1223 | { | ||
1224 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1225 | atomic64_inc(&alg->hash_err_cnt); | ||
1226 | } else { | ||
1227 | atomic64_inc(&alg->hash_cnt); | ||
1228 | atomic64_add(nbytes, &alg->hash_tlen); | ||
1229 | } | ||
1230 | crypto_alg_put(alg); | ||
1231 | } | ||
1232 | EXPORT_SYMBOL_GPL(crypto_stats_ahash_final); | ||
1233 | |||
1234 | void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret) | ||
1235 | { | ||
1236 | if (ret) | ||
1237 | atomic64_inc(&alg->kpp_err_cnt); | ||
1238 | else | ||
1239 | atomic64_inc(&alg->setsecret_cnt); | ||
1240 | crypto_alg_put(alg); | ||
1241 | } | ||
1242 | EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret); | ||
1243 | |||
1244 | void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret) | ||
1245 | { | ||
1246 | if (ret) | ||
1247 | atomic64_inc(&alg->kpp_err_cnt); | ||
1248 | else | ||
1249 | atomic64_inc(&alg->generate_public_key_cnt); | ||
1250 | crypto_alg_put(alg); | ||
1251 | } | ||
1252 | EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key); | ||
1253 | |||
1254 | void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret) | ||
1255 | { | ||
1256 | if (ret) | ||
1257 | atomic64_inc(&alg->kpp_err_cnt); | ||
1258 | else | ||
1259 | atomic64_inc(&alg->compute_shared_secret_cnt); | ||
1260 | crypto_alg_put(alg); | ||
1261 | } | ||
1262 | EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret); | ||
1263 | |||
1264 | void crypto_stats_rng_seed(struct crypto_alg *alg, int ret) | ||
1265 | { | ||
1266 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) | ||
1267 | atomic64_inc(&alg->rng_err_cnt); | ||
1268 | else | ||
1269 | atomic64_inc(&alg->seed_cnt); | ||
1270 | crypto_alg_put(alg); | ||
1271 | } | ||
1272 | EXPORT_SYMBOL_GPL(crypto_stats_rng_seed); | ||
1273 | |||
1274 | void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, | ||
1275 | int ret) | ||
1276 | { | ||
1277 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1278 | atomic64_inc(&alg->rng_err_cnt); | ||
1279 | } else { | ||
1280 | atomic64_inc(&alg->generate_cnt); | ||
1281 | atomic64_add(dlen, &alg->generate_tlen); | ||
1282 | } | ||
1283 | crypto_alg_put(alg); | ||
1284 | } | ||
1285 | EXPORT_SYMBOL_GPL(crypto_stats_rng_generate); | ||
1286 | |||
1287 | void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, | ||
1288 | struct crypto_alg *alg) | ||
1289 | { | ||
1290 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1291 | atomic64_inc(&alg->cipher_err_cnt); | ||
1292 | } else { | ||
1293 | atomic64_inc(&alg->encrypt_cnt); | ||
1294 | atomic64_add(cryptlen, &alg->encrypt_tlen); | ||
1295 | } | ||
1296 | crypto_alg_put(alg); | ||
1297 | } | ||
1298 | EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt); | ||
1299 | |||
1300 | void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, | ||
1301 | struct crypto_alg *alg) | ||
1302 | { | ||
1303 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1304 | atomic64_inc(&alg->cipher_err_cnt); | ||
1305 | } else { | ||
1306 | atomic64_inc(&alg->decrypt_cnt); | ||
1307 | atomic64_add(cryptlen, &alg->decrypt_tlen); | ||
1308 | } | ||
1309 | crypto_alg_put(alg); | ||
1310 | } | ||
1311 | EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt); | ||
1312 | #endif | ||
1313 | |||
1081 | static int __init crypto_algapi_init(void) | 1314 | static int __init crypto_algapi_init(void) |
1082 | { | 1315 | { |
1083 | crypto_init_proc(); | 1316 | crypto_init_proc(); |
diff --git a/crypto/rng.c b/crypto/rng.c index 2406501b90b7..33c38a72bff5 100644 --- a/crypto/rng.c +++ b/crypto/rng.c | |||
@@ -35,9 +35,11 @@ static int crypto_default_rng_refcnt; | |||
35 | 35 | ||
36 | int crypto_rng_reset(struct crypto_rng *tfm, const u8 *seed, unsigned int slen) | 36 | int crypto_rng_reset(struct crypto_rng *tfm, const u8 *seed, unsigned int slen) |
37 | { | 37 | { |
38 | struct crypto_alg *alg = tfm->base.__crt_alg; | ||
38 | u8 *buf = NULL; | 39 | u8 *buf = NULL; |
39 | int err; | 40 | int err; |
40 | 41 | ||
42 | crypto_stats_get(alg); | ||
41 | if (!seed && slen) { | 43 | if (!seed && slen) { |
42 | buf = kmalloc(slen, GFP_KERNEL); | 44 | buf = kmalloc(slen, GFP_KERNEL); |
43 | if (!buf) | 45 | if (!buf) |
@@ -50,7 +52,7 @@ int crypto_rng_reset(struct crypto_rng *tfm, const u8 *seed, unsigned int slen) | |||
50 | } | 52 | } |
51 | 53 | ||
52 | err = crypto_rng_alg(tfm)->seed(tfm, seed, slen); | 54 | err = crypto_rng_alg(tfm)->seed(tfm, seed, slen); |
53 | crypto_stat_rng_seed(tfm, err); | 55 | crypto_stats_rng_seed(alg, err); |
54 | out: | 56 | out: |
55 | kzfree(buf); | 57 | kzfree(buf); |
56 | return err; | 58 | return err; |
diff --git a/include/crypto/acompress.h b/include/crypto/acompress.h index f79918196811..a3e766dff917 100644 --- a/include/crypto/acompress.h +++ b/include/crypto/acompress.h | |||
@@ -234,34 +234,6 @@ static inline void acomp_request_set_params(struct acomp_req *req, | |||
234 | req->flags |= CRYPTO_ACOMP_ALLOC_OUTPUT; | 234 | req->flags |= CRYPTO_ACOMP_ALLOC_OUTPUT; |
235 | } | 235 | } |
236 | 236 | ||
237 | static inline void crypto_stat_compress(struct acomp_req *req, int ret) | ||
238 | { | ||
239 | #ifdef CONFIG_CRYPTO_STATS | ||
240 | struct crypto_acomp *tfm = crypto_acomp_reqtfm(req); | ||
241 | |||
242 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
243 | atomic64_inc(&tfm->base.__crt_alg->compress_err_cnt); | ||
244 | } else { | ||
245 | atomic64_inc(&tfm->base.__crt_alg->compress_cnt); | ||
246 | atomic64_add(req->slen, &tfm->base.__crt_alg->compress_tlen); | ||
247 | } | ||
248 | #endif | ||
249 | } | ||
250 | |||
251 | static inline void crypto_stat_decompress(struct acomp_req *req, int ret) | ||
252 | { | ||
253 | #ifdef CONFIG_CRYPTO_STATS | ||
254 | struct crypto_acomp *tfm = crypto_acomp_reqtfm(req); | ||
255 | |||
256 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
257 | atomic64_inc(&tfm->base.__crt_alg->compress_err_cnt); | ||
258 | } else { | ||
259 | atomic64_inc(&tfm->base.__crt_alg->decompress_cnt); | ||
260 | atomic64_add(req->slen, &tfm->base.__crt_alg->decompress_tlen); | ||
261 | } | ||
262 | #endif | ||
263 | } | ||
264 | |||
265 | /** | 237 | /** |
266 | * crypto_acomp_compress() -- Invoke asynchronous compress operation | 238 | * crypto_acomp_compress() -- Invoke asynchronous compress operation |
267 | * | 239 | * |
@@ -274,10 +246,13 @@ static inline void crypto_stat_decompress(struct acomp_req *req, int ret) | |||
274 | static inline int crypto_acomp_compress(struct acomp_req *req) | 246 | static inline int crypto_acomp_compress(struct acomp_req *req) |
275 | { | 247 | { |
276 | struct crypto_acomp *tfm = crypto_acomp_reqtfm(req); | 248 | struct crypto_acomp *tfm = crypto_acomp_reqtfm(req); |
249 | struct crypto_alg *alg = tfm->base.__crt_alg; | ||
250 | unsigned int slen = req->slen; | ||
277 | int ret; | 251 | int ret; |
278 | 252 | ||
253 | crypto_stats_get(alg); | ||
279 | ret = tfm->compress(req); | 254 | ret = tfm->compress(req); |
280 | crypto_stat_compress(req, ret); | 255 | crypto_stats_compress(slen, ret, alg); |
281 | return ret; | 256 | return ret; |
282 | } | 257 | } |
283 | 258 | ||
@@ -293,10 +268,13 @@ static inline int crypto_acomp_compress(struct acomp_req *req) | |||
293 | static inline int crypto_acomp_decompress(struct acomp_req *req) | 268 | static inline int crypto_acomp_decompress(struct acomp_req *req) |
294 | { | 269 | { |
295 | struct crypto_acomp *tfm = crypto_acomp_reqtfm(req); | 270 | struct crypto_acomp *tfm = crypto_acomp_reqtfm(req); |
271 | struct crypto_alg *alg = tfm->base.__crt_alg; | ||
272 | unsigned int slen = req->slen; | ||
296 | int ret; | 273 | int ret; |
297 | 274 | ||
275 | crypto_stats_get(alg); | ||
298 | ret = tfm->decompress(req); | 276 | ret = tfm->decompress(req); |
299 | crypto_stat_decompress(req, ret); | 277 | crypto_stats_decompress(slen, ret, alg); |
300 | return ret; | 278 | return ret; |
301 | } | 279 | } |
302 | 280 | ||
diff --git a/include/crypto/aead.h b/include/crypto/aead.h index 99afd78c665d..b7b8d24cf765 100644 --- a/include/crypto/aead.h +++ b/include/crypto/aead.h | |||
@@ -306,34 +306,6 @@ static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req) | |||
306 | return __crypto_aead_cast(req->base.tfm); | 306 | return __crypto_aead_cast(req->base.tfm); |
307 | } | 307 | } |
308 | 308 | ||
309 | static inline void crypto_stat_aead_encrypt(struct aead_request *req, int ret) | ||
310 | { | ||
311 | #ifdef CONFIG_CRYPTO_STATS | ||
312 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); | ||
313 | |||
314 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
315 | atomic64_inc(&tfm->base.__crt_alg->aead_err_cnt); | ||
316 | } else { | ||
317 | atomic64_inc(&tfm->base.__crt_alg->encrypt_cnt); | ||
318 | atomic64_add(req->cryptlen, &tfm->base.__crt_alg->encrypt_tlen); | ||
319 | } | ||
320 | #endif | ||
321 | } | ||
322 | |||
323 | static inline void crypto_stat_aead_decrypt(struct aead_request *req, int ret) | ||
324 | { | ||
325 | #ifdef CONFIG_CRYPTO_STATS | ||
326 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); | ||
327 | |||
328 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
329 | atomic64_inc(&tfm->base.__crt_alg->aead_err_cnt); | ||
330 | } else { | ||
331 | atomic64_inc(&tfm->base.__crt_alg->decrypt_cnt); | ||
332 | atomic64_add(req->cryptlen, &tfm->base.__crt_alg->decrypt_tlen); | ||
333 | } | ||
334 | #endif | ||
335 | } | ||
336 | |||
337 | /** | 309 | /** |
338 | * crypto_aead_encrypt() - encrypt plaintext | 310 | * crypto_aead_encrypt() - encrypt plaintext |
339 | * @req: reference to the aead_request handle that holds all information | 311 | * @req: reference to the aead_request handle that holds all information |
@@ -356,13 +328,16 @@ static inline void crypto_stat_aead_decrypt(struct aead_request *req, int ret) | |||
356 | static inline int crypto_aead_encrypt(struct aead_request *req) | 328 | static inline int crypto_aead_encrypt(struct aead_request *req) |
357 | { | 329 | { |
358 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | 330 | struct crypto_aead *aead = crypto_aead_reqtfm(req); |
331 | struct crypto_alg *alg = aead->base.__crt_alg; | ||
332 | unsigned int cryptlen = req->cryptlen; | ||
359 | int ret; | 333 | int ret; |
360 | 334 | ||
335 | crypto_stats_get(alg); | ||
361 | if (crypto_aead_get_flags(aead) & CRYPTO_TFM_NEED_KEY) | 336 | if (crypto_aead_get_flags(aead) & CRYPTO_TFM_NEED_KEY) |
362 | ret = -ENOKEY; | 337 | ret = -ENOKEY; |
363 | else | 338 | else |
364 | ret = crypto_aead_alg(aead)->encrypt(req); | 339 | ret = crypto_aead_alg(aead)->encrypt(req); |
365 | crypto_stat_aead_encrypt(req, ret); | 340 | crypto_stats_aead_encrypt(cryptlen, alg, ret); |
366 | return ret; | 341 | return ret; |
367 | } | 342 | } |
368 | 343 | ||
@@ -391,15 +366,18 @@ static inline int crypto_aead_encrypt(struct aead_request *req) | |||
391 | static inline int crypto_aead_decrypt(struct aead_request *req) | 366 | static inline int crypto_aead_decrypt(struct aead_request *req) |
392 | { | 367 | { |
393 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | 368 | struct crypto_aead *aead = crypto_aead_reqtfm(req); |
369 | struct crypto_alg *alg = aead->base.__crt_alg; | ||
370 | unsigned int cryptlen = req->cryptlen; | ||
394 | int ret; | 371 | int ret; |
395 | 372 | ||
373 | crypto_stats_get(alg); | ||
396 | if (crypto_aead_get_flags(aead) & CRYPTO_TFM_NEED_KEY) | 374 | if (crypto_aead_get_flags(aead) & CRYPTO_TFM_NEED_KEY) |
397 | ret = -ENOKEY; | 375 | ret = -ENOKEY; |
398 | else if (req->cryptlen < crypto_aead_authsize(aead)) | 376 | else if (req->cryptlen < crypto_aead_authsize(aead)) |
399 | ret = -EINVAL; | 377 | ret = -EINVAL; |
400 | else | 378 | else |
401 | ret = crypto_aead_alg(aead)->decrypt(req); | 379 | ret = crypto_aead_alg(aead)->decrypt(req); |
402 | crypto_stat_aead_decrypt(req, ret); | 380 | crypto_stats_aead_decrypt(cryptlen, alg, ret); |
403 | return ret; | 381 | return ret; |
404 | } | 382 | } |
405 | 383 | ||
diff --git a/include/crypto/akcipher.h b/include/crypto/akcipher.h index 3dc05cf7e0a9..2d690494568c 100644 --- a/include/crypto/akcipher.h +++ b/include/crypto/akcipher.h | |||
@@ -271,62 +271,6 @@ static inline unsigned int crypto_akcipher_maxsize(struct crypto_akcipher *tfm) | |||
271 | return alg->max_size(tfm); | 271 | return alg->max_size(tfm); |
272 | } | 272 | } |
273 | 273 | ||
274 | static inline void crypto_stat_akcipher_encrypt(struct akcipher_request *req, | ||
275 | int ret) | ||
276 | { | ||
277 | #ifdef CONFIG_CRYPTO_STATS | ||
278 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); | ||
279 | |||
280 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
281 | atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt); | ||
282 | } else { | ||
283 | atomic64_inc(&tfm->base.__crt_alg->encrypt_cnt); | ||
284 | atomic64_add(req->src_len, &tfm->base.__crt_alg->encrypt_tlen); | ||
285 | } | ||
286 | #endif | ||
287 | } | ||
288 | |||
289 | static inline void crypto_stat_akcipher_decrypt(struct akcipher_request *req, | ||
290 | int ret) | ||
291 | { | ||
292 | #ifdef CONFIG_CRYPTO_STATS | ||
293 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); | ||
294 | |||
295 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
296 | atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt); | ||
297 | } else { | ||
298 | atomic64_inc(&tfm->base.__crt_alg->decrypt_cnt); | ||
299 | atomic64_add(req->src_len, &tfm->base.__crt_alg->decrypt_tlen); | ||
300 | } | ||
301 | #endif | ||
302 | } | ||
303 | |||
304 | static inline void crypto_stat_akcipher_sign(struct akcipher_request *req, | ||
305 | int ret) | ||
306 | { | ||
307 | #ifdef CONFIG_CRYPTO_STATS | ||
308 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); | ||
309 | |||
310 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) | ||
311 | atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt); | ||
312 | else | ||
313 | atomic64_inc(&tfm->base.__crt_alg->sign_cnt); | ||
314 | #endif | ||
315 | } | ||
316 | |||
317 | static inline void crypto_stat_akcipher_verify(struct akcipher_request *req, | ||
318 | int ret) | ||
319 | { | ||
320 | #ifdef CONFIG_CRYPTO_STATS | ||
321 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); | ||
322 | |||
323 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) | ||
324 | atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt); | ||
325 | else | ||
326 | atomic64_inc(&tfm->base.__crt_alg->verify_cnt); | ||
327 | #endif | ||
328 | } | ||
329 | |||
330 | /** | 274 | /** |
331 | * crypto_akcipher_encrypt() - Invoke public key encrypt operation | 275 | * crypto_akcipher_encrypt() - Invoke public key encrypt operation |
332 | * | 276 | * |
@@ -341,10 +285,13 @@ static inline int crypto_akcipher_encrypt(struct akcipher_request *req) | |||
341 | { | 285 | { |
342 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); | 286 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); |
343 | struct akcipher_alg *alg = crypto_akcipher_alg(tfm); | 287 | struct akcipher_alg *alg = crypto_akcipher_alg(tfm); |
288 | struct crypto_alg *calg = tfm->base.__crt_alg; | ||
289 | unsigned int src_len = req->src_len; | ||
344 | int ret; | 290 | int ret; |
345 | 291 | ||
292 | crypto_stats_get(calg); | ||
346 | ret = alg->encrypt(req); | 293 | ret = alg->encrypt(req); |
347 | crypto_stat_akcipher_encrypt(req, ret); | 294 | crypto_stats_akcipher_encrypt(src_len, ret, calg); |
348 | return ret; | 295 | return ret; |
349 | } | 296 | } |
350 | 297 | ||
@@ -362,10 +309,13 @@ static inline int crypto_akcipher_decrypt(struct akcipher_request *req) | |||
362 | { | 309 | { |
363 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); | 310 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); |
364 | struct akcipher_alg *alg = crypto_akcipher_alg(tfm); | 311 | struct akcipher_alg *alg = crypto_akcipher_alg(tfm); |
312 | struct crypto_alg *calg = tfm->base.__crt_alg; | ||
313 | unsigned int src_len = req->src_len; | ||
365 | int ret; | 314 | int ret; |
366 | 315 | ||
316 | crypto_stats_get(calg); | ||
367 | ret = alg->decrypt(req); | 317 | ret = alg->decrypt(req); |
368 | crypto_stat_akcipher_decrypt(req, ret); | 318 | crypto_stats_akcipher_decrypt(src_len, ret, calg); |
369 | return ret; | 319 | return ret; |
370 | } | 320 | } |
371 | 321 | ||
@@ -383,10 +333,12 @@ static inline int crypto_akcipher_sign(struct akcipher_request *req) | |||
383 | { | 333 | { |
384 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); | 334 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); |
385 | struct akcipher_alg *alg = crypto_akcipher_alg(tfm); | 335 | struct akcipher_alg *alg = crypto_akcipher_alg(tfm); |
336 | struct crypto_alg *calg = tfm->base.__crt_alg; | ||
386 | int ret; | 337 | int ret; |
387 | 338 | ||
339 | crypto_stats_get(calg); | ||
388 | ret = alg->sign(req); | 340 | ret = alg->sign(req); |
389 | crypto_stat_akcipher_sign(req, ret); | 341 | crypto_stats_akcipher_sign(ret, calg); |
390 | return ret; | 342 | return ret; |
391 | } | 343 | } |
392 | 344 | ||
@@ -404,10 +356,12 @@ static inline int crypto_akcipher_verify(struct akcipher_request *req) | |||
404 | { | 356 | { |
405 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); | 357 | struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); |
406 | struct akcipher_alg *alg = crypto_akcipher_alg(tfm); | 358 | struct akcipher_alg *alg = crypto_akcipher_alg(tfm); |
359 | struct crypto_alg *calg = tfm->base.__crt_alg; | ||
407 | int ret; | 360 | int ret; |
408 | 361 | ||
362 | crypto_stats_get(calg); | ||
409 | ret = alg->verify(req); | 363 | ret = alg->verify(req); |
410 | crypto_stat_akcipher_verify(req, ret); | 364 | crypto_stats_akcipher_verify(ret, calg); |
411 | return ret; | 365 | return ret; |
412 | } | 366 | } |
413 | 367 | ||
diff --git a/include/crypto/hash.h b/include/crypto/hash.h index 52920bed05ba..3b31c1b349ae 100644 --- a/include/crypto/hash.h +++ b/include/crypto/hash.h | |||
@@ -412,32 +412,6 @@ static inline void *ahash_request_ctx(struct ahash_request *req) | |||
412 | int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, | 412 | int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, |
413 | unsigned int keylen); | 413 | unsigned int keylen); |
414 | 414 | ||
415 | static inline void crypto_stat_ahash_update(struct ahash_request *req, int ret) | ||
416 | { | ||
417 | #ifdef CONFIG_CRYPTO_STATS | ||
418 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
419 | |||
420 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) | ||
421 | atomic64_inc(&tfm->base.__crt_alg->hash_err_cnt); | ||
422 | else | ||
423 | atomic64_add(req->nbytes, &tfm->base.__crt_alg->hash_tlen); | ||
424 | #endif | ||
425 | } | ||
426 | |||
427 | static inline void crypto_stat_ahash_final(struct ahash_request *req, int ret) | ||
428 | { | ||
429 | #ifdef CONFIG_CRYPTO_STATS | ||
430 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
431 | |||
432 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
433 | atomic64_inc(&tfm->base.__crt_alg->hash_err_cnt); | ||
434 | } else { | ||
435 | atomic64_inc(&tfm->base.__crt_alg->hash_cnt); | ||
436 | atomic64_add(req->nbytes, &tfm->base.__crt_alg->hash_tlen); | ||
437 | } | ||
438 | #endif | ||
439 | } | ||
440 | |||
441 | /** | 415 | /** |
442 | * crypto_ahash_finup() - update and finalize message digest | 416 | * crypto_ahash_finup() - update and finalize message digest |
443 | * @req: reference to the ahash_request handle that holds all information | 417 | * @req: reference to the ahash_request handle that holds all information |
@@ -552,10 +526,14 @@ static inline int crypto_ahash_init(struct ahash_request *req) | |||
552 | */ | 526 | */ |
553 | static inline int crypto_ahash_update(struct ahash_request *req) | 527 | static inline int crypto_ahash_update(struct ahash_request *req) |
554 | { | 528 | { |
529 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | ||
530 | struct crypto_alg *alg = tfm->base.__crt_alg; | ||
531 | unsigned int nbytes = req->nbytes; | ||
555 | int ret; | 532 | int ret; |
556 | 533 | ||
534 | crypto_stats_get(alg); | ||
557 | ret = crypto_ahash_reqtfm(req)->update(req); | 535 | ret = crypto_ahash_reqtfm(req)->update(req); |
558 | crypto_stat_ahash_update(req, ret); | 536 | crypto_stats_ahash_update(nbytes, ret, alg); |
559 | return ret; | 537 | return ret; |
560 | } | 538 | } |
561 | 539 | ||
diff --git a/include/crypto/kpp.h b/include/crypto/kpp.h index bd5103a80919..1a97e1601422 100644 --- a/include/crypto/kpp.h +++ b/include/crypto/kpp.h | |||
@@ -268,42 +268,6 @@ struct kpp_secret { | |||
268 | unsigned short len; | 268 | unsigned short len; |
269 | }; | 269 | }; |
270 | 270 | ||
271 | static inline void crypto_stat_kpp_set_secret(struct crypto_kpp *tfm, int ret) | ||
272 | { | ||
273 | #ifdef CONFIG_CRYPTO_STATS | ||
274 | if (ret) | ||
275 | atomic64_inc(&tfm->base.__crt_alg->kpp_err_cnt); | ||
276 | else | ||
277 | atomic64_inc(&tfm->base.__crt_alg->setsecret_cnt); | ||
278 | #endif | ||
279 | } | ||
280 | |||
281 | static inline void crypto_stat_kpp_generate_public_key(struct kpp_request *req, | ||
282 | int ret) | ||
283 | { | ||
284 | #ifdef CONFIG_CRYPTO_STATS | ||
285 | struct crypto_kpp *tfm = crypto_kpp_reqtfm(req); | ||
286 | |||
287 | if (ret) | ||
288 | atomic64_inc(&tfm->base.__crt_alg->kpp_err_cnt); | ||
289 | else | ||
290 | atomic64_inc(&tfm->base.__crt_alg->generate_public_key_cnt); | ||
291 | #endif | ||
292 | } | ||
293 | |||
294 | static inline void crypto_stat_kpp_compute_shared_secret(struct kpp_request *req, | ||
295 | int ret) | ||
296 | { | ||
297 | #ifdef CONFIG_CRYPTO_STATS | ||
298 | struct crypto_kpp *tfm = crypto_kpp_reqtfm(req); | ||
299 | |||
300 | if (ret) | ||
301 | atomic64_inc(&tfm->base.__crt_alg->kpp_err_cnt); | ||
302 | else | ||
303 | atomic64_inc(&tfm->base.__crt_alg->compute_shared_secret_cnt); | ||
304 | #endif | ||
305 | } | ||
306 | |||
307 | /** | 271 | /** |
308 | * crypto_kpp_set_secret() - Invoke kpp operation | 272 | * crypto_kpp_set_secret() - Invoke kpp operation |
309 | * | 273 | * |
@@ -323,10 +287,12 @@ static inline int crypto_kpp_set_secret(struct crypto_kpp *tfm, | |||
323 | const void *buffer, unsigned int len) | 287 | const void *buffer, unsigned int len) |
324 | { | 288 | { |
325 | struct kpp_alg *alg = crypto_kpp_alg(tfm); | 289 | struct kpp_alg *alg = crypto_kpp_alg(tfm); |
290 | struct crypto_alg *calg = tfm->base.__crt_alg; | ||
326 | int ret; | 291 | int ret; |
327 | 292 | ||
293 | crypto_stats_get(calg); | ||
328 | ret = alg->set_secret(tfm, buffer, len); | 294 | ret = alg->set_secret(tfm, buffer, len); |
329 | crypto_stat_kpp_set_secret(tfm, ret); | 295 | crypto_stats_kpp_set_secret(calg, ret); |
330 | return ret; | 296 | return ret; |
331 | } | 297 | } |
332 | 298 | ||
@@ -347,10 +313,12 @@ static inline int crypto_kpp_generate_public_key(struct kpp_request *req) | |||
347 | { | 313 | { |
348 | struct crypto_kpp *tfm = crypto_kpp_reqtfm(req); | 314 | struct crypto_kpp *tfm = crypto_kpp_reqtfm(req); |
349 | struct kpp_alg *alg = crypto_kpp_alg(tfm); | 315 | struct kpp_alg *alg = crypto_kpp_alg(tfm); |
316 | struct crypto_alg *calg = tfm->base.__crt_alg; | ||
350 | int ret; | 317 | int ret; |
351 | 318 | ||
319 | crypto_stats_get(calg); | ||
352 | ret = alg->generate_public_key(req); | 320 | ret = alg->generate_public_key(req); |
353 | crypto_stat_kpp_generate_public_key(req, ret); | 321 | crypto_stats_kpp_generate_public_key(calg, ret); |
354 | return ret; | 322 | return ret; |
355 | } | 323 | } |
356 | 324 | ||
@@ -368,10 +336,12 @@ static inline int crypto_kpp_compute_shared_secret(struct kpp_request *req) | |||
368 | { | 336 | { |
369 | struct crypto_kpp *tfm = crypto_kpp_reqtfm(req); | 337 | struct crypto_kpp *tfm = crypto_kpp_reqtfm(req); |
370 | struct kpp_alg *alg = crypto_kpp_alg(tfm); | 338 | struct kpp_alg *alg = crypto_kpp_alg(tfm); |
339 | struct crypto_alg *calg = tfm->base.__crt_alg; | ||
371 | int ret; | 340 | int ret; |
372 | 341 | ||
342 | crypto_stats_get(calg); | ||
373 | ret = alg->compute_shared_secret(req); | 343 | ret = alg->compute_shared_secret(req); |
374 | crypto_stat_kpp_compute_shared_secret(req, ret); | 344 | crypto_stats_kpp_compute_shared_secret(calg, ret); |
375 | return ret; | 345 | return ret; |
376 | } | 346 | } |
377 | 347 | ||
diff --git a/include/crypto/rng.h b/include/crypto/rng.h index 966615bba45e..022a1b896b47 100644 --- a/include/crypto/rng.h +++ b/include/crypto/rng.h | |||
@@ -122,29 +122,6 @@ static inline void crypto_free_rng(struct crypto_rng *tfm) | |||
122 | crypto_destroy_tfm(tfm, crypto_rng_tfm(tfm)); | 122 | crypto_destroy_tfm(tfm, crypto_rng_tfm(tfm)); |
123 | } | 123 | } |
124 | 124 | ||
125 | static inline void crypto_stat_rng_seed(struct crypto_rng *tfm, int ret) | ||
126 | { | ||
127 | #ifdef CONFIG_CRYPTO_STATS | ||
128 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) | ||
129 | atomic64_inc(&tfm->base.__crt_alg->rng_err_cnt); | ||
130 | else | ||
131 | atomic64_inc(&tfm->base.__crt_alg->seed_cnt); | ||
132 | #endif | ||
133 | } | ||
134 | |||
135 | static inline void crypto_stat_rng_generate(struct crypto_rng *tfm, | ||
136 | unsigned int dlen, int ret) | ||
137 | { | ||
138 | #ifdef CONFIG_CRYPTO_STATS | ||
139 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
140 | atomic64_inc(&tfm->base.__crt_alg->rng_err_cnt); | ||
141 | } else { | ||
142 | atomic64_inc(&tfm->base.__crt_alg->generate_cnt); | ||
143 | atomic64_add(dlen, &tfm->base.__crt_alg->generate_tlen); | ||
144 | } | ||
145 | #endif | ||
146 | } | ||
147 | |||
148 | /** | 125 | /** |
149 | * crypto_rng_generate() - get random number | 126 | * crypto_rng_generate() - get random number |
150 | * @tfm: cipher handle | 127 | * @tfm: cipher handle |
@@ -163,10 +140,12 @@ static inline int crypto_rng_generate(struct crypto_rng *tfm, | |||
163 | const u8 *src, unsigned int slen, | 140 | const u8 *src, unsigned int slen, |
164 | u8 *dst, unsigned int dlen) | 141 | u8 *dst, unsigned int dlen) |
165 | { | 142 | { |
143 | struct crypto_alg *alg = tfm->base.__crt_alg; | ||
166 | int ret; | 144 | int ret; |
167 | 145 | ||
146 | crypto_stats_get(alg); | ||
168 | ret = crypto_rng_alg(tfm)->generate(tfm, src, slen, dst, dlen); | 147 | ret = crypto_rng_alg(tfm)->generate(tfm, src, slen, dst, dlen); |
169 | crypto_stat_rng_generate(tfm, dlen, ret); | 148 | crypto_stats_rng_generate(alg, dlen, ret); |
170 | return ret; | 149 | return ret; |
171 | } | 150 | } |
172 | 151 | ||
diff --git a/include/crypto/skcipher.h b/include/crypto/skcipher.h index dff54731ddf4..480f8301a47d 100644 --- a/include/crypto/skcipher.h +++ b/include/crypto/skcipher.h | |||
@@ -486,32 +486,6 @@ static inline struct crypto_sync_skcipher *crypto_sync_skcipher_reqtfm( | |||
486 | return container_of(tfm, struct crypto_sync_skcipher, base); | 486 | return container_of(tfm, struct crypto_sync_skcipher, base); |
487 | } | 487 | } |
488 | 488 | ||
489 | static inline void crypto_stat_skcipher_encrypt(struct skcipher_request *req, | ||
490 | int ret, struct crypto_alg *alg) | ||
491 | { | ||
492 | #ifdef CONFIG_CRYPTO_STATS | ||
493 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
494 | atomic64_inc(&alg->cipher_err_cnt); | ||
495 | } else { | ||
496 | atomic64_inc(&alg->encrypt_cnt); | ||
497 | atomic64_add(req->cryptlen, &alg->encrypt_tlen); | ||
498 | } | ||
499 | #endif | ||
500 | } | ||
501 | |||
502 | static inline void crypto_stat_skcipher_decrypt(struct skcipher_request *req, | ||
503 | int ret, struct crypto_alg *alg) | ||
504 | { | ||
505 | #ifdef CONFIG_CRYPTO_STATS | ||
506 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
507 | atomic64_inc(&alg->cipher_err_cnt); | ||
508 | } else { | ||
509 | atomic64_inc(&alg->decrypt_cnt); | ||
510 | atomic64_add(req->cryptlen, &alg->decrypt_tlen); | ||
511 | } | ||
512 | #endif | ||
513 | } | ||
514 | |||
515 | /** | 489 | /** |
516 | * crypto_skcipher_encrypt() - encrypt plaintext | 490 | * crypto_skcipher_encrypt() - encrypt plaintext |
517 | * @req: reference to the skcipher_request handle that holds all information | 491 | * @req: reference to the skcipher_request handle that holds all information |
@@ -526,13 +500,16 @@ static inline void crypto_stat_skcipher_decrypt(struct skcipher_request *req, | |||
526 | static inline int crypto_skcipher_encrypt(struct skcipher_request *req) | 500 | static inline int crypto_skcipher_encrypt(struct skcipher_request *req) |
527 | { | 501 | { |
528 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); | 502 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
503 | struct crypto_alg *alg = tfm->base.__crt_alg; | ||
504 | unsigned int cryptlen = req->cryptlen; | ||
529 | int ret; | 505 | int ret; |
530 | 506 | ||
507 | crypto_stats_get(alg); | ||
531 | if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) | 508 | if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) |
532 | ret = -ENOKEY; | 509 | ret = -ENOKEY; |
533 | else | 510 | else |
534 | ret = tfm->encrypt(req); | 511 | ret = tfm->encrypt(req); |
535 | crypto_stat_skcipher_encrypt(req, ret, tfm->base.__crt_alg); | 512 | crypto_stats_skcipher_encrypt(cryptlen, ret, alg); |
536 | return ret; | 513 | return ret; |
537 | } | 514 | } |
538 | 515 | ||
@@ -550,13 +527,16 @@ static inline int crypto_skcipher_encrypt(struct skcipher_request *req) | |||
550 | static inline int crypto_skcipher_decrypt(struct skcipher_request *req) | 527 | static inline int crypto_skcipher_decrypt(struct skcipher_request *req) |
551 | { | 528 | { |
552 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); | 529 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
530 | struct crypto_alg *alg = tfm->base.__crt_alg; | ||
531 | unsigned int cryptlen = req->cryptlen; | ||
553 | int ret; | 532 | int ret; |
554 | 533 | ||
534 | crypto_stats_get(alg); | ||
555 | if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) | 535 | if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) |
556 | ret = -ENOKEY; | 536 | ret = -ENOKEY; |
557 | else | 537 | else |
558 | ret = tfm->decrypt(req); | 538 | ret = tfm->decrypt(req); |
559 | crypto_stat_skcipher_decrypt(req, ret, tfm->base.__crt_alg); | 539 | crypto_stats_skcipher_decrypt(cryptlen, ret, alg); |
560 | return ret; | 540 | return ret; |
561 | } | 541 | } |
562 | 542 | ||
diff --git a/include/linux/crypto.h b/include/linux/crypto.h index b109b50906e7..e2fd24714e00 100644 --- a/include/linux/crypto.h +++ b/include/linux/crypto.h | |||
@@ -557,6 +557,69 @@ struct crypto_alg { | |||
557 | 557 | ||
558 | } CRYPTO_MINALIGN_ATTR; | 558 | } CRYPTO_MINALIGN_ATTR; |
559 | 559 | ||
560 | #ifdef CONFIG_CRYPTO_STATS | ||
561 | void crypto_stats_get(struct crypto_alg *alg); | ||
562 | void crypto_stats_ablkcipher_encrypt(unsigned int nbytes, int ret, struct crypto_alg *alg); | ||
563 | void crypto_stats_ablkcipher_decrypt(unsigned int nbytes, int ret, struct crypto_alg *alg); | ||
564 | void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret); | ||
565 | void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret); | ||
566 | void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg); | ||
567 | void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg); | ||
568 | void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg); | ||
569 | void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg); | ||
570 | void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg); | ||
571 | void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg); | ||
572 | void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg); | ||
573 | void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg); | ||
574 | void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret); | ||
575 | void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret); | ||
576 | void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret); | ||
577 | void crypto_stats_rng_seed(struct crypto_alg *alg, int ret); | ||
578 | void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret); | ||
579 | void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg); | ||
580 | void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg); | ||
581 | #else | ||
582 | static inline void crypto_stats_get(struct crypto_alg *alg) | ||
583 | {} | ||
584 | static inline void crypto_stats_ablkcipher_encrypt(unsigned int nbytes, int ret, struct crypto_alg *alg) | ||
585 | {} | ||
586 | static inline void crypto_stats_ablkcipher_decrypt(unsigned int nbytes, int ret, struct crypto_alg *alg) | ||
587 | {} | ||
588 | static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret) | ||
589 | {} | ||
590 | static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret) | ||
591 | {} | ||
592 | static inline void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg) | ||
593 | {} | ||
594 | static inline void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg) | ||
595 | {} | ||
596 | static inline void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg) | ||
597 | {} | ||
598 | static inline void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg) | ||
599 | {} | ||
600 | static inline void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg) | ||
601 | {} | ||
602 | static inline void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg) | ||
603 | {} | ||
604 | static inline void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg) | ||
605 | {} | ||
606 | static inline void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg) | ||
607 | {} | ||
608 | static inline void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret) | ||
609 | {} | ||
610 | static inline void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret) | ||
611 | {} | ||
612 | static inline void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret) | ||
613 | {} | ||
614 | static inline void crypto_stats_rng_seed(struct crypto_alg *alg, int ret) | ||
615 | {} | ||
616 | static inline void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret) | ||
617 | {} | ||
618 | static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg) | ||
619 | {} | ||
620 | static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg) | ||
621 | {} | ||
622 | #endif | ||
560 | /* | 623 | /* |
561 | * A helper struct for waiting for completion of async crypto ops | 624 | * A helper struct for waiting for completion of async crypto ops |
562 | */ | 625 | */ |
@@ -975,38 +1038,6 @@ static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm( | |||
975 | return __crypto_ablkcipher_cast(req->base.tfm); | 1038 | return __crypto_ablkcipher_cast(req->base.tfm); |
976 | } | 1039 | } |
977 | 1040 | ||
978 | static inline void crypto_stat_ablkcipher_encrypt(struct ablkcipher_request *req, | ||
979 | int ret) | ||
980 | { | ||
981 | #ifdef CONFIG_CRYPTO_STATS | ||
982 | struct ablkcipher_tfm *crt = | ||
983 | crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); | ||
984 | |||
985 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
986 | atomic64_inc(&crt->base->base.__crt_alg->cipher_err_cnt); | ||
987 | } else { | ||
988 | atomic64_inc(&crt->base->base.__crt_alg->encrypt_cnt); | ||
989 | atomic64_add(req->nbytes, &crt->base->base.__crt_alg->encrypt_tlen); | ||
990 | } | ||
991 | #endif | ||
992 | } | ||
993 | |||
994 | static inline void crypto_stat_ablkcipher_decrypt(struct ablkcipher_request *req, | ||
995 | int ret) | ||
996 | { | ||
997 | #ifdef CONFIG_CRYPTO_STATS | ||
998 | struct ablkcipher_tfm *crt = | ||
999 | crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); | ||
1000 | |||
1001 | if (ret && ret != -EINPROGRESS && ret != -EBUSY) { | ||
1002 | atomic64_inc(&crt->base->base.__crt_alg->cipher_err_cnt); | ||
1003 | } else { | ||
1004 | atomic64_inc(&crt->base->base.__crt_alg->decrypt_cnt); | ||
1005 | atomic64_add(req->nbytes, &crt->base->base.__crt_alg->decrypt_tlen); | ||
1006 | } | ||
1007 | #endif | ||
1008 | } | ||
1009 | |||
1010 | /** | 1041 | /** |
1011 | * crypto_ablkcipher_encrypt() - encrypt plaintext | 1042 | * crypto_ablkcipher_encrypt() - encrypt plaintext |
1012 | * @req: reference to the ablkcipher_request handle that holds all information | 1043 | * @req: reference to the ablkcipher_request handle that holds all information |
@@ -1022,10 +1053,13 @@ static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req) | |||
1022 | { | 1053 | { |
1023 | struct ablkcipher_tfm *crt = | 1054 | struct ablkcipher_tfm *crt = |
1024 | crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); | 1055 | crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); |
1056 | struct crypto_alg *alg = crt->base->base.__crt_alg; | ||
1057 | unsigned int nbytes = req->nbytes; | ||
1025 | int ret; | 1058 | int ret; |
1026 | 1059 | ||
1060 | crypto_stats_get(alg); | ||
1027 | ret = crt->encrypt(req); | 1061 | ret = crt->encrypt(req); |
1028 | crypto_stat_ablkcipher_encrypt(req, ret); | 1062 | crypto_stats_ablkcipher_encrypt(nbytes, ret, alg); |
1029 | return ret; | 1063 | return ret; |
1030 | } | 1064 | } |
1031 | 1065 | ||
@@ -1044,10 +1078,13 @@ static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req) | |||
1044 | { | 1078 | { |
1045 | struct ablkcipher_tfm *crt = | 1079 | struct ablkcipher_tfm *crt = |
1046 | crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); | 1080 | crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); |
1081 | struct crypto_alg *alg = crt->base->base.__crt_alg; | ||
1082 | unsigned int nbytes = req->nbytes; | ||
1047 | int ret; | 1083 | int ret; |
1048 | 1084 | ||
1085 | crypto_stats_get(alg); | ||
1049 | ret = crt->decrypt(req); | 1086 | ret = crt->decrypt(req); |
1050 | crypto_stat_ablkcipher_decrypt(req, ret); | 1087 | crypto_stats_ablkcipher_decrypt(nbytes, ret, alg); |
1051 | return ret; | 1088 | return ret; |
1052 | } | 1089 | } |
1053 | 1090 | ||