summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorCorentin Labbe <clabbe@baylibre.com>2018-11-29 09:42:18 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2018-12-07 01:15:00 -0500
commit6e8e72cd206e2ba68801e4f2490f639d41808c8d (patch)
tree6c89421ccb76ab0abe12e0d132c4c9a6a9de914b
parenta6a31385364ca0f7b98ace0bad93d793f07f97f3 (diff)
crypto: user - convert all stats from u32 to u64
All the 32-bit fields need to be 64-bit. In some cases, UINT32_MAX crypto operations can be done in seconds. Reported-by: Eric Biggers <ebiggers@kernel.org> Signed-off-by: Corentin Labbe <clabbe@baylibre.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--crypto/algapi.c10
-rw-r--r--crypto/crypto_user_stat.c114
-rw-r--r--include/crypto/acompress.h8
-rw-r--r--include/crypto/aead.h8
-rw-r--r--include/crypto/akcipher.h16
-rw-r--r--include/crypto/hash.h6
-rw-r--r--include/crypto/kpp.h12
-rw-r--r--include/crypto/rng.h8
-rw-r--r--include/crypto/skcipher.h8
-rw-r--r--include/linux/crypto.h46
-rw-r--r--include/uapi/linux/cryptouser.h38
11 files changed, 133 insertions, 141 deletions
diff --git a/crypto/algapi.c b/crypto/algapi.c
index f5396c88e8cd..42fe316f80ee 100644
--- a/crypto/algapi.c
+++ b/crypto/algapi.c
@@ -259,13 +259,13 @@ static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
259 list_add(&larval->alg.cra_list, &crypto_alg_list); 259 list_add(&larval->alg.cra_list, &crypto_alg_list);
260 260
261#ifdef CONFIG_CRYPTO_STATS 261#ifdef CONFIG_CRYPTO_STATS
262 atomic_set(&alg->encrypt_cnt, 0); 262 atomic64_set(&alg->encrypt_cnt, 0);
263 atomic_set(&alg->decrypt_cnt, 0); 263 atomic64_set(&alg->decrypt_cnt, 0);
264 atomic64_set(&alg->encrypt_tlen, 0); 264 atomic64_set(&alg->encrypt_tlen, 0);
265 atomic64_set(&alg->decrypt_tlen, 0); 265 atomic64_set(&alg->decrypt_tlen, 0);
266 atomic_set(&alg->verify_cnt, 0); 266 atomic64_set(&alg->verify_cnt, 0);
267 atomic_set(&alg->cipher_err_cnt, 0); 267 atomic64_set(&alg->cipher_err_cnt, 0);
268 atomic_set(&alg->sign_cnt, 0); 268 atomic64_set(&alg->sign_cnt, 0);
269#endif 269#endif
270 270
271out: 271out:
diff --git a/crypto/crypto_user_stat.c b/crypto/crypto_user_stat.c
index a6fb2e6f618d..352569f378a0 100644
--- a/crypto/crypto_user_stat.c
+++ b/crypto/crypto_user_stat.c
@@ -35,22 +35,21 @@ static int crypto_report_aead(struct sk_buff *skb, struct crypto_alg *alg)
35{ 35{
36 struct crypto_stat raead; 36 struct crypto_stat raead;
37 u64 v64; 37 u64 v64;
38 u32 v32;
39 38
40 memset(&raead, 0, sizeof(raead)); 39 memset(&raead, 0, sizeof(raead));
41 40
42 strscpy(raead.type, "aead", sizeof(raead.type)); 41 strscpy(raead.type, "aead", sizeof(raead.type));
43 42
44 v32 = atomic_read(&alg->encrypt_cnt); 43 v64 = atomic64_read(&alg->encrypt_cnt);
45 raead.stat_encrypt_cnt = v32; 44 raead.stat_encrypt_cnt = v64;
46 v64 = atomic64_read(&alg->encrypt_tlen); 45 v64 = atomic64_read(&alg->encrypt_tlen);
47 raead.stat_encrypt_tlen = v64; 46 raead.stat_encrypt_tlen = v64;
48 v32 = atomic_read(&alg->decrypt_cnt); 47 v64 = atomic64_read(&alg->decrypt_cnt);
49 raead.stat_decrypt_cnt = v32; 48 raead.stat_decrypt_cnt = v64;
50 v64 = atomic64_read(&alg->decrypt_tlen); 49 v64 = atomic64_read(&alg->decrypt_tlen);
51 raead.stat_decrypt_tlen = v64; 50 raead.stat_decrypt_tlen = v64;
52 v32 = atomic_read(&alg->aead_err_cnt); 51 v64 = atomic64_read(&alg->aead_err_cnt);
53 raead.stat_aead_err_cnt = v32; 52 raead.stat_aead_err_cnt = v64;
54 53
55 return nla_put(skb, CRYPTOCFGA_STAT_AEAD, sizeof(raead), &raead); 54 return nla_put(skb, CRYPTOCFGA_STAT_AEAD, sizeof(raead), &raead);
56} 55}
@@ -59,22 +58,21 @@ static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg)
59{ 58{
60 struct crypto_stat rcipher; 59 struct crypto_stat rcipher;
61 u64 v64; 60 u64 v64;
62 u32 v32;
63 61
64 memset(&rcipher, 0, sizeof(rcipher)); 62 memset(&rcipher, 0, sizeof(rcipher));
65 63
66 strscpy(rcipher.type, "cipher", sizeof(rcipher.type)); 64 strscpy(rcipher.type, "cipher", sizeof(rcipher.type));
67 65
68 v32 = atomic_read(&alg->encrypt_cnt); 66 v64 = atomic64_read(&alg->encrypt_cnt);
69 rcipher.stat_encrypt_cnt = v32; 67 rcipher.stat_encrypt_cnt = v64;
70 v64 = atomic64_read(&alg->encrypt_tlen); 68 v64 = atomic64_read(&alg->encrypt_tlen);
71 rcipher.stat_encrypt_tlen = v64; 69 rcipher.stat_encrypt_tlen = v64;
72 v32 = atomic_read(&alg->decrypt_cnt); 70 v64 = atomic64_read(&alg->decrypt_cnt);
73 rcipher.stat_decrypt_cnt = v32; 71 rcipher.stat_decrypt_cnt = v64;
74 v64 = atomic64_read(&alg->decrypt_tlen); 72 v64 = atomic64_read(&alg->decrypt_tlen);
75 rcipher.stat_decrypt_tlen = v64; 73 rcipher.stat_decrypt_tlen = v64;
76 v32 = atomic_read(&alg->cipher_err_cnt); 74 v64 = atomic64_read(&alg->cipher_err_cnt);
77 rcipher.stat_cipher_err_cnt = v32; 75 rcipher.stat_cipher_err_cnt = v64;
78 76
79 return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher); 77 return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher);
80} 78}
@@ -83,21 +81,20 @@ static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg)
83{ 81{
84 struct crypto_stat rcomp; 82 struct crypto_stat rcomp;
85 u64 v64; 83 u64 v64;
86 u32 v32;
87 84
88 memset(&rcomp, 0, sizeof(rcomp)); 85 memset(&rcomp, 0, sizeof(rcomp));
89 86
90 strscpy(rcomp.type, "compression", sizeof(rcomp.type)); 87 strscpy(rcomp.type, "compression", sizeof(rcomp.type));
91 v32 = atomic_read(&alg->compress_cnt); 88 v64 = atomic64_read(&alg->compress_cnt);
92 rcomp.stat_compress_cnt = v32; 89 rcomp.stat_compress_cnt = v64;
93 v64 = atomic64_read(&alg->compress_tlen); 90 v64 = atomic64_read(&alg->compress_tlen);
94 rcomp.stat_compress_tlen = v64; 91 rcomp.stat_compress_tlen = v64;
95 v32 = atomic_read(&alg->decompress_cnt); 92 v64 = atomic64_read(&alg->decompress_cnt);
96 rcomp.stat_decompress_cnt = v32; 93 rcomp.stat_decompress_cnt = v64;
97 v64 = atomic64_read(&alg->decompress_tlen); 94 v64 = atomic64_read(&alg->decompress_tlen);
98 rcomp.stat_decompress_tlen = v64; 95 rcomp.stat_decompress_tlen = v64;
99 v32 = atomic_read(&alg->cipher_err_cnt); 96 v64 = atomic64_read(&alg->cipher_err_cnt);
100 rcomp.stat_compress_err_cnt = v32; 97 rcomp.stat_compress_err_cnt = v64;
101 98
102 return nla_put(skb, CRYPTOCFGA_STAT_COMPRESS, sizeof(rcomp), &rcomp); 99 return nla_put(skb, CRYPTOCFGA_STAT_COMPRESS, sizeof(rcomp), &rcomp);
103} 100}
@@ -106,21 +103,20 @@ static int crypto_report_acomp(struct sk_buff *skb, struct crypto_alg *alg)
106{ 103{
107 struct crypto_stat racomp; 104 struct crypto_stat racomp;
108 u64 v64; 105 u64 v64;
109 u32 v32;
110 106
111 memset(&racomp, 0, sizeof(racomp)); 107 memset(&racomp, 0, sizeof(racomp));
112 108
113 strscpy(racomp.type, "acomp", sizeof(racomp.type)); 109 strscpy(racomp.type, "acomp", sizeof(racomp.type));
114 v32 = atomic_read(&alg->compress_cnt); 110 v64 = atomic64_read(&alg->compress_cnt);
115 racomp.stat_compress_cnt = v32; 111 racomp.stat_compress_cnt = v64;
116 v64 = atomic64_read(&alg->compress_tlen); 112 v64 = atomic64_read(&alg->compress_tlen);
117 racomp.stat_compress_tlen = v64; 113 racomp.stat_compress_tlen = v64;
118 v32 = atomic_read(&alg->decompress_cnt); 114 v64 = atomic64_read(&alg->decompress_cnt);
119 racomp.stat_decompress_cnt = v32; 115 racomp.stat_decompress_cnt = v64;
120 v64 = atomic64_read(&alg->decompress_tlen); 116 v64 = atomic64_read(&alg->decompress_tlen);
121 racomp.stat_decompress_tlen = v64; 117 racomp.stat_decompress_tlen = v64;
122 v32 = atomic_read(&alg->cipher_err_cnt); 118 v64 = atomic64_read(&alg->cipher_err_cnt);
123 racomp.stat_compress_err_cnt = v32; 119 racomp.stat_compress_err_cnt = v64;
124 120
125 return nla_put(skb, CRYPTOCFGA_STAT_ACOMP, sizeof(racomp), &racomp); 121 return nla_put(skb, CRYPTOCFGA_STAT_ACOMP, sizeof(racomp), &racomp);
126} 122}
@@ -129,25 +125,24 @@ static int crypto_report_akcipher(struct sk_buff *skb, struct crypto_alg *alg)
129{ 125{
130 struct crypto_stat rakcipher; 126 struct crypto_stat rakcipher;
131 u64 v64; 127 u64 v64;
132 u32 v32;
133 128
134 memset(&rakcipher, 0, sizeof(rakcipher)); 129 memset(&rakcipher, 0, sizeof(rakcipher));
135 130
136 strscpy(rakcipher.type, "akcipher", sizeof(rakcipher.type)); 131 strscpy(rakcipher.type, "akcipher", sizeof(rakcipher.type));
137 v32 = atomic_read(&alg->encrypt_cnt); 132 v64 = atomic64_read(&alg->encrypt_cnt);
138 rakcipher.stat_encrypt_cnt = v32; 133 rakcipher.stat_encrypt_cnt = v64;
139 v64 = atomic64_read(&alg->encrypt_tlen); 134 v64 = atomic64_read(&alg->encrypt_tlen);
140 rakcipher.stat_encrypt_tlen = v64; 135 rakcipher.stat_encrypt_tlen = v64;
141 v32 = atomic_read(&alg->decrypt_cnt); 136 v64 = atomic64_read(&alg->decrypt_cnt);
142 rakcipher.stat_decrypt_cnt = v32; 137 rakcipher.stat_decrypt_cnt = v64;
143 v64 = atomic64_read(&alg->decrypt_tlen); 138 v64 = atomic64_read(&alg->decrypt_tlen);
144 rakcipher.stat_decrypt_tlen = v64; 139 rakcipher.stat_decrypt_tlen = v64;
145 v32 = atomic_read(&alg->sign_cnt); 140 v64 = atomic64_read(&alg->sign_cnt);
146 rakcipher.stat_sign_cnt = v32; 141 rakcipher.stat_sign_cnt = v64;
147 v32 = atomic_read(&alg->verify_cnt); 142 v64 = atomic64_read(&alg->verify_cnt);
148 rakcipher.stat_verify_cnt = v32; 143 rakcipher.stat_verify_cnt = v64;
149 v32 = atomic_read(&alg->akcipher_err_cnt); 144 v64 = atomic64_read(&alg->akcipher_err_cnt);
150 rakcipher.stat_akcipher_err_cnt = v32; 145 rakcipher.stat_akcipher_err_cnt = v64;
151 146
152 return nla_put(skb, CRYPTOCFGA_STAT_AKCIPHER, 147 return nla_put(skb, CRYPTOCFGA_STAT_AKCIPHER,
153 sizeof(rakcipher), &rakcipher); 148 sizeof(rakcipher), &rakcipher);
@@ -156,19 +151,19 @@ static int crypto_report_akcipher(struct sk_buff *skb, struct crypto_alg *alg)
156static int crypto_report_kpp(struct sk_buff *skb, struct crypto_alg *alg) 151static int crypto_report_kpp(struct sk_buff *skb, struct crypto_alg *alg)
157{ 152{
158 struct crypto_stat rkpp; 153 struct crypto_stat rkpp;
159 u32 v; 154 u64 v;
160 155
161 memset(&rkpp, 0, sizeof(rkpp)); 156 memset(&rkpp, 0, sizeof(rkpp));
162 157
163 strscpy(rkpp.type, "kpp", sizeof(rkpp.type)); 158 strscpy(rkpp.type, "kpp", sizeof(rkpp.type));
164 159
165 v = atomic_read(&alg->setsecret_cnt); 160 v = atomic64_read(&alg->setsecret_cnt);
166 rkpp.stat_setsecret_cnt = v; 161 rkpp.stat_setsecret_cnt = v;
167 v = atomic_read(&alg->generate_public_key_cnt); 162 v = atomic64_read(&alg->generate_public_key_cnt);
168 rkpp.stat_generate_public_key_cnt = v; 163 rkpp.stat_generate_public_key_cnt = v;
169 v = atomic_read(&alg->compute_shared_secret_cnt); 164 v = atomic64_read(&alg->compute_shared_secret_cnt);
170 rkpp.stat_compute_shared_secret_cnt = v; 165 rkpp.stat_compute_shared_secret_cnt = v;
171 v = atomic_read(&alg->kpp_err_cnt); 166 v = atomic64_read(&alg->kpp_err_cnt);
172 rkpp.stat_kpp_err_cnt = v; 167 rkpp.stat_kpp_err_cnt = v;
173 168
174 return nla_put(skb, CRYPTOCFGA_STAT_KPP, sizeof(rkpp), &rkpp); 169 return nla_put(skb, CRYPTOCFGA_STAT_KPP, sizeof(rkpp), &rkpp);
@@ -178,18 +173,17 @@ static int crypto_report_ahash(struct sk_buff *skb, struct crypto_alg *alg)
178{ 173{
179 struct crypto_stat rhash; 174 struct crypto_stat rhash;
180 u64 v64; 175 u64 v64;
181 u32 v32;
182 176
183 memset(&rhash, 0, sizeof(rhash)); 177 memset(&rhash, 0, sizeof(rhash));
184 178
185 strscpy(rhash.type, "ahash", sizeof(rhash.type)); 179 strscpy(rhash.type, "ahash", sizeof(rhash.type));
186 180
187 v32 = atomic_read(&alg->hash_cnt); 181 v64 = atomic64_read(&alg->hash_cnt);
188 rhash.stat_hash_cnt = v32; 182 rhash.stat_hash_cnt = v64;
189 v64 = atomic64_read(&alg->hash_tlen); 183 v64 = atomic64_read(&alg->hash_tlen);
190 rhash.stat_hash_tlen = v64; 184 rhash.stat_hash_tlen = v64;
191 v32 = atomic_read(&alg->hash_err_cnt); 185 v64 = atomic64_read(&alg->hash_err_cnt);
192 rhash.stat_hash_err_cnt = v32; 186 rhash.stat_hash_err_cnt = v64;
193 187
194 return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash); 188 return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
195} 189}
@@ -198,18 +192,17 @@ static int crypto_report_shash(struct sk_buff *skb, struct crypto_alg *alg)
198{ 192{
199 struct crypto_stat rhash; 193 struct crypto_stat rhash;
200 u64 v64; 194 u64 v64;
201 u32 v32;
202 195
203 memset(&rhash, 0, sizeof(rhash)); 196 memset(&rhash, 0, sizeof(rhash));
204 197
205 strscpy(rhash.type, "shash", sizeof(rhash.type)); 198 strscpy(rhash.type, "shash", sizeof(rhash.type));
206 199
207 v32 = atomic_read(&alg->hash_cnt); 200 v64 = atomic64_read(&alg->hash_cnt);
208 rhash.stat_hash_cnt = v32; 201 rhash.stat_hash_cnt = v64;
209 v64 = atomic64_read(&alg->hash_tlen); 202 v64 = atomic64_read(&alg->hash_tlen);
210 rhash.stat_hash_tlen = v64; 203 rhash.stat_hash_tlen = v64;
211 v32 = atomic_read(&alg->hash_err_cnt); 204 v64 = atomic64_read(&alg->hash_err_cnt);
212 rhash.stat_hash_err_cnt = v32; 205 rhash.stat_hash_err_cnt = v64;
213 206
214 return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash); 207 return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
215} 208}
@@ -218,20 +211,19 @@ static int crypto_report_rng(struct sk_buff *skb, struct crypto_alg *alg)
218{ 211{
219 struct crypto_stat rrng; 212 struct crypto_stat rrng;
220 u64 v64; 213 u64 v64;
221 u32 v32;
222 214
223 memset(&rrng, 0, sizeof(rrng)); 215 memset(&rrng, 0, sizeof(rrng));
224 216
225 strscpy(rrng.type, "rng", sizeof(rrng.type)); 217 strscpy(rrng.type, "rng", sizeof(rrng.type));
226 218
227 v32 = atomic_read(&alg->generate_cnt); 219 v64 = atomic64_read(&alg->generate_cnt);
228 rrng.stat_generate_cnt = v32; 220 rrng.stat_generate_cnt = v64;
229 v64 = atomic64_read(&alg->generate_tlen); 221 v64 = atomic64_read(&alg->generate_tlen);
230 rrng.stat_generate_tlen = v64; 222 rrng.stat_generate_tlen = v64;
231 v32 = atomic_read(&alg->seed_cnt); 223 v64 = atomic64_read(&alg->seed_cnt);
232 rrng.stat_seed_cnt = v32; 224 rrng.stat_seed_cnt = v64;
233 v32 = atomic_read(&alg->hash_err_cnt); 225 v64 = atomic64_read(&alg->hash_err_cnt);
234 rrng.stat_rng_err_cnt = v32; 226 rrng.stat_rng_err_cnt = v64;
235 227
236 return nla_put(skb, CRYPTOCFGA_STAT_RNG, sizeof(rrng), &rrng); 228 return nla_put(skb, CRYPTOCFGA_STAT_RNG, sizeof(rrng), &rrng);
237} 229}
diff --git a/include/crypto/acompress.h b/include/crypto/acompress.h
index 22e6f412c595..f79918196811 100644
--- a/include/crypto/acompress.h
+++ b/include/crypto/acompress.h
@@ -240,9 +240,9 @@ static inline void crypto_stat_compress(struct acomp_req *req, int ret)
240 struct crypto_acomp *tfm = crypto_acomp_reqtfm(req); 240 struct crypto_acomp *tfm = crypto_acomp_reqtfm(req);
241 241
242 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 242 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
243 atomic_inc(&tfm->base.__crt_alg->compress_err_cnt); 243 atomic64_inc(&tfm->base.__crt_alg->compress_err_cnt);
244 } else { 244 } else {
245 atomic_inc(&tfm->base.__crt_alg->compress_cnt); 245 atomic64_inc(&tfm->base.__crt_alg->compress_cnt);
246 atomic64_add(req->slen, &tfm->base.__crt_alg->compress_tlen); 246 atomic64_add(req->slen, &tfm->base.__crt_alg->compress_tlen);
247 } 247 }
248#endif 248#endif
@@ -254,9 +254,9 @@ static inline void crypto_stat_decompress(struct acomp_req *req, int ret)
254 struct crypto_acomp *tfm = crypto_acomp_reqtfm(req); 254 struct crypto_acomp *tfm = crypto_acomp_reqtfm(req);
255 255
256 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 256 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
257 atomic_inc(&tfm->base.__crt_alg->compress_err_cnt); 257 atomic64_inc(&tfm->base.__crt_alg->compress_err_cnt);
258 } else { 258 } else {
259 atomic_inc(&tfm->base.__crt_alg->decompress_cnt); 259 atomic64_inc(&tfm->base.__crt_alg->decompress_cnt);
260 atomic64_add(req->slen, &tfm->base.__crt_alg->decompress_tlen); 260 atomic64_add(req->slen, &tfm->base.__crt_alg->decompress_tlen);
261 } 261 }
262#endif 262#endif
diff --git a/include/crypto/aead.h b/include/crypto/aead.h
index 0d765d7bfb82..99afd78c665d 100644
--- a/include/crypto/aead.h
+++ b/include/crypto/aead.h
@@ -312,9 +312,9 @@ static inline void crypto_stat_aead_encrypt(struct aead_request *req, int ret)
312 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 312 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
313 313
314 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 314 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
315 atomic_inc(&tfm->base.__crt_alg->aead_err_cnt); 315 atomic64_inc(&tfm->base.__crt_alg->aead_err_cnt);
316 } else { 316 } else {
317 atomic_inc(&tfm->base.__crt_alg->encrypt_cnt); 317 atomic64_inc(&tfm->base.__crt_alg->encrypt_cnt);
318 atomic64_add(req->cryptlen, &tfm->base.__crt_alg->encrypt_tlen); 318 atomic64_add(req->cryptlen, &tfm->base.__crt_alg->encrypt_tlen);
319 } 319 }
320#endif 320#endif
@@ -326,9 +326,9 @@ static inline void crypto_stat_aead_decrypt(struct aead_request *req, int ret)
326 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 326 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
327 327
328 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 328 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
329 atomic_inc(&tfm->base.__crt_alg->aead_err_cnt); 329 atomic64_inc(&tfm->base.__crt_alg->aead_err_cnt);
330 } else { 330 } else {
331 atomic_inc(&tfm->base.__crt_alg->decrypt_cnt); 331 atomic64_inc(&tfm->base.__crt_alg->decrypt_cnt);
332 atomic64_add(req->cryptlen, &tfm->base.__crt_alg->decrypt_tlen); 332 atomic64_add(req->cryptlen, &tfm->base.__crt_alg->decrypt_tlen);
333 } 333 }
334#endif 334#endif
diff --git a/include/crypto/akcipher.h b/include/crypto/akcipher.h
index afac71119396..3dc05cf7e0a9 100644
--- a/include/crypto/akcipher.h
+++ b/include/crypto/akcipher.h
@@ -278,9 +278,9 @@ static inline void crypto_stat_akcipher_encrypt(struct akcipher_request *req,
278 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); 278 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
279 279
280 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 280 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
281 atomic_inc(&tfm->base.__crt_alg->akcipher_err_cnt); 281 atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
282 } else { 282 } else {
283 atomic_inc(&tfm->base.__crt_alg->encrypt_cnt); 283 atomic64_inc(&tfm->base.__crt_alg->encrypt_cnt);
284 atomic64_add(req->src_len, &tfm->base.__crt_alg->encrypt_tlen); 284 atomic64_add(req->src_len, &tfm->base.__crt_alg->encrypt_tlen);
285 } 285 }
286#endif 286#endif
@@ -293,9 +293,9 @@ static inline void crypto_stat_akcipher_decrypt(struct akcipher_request *req,
293 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); 293 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
294 294
295 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 295 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
296 atomic_inc(&tfm->base.__crt_alg->akcipher_err_cnt); 296 atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
297 } else { 297 } else {
298 atomic_inc(&tfm->base.__crt_alg->decrypt_cnt); 298 atomic64_inc(&tfm->base.__crt_alg->decrypt_cnt);
299 atomic64_add(req->src_len, &tfm->base.__crt_alg->decrypt_tlen); 299 atomic64_add(req->src_len, &tfm->base.__crt_alg->decrypt_tlen);
300 } 300 }
301#endif 301#endif
@@ -308,9 +308,9 @@ static inline void crypto_stat_akcipher_sign(struct akcipher_request *req,
308 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); 308 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
309 309
310 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 310 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
311 atomic_inc(&tfm->base.__crt_alg->akcipher_err_cnt); 311 atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
312 else 312 else
313 atomic_inc(&tfm->base.__crt_alg->sign_cnt); 313 atomic64_inc(&tfm->base.__crt_alg->sign_cnt);
314#endif 314#endif
315} 315}
316 316
@@ -321,9 +321,9 @@ static inline void crypto_stat_akcipher_verify(struct akcipher_request *req,
321 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); 321 struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
322 322
323 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 323 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
324 atomic_inc(&tfm->base.__crt_alg->akcipher_err_cnt); 324 atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
325 else 325 else
326 atomic_inc(&tfm->base.__crt_alg->verify_cnt); 326 atomic64_inc(&tfm->base.__crt_alg->verify_cnt);
327#endif 327#endif
328} 328}
329 329
diff --git a/include/crypto/hash.h b/include/crypto/hash.h
index bc7796600338..52920bed05ba 100644
--- a/include/crypto/hash.h
+++ b/include/crypto/hash.h
@@ -418,7 +418,7 @@ static inline void crypto_stat_ahash_update(struct ahash_request *req, int ret)
418 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 418 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
419 419
420 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 420 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
421 atomic_inc(&tfm->base.__crt_alg->hash_err_cnt); 421 atomic64_inc(&tfm->base.__crt_alg->hash_err_cnt);
422 else 422 else
423 atomic64_add(req->nbytes, &tfm->base.__crt_alg->hash_tlen); 423 atomic64_add(req->nbytes, &tfm->base.__crt_alg->hash_tlen);
424#endif 424#endif
@@ -430,9 +430,9 @@ static inline void crypto_stat_ahash_final(struct ahash_request *req, int ret)
430 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 430 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
431 431
432 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 432 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
433 atomic_inc(&tfm->base.__crt_alg->hash_err_cnt); 433 atomic64_inc(&tfm->base.__crt_alg->hash_err_cnt);
434 } else { 434 } else {
435 atomic_inc(&tfm->base.__crt_alg->hash_cnt); 435 atomic64_inc(&tfm->base.__crt_alg->hash_cnt);
436 atomic64_add(req->nbytes, &tfm->base.__crt_alg->hash_tlen); 436 atomic64_add(req->nbytes, &tfm->base.__crt_alg->hash_tlen);
437 } 437 }
438#endif 438#endif
diff --git a/include/crypto/kpp.h b/include/crypto/kpp.h
index f517ba6d3a27..bd5103a80919 100644
--- a/include/crypto/kpp.h
+++ b/include/crypto/kpp.h
@@ -272,9 +272,9 @@ static inline void crypto_stat_kpp_set_secret(struct crypto_kpp *tfm, int ret)
272{ 272{
273#ifdef CONFIG_CRYPTO_STATS 273#ifdef CONFIG_CRYPTO_STATS
274 if (ret) 274 if (ret)
275 atomic_inc(&tfm->base.__crt_alg->kpp_err_cnt); 275 atomic64_inc(&tfm->base.__crt_alg->kpp_err_cnt);
276 else 276 else
277 atomic_inc(&tfm->base.__crt_alg->setsecret_cnt); 277 atomic64_inc(&tfm->base.__crt_alg->setsecret_cnt);
278#endif 278#endif
279} 279}
280 280
@@ -285,9 +285,9 @@ static inline void crypto_stat_kpp_generate_public_key(struct kpp_request *req,
285 struct crypto_kpp *tfm = crypto_kpp_reqtfm(req); 285 struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
286 286
287 if (ret) 287 if (ret)
288 atomic_inc(&tfm->base.__crt_alg->kpp_err_cnt); 288 atomic64_inc(&tfm->base.__crt_alg->kpp_err_cnt);
289 else 289 else
290 atomic_inc(&tfm->base.__crt_alg->generate_public_key_cnt); 290 atomic64_inc(&tfm->base.__crt_alg->generate_public_key_cnt);
291#endif 291#endif
292} 292}
293 293
@@ -298,9 +298,9 @@ static inline void crypto_stat_kpp_compute_shared_secret(struct kpp_request *req
298 struct crypto_kpp *tfm = crypto_kpp_reqtfm(req); 298 struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
299 299
300 if (ret) 300 if (ret)
301 atomic_inc(&tfm->base.__crt_alg->kpp_err_cnt); 301 atomic64_inc(&tfm->base.__crt_alg->kpp_err_cnt);
302 else 302 else
303 atomic_inc(&tfm->base.__crt_alg->compute_shared_secret_cnt); 303 atomic64_inc(&tfm->base.__crt_alg->compute_shared_secret_cnt);
304#endif 304#endif
305} 305}
306 306
diff --git a/include/crypto/rng.h b/include/crypto/rng.h
index 6d258f5b68f1..966615bba45e 100644
--- a/include/crypto/rng.h
+++ b/include/crypto/rng.h
@@ -126,9 +126,9 @@ static inline void crypto_stat_rng_seed(struct crypto_rng *tfm, int ret)
126{ 126{
127#ifdef CONFIG_CRYPTO_STATS 127#ifdef CONFIG_CRYPTO_STATS
128 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 128 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
129 atomic_inc(&tfm->base.__crt_alg->rng_err_cnt); 129 atomic64_inc(&tfm->base.__crt_alg->rng_err_cnt);
130 else 130 else
131 atomic_inc(&tfm->base.__crt_alg->seed_cnt); 131 atomic64_inc(&tfm->base.__crt_alg->seed_cnt);
132#endif 132#endif
133} 133}
134 134
@@ -137,9 +137,9 @@ static inline void crypto_stat_rng_generate(struct crypto_rng *tfm,
137{ 137{
138#ifdef CONFIG_CRYPTO_STATS 138#ifdef CONFIG_CRYPTO_STATS
139 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 139 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
140 atomic_inc(&tfm->base.__crt_alg->rng_err_cnt); 140 atomic64_inc(&tfm->base.__crt_alg->rng_err_cnt);
141 } else { 141 } else {
142 atomic_inc(&tfm->base.__crt_alg->generate_cnt); 142 atomic64_inc(&tfm->base.__crt_alg->generate_cnt);
143 atomic64_add(dlen, &tfm->base.__crt_alg->generate_tlen); 143 atomic64_add(dlen, &tfm->base.__crt_alg->generate_tlen);
144 } 144 }
145#endif 145#endif
diff --git a/include/crypto/skcipher.h b/include/crypto/skcipher.h
index 925f547cdcfa..dff54731ddf4 100644
--- a/include/crypto/skcipher.h
+++ b/include/crypto/skcipher.h
@@ -491,9 +491,9 @@ static inline void crypto_stat_skcipher_encrypt(struct skcipher_request *req,
491{ 491{
492#ifdef CONFIG_CRYPTO_STATS 492#ifdef CONFIG_CRYPTO_STATS
493 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 493 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
494 atomic_inc(&alg->cipher_err_cnt); 494 atomic64_inc(&alg->cipher_err_cnt);
495 } else { 495 } else {
496 atomic_inc(&alg->encrypt_cnt); 496 atomic64_inc(&alg->encrypt_cnt);
497 atomic64_add(req->cryptlen, &alg->encrypt_tlen); 497 atomic64_add(req->cryptlen, &alg->encrypt_tlen);
498 } 498 }
499#endif 499#endif
@@ -504,9 +504,9 @@ static inline void crypto_stat_skcipher_decrypt(struct skcipher_request *req,
504{ 504{
505#ifdef CONFIG_CRYPTO_STATS 505#ifdef CONFIG_CRYPTO_STATS
506 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 506 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
507 atomic_inc(&alg->cipher_err_cnt); 507 atomic64_inc(&alg->cipher_err_cnt);
508 } else { 508 } else {
509 atomic_inc(&alg->decrypt_cnt); 509 atomic64_inc(&alg->decrypt_cnt);
510 atomic64_add(req->cryptlen, &alg->decrypt_tlen); 510 atomic64_add(req->cryptlen, &alg->decrypt_tlen);
511 } 511 }
512#endif 512#endif
diff --git a/include/linux/crypto.h b/include/linux/crypto.h
index 3e05053b8d57..b109b50906e7 100644
--- a/include/linux/crypto.h
+++ b/include/linux/crypto.h
@@ -517,11 +517,11 @@ struct crypto_alg {
517 517
518#ifdef CONFIG_CRYPTO_STATS 518#ifdef CONFIG_CRYPTO_STATS
519 union { 519 union {
520 atomic_t encrypt_cnt; 520 atomic64_t encrypt_cnt;
521 atomic_t compress_cnt; 521 atomic64_t compress_cnt;
522 atomic_t generate_cnt; 522 atomic64_t generate_cnt;
523 atomic_t hash_cnt; 523 atomic64_t hash_cnt;
524 atomic_t setsecret_cnt; 524 atomic64_t setsecret_cnt;
525 }; 525 };
526 union { 526 union {
527 atomic64_t encrypt_tlen; 527 atomic64_t encrypt_tlen;
@@ -530,29 +530,29 @@ struct crypto_alg {
530 atomic64_t hash_tlen; 530 atomic64_t hash_tlen;
531 }; 531 };
532 union { 532 union {
533 atomic_t akcipher_err_cnt; 533 atomic64_t akcipher_err_cnt;
534 atomic_t cipher_err_cnt; 534 atomic64_t cipher_err_cnt;
535 atomic_t compress_err_cnt; 535 atomic64_t compress_err_cnt;
536 atomic_t aead_err_cnt; 536 atomic64_t aead_err_cnt;
537 atomic_t hash_err_cnt; 537 atomic64_t hash_err_cnt;
538 atomic_t rng_err_cnt; 538 atomic64_t rng_err_cnt;
539 atomic_t kpp_err_cnt; 539 atomic64_t kpp_err_cnt;
540 }; 540 };
541 union { 541 union {
542 atomic_t decrypt_cnt; 542 atomic64_t decrypt_cnt;
543 atomic_t decompress_cnt; 543 atomic64_t decompress_cnt;
544 atomic_t seed_cnt; 544 atomic64_t seed_cnt;
545 atomic_t generate_public_key_cnt; 545 atomic64_t generate_public_key_cnt;
546 }; 546 };
547 union { 547 union {
548 atomic64_t decrypt_tlen; 548 atomic64_t decrypt_tlen;
549 atomic64_t decompress_tlen; 549 atomic64_t decompress_tlen;
550 }; 550 };
551 union { 551 union {
552 atomic_t verify_cnt; 552 atomic64_t verify_cnt;
553 atomic_t compute_shared_secret_cnt; 553 atomic64_t compute_shared_secret_cnt;
554 }; 554 };
555 atomic_t sign_cnt; 555 atomic64_t sign_cnt;
556#endif /* CONFIG_CRYPTO_STATS */ 556#endif /* CONFIG_CRYPTO_STATS */
557 557
558} CRYPTO_MINALIGN_ATTR; 558} CRYPTO_MINALIGN_ATTR;
@@ -983,9 +983,9 @@ static inline void crypto_stat_ablkcipher_encrypt(struct ablkcipher_request *req
983 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 983 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
984 984
985 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 985 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
986 atomic_inc(&crt->base->base.__crt_alg->cipher_err_cnt); 986 atomic64_inc(&crt->base->base.__crt_alg->cipher_err_cnt);
987 } else { 987 } else {
988 atomic_inc(&crt->base->base.__crt_alg->encrypt_cnt); 988 atomic64_inc(&crt->base->base.__crt_alg->encrypt_cnt);
989 atomic64_add(req->nbytes, &crt->base->base.__crt_alg->encrypt_tlen); 989 atomic64_add(req->nbytes, &crt->base->base.__crt_alg->encrypt_tlen);
990 } 990 }
991#endif 991#endif
@@ -999,9 +999,9 @@ static inline void crypto_stat_ablkcipher_decrypt(struct ablkcipher_request *req
999 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); 999 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
1000 1000
1001 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1001 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1002 atomic_inc(&crt->base->base.__crt_alg->cipher_err_cnt); 1002 atomic64_inc(&crt->base->base.__crt_alg->cipher_err_cnt);
1003 } else { 1003 } else {
1004 atomic_inc(&crt->base->base.__crt_alg->decrypt_cnt); 1004 atomic64_inc(&crt->base->base.__crt_alg->decrypt_cnt);
1005 atomic64_add(req->nbytes, &crt->base->base.__crt_alg->decrypt_tlen); 1005 atomic64_add(req->nbytes, &crt->base->base.__crt_alg->decrypt_tlen);
1006 } 1006 }
1007#endif 1007#endif
diff --git a/include/uapi/linux/cryptouser.h b/include/uapi/linux/cryptouser.h
index 6dafbc3e4414..9f8187077ce4 100644
--- a/include/uapi/linux/cryptouser.h
+++ b/include/uapi/linux/cryptouser.h
@@ -79,11 +79,11 @@ struct crypto_user_alg {
79struct crypto_stat { 79struct crypto_stat {
80 char type[CRYPTO_MAX_NAME]; 80 char type[CRYPTO_MAX_NAME];
81 union { 81 union {
82 __u32 stat_encrypt_cnt; 82 __u64 stat_encrypt_cnt;
83 __u32 stat_compress_cnt; 83 __u64 stat_compress_cnt;
84 __u32 stat_generate_cnt; 84 __u64 stat_generate_cnt;
85 __u32 stat_hash_cnt; 85 __u64 stat_hash_cnt;
86 __u32 stat_setsecret_cnt; 86 __u64 stat_setsecret_cnt;
87 }; 87 };
88 union { 88 union {
89 __u64 stat_encrypt_tlen; 89 __u64 stat_encrypt_tlen;
@@ -92,29 +92,29 @@ struct crypto_stat {
92 __u64 stat_hash_tlen; 92 __u64 stat_hash_tlen;
93 }; 93 };
94 union { 94 union {
95 __u32 stat_akcipher_err_cnt; 95 __u64 stat_akcipher_err_cnt;
96 __u32 stat_cipher_err_cnt; 96 __u64 stat_cipher_err_cnt;
97 __u32 stat_compress_err_cnt; 97 __u64 stat_compress_err_cnt;
98 __u32 stat_aead_err_cnt; 98 __u64 stat_aead_err_cnt;
99 __u32 stat_hash_err_cnt; 99 __u64 stat_hash_err_cnt;
100 __u32 stat_rng_err_cnt; 100 __u64 stat_rng_err_cnt;
101 __u32 stat_kpp_err_cnt; 101 __u64 stat_kpp_err_cnt;
102 }; 102 };
103 union { 103 union {
104 __u32 stat_decrypt_cnt; 104 __u64 stat_decrypt_cnt;
105 __u32 stat_decompress_cnt; 105 __u64 stat_decompress_cnt;
106 __u32 stat_seed_cnt; 106 __u64 stat_seed_cnt;
107 __u32 stat_generate_public_key_cnt; 107 __u64 stat_generate_public_key_cnt;
108 }; 108 };
109 union { 109 union {
110 __u64 stat_decrypt_tlen; 110 __u64 stat_decrypt_tlen;
111 __u64 stat_decompress_tlen; 111 __u64 stat_decompress_tlen;
112 }; 112 };
113 union { 113 union {
114 __u32 stat_verify_cnt; 114 __u64 stat_verify_cnt;
115 __u32 stat_compute_shared_secret_cnt; 115 __u64 stat_compute_shared_secret_cnt;
116 }; 116 };
117 __u32 stat_sign_cnt; 117 __u64 stat_sign_cnt;
118}; 118};
119 119
120struct crypto_report_larval { 120struct crypto_report_larval {