diff options
-rw-r--r-- | drivers/crypto/nx/nx-aes-ccm.c | 6 | ||||
-rw-r--r-- | drivers/crypto/nx/nx-aes-ctr.c | 7 | ||||
-rw-r--r-- | drivers/crypto/nx/nx-aes-gcm.c | 17 | ||||
-rw-r--r-- | drivers/crypto/nx/nx-aes-xcbc.c | 70 | ||||
-rw-r--r-- | drivers/crypto/nx/nx-sha256.c | 43 | ||||
-rw-r--r-- | drivers/crypto/nx/nx-sha512.c | 44 | ||||
-rw-r--r-- | drivers/crypto/nx/nx.c | 3 | ||||
-rw-r--r-- | drivers/crypto/nx/nx.h | 14 | ||||
-rw-r--r-- | drivers/crypto/omap-des.c | 3 |
9 files changed, 125 insertions, 82 deletions
diff --git a/drivers/crypto/nx/nx-aes-ccm.c b/drivers/crypto/nx/nx-aes-ccm.c index 67f80813a06f..e4311ce0cd78 100644 --- a/drivers/crypto/nx/nx-aes-ccm.c +++ b/drivers/crypto/nx/nx-aes-ccm.c | |||
@@ -494,8 +494,9 @@ out: | |||
494 | static int ccm4309_aes_nx_encrypt(struct aead_request *req) | 494 | static int ccm4309_aes_nx_encrypt(struct aead_request *req) |
495 | { | 495 | { |
496 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); | 496 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); |
497 | struct nx_gcm_rctx *rctx = aead_request_ctx(req); | ||
497 | struct blkcipher_desc desc; | 498 | struct blkcipher_desc desc; |
498 | u8 *iv = nx_ctx->priv.ccm.iv; | 499 | u8 *iv = rctx->iv; |
499 | 500 | ||
500 | iv[0] = 3; | 501 | iv[0] = 3; |
501 | memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3); | 502 | memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3); |
@@ -525,8 +526,9 @@ static int ccm_aes_nx_encrypt(struct aead_request *req) | |||
525 | static int ccm4309_aes_nx_decrypt(struct aead_request *req) | 526 | static int ccm4309_aes_nx_decrypt(struct aead_request *req) |
526 | { | 527 | { |
527 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); | 528 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); |
529 | struct nx_gcm_rctx *rctx = aead_request_ctx(req); | ||
528 | struct blkcipher_desc desc; | 530 | struct blkcipher_desc desc; |
529 | u8 *iv = nx_ctx->priv.ccm.iv; | 531 | u8 *iv = rctx->iv; |
530 | 532 | ||
531 | iv[0] = 3; | 533 | iv[0] = 3; |
532 | memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3); | 534 | memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3); |
diff --git a/drivers/crypto/nx/nx-aes-ctr.c b/drivers/crypto/nx/nx-aes-ctr.c index 2617cd4d54dd..dd7e9f3f5b6b 100644 --- a/drivers/crypto/nx/nx-aes-ctr.c +++ b/drivers/crypto/nx/nx-aes-ctr.c | |||
@@ -72,7 +72,7 @@ static int ctr3686_aes_nx_set_key(struct crypto_tfm *tfm, | |||
72 | if (key_len < CTR_RFC3686_NONCE_SIZE) | 72 | if (key_len < CTR_RFC3686_NONCE_SIZE) |
73 | return -EINVAL; | 73 | return -EINVAL; |
74 | 74 | ||
75 | memcpy(nx_ctx->priv.ctr.iv, | 75 | memcpy(nx_ctx->priv.ctr.nonce, |
76 | in_key + key_len - CTR_RFC3686_NONCE_SIZE, | 76 | in_key + key_len - CTR_RFC3686_NONCE_SIZE, |
77 | CTR_RFC3686_NONCE_SIZE); | 77 | CTR_RFC3686_NONCE_SIZE); |
78 | 78 | ||
@@ -131,14 +131,15 @@ static int ctr3686_aes_nx_crypt(struct blkcipher_desc *desc, | |||
131 | unsigned int nbytes) | 131 | unsigned int nbytes) |
132 | { | 132 | { |
133 | struct nx_crypto_ctx *nx_ctx = crypto_blkcipher_ctx(desc->tfm); | 133 | struct nx_crypto_ctx *nx_ctx = crypto_blkcipher_ctx(desc->tfm); |
134 | u8 *iv = nx_ctx->priv.ctr.iv; | 134 | u8 iv[16]; |
135 | 135 | ||
136 | memcpy(iv, nx_ctx->priv.ctr.nonce, CTR_RFC3686_IV_SIZE); | ||
136 | memcpy(iv + CTR_RFC3686_NONCE_SIZE, | 137 | memcpy(iv + CTR_RFC3686_NONCE_SIZE, |
137 | desc->info, CTR_RFC3686_IV_SIZE); | 138 | desc->info, CTR_RFC3686_IV_SIZE); |
138 | iv[12] = iv[13] = iv[14] = 0; | 139 | iv[12] = iv[13] = iv[14] = 0; |
139 | iv[15] = 1; | 140 | iv[15] = 1; |
140 | 141 | ||
141 | desc->info = nx_ctx->priv.ctr.iv; | 142 | desc->info = iv; |
142 | 143 | ||
143 | return ctr_aes_nx_crypt(desc, dst, src, nbytes); | 144 | return ctr_aes_nx_crypt(desc, dst, src, nbytes); |
144 | } | 145 | } |
diff --git a/drivers/crypto/nx/nx-aes-gcm.c b/drivers/crypto/nx/nx-aes-gcm.c index 08ac6d48688c..92c993f08213 100644 --- a/drivers/crypto/nx/nx-aes-gcm.c +++ b/drivers/crypto/nx/nx-aes-gcm.c | |||
@@ -317,6 +317,7 @@ out: | |||
317 | static int gcm_aes_nx_crypt(struct aead_request *req, int enc) | 317 | static int gcm_aes_nx_crypt(struct aead_request *req, int enc) |
318 | { | 318 | { |
319 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); | 319 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); |
320 | struct nx_gcm_rctx *rctx = aead_request_ctx(req); | ||
320 | struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; | 321 | struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; |
321 | struct blkcipher_desc desc; | 322 | struct blkcipher_desc desc; |
322 | unsigned int nbytes = req->cryptlen; | 323 | unsigned int nbytes = req->cryptlen; |
@@ -326,7 +327,7 @@ static int gcm_aes_nx_crypt(struct aead_request *req, int enc) | |||
326 | 327 | ||
327 | spin_lock_irqsave(&nx_ctx->lock, irq_flags); | 328 | spin_lock_irqsave(&nx_ctx->lock, irq_flags); |
328 | 329 | ||
329 | desc.info = nx_ctx->priv.gcm.iv; | 330 | desc.info = rctx->iv; |
330 | /* initialize the counter */ | 331 | /* initialize the counter */ |
331 | *(u32 *)(desc.info + NX_GCM_CTR_OFFSET) = 1; | 332 | *(u32 *)(desc.info + NX_GCM_CTR_OFFSET) = 1; |
332 | 333 | ||
@@ -424,8 +425,8 @@ out: | |||
424 | 425 | ||
425 | static int gcm_aes_nx_encrypt(struct aead_request *req) | 426 | static int gcm_aes_nx_encrypt(struct aead_request *req) |
426 | { | 427 | { |
427 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); | 428 | struct nx_gcm_rctx *rctx = aead_request_ctx(req); |
428 | char *iv = nx_ctx->priv.gcm.iv; | 429 | char *iv = rctx->iv; |
429 | 430 | ||
430 | memcpy(iv, req->iv, 12); | 431 | memcpy(iv, req->iv, 12); |
431 | 432 | ||
@@ -434,8 +435,8 @@ static int gcm_aes_nx_encrypt(struct aead_request *req) | |||
434 | 435 | ||
435 | static int gcm_aes_nx_decrypt(struct aead_request *req) | 436 | static int gcm_aes_nx_decrypt(struct aead_request *req) |
436 | { | 437 | { |
437 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); | 438 | struct nx_gcm_rctx *rctx = aead_request_ctx(req); |
438 | char *iv = nx_ctx->priv.gcm.iv; | 439 | char *iv = rctx->iv; |
439 | 440 | ||
440 | memcpy(iv, req->iv, 12); | 441 | memcpy(iv, req->iv, 12); |
441 | 442 | ||
@@ -445,7 +446,8 @@ static int gcm_aes_nx_decrypt(struct aead_request *req) | |||
445 | static int gcm4106_aes_nx_encrypt(struct aead_request *req) | 446 | static int gcm4106_aes_nx_encrypt(struct aead_request *req) |
446 | { | 447 | { |
447 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); | 448 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); |
448 | char *iv = nx_ctx->priv.gcm.iv; | 449 | struct nx_gcm_rctx *rctx = aead_request_ctx(req); |
450 | char *iv = rctx->iv; | ||
449 | char *nonce = nx_ctx->priv.gcm.nonce; | 451 | char *nonce = nx_ctx->priv.gcm.nonce; |
450 | 452 | ||
451 | memcpy(iv, nonce, NX_GCM4106_NONCE_LEN); | 453 | memcpy(iv, nonce, NX_GCM4106_NONCE_LEN); |
@@ -457,7 +459,8 @@ static int gcm4106_aes_nx_encrypt(struct aead_request *req) | |||
457 | static int gcm4106_aes_nx_decrypt(struct aead_request *req) | 459 | static int gcm4106_aes_nx_decrypt(struct aead_request *req) |
458 | { | 460 | { |
459 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); | 461 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); |
460 | char *iv = nx_ctx->priv.gcm.iv; | 462 | struct nx_gcm_rctx *rctx = aead_request_ctx(req); |
463 | char *iv = rctx->iv; | ||
461 | char *nonce = nx_ctx->priv.gcm.nonce; | 464 | char *nonce = nx_ctx->priv.gcm.nonce; |
462 | 465 | ||
463 | memcpy(iv, nonce, NX_GCM4106_NONCE_LEN); | 466 | memcpy(iv, nonce, NX_GCM4106_NONCE_LEN); |
diff --git a/drivers/crypto/nx/nx-aes-xcbc.c b/drivers/crypto/nx/nx-aes-xcbc.c index 8c2faffab4a3..c2f7d4befb55 100644 --- a/drivers/crypto/nx/nx-aes-xcbc.c +++ b/drivers/crypto/nx/nx-aes-xcbc.c | |||
@@ -42,6 +42,7 @@ static int nx_xcbc_set_key(struct crypto_shash *desc, | |||
42 | unsigned int key_len) | 42 | unsigned int key_len) |
43 | { | 43 | { |
44 | struct nx_crypto_ctx *nx_ctx = crypto_shash_ctx(desc); | 44 | struct nx_crypto_ctx *nx_ctx = crypto_shash_ctx(desc); |
45 | struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; | ||
45 | 46 | ||
46 | switch (key_len) { | 47 | switch (key_len) { |
47 | case AES_KEYSIZE_128: | 48 | case AES_KEYSIZE_128: |
@@ -51,7 +52,7 @@ static int nx_xcbc_set_key(struct crypto_shash *desc, | |||
51 | return -EINVAL; | 52 | return -EINVAL; |
52 | } | 53 | } |
53 | 54 | ||
54 | memcpy(nx_ctx->priv.xcbc.key, in_key, key_len); | 55 | memcpy(csbcpb->cpb.aes_xcbc.key, in_key, key_len); |
55 | 56 | ||
56 | return 0; | 57 | return 0; |
57 | } | 58 | } |
@@ -148,32 +149,29 @@ out: | |||
148 | return rc; | 149 | return rc; |
149 | } | 150 | } |
150 | 151 | ||
151 | static int nx_xcbc_init(struct shash_desc *desc) | 152 | static int nx_crypto_ctx_aes_xcbc_init2(struct crypto_tfm *tfm) |
152 | { | 153 | { |
153 | struct xcbc_state *sctx = shash_desc_ctx(desc); | 154 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); |
154 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); | ||
155 | struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; | 155 | struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; |
156 | struct nx_sg *out_sg; | 156 | int err; |
157 | int len; | ||
158 | 157 | ||
159 | nx_ctx_init(nx_ctx, HCOP_FC_AES); | 158 | err = nx_crypto_ctx_aes_xcbc_init(tfm); |
159 | if (err) | ||
160 | return err; | ||
160 | 161 | ||
161 | memset(sctx, 0, sizeof *sctx); | 162 | nx_ctx_init(nx_ctx, HCOP_FC_AES); |
162 | 163 | ||
163 | NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128); | 164 | NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128); |
164 | csbcpb->cpb.hdr.mode = NX_MODE_AES_XCBC_MAC; | 165 | csbcpb->cpb.hdr.mode = NX_MODE_AES_XCBC_MAC; |
165 | 166 | ||
166 | memcpy(csbcpb->cpb.aes_xcbc.key, nx_ctx->priv.xcbc.key, AES_BLOCK_SIZE); | 167 | return 0; |
167 | memset(nx_ctx->priv.xcbc.key, 0, sizeof *nx_ctx->priv.xcbc.key); | 168 | } |
168 | |||
169 | len = AES_BLOCK_SIZE; | ||
170 | out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, | ||
171 | &len, nx_ctx->ap->sglen); | ||
172 | 169 | ||
173 | if (len != AES_BLOCK_SIZE) | 170 | static int nx_xcbc_init(struct shash_desc *desc) |
174 | return -EINVAL; | 171 | { |
172 | struct xcbc_state *sctx = shash_desc_ctx(desc); | ||
175 | 173 | ||
176 | nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); | 174 | memset(sctx, 0, sizeof *sctx); |
177 | 175 | ||
178 | return 0; | 176 | return 0; |
179 | } | 177 | } |
@@ -186,6 +184,7 @@ static int nx_xcbc_update(struct shash_desc *desc, | |||
186 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); | 184 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); |
187 | struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; | 185 | struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; |
188 | struct nx_sg *in_sg; | 186 | struct nx_sg *in_sg; |
187 | struct nx_sg *out_sg; | ||
189 | u32 to_process = 0, leftover, total; | 188 | u32 to_process = 0, leftover, total; |
190 | unsigned int max_sg_len; | 189 | unsigned int max_sg_len; |
191 | unsigned long irq_flags; | 190 | unsigned long irq_flags; |
@@ -213,6 +212,17 @@ static int nx_xcbc_update(struct shash_desc *desc, | |||
213 | max_sg_len = min_t(u64, max_sg_len, | 212 | max_sg_len = min_t(u64, max_sg_len, |
214 | nx_ctx->ap->databytelen/NX_PAGE_SIZE); | 213 | nx_ctx->ap->databytelen/NX_PAGE_SIZE); |
215 | 214 | ||
215 | data_len = AES_BLOCK_SIZE; | ||
216 | out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, | ||
217 | &len, nx_ctx->ap->sglen); | ||
218 | |||
219 | if (data_len != AES_BLOCK_SIZE) { | ||
220 | rc = -EINVAL; | ||
221 | goto out; | ||
222 | } | ||
223 | |||
224 | nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); | ||
225 | |||
216 | do { | 226 | do { |
217 | to_process = total - to_process; | 227 | to_process = total - to_process; |
218 | to_process = to_process & ~(AES_BLOCK_SIZE - 1); | 228 | to_process = to_process & ~(AES_BLOCK_SIZE - 1); |
@@ -235,8 +245,10 @@ static int nx_xcbc_update(struct shash_desc *desc, | |||
235 | (u8 *) sctx->buffer, | 245 | (u8 *) sctx->buffer, |
236 | &data_len, | 246 | &data_len, |
237 | max_sg_len); | 247 | max_sg_len); |
238 | if (data_len != sctx->count) | 248 | if (data_len != sctx->count) { |
239 | return -EINVAL; | 249 | rc = -EINVAL; |
250 | goto out; | ||
251 | } | ||
240 | } | 252 | } |
241 | 253 | ||
242 | data_len = to_process - sctx->count; | 254 | data_len = to_process - sctx->count; |
@@ -245,8 +257,10 @@ static int nx_xcbc_update(struct shash_desc *desc, | |||
245 | &data_len, | 257 | &data_len, |
246 | max_sg_len); | 258 | max_sg_len); |
247 | 259 | ||
248 | if (data_len != to_process - sctx->count) | 260 | if (data_len != to_process - sctx->count) { |
249 | return -EINVAL; | 261 | rc = -EINVAL; |
262 | goto out; | ||
263 | } | ||
250 | 264 | ||
251 | nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * | 265 | nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * |
252 | sizeof(struct nx_sg); | 266 | sizeof(struct nx_sg); |
@@ -325,15 +339,19 @@ static int nx_xcbc_final(struct shash_desc *desc, u8 *out) | |||
325 | in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *)sctx->buffer, | 339 | in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *)sctx->buffer, |
326 | &len, nx_ctx->ap->sglen); | 340 | &len, nx_ctx->ap->sglen); |
327 | 341 | ||
328 | if (len != sctx->count) | 342 | if (len != sctx->count) { |
329 | return -EINVAL; | 343 | rc = -EINVAL; |
344 | goto out; | ||
345 | } | ||
330 | 346 | ||
331 | len = AES_BLOCK_SIZE; | 347 | len = AES_BLOCK_SIZE; |
332 | out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, | 348 | out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, |
333 | nx_ctx->ap->sglen); | 349 | nx_ctx->ap->sglen); |
334 | 350 | ||
335 | if (len != AES_BLOCK_SIZE) | 351 | if (len != AES_BLOCK_SIZE) { |
336 | return -EINVAL; | 352 | rc = -EINVAL; |
353 | goto out; | ||
354 | } | ||
337 | 355 | ||
338 | nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); | 356 | nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); |
339 | nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); | 357 | nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); |
@@ -372,7 +390,7 @@ struct shash_alg nx_shash_aes_xcbc_alg = { | |||
372 | .cra_blocksize = AES_BLOCK_SIZE, | 390 | .cra_blocksize = AES_BLOCK_SIZE, |
373 | .cra_module = THIS_MODULE, | 391 | .cra_module = THIS_MODULE, |
374 | .cra_ctxsize = sizeof(struct nx_crypto_ctx), | 392 | .cra_ctxsize = sizeof(struct nx_crypto_ctx), |
375 | .cra_init = nx_crypto_ctx_aes_xcbc_init, | 393 | .cra_init = nx_crypto_ctx_aes_xcbc_init2, |
376 | .cra_exit = nx_crypto_ctx_exit, | 394 | .cra_exit = nx_crypto_ctx_exit, |
377 | } | 395 | } |
378 | }; | 396 | }; |
diff --git a/drivers/crypto/nx/nx-sha256.c b/drivers/crypto/nx/nx-sha256.c index 4e91bdb83c59..08f8d5cd6334 100644 --- a/drivers/crypto/nx/nx-sha256.c +++ b/drivers/crypto/nx/nx-sha256.c | |||
@@ -29,34 +29,28 @@ | |||
29 | #include "nx.h" | 29 | #include "nx.h" |
30 | 30 | ||
31 | 31 | ||
32 | static int nx_sha256_init(struct shash_desc *desc) | 32 | static int nx_crypto_ctx_sha256_init(struct crypto_tfm *tfm) |
33 | { | 33 | { |
34 | struct sha256_state *sctx = shash_desc_ctx(desc); | 34 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); |
35 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); | 35 | int err; |
36 | struct nx_sg *out_sg; | ||
37 | int len; | ||
38 | u32 max_sg_len; | ||
39 | 36 | ||
40 | nx_ctx_init(nx_ctx, HCOP_FC_SHA); | 37 | err = nx_crypto_ctx_sha_init(tfm); |
38 | if (err) | ||
39 | return err; | ||
41 | 40 | ||
42 | memset(sctx, 0, sizeof *sctx); | 41 | nx_ctx_init(nx_ctx, HCOP_FC_SHA); |
43 | 42 | ||
44 | nx_ctx->ap = &nx_ctx->props[NX_PROPS_SHA256]; | 43 | nx_ctx->ap = &nx_ctx->props[NX_PROPS_SHA256]; |
45 | 44 | ||
46 | NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA256); | 45 | NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA256); |
47 | 46 | ||
48 | max_sg_len = min_t(u64, nx_ctx->ap->sglen, | 47 | return 0; |
49 | nx_driver.of.max_sg_len/sizeof(struct nx_sg)); | 48 | } |
50 | max_sg_len = min_t(u64, max_sg_len, | ||
51 | nx_ctx->ap->databytelen/NX_PAGE_SIZE); | ||
52 | 49 | ||
53 | len = SHA256_DIGEST_SIZE; | 50 | static int nx_sha256_init(struct shash_desc *desc) { |
54 | out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, | 51 | struct sha256_state *sctx = shash_desc_ctx(desc); |
55 | &len, max_sg_len); | ||
56 | nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); | ||
57 | 52 | ||
58 | if (len != SHA256_DIGEST_SIZE) | 53 | memset(sctx, 0, sizeof *sctx); |
59 | return -EINVAL; | ||
60 | 54 | ||
61 | sctx->state[0] = __cpu_to_be32(SHA256_H0); | 55 | sctx->state[0] = __cpu_to_be32(SHA256_H0); |
62 | sctx->state[1] = __cpu_to_be32(SHA256_H1); | 56 | sctx->state[1] = __cpu_to_be32(SHA256_H1); |
@@ -78,6 +72,7 @@ static int nx_sha256_update(struct shash_desc *desc, const u8 *data, | |||
78 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); | 72 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); |
79 | struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; | 73 | struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; |
80 | struct nx_sg *in_sg; | 74 | struct nx_sg *in_sg; |
75 | struct nx_sg *out_sg; | ||
81 | u64 to_process = 0, leftover, total; | 76 | u64 to_process = 0, leftover, total; |
82 | unsigned long irq_flags; | 77 | unsigned long irq_flags; |
83 | int rc = 0; | 78 | int rc = 0; |
@@ -108,6 +103,16 @@ static int nx_sha256_update(struct shash_desc *desc, const u8 *data, | |||
108 | max_sg_len = min_t(u64, max_sg_len, | 103 | max_sg_len = min_t(u64, max_sg_len, |
109 | nx_ctx->ap->databytelen/NX_PAGE_SIZE); | 104 | nx_ctx->ap->databytelen/NX_PAGE_SIZE); |
110 | 105 | ||
106 | data_len = SHA256_DIGEST_SIZE; | ||
107 | out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, | ||
108 | &data_len, max_sg_len); | ||
109 | nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); | ||
110 | |||
111 | if (data_len != SHA256_DIGEST_SIZE) { | ||
112 | rc = -EINVAL; | ||
113 | goto out; | ||
114 | } | ||
115 | |||
111 | do { | 116 | do { |
112 | /* | 117 | /* |
113 | * to_process: the SHA256_BLOCK_SIZE data chunk to process in | 118 | * to_process: the SHA256_BLOCK_SIZE data chunk to process in |
@@ -282,7 +287,7 @@ struct shash_alg nx_shash_sha256_alg = { | |||
282 | .cra_blocksize = SHA256_BLOCK_SIZE, | 287 | .cra_blocksize = SHA256_BLOCK_SIZE, |
283 | .cra_module = THIS_MODULE, | 288 | .cra_module = THIS_MODULE, |
284 | .cra_ctxsize = sizeof(struct nx_crypto_ctx), | 289 | .cra_ctxsize = sizeof(struct nx_crypto_ctx), |
285 | .cra_init = nx_crypto_ctx_sha_init, | 290 | .cra_init = nx_crypto_ctx_sha256_init, |
286 | .cra_exit = nx_crypto_ctx_exit, | 291 | .cra_exit = nx_crypto_ctx_exit, |
287 | } | 292 | } |
288 | }; | 293 | }; |
diff --git a/drivers/crypto/nx/nx-sha512.c b/drivers/crypto/nx/nx-sha512.c index e6a58d2ee628..aff0fe58eac0 100644 --- a/drivers/crypto/nx/nx-sha512.c +++ b/drivers/crypto/nx/nx-sha512.c | |||
@@ -28,34 +28,29 @@ | |||
28 | #include "nx.h" | 28 | #include "nx.h" |
29 | 29 | ||
30 | 30 | ||
31 | static int nx_sha512_init(struct shash_desc *desc) | 31 | static int nx_crypto_ctx_sha512_init(struct crypto_tfm *tfm) |
32 | { | 32 | { |
33 | struct sha512_state *sctx = shash_desc_ctx(desc); | 33 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); |
34 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); | 34 | int err; |
35 | struct nx_sg *out_sg; | ||
36 | int len; | ||
37 | u32 max_sg_len; | ||
38 | 35 | ||
39 | nx_ctx_init(nx_ctx, HCOP_FC_SHA); | 36 | err = nx_crypto_ctx_sha_init(tfm); |
37 | if (err) | ||
38 | return err; | ||
40 | 39 | ||
41 | memset(sctx, 0, sizeof *sctx); | 40 | nx_ctx_init(nx_ctx, HCOP_FC_SHA); |
42 | 41 | ||
43 | nx_ctx->ap = &nx_ctx->props[NX_PROPS_SHA512]; | 42 | nx_ctx->ap = &nx_ctx->props[NX_PROPS_SHA512]; |
44 | 43 | ||
45 | NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA512); | 44 | NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA512); |
46 | 45 | ||
47 | max_sg_len = min_t(u64, nx_ctx->ap->sglen, | 46 | return 0; |
48 | nx_driver.of.max_sg_len/sizeof(struct nx_sg)); | 47 | } |
49 | max_sg_len = min_t(u64, max_sg_len, | ||
50 | nx_ctx->ap->databytelen/NX_PAGE_SIZE); | ||
51 | 48 | ||
52 | len = SHA512_DIGEST_SIZE; | 49 | static int nx_sha512_init(struct shash_desc *desc) |
53 | out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, | 50 | { |
54 | &len, max_sg_len); | 51 | struct sha512_state *sctx = shash_desc_ctx(desc); |
55 | nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); | ||
56 | 52 | ||
57 | if (len != SHA512_DIGEST_SIZE) | 53 | memset(sctx, 0, sizeof *sctx); |
58 | return -EINVAL; | ||
59 | 54 | ||
60 | sctx->state[0] = __cpu_to_be64(SHA512_H0); | 55 | sctx->state[0] = __cpu_to_be64(SHA512_H0); |
61 | sctx->state[1] = __cpu_to_be64(SHA512_H1); | 56 | sctx->state[1] = __cpu_to_be64(SHA512_H1); |
@@ -77,6 +72,7 @@ static int nx_sha512_update(struct shash_desc *desc, const u8 *data, | |||
77 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); | 72 | struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); |
78 | struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; | 73 | struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; |
79 | struct nx_sg *in_sg; | 74 | struct nx_sg *in_sg; |
75 | struct nx_sg *out_sg; | ||
80 | u64 to_process, leftover = 0, total; | 76 | u64 to_process, leftover = 0, total; |
81 | unsigned long irq_flags; | 77 | unsigned long irq_flags; |
82 | int rc = 0; | 78 | int rc = 0; |
@@ -107,6 +103,16 @@ static int nx_sha512_update(struct shash_desc *desc, const u8 *data, | |||
107 | max_sg_len = min_t(u64, max_sg_len, | 103 | max_sg_len = min_t(u64, max_sg_len, |
108 | nx_ctx->ap->databytelen/NX_PAGE_SIZE); | 104 | nx_ctx->ap->databytelen/NX_PAGE_SIZE); |
109 | 105 | ||
106 | data_len = SHA512_DIGEST_SIZE; | ||
107 | out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, | ||
108 | &data_len, max_sg_len); | ||
109 | nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); | ||
110 | |||
111 | if (data_len != SHA512_DIGEST_SIZE) { | ||
112 | rc = -EINVAL; | ||
113 | goto out; | ||
114 | } | ||
115 | |||
110 | do { | 116 | do { |
111 | /* | 117 | /* |
112 | * to_process: the SHA512_BLOCK_SIZE data chunk to process in | 118 | * to_process: the SHA512_BLOCK_SIZE data chunk to process in |
@@ -288,7 +294,7 @@ struct shash_alg nx_shash_sha512_alg = { | |||
288 | .cra_blocksize = SHA512_BLOCK_SIZE, | 294 | .cra_blocksize = SHA512_BLOCK_SIZE, |
289 | .cra_module = THIS_MODULE, | 295 | .cra_module = THIS_MODULE, |
290 | .cra_ctxsize = sizeof(struct nx_crypto_ctx), | 296 | .cra_ctxsize = sizeof(struct nx_crypto_ctx), |
291 | .cra_init = nx_crypto_ctx_sha_init, | 297 | .cra_init = nx_crypto_ctx_sha512_init, |
292 | .cra_exit = nx_crypto_ctx_exit, | 298 | .cra_exit = nx_crypto_ctx_exit, |
293 | } | 299 | } |
294 | }; | 300 | }; |
diff --git a/drivers/crypto/nx/nx.c b/drivers/crypto/nx/nx.c index f6198f29a4a8..436971343ff7 100644 --- a/drivers/crypto/nx/nx.c +++ b/drivers/crypto/nx/nx.c | |||
@@ -713,12 +713,15 @@ static int nx_crypto_ctx_init(struct nx_crypto_ctx *nx_ctx, u32 fc, u32 mode) | |||
713 | /* entry points from the crypto tfm initializers */ | 713 | /* entry points from the crypto tfm initializers */ |
714 | int nx_crypto_ctx_aes_ccm_init(struct crypto_tfm *tfm) | 714 | int nx_crypto_ctx_aes_ccm_init(struct crypto_tfm *tfm) |
715 | { | 715 | { |
716 | crypto_aead_set_reqsize(__crypto_aead_cast(tfm), | ||
717 | sizeof(struct nx_ccm_rctx)); | ||
716 | return nx_crypto_ctx_init(crypto_tfm_ctx(tfm), NX_FC_AES, | 718 | return nx_crypto_ctx_init(crypto_tfm_ctx(tfm), NX_FC_AES, |
717 | NX_MODE_AES_CCM); | 719 | NX_MODE_AES_CCM); |
718 | } | 720 | } |
719 | 721 | ||
720 | int nx_crypto_ctx_aes_gcm_init(struct crypto_aead *tfm) | 722 | int nx_crypto_ctx_aes_gcm_init(struct crypto_aead *tfm) |
721 | { | 723 | { |
724 | crypto_aead_set_reqsize(tfm, sizeof(struct nx_gcm_rctx)); | ||
722 | return nx_crypto_ctx_init(crypto_aead_ctx(tfm), NX_FC_AES, | 725 | return nx_crypto_ctx_init(crypto_aead_ctx(tfm), NX_FC_AES, |
723 | NX_MODE_AES_GCM); | 726 | NX_MODE_AES_GCM); |
724 | } | 727 | } |
diff --git a/drivers/crypto/nx/nx.h b/drivers/crypto/nx/nx.h index de3ea8738146..cdff03a42ae7 100644 --- a/drivers/crypto/nx/nx.h +++ b/drivers/crypto/nx/nx.h | |||
@@ -2,6 +2,8 @@ | |||
2 | #ifndef __NX_H__ | 2 | #ifndef __NX_H__ |
3 | #define __NX_H__ | 3 | #define __NX_H__ |
4 | 4 | ||
5 | #include <crypto/ctr.h> | ||
6 | |||
5 | #define NX_NAME "nx-crypto" | 7 | #define NX_NAME "nx-crypto" |
6 | #define NX_STRING "IBM Power7+ Nest Accelerator Crypto Driver" | 8 | #define NX_STRING "IBM Power7+ Nest Accelerator Crypto Driver" |
7 | #define NX_VERSION "1.0" | 9 | #define NX_VERSION "1.0" |
@@ -91,8 +93,11 @@ struct nx_crypto_driver { | |||
91 | 93 | ||
92 | #define NX_GCM4106_NONCE_LEN (4) | 94 | #define NX_GCM4106_NONCE_LEN (4) |
93 | #define NX_GCM_CTR_OFFSET (12) | 95 | #define NX_GCM_CTR_OFFSET (12) |
94 | struct nx_gcm_priv { | 96 | struct nx_gcm_rctx { |
95 | u8 iv[16]; | 97 | u8 iv[16]; |
98 | }; | ||
99 | |||
100 | struct nx_gcm_priv { | ||
96 | u8 iauth_tag[16]; | 101 | u8 iauth_tag[16]; |
97 | u8 nonce[NX_GCM4106_NONCE_LEN]; | 102 | u8 nonce[NX_GCM4106_NONCE_LEN]; |
98 | }; | 103 | }; |
@@ -100,8 +105,11 @@ struct nx_gcm_priv { | |||
100 | #define NX_CCM_AES_KEY_LEN (16) | 105 | #define NX_CCM_AES_KEY_LEN (16) |
101 | #define NX_CCM4309_AES_KEY_LEN (19) | 106 | #define NX_CCM4309_AES_KEY_LEN (19) |
102 | #define NX_CCM4309_NONCE_LEN (3) | 107 | #define NX_CCM4309_NONCE_LEN (3) |
103 | struct nx_ccm_priv { | 108 | struct nx_ccm_rctx { |
104 | u8 iv[16]; | 109 | u8 iv[16]; |
110 | }; | ||
111 | |||
112 | struct nx_ccm_priv { | ||
105 | u8 b0[16]; | 113 | u8 b0[16]; |
106 | u8 iauth_tag[16]; | 114 | u8 iauth_tag[16]; |
107 | u8 oauth_tag[16]; | 115 | u8 oauth_tag[16]; |
@@ -113,7 +121,7 @@ struct nx_xcbc_priv { | |||
113 | }; | 121 | }; |
114 | 122 | ||
115 | struct nx_ctr_priv { | 123 | struct nx_ctr_priv { |
116 | u8 iv[16]; | 124 | u8 nonce[CTR_RFC3686_NONCE_SIZE]; |
117 | }; | 125 | }; |
118 | 126 | ||
119 | struct nx_crypto_ctx { | 127 | struct nx_crypto_ctx { |
diff --git a/drivers/crypto/omap-des.c b/drivers/crypto/omap-des.c index 46307098f8ba..0a70e46d5416 100644 --- a/drivers/crypto/omap-des.c +++ b/drivers/crypto/omap-des.c | |||
@@ -536,9 +536,6 @@ static int omap_des_crypt_dma_stop(struct omap_des_dev *dd) | |||
536 | dmaengine_terminate_all(dd->dma_lch_in); | 536 | dmaengine_terminate_all(dd->dma_lch_in); |
537 | dmaengine_terminate_all(dd->dma_lch_out); | 537 | dmaengine_terminate_all(dd->dma_lch_out); |
538 | 538 | ||
539 | dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); | ||
540 | dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, DMA_FROM_DEVICE); | ||
541 | |||
542 | return err; | 539 | return err; |
543 | } | 540 | } |
544 | 541 | ||