aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorHoria Geanta <horia.geanta@freescale.com>2014-03-14 11:46:49 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2014-03-21 09:54:24 -0400
commit4464a7d4f53d756101291da26563f37f7fce40f3 (patch)
tree38ab31ed4f28cf70c8358e6b1a7059770f8c0fe2 /drivers/crypto
parentd4a7a0fbe959e12bdd071b79b50ed34853a6db8f (diff)
crypto: caam - remove error propagation handling
Commit 61bb86bba169507a5f223b94b9176c32c84b4721 ("crypto: caam - set descriptor sharing type to SERIAL") changed the descriptor sharing mode from SHARE_WAIT to SHARE_SERIAL. All descriptor commands that handle the "ok to share" and "error propagation" settings should also go away, since they have no meaning for SHARE_SERIAL. Signed-off-by: Horia Geanta <horia.geanta@freescale.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/caam/caamalg.c54
1 files changed, 7 insertions, 47 deletions
diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c
index b71f2fd749df..5016e63b6c25 100644
--- a/drivers/crypto/caam/caamalg.c
+++ b/drivers/crypto/caam/caamalg.c
@@ -66,8 +66,8 @@
66 66
67/* length of descriptors text */ 67/* length of descriptors text */
68#define DESC_AEAD_BASE (4 * CAAM_CMD_SZ) 68#define DESC_AEAD_BASE (4 * CAAM_CMD_SZ)
69#define DESC_AEAD_ENC_LEN (DESC_AEAD_BASE + 16 * CAAM_CMD_SZ) 69#define DESC_AEAD_ENC_LEN (DESC_AEAD_BASE + 15 * CAAM_CMD_SZ)
70#define DESC_AEAD_DEC_LEN (DESC_AEAD_BASE + 21 * CAAM_CMD_SZ) 70#define DESC_AEAD_DEC_LEN (DESC_AEAD_BASE + 18 * CAAM_CMD_SZ)
71#define DESC_AEAD_GIVENC_LEN (DESC_AEAD_ENC_LEN + 7 * CAAM_CMD_SZ) 71#define DESC_AEAD_GIVENC_LEN (DESC_AEAD_ENC_LEN + 7 * CAAM_CMD_SZ)
72 72
73#define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ) 73#define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ)
@@ -104,19 +104,6 @@ static inline void append_dec_op1(u32 *desc, u32 type)
104} 104}
105 105
106/* 106/*
107 * Wait for completion of class 1 key loading before allowing
108 * error propagation
109 */
110static inline void append_dec_shr_done(u32 *desc)
111{
112 u32 *jump_cmd;
113
114 jump_cmd = append_jump(desc, JUMP_CLASS_CLASS1 | JUMP_TEST_ALL);
115 set_jump_tgt_here(desc, jump_cmd);
116 append_cmd(desc, SET_OK_NO_PROP_ERRORS | CMD_LOAD);
117}
118
119/*
120 * For aead functions, read payload and write payload, 107 * For aead functions, read payload and write payload,
121 * both of which are specified in req->src and req->dst 108 * both of which are specified in req->src and req->dst
122 */ 109 */
@@ -211,9 +198,6 @@ static void init_sh_desc_key_aead(u32 *desc, struct caam_ctx *ctx,
211 append_key_aead(desc, ctx, keys_fit_inline); 198 append_key_aead(desc, ctx, keys_fit_inline);
212 199
213 set_jump_tgt_here(desc, key_jump_cmd); 200 set_jump_tgt_here(desc, key_jump_cmd);
214
215 /* Propagate errors from shared to job descriptor */
216 append_cmd(desc, SET_OK_NO_PROP_ERRORS | CMD_LOAD);
217} 201}
218 202
219static int aead_set_sh_desc(struct crypto_aead *aead) 203static int aead_set_sh_desc(struct crypto_aead *aead)
@@ -222,7 +206,6 @@ static int aead_set_sh_desc(struct crypto_aead *aead)
222 struct caam_ctx *ctx = crypto_aead_ctx(aead); 206 struct caam_ctx *ctx = crypto_aead_ctx(aead);
223 struct device *jrdev = ctx->jrdev; 207 struct device *jrdev = ctx->jrdev;
224 bool keys_fit_inline = false; 208 bool keys_fit_inline = false;
225 u32 *key_jump_cmd, *jump_cmd;
226 u32 geniv, moveiv; 209 u32 geniv, moveiv;
227 u32 *desc; 210 u32 *desc;
228 211
@@ -253,7 +236,7 @@ static int aead_set_sh_desc(struct crypto_aead *aead)
253 /* assoclen + cryptlen = seqinlen - ivsize */ 236 /* assoclen + cryptlen = seqinlen - ivsize */
254 append_math_sub_imm_u32(desc, REG2, SEQINLEN, IMM, tfm->ivsize); 237 append_math_sub_imm_u32(desc, REG2, SEQINLEN, IMM, tfm->ivsize);
255 238
256 /* assoclen + cryptlen = (assoclen + cryptlen) - cryptlen */ 239 /* assoclen = (assoclen + cryptlen) - cryptlen */
257 append_math_sub(desc, VARSEQINLEN, REG2, REG3, CAAM_CMD_SZ); 240 append_math_sub(desc, VARSEQINLEN, REG2, REG3, CAAM_CMD_SZ);
258 241
259 /* read assoc before reading payload */ 242 /* read assoc before reading payload */
@@ -296,28 +279,16 @@ static int aead_set_sh_desc(struct crypto_aead *aead)
296 CAAM_DESC_BYTES_MAX) 279 CAAM_DESC_BYTES_MAX)
297 keys_fit_inline = true; 280 keys_fit_inline = true;
298 281
299 desc = ctx->sh_desc_dec;
300
301 /* aead_decrypt shared descriptor */ 282 /* aead_decrypt shared descriptor */
302 init_sh_desc(desc, HDR_SHARE_SERIAL); 283 desc = ctx->sh_desc_dec;
303
304 /* Skip if already shared */
305 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
306 JUMP_COND_SHRD);
307
308 append_key_aead(desc, ctx, keys_fit_inline);
309 284
310 /* Only propagate error immediately if shared */ 285 init_sh_desc_key_aead(desc, ctx, keys_fit_inline);
311 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
312 set_jump_tgt_here(desc, key_jump_cmd);
313 append_cmd(desc, SET_OK_NO_PROP_ERRORS | CMD_LOAD);
314 set_jump_tgt_here(desc, jump_cmd);
315 286
316 /* Class 2 operation */ 287 /* Class 2 operation */
317 append_operation(desc, ctx->class2_alg_type | 288 append_operation(desc, ctx->class2_alg_type |
318 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON); 289 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
319 290
320 /* assoclen + cryptlen = seqinlen - ivsize */ 291 /* assoclen + cryptlen = seqinlen - ivsize - authsize */
321 append_math_sub_imm_u32(desc, REG3, SEQINLEN, IMM, 292 append_math_sub_imm_u32(desc, REG3, SEQINLEN, IMM,
322 ctx->authsize + tfm->ivsize) 293 ctx->authsize + tfm->ivsize)
323 /* assoclen = (assoclen + cryptlen) - cryptlen */ 294 /* assoclen = (assoclen + cryptlen) - cryptlen */
@@ -340,7 +311,6 @@ static int aead_set_sh_desc(struct crypto_aead *aead)
340 /* Load ICV */ 311 /* Load ICV */
341 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 | 312 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
342 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV); 313 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
343 append_dec_shr_done(desc);
344 314
345 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, 315 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
346 desc_bytes(desc), 316 desc_bytes(desc),
@@ -532,7 +502,7 @@ static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
532 struct ablkcipher_tfm *tfm = &ablkcipher->base.crt_ablkcipher; 502 struct ablkcipher_tfm *tfm = &ablkcipher->base.crt_ablkcipher;
533 struct device *jrdev = ctx->jrdev; 503 struct device *jrdev = ctx->jrdev;
534 int ret = 0; 504 int ret = 0;
535 u32 *key_jump_cmd, *jump_cmd; 505 u32 *key_jump_cmd;
536 u32 *desc; 506 u32 *desc;
537 507
538#ifdef DEBUG 508#ifdef DEBUG
@@ -563,9 +533,6 @@ static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
563 533
564 set_jump_tgt_here(desc, key_jump_cmd); 534 set_jump_tgt_here(desc, key_jump_cmd);
565 535
566 /* Propagate errors from shared to job descriptor */
567 append_cmd(desc, SET_OK_NO_PROP_ERRORS | CMD_LOAD);
568
569 /* Load iv */ 536 /* Load iv */
570 append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT | 537 append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
571 LDST_CLASS_1_CCB | tfm->ivsize); 538 LDST_CLASS_1_CCB | tfm->ivsize);
@@ -603,11 +570,7 @@ static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
603 ctx->enckeylen, CLASS_1 | 570 ctx->enckeylen, CLASS_1 |
604 KEY_DEST_CLASS_REG); 571 KEY_DEST_CLASS_REG);
605 572
606 /* For aead, only propagate error immediately if shared */
607 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
608 set_jump_tgt_here(desc, key_jump_cmd); 573 set_jump_tgt_here(desc, key_jump_cmd);
609 append_cmd(desc, SET_OK_NO_PROP_ERRORS | CMD_LOAD);
610 set_jump_tgt_here(desc, jump_cmd);
611 574
612 /* load IV */ 575 /* load IV */
613 append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT | 576 append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
@@ -619,9 +582,6 @@ static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
619 /* Perform operation */ 582 /* Perform operation */
620 ablkcipher_append_src_dst(desc); 583 ablkcipher_append_src_dst(desc);
621 584
622 /* Wait for key to load before allowing propagating error */
623 append_dec_shr_done(desc);
624
625 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, 585 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
626 desc_bytes(desc), 586 desc_bytes(desc),
627 DMA_TO_DEVICE); 587 DMA_TO_DEVICE);