aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--crypto/tcrypt.c4
-rw-r--r--drivers/crypto/caam/caamalg.c51
2 files changed, 33 insertions, 22 deletions
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c
index 1ab8258fcf56..001f07cdb828 100644
--- a/crypto/tcrypt.c
+++ b/crypto/tcrypt.c
@@ -1242,6 +1242,10 @@ static int do_test(int m)
1242 ret += tcrypt_test("cmac(des3_ede)"); 1242 ret += tcrypt_test("cmac(des3_ede)");
1243 break; 1243 break;
1244 1244
1245 case 155:
1246 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
1247 break;
1248
1245 case 200: 1249 case 200:
1246 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0, 1250 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
1247 speed_template_16_24_32); 1251 speed_template_16_24_32);
diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c
index 4f44b71b9e24..4cf5dec826e1 100644
--- a/drivers/crypto/caam/caamalg.c
+++ b/drivers/crypto/caam/caamalg.c
@@ -818,7 +818,7 @@ static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
818 ivsize, 1); 818 ivsize, 1);
819 print_hex_dump(KERN_ERR, "dst @"__stringify(__LINE__)": ", 819 print_hex_dump(KERN_ERR, "dst @"__stringify(__LINE__)": ",
820 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->dst), 820 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->dst),
821 req->cryptlen, 1); 821 req->cryptlen - ctx->authsize, 1);
822#endif 822#endif
823 823
824 if (err) { 824 if (err) {
@@ -972,12 +972,9 @@ static void init_aead_job(u32 *sh_desc, dma_addr_t ptr,
972 (edesc->src_nents ? : 1); 972 (edesc->src_nents ? : 1);
973 in_options = LDST_SGF; 973 in_options = LDST_SGF;
974 } 974 }
975 if (encrypt) 975
976 append_seq_in_ptr(desc, src_dma, req->assoclen + ivsize + 976 append_seq_in_ptr(desc, src_dma, req->assoclen + ivsize + req->cryptlen,
977 req->cryptlen - authsize, in_options); 977 in_options);
978 else
979 append_seq_in_ptr(desc, src_dma, req->assoclen + ivsize +
980 req->cryptlen, in_options);
981 978
982 if (likely(req->src == req->dst)) { 979 if (likely(req->src == req->dst)) {
983 if (all_contig) { 980 if (all_contig) {
@@ -998,7 +995,8 @@ static void init_aead_job(u32 *sh_desc, dma_addr_t ptr,
998 } 995 }
999 } 996 }
1000 if (encrypt) 997 if (encrypt)
1001 append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options); 998 append_seq_out_ptr(desc, dst_dma, req->cryptlen + authsize,
999 out_options);
1002 else 1000 else
1003 append_seq_out_ptr(desc, dst_dma, req->cryptlen - authsize, 1001 append_seq_out_ptr(desc, dst_dma, req->cryptlen - authsize,
1004 out_options); 1002 out_options);
@@ -1048,8 +1046,8 @@ static void init_aead_giv_job(u32 *sh_desc, dma_addr_t ptr,
1048 sec4_sg_index += edesc->assoc_nents + 1 + edesc->src_nents; 1046 sec4_sg_index += edesc->assoc_nents + 1 + edesc->src_nents;
1049 in_options = LDST_SGF; 1047 in_options = LDST_SGF;
1050 } 1048 }
1051 append_seq_in_ptr(desc, src_dma, req->assoclen + ivsize + 1049 append_seq_in_ptr(desc, src_dma, req->assoclen + ivsize + req->cryptlen,
1052 req->cryptlen - authsize, in_options); 1050 in_options);
1053 1051
1054 if (contig & GIV_DST_CONTIG) { 1052 if (contig & GIV_DST_CONTIG) {
1055 dst_dma = edesc->iv_dma; 1053 dst_dma = edesc->iv_dma;
@@ -1066,7 +1064,8 @@ static void init_aead_giv_job(u32 *sh_desc, dma_addr_t ptr,
1066 } 1064 }
1067 } 1065 }
1068 1066
1069 append_seq_out_ptr(desc, dst_dma, ivsize + req->cryptlen, out_options); 1067 append_seq_out_ptr(desc, dst_dma, ivsize + req->cryptlen + authsize,
1068 out_options);
1070} 1069}
1071 1070
1072/* 1071/*
@@ -1130,7 +1129,8 @@ static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
1130 * allocate and map the aead extended descriptor 1129 * allocate and map the aead extended descriptor
1131 */ 1130 */
1132static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1131static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1133 int desc_bytes, bool *all_contig_ptr) 1132 int desc_bytes, bool *all_contig_ptr,
1133 bool encrypt)
1134{ 1134{
1135 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1135 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1136 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1136 struct caam_ctx *ctx = crypto_aead_ctx(aead);
@@ -1145,12 +1145,22 @@ static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1145 bool assoc_chained = false, src_chained = false, dst_chained = false; 1145 bool assoc_chained = false, src_chained = false, dst_chained = false;
1146 int ivsize = crypto_aead_ivsize(aead); 1146 int ivsize = crypto_aead_ivsize(aead);
1147 int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes; 1147 int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes;
1148 unsigned int authsize = ctx->authsize;
1148 1149
1149 assoc_nents = sg_count(req->assoc, req->assoclen, &assoc_chained); 1150 assoc_nents = sg_count(req->assoc, req->assoclen, &assoc_chained);
1150 src_nents = sg_count(req->src, req->cryptlen, &src_chained);
1151 1151
1152 if (unlikely(req->dst != req->src)) 1152 if (unlikely(req->dst != req->src)) {
1153 dst_nents = sg_count(req->dst, req->cryptlen, &dst_chained); 1153 src_nents = sg_count(req->src, req->cryptlen, &src_chained);
1154 dst_nents = sg_count(req->dst,
1155 req->cryptlen +
1156 (encrypt ? authsize : (-authsize)),
1157 &dst_chained);
1158 } else {
1159 src_nents = sg_count(req->src,
1160 req->cryptlen +
1161 (encrypt ? authsize : 0),
1162 &src_chained);
1163 }
1154 1164
1155 sgc = dma_map_sg_chained(jrdev, req->assoc, assoc_nents ? : 1, 1165 sgc = dma_map_sg_chained(jrdev, req->assoc, assoc_nents ? : 1,
1156 DMA_TO_DEVICE, assoc_chained); 1166 DMA_TO_DEVICE, assoc_chained);
@@ -1234,11 +1244,9 @@ static int aead_encrypt(struct aead_request *req)
1234 u32 *desc; 1244 u32 *desc;
1235 int ret = 0; 1245 int ret = 0;
1236 1246
1237 req->cryptlen += ctx->authsize;
1238
1239 /* allocate extended descriptor */ 1247 /* allocate extended descriptor */
1240 edesc = aead_edesc_alloc(req, DESC_JOB_IO_LEN * 1248 edesc = aead_edesc_alloc(req, DESC_JOB_IO_LEN *
1241 CAAM_CMD_SZ, &all_contig); 1249 CAAM_CMD_SZ, &all_contig, true);
1242 if (IS_ERR(edesc)) 1250 if (IS_ERR(edesc))
1243 return PTR_ERR(edesc); 1251 return PTR_ERR(edesc);
1244 1252
@@ -1275,7 +1283,7 @@ static int aead_decrypt(struct aead_request *req)
1275 1283
1276 /* allocate extended descriptor */ 1284 /* allocate extended descriptor */
1277 edesc = aead_edesc_alloc(req, DESC_JOB_IO_LEN * 1285 edesc = aead_edesc_alloc(req, DESC_JOB_IO_LEN *
1278 CAAM_CMD_SZ, &all_contig); 1286 CAAM_CMD_SZ, &all_contig, false);
1279 if (IS_ERR(edesc)) 1287 if (IS_ERR(edesc))
1280 return PTR_ERR(edesc); 1288 return PTR_ERR(edesc);
1281 1289
@@ -1332,7 +1340,8 @@ static struct aead_edesc *aead_giv_edesc_alloc(struct aead_givcrypt_request
1332 src_nents = sg_count(req->src, req->cryptlen, &src_chained); 1340 src_nents = sg_count(req->src, req->cryptlen, &src_chained);
1333 1341
1334 if (unlikely(req->dst != req->src)) 1342 if (unlikely(req->dst != req->src))
1335 dst_nents = sg_count(req->dst, req->cryptlen, &dst_chained); 1343 dst_nents = sg_count(req->dst, req->cryptlen + ctx->authsize,
1344 &dst_chained);
1336 1345
1337 sgc = dma_map_sg_chained(jrdev, req->assoc, assoc_nents ? : 1, 1346 sgc = dma_map_sg_chained(jrdev, req->assoc, assoc_nents ? : 1,
1338 DMA_TO_DEVICE, assoc_chained); 1347 DMA_TO_DEVICE, assoc_chained);
@@ -1426,8 +1435,6 @@ static int aead_givencrypt(struct aead_givcrypt_request *areq)
1426 u32 *desc; 1435 u32 *desc;
1427 int ret = 0; 1436 int ret = 0;
1428 1437
1429 req->cryptlen += ctx->authsize;
1430
1431 /* allocate extended descriptor */ 1438 /* allocate extended descriptor */
1432 edesc = aead_giv_edesc_alloc(areq, DESC_JOB_IO_LEN * 1439 edesc = aead_giv_edesc_alloc(areq, DESC_JOB_IO_LEN *
1433 CAAM_CMD_SZ, &contig); 1440 CAAM_CMD_SZ, &contig);