aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorHoria Geanta <horia.geanta@freescale.com>2014-03-14 11:46:52 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2014-03-21 09:54:27 -0400
commitae4a825ffdd6fd769af2667e03070940af1b8368 (patch)
tree8bfcd6d60ccaa448498af537a134910513c9946c /drivers/crypto
parentbca4feb0d4fe2d5da1a0f31ef89f63709aba4906 (diff)
crypto: caam - add support for aead null encryption
Add support for the following combinations: -encryption: null -authentication: md5, sha* (1, 224, 256, 384, 512) Signed-off-by: Tudor Ambarus <tudor.ambarus@freescale.com> Signed-off-by: Horia Geanta <horia.geanta@freescale.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/caam/caamalg.c327
-rw-r--r--drivers/crypto/caam/compat.h1
-rw-r--r--drivers/crypto/caam/desc_constr.h27
3 files changed, 342 insertions, 13 deletions
diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c
index 5016e63b6c25..a9ba8b159636 100644
--- a/drivers/crypto/caam/caamalg.c
+++ b/drivers/crypto/caam/caamalg.c
@@ -70,6 +70,10 @@
70#define DESC_AEAD_DEC_LEN (DESC_AEAD_BASE + 18 * CAAM_CMD_SZ) 70#define DESC_AEAD_DEC_LEN (DESC_AEAD_BASE + 18 * CAAM_CMD_SZ)
71#define DESC_AEAD_GIVENC_LEN (DESC_AEAD_ENC_LEN + 7 * CAAM_CMD_SZ) 71#define DESC_AEAD_GIVENC_LEN (DESC_AEAD_ENC_LEN + 7 * CAAM_CMD_SZ)
72 72
73#define DESC_AEAD_NULL_BASE (3 * CAAM_CMD_SZ)
74#define DESC_AEAD_NULL_ENC_LEN (DESC_AEAD_NULL_BASE + 14 * CAAM_CMD_SZ)
75#define DESC_AEAD_NULL_DEC_LEN (DESC_AEAD_NULL_BASE + 17 * CAAM_CMD_SZ)
76
73#define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ) 77#define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ)
74#define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \ 78#define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \
75 20 * CAAM_CMD_SZ) 79 20 * CAAM_CMD_SZ)
@@ -109,9 +113,9 @@ static inline void append_dec_op1(u32 *desc, u32 type)
109 */ 113 */
110static inline void aead_append_src_dst(u32 *desc, u32 msg_type) 114static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
111{ 115{
116 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
112 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | 117 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
113 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH); 118 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
114 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
115} 119}
116 120
117/* 121/*
@@ -200,6 +204,196 @@ static void init_sh_desc_key_aead(u32 *desc, struct caam_ctx *ctx,
200 set_jump_tgt_here(desc, key_jump_cmd); 204 set_jump_tgt_here(desc, key_jump_cmd);
201} 205}
202 206
207static int aead_null_set_sh_desc(struct crypto_aead *aead)
208{
209 struct aead_tfm *tfm = &aead->base.crt_aead;
210 struct caam_ctx *ctx = crypto_aead_ctx(aead);
211 struct device *jrdev = ctx->jrdev;
212 bool keys_fit_inline = false;
213 u32 *key_jump_cmd, *jump_cmd, *read_move_cmd, *write_move_cmd;
214 u32 *desc;
215
216 /*
217 * Job Descriptor and Shared Descriptors
218 * must all fit into the 64-word Descriptor h/w Buffer
219 */
220 if (DESC_AEAD_NULL_ENC_LEN + DESC_JOB_IO_LEN +
221 ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
222 keys_fit_inline = true;
223
224 /* aead_encrypt shared descriptor */
225 desc = ctx->sh_desc_enc;
226
227 init_sh_desc(desc, HDR_SHARE_SERIAL);
228
229 /* Skip if already shared */
230 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
231 JUMP_COND_SHRD);
232 if (keys_fit_inline)
233 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
234 ctx->split_key_len, CLASS_2 |
235 KEY_DEST_MDHA_SPLIT | KEY_ENC);
236 else
237 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
238 KEY_DEST_MDHA_SPLIT | KEY_ENC);
239 set_jump_tgt_here(desc, key_jump_cmd);
240
241 /* cryptlen = seqoutlen - authsize */
242 append_math_sub_imm_u32(desc, REG3, SEQOUTLEN, IMM, ctx->authsize);
243
244 /*
245 * NULL encryption; IV is zero
246 * assoclen = (assoclen + cryptlen) - cryptlen
247 */
248 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
249
250 /* read assoc before reading payload */
251 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
252 KEY_VLF);
253
254 /* Prepare to read and write cryptlen bytes */
255 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
256 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
257
258 /*
259 * MOVE_LEN opcode is not available in all SEC HW revisions,
260 * thus need to do some magic, i.e. self-patch the descriptor
261 * buffer.
262 */
263 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
264 MOVE_DEST_MATH3 |
265 (0x6 << MOVE_LEN_SHIFT));
266 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
267 MOVE_DEST_DESCBUF |
268 MOVE_WAITCOMP |
269 (0x8 << MOVE_LEN_SHIFT));
270
271 /* Class 2 operation */
272 append_operation(desc, ctx->class2_alg_type |
273 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
274
275 /* Read and write cryptlen bytes */
276 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
277
278 set_move_tgt_here(desc, read_move_cmd);
279 set_move_tgt_here(desc, write_move_cmd);
280 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
281 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
282 MOVE_AUX_LS);
283
284 /* Write ICV */
285 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
286 LDST_SRCDST_BYTE_CONTEXT);
287
288 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
289 desc_bytes(desc),
290 DMA_TO_DEVICE);
291 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
292 dev_err(jrdev, "unable to map shared descriptor\n");
293 return -ENOMEM;
294 }
295#ifdef DEBUG
296 print_hex_dump(KERN_ERR,
297 "aead null enc shdesc@"__stringify(__LINE__)": ",
298 DUMP_PREFIX_ADDRESS, 16, 4, desc,
299 desc_bytes(desc), 1);
300#endif
301
302 /*
303 * Job Descriptor and Shared Descriptors
304 * must all fit into the 64-word Descriptor h/w Buffer
305 */
306 if (DESC_AEAD_NULL_DEC_LEN + DESC_JOB_IO_LEN +
307 ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
308 keys_fit_inline = true;
309
310 desc = ctx->sh_desc_dec;
311
312 /* aead_decrypt shared descriptor */
313 init_sh_desc(desc, HDR_SHARE_SERIAL);
314
315 /* Skip if already shared */
316 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
317 JUMP_COND_SHRD);
318 if (keys_fit_inline)
319 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
320 ctx->split_key_len, CLASS_2 |
321 KEY_DEST_MDHA_SPLIT | KEY_ENC);
322 else
323 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
324 KEY_DEST_MDHA_SPLIT | KEY_ENC);
325 set_jump_tgt_here(desc, key_jump_cmd);
326
327 /* Class 2 operation */
328 append_operation(desc, ctx->class2_alg_type |
329 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
330
331 /* assoclen + cryptlen = seqinlen - ivsize - authsize */
332 append_math_sub_imm_u32(desc, REG3, SEQINLEN, IMM,
333 ctx->authsize + tfm->ivsize);
334 /* assoclen = (assoclen + cryptlen) - cryptlen */
335 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
336 append_math_sub(desc, VARSEQINLEN, REG3, REG2, CAAM_CMD_SZ);
337
338 /* read assoc before reading payload */
339 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
340 KEY_VLF);
341
342 /* Prepare to read and write cryptlen bytes */
343 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
344 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
345
346 /*
347 * MOVE_LEN opcode is not available in all SEC HW revisions,
348 * thus need to do some magic, i.e. self-patch the descriptor
349 * buffer.
350 */
351 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
352 MOVE_DEST_MATH2 |
353 (0x6 << MOVE_LEN_SHIFT));
354 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
355 MOVE_DEST_DESCBUF |
356 MOVE_WAITCOMP |
357 (0x8 << MOVE_LEN_SHIFT));
358
359 /* Read and write cryptlen bytes */
360 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
361
362 /*
363 * Insert a NOP here, since we need at least 4 instructions between
364 * code patching the descriptor buffer and the location being patched.
365 */
366 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
367 set_jump_tgt_here(desc, jump_cmd);
368
369 set_move_tgt_here(desc, read_move_cmd);
370 set_move_tgt_here(desc, write_move_cmd);
371 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
372 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
373 MOVE_AUX_LS);
374 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
375
376 /* Load ICV */
377 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
378 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
379
380 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
381 desc_bytes(desc),
382 DMA_TO_DEVICE);
383 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
384 dev_err(jrdev, "unable to map shared descriptor\n");
385 return -ENOMEM;
386 }
387#ifdef DEBUG
388 print_hex_dump(KERN_ERR,
389 "aead null dec shdesc@"__stringify(__LINE__)": ",
390 DUMP_PREFIX_ADDRESS, 16, 4, desc,
391 desc_bytes(desc), 1);
392#endif
393
394 return 0;
395}
396
203static int aead_set_sh_desc(struct crypto_aead *aead) 397static int aead_set_sh_desc(struct crypto_aead *aead)
204{ 398{
205 struct aead_tfm *tfm = &aead->base.crt_aead; 399 struct aead_tfm *tfm = &aead->base.crt_aead;
@@ -209,9 +403,13 @@ static int aead_set_sh_desc(struct crypto_aead *aead)
209 u32 geniv, moveiv; 403 u32 geniv, moveiv;
210 u32 *desc; 404 u32 *desc;
211 405
212 if (!ctx->enckeylen || !ctx->authsize) 406 if (!ctx->authsize)
213 return 0; 407 return 0;
214 408
409 /* NULL encryption / decryption */
410 if (!ctx->enckeylen)
411 return aead_null_set_sh_desc(aead);
412
215 /* 413 /*
216 * Job Descriptor and Shared Descriptors 414 * Job Descriptor and Shared Descriptors
217 * must all fit into the 64-word Descriptor h/w Buffer 415 * must all fit into the 64-word Descriptor h/w Buffer
@@ -290,7 +488,7 @@ static int aead_set_sh_desc(struct crypto_aead *aead)
290 488
291 /* assoclen + cryptlen = seqinlen - ivsize - authsize */ 489 /* assoclen + cryptlen = seqinlen - ivsize - authsize */
292 append_math_sub_imm_u32(desc, REG3, SEQINLEN, IMM, 490 append_math_sub_imm_u32(desc, REG3, SEQINLEN, IMM,
293 ctx->authsize + tfm->ivsize) 491 ctx->authsize + tfm->ivsize);
294 /* assoclen = (assoclen + cryptlen) - cryptlen */ 492 /* assoclen = (assoclen + cryptlen) - cryptlen */
295 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ); 493 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
296 append_math_sub(desc, VARSEQINLEN, REG3, REG2, CAAM_CMD_SZ); 494 append_math_sub(desc, VARSEQINLEN, REG3, REG2, CAAM_CMD_SZ);
@@ -1419,6 +1617,11 @@ static int aead_givencrypt(struct aead_givcrypt_request *areq)
1419 return ret; 1617 return ret;
1420} 1618}
1421 1619
1620static int aead_null_givencrypt(struct aead_givcrypt_request *areq)
1621{
1622 return aead_encrypt(&areq->areq);
1623}
1624
1422/* 1625/*
1423 * allocate and map the ablkcipher extended descriptor for ablkcipher 1626 * allocate and map the ablkcipher extended descriptor for ablkcipher
1424 */ 1627 */
@@ -1608,6 +1811,124 @@ struct caam_alg_template {
1608static struct caam_alg_template driver_algs[] = { 1811static struct caam_alg_template driver_algs[] = {
1609 /* single-pass ipsec_esp descriptor */ 1812 /* single-pass ipsec_esp descriptor */
1610 { 1813 {
1814 .name = "authenc(hmac(md5),ecb(cipher_null))",
1815 .driver_name = "authenc-hmac-md5-ecb-cipher_null-caam",
1816 .blocksize = NULL_BLOCK_SIZE,
1817 .type = CRYPTO_ALG_TYPE_AEAD,
1818 .template_aead = {
1819 .setkey = aead_setkey,
1820 .setauthsize = aead_setauthsize,
1821 .encrypt = aead_encrypt,
1822 .decrypt = aead_decrypt,
1823 .givencrypt = aead_null_givencrypt,
1824 .geniv = "<built-in>",
1825 .ivsize = NULL_IV_SIZE,
1826 .maxauthsize = MD5_DIGEST_SIZE,
1827 },
1828 .class1_alg_type = 0,
1829 .class2_alg_type = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC_PRECOMP,
1830 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
1831 },
1832 {
1833 .name = "authenc(hmac(sha1),ecb(cipher_null))",
1834 .driver_name = "authenc-hmac-sha1-ecb-cipher_null-caam",
1835 .blocksize = NULL_BLOCK_SIZE,
1836 .type = CRYPTO_ALG_TYPE_AEAD,
1837 .template_aead = {
1838 .setkey = aead_setkey,
1839 .setauthsize = aead_setauthsize,
1840 .encrypt = aead_encrypt,
1841 .decrypt = aead_decrypt,
1842 .givencrypt = aead_null_givencrypt,
1843 .geniv = "<built-in>",
1844 .ivsize = NULL_IV_SIZE,
1845 .maxauthsize = SHA1_DIGEST_SIZE,
1846 },
1847 .class1_alg_type = 0,
1848 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC_PRECOMP,
1849 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
1850 },
1851 {
1852 .name = "authenc(hmac(sha224),ecb(cipher_null))",
1853 .driver_name = "authenc-hmac-sha224-ecb-cipher_null-caam",
1854 .blocksize = NULL_BLOCK_SIZE,
1855 .type = CRYPTO_ALG_TYPE_AEAD,
1856 .template_aead = {
1857 .setkey = aead_setkey,
1858 .setauthsize = aead_setauthsize,
1859 .encrypt = aead_encrypt,
1860 .decrypt = aead_decrypt,
1861 .givencrypt = aead_null_givencrypt,
1862 .geniv = "<built-in>",
1863 .ivsize = NULL_IV_SIZE,
1864 .maxauthsize = SHA224_DIGEST_SIZE,
1865 },
1866 .class1_alg_type = 0,
1867 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1868 OP_ALG_AAI_HMAC_PRECOMP,
1869 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
1870 },
1871 {
1872 .name = "authenc(hmac(sha256),ecb(cipher_null))",
1873 .driver_name = "authenc-hmac-sha256-ecb-cipher_null-caam",
1874 .blocksize = NULL_BLOCK_SIZE,
1875 .type = CRYPTO_ALG_TYPE_AEAD,
1876 .template_aead = {
1877 .setkey = aead_setkey,
1878 .setauthsize = aead_setauthsize,
1879 .encrypt = aead_encrypt,
1880 .decrypt = aead_decrypt,
1881 .givencrypt = aead_null_givencrypt,
1882 .geniv = "<built-in>",
1883 .ivsize = NULL_IV_SIZE,
1884 .maxauthsize = SHA256_DIGEST_SIZE,
1885 },
1886 .class1_alg_type = 0,
1887 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1888 OP_ALG_AAI_HMAC_PRECOMP,
1889 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
1890 },
1891 {
1892 .name = "authenc(hmac(sha384),ecb(cipher_null))",
1893 .driver_name = "authenc-hmac-sha384-ecb-cipher_null-caam",
1894 .blocksize = NULL_BLOCK_SIZE,
1895 .type = CRYPTO_ALG_TYPE_AEAD,
1896 .template_aead = {
1897 .setkey = aead_setkey,
1898 .setauthsize = aead_setauthsize,
1899 .encrypt = aead_encrypt,
1900 .decrypt = aead_decrypt,
1901 .givencrypt = aead_null_givencrypt,
1902 .geniv = "<built-in>",
1903 .ivsize = NULL_IV_SIZE,
1904 .maxauthsize = SHA384_DIGEST_SIZE,
1905 },
1906 .class1_alg_type = 0,
1907 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1908 OP_ALG_AAI_HMAC_PRECOMP,
1909 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
1910 },
1911 {
1912 .name = "authenc(hmac(sha512),ecb(cipher_null))",
1913 .driver_name = "authenc-hmac-sha512-ecb-cipher_null-caam",
1914 .blocksize = NULL_BLOCK_SIZE,
1915 .type = CRYPTO_ALG_TYPE_AEAD,
1916 .template_aead = {
1917 .setkey = aead_setkey,
1918 .setauthsize = aead_setauthsize,
1919 .encrypt = aead_encrypt,
1920 .decrypt = aead_decrypt,
1921 .givencrypt = aead_null_givencrypt,
1922 .geniv = "<built-in>",
1923 .ivsize = NULL_IV_SIZE,
1924 .maxauthsize = SHA512_DIGEST_SIZE,
1925 },
1926 .class1_alg_type = 0,
1927 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1928 OP_ALG_AAI_HMAC_PRECOMP,
1929 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
1930 },
1931 {
1611 .name = "authenc(hmac(md5),cbc(aes))", 1932 .name = "authenc(hmac(md5),cbc(aes))",
1612 .driver_name = "authenc-hmac-md5-cbc-aes-caam", 1933 .driver_name = "authenc-hmac-md5-cbc-aes-caam",
1613 .blocksize = AES_BLOCK_SIZE, 1934 .blocksize = AES_BLOCK_SIZE,
diff --git a/drivers/crypto/caam/compat.h b/drivers/crypto/caam/compat.h
index 762aeff626ac..f227922cea38 100644
--- a/drivers/crypto/caam/compat.h
+++ b/drivers/crypto/caam/compat.h
@@ -26,6 +26,7 @@
26#include <net/xfrm.h> 26#include <net/xfrm.h>
27 27
28#include <crypto/algapi.h> 28#include <crypto/algapi.h>
29#include <crypto/null.h>
29#include <crypto/aes.h> 30#include <crypto/aes.h>
30#include <crypto/des.h> 31#include <crypto/des.h>
31#include <crypto/sha.h> 32#include <crypto/sha.h>
diff --git a/drivers/crypto/caam/desc_constr.h b/drivers/crypto/caam/desc_constr.h
index cd5f678847ce..7eec20bb3849 100644
--- a/drivers/crypto/caam/desc_constr.h
+++ b/drivers/crypto/caam/desc_constr.h
@@ -155,21 +155,29 @@ static inline void append_cmd_data(u32 *desc, void *data, int len,
155 append_data(desc, data, len); 155 append_data(desc, data, len);
156} 156}
157 157
158static inline u32 *append_jump(u32 *desc, u32 options) 158#define APPEND_CMD_RET(cmd, op) \
159{ 159static inline u32 *append_##cmd(u32 *desc, u32 options) \
160 u32 *cmd = desc_end(desc); 160{ \
161 161 u32 *cmd = desc_end(desc); \
162 PRINT_POS; 162 PRINT_POS; \
163 append_cmd(desc, CMD_JUMP | options); 163 append_cmd(desc, CMD_##op | options); \
164 164 return cmd; \
165 return cmd;
166} 165}
166APPEND_CMD_RET(jump, JUMP)
167APPEND_CMD_RET(move, MOVE)
167 168
168static inline void set_jump_tgt_here(u32 *desc, u32 *jump_cmd) 169static inline void set_jump_tgt_here(u32 *desc, u32 *jump_cmd)
169{ 170{
170 *jump_cmd = *jump_cmd | (desc_len(desc) - (jump_cmd - desc)); 171 *jump_cmd = *jump_cmd | (desc_len(desc) - (jump_cmd - desc));
171} 172}
172 173
174static inline void set_move_tgt_here(u32 *desc, u32 *move_cmd)
175{
176 *move_cmd &= ~MOVE_OFFSET_MASK;
177 *move_cmd = *move_cmd | ((desc_len(desc) << (MOVE_OFFSET_SHIFT + 2)) &
178 MOVE_OFFSET_MASK);
179}
180
173#define APPEND_CMD(cmd, op) \ 181#define APPEND_CMD(cmd, op) \
174static inline void append_##cmd(u32 *desc, u32 options) \ 182static inline void append_##cmd(u32 *desc, u32 options) \
175{ \ 183{ \
@@ -177,7 +185,6 @@ static inline void append_##cmd(u32 *desc, u32 options) \
177 append_cmd(desc, CMD_##op | options); \ 185 append_cmd(desc, CMD_##op | options); \
178} 186}
179APPEND_CMD(operation, OPERATION) 187APPEND_CMD(operation, OPERATION)
180APPEND_CMD(move, MOVE)
181 188
182#define APPEND_CMD_LEN(cmd, op) \ 189#define APPEND_CMD_LEN(cmd, op) \
183static inline void append_##cmd(u32 *desc, unsigned int len, u32 options) \ 190static inline void append_##cmd(u32 *desc, unsigned int len, u32 options) \
@@ -328,7 +335,7 @@ append_cmd(desc, CMD_MATH | MATH_FUN_##op | MATH_DEST_##dest | \
328do { \ 335do { \
329 APPEND_MATH(op, desc, dest, src_0, src_1, CAAM_CMD_SZ); \ 336 APPEND_MATH(op, desc, dest, src_0, src_1, CAAM_CMD_SZ); \
330 append_cmd(desc, data); \ 337 append_cmd(desc, data); \
331} while (0); 338} while (0)
332 339
333#define append_math_add_imm_u32(desc, dest, src0, src1, data) \ 340#define append_math_add_imm_u32(desc, dest, src0, src1, data) \
334 APPEND_MATH_IMM_u32(ADD, desc, dest, src0, src1, data) 341 APPEND_MATH_IMM_u32(ADD, desc, dest, src0, src1, data)