aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto/inside-secure/safexcel_cipher.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/crypto/inside-secure/safexcel_cipher.c')
-rw-r--r--drivers/crypto/inside-secure/safexcel_cipher.c85
1 files changed, 65 insertions, 20 deletions
diff --git a/drivers/crypto/inside-secure/safexcel_cipher.c b/drivers/crypto/inside-secure/safexcel_cipher.c
index 5438552bc6d7..fcc0a606d748 100644
--- a/drivers/crypto/inside-secure/safexcel_cipher.c
+++ b/drivers/crypto/inside-secure/safexcel_cipher.c
@@ -14,6 +14,7 @@
14 14
15#include <crypto/aes.h> 15#include <crypto/aes.h>
16#include <crypto/skcipher.h> 16#include <crypto/skcipher.h>
17#include <crypto/internal/skcipher.h>
17 18
18#include "safexcel.h" 19#include "safexcel.h"
19 20
@@ -33,6 +34,10 @@ struct safexcel_cipher_ctx {
33 unsigned int key_len; 34 unsigned int key_len;
34}; 35};
35 36
37struct safexcel_cipher_req {
38 bool needs_inv;
39};
40
36static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, 41static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx,
37 struct crypto_async_request *async, 42 struct crypto_async_request *async,
38 struct safexcel_command_desc *cdesc, 43 struct safexcel_command_desc *cdesc,
@@ -126,9 +131,9 @@ static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
126 return 0; 131 return 0;
127} 132}
128 133
129static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring, 134static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
130 struct crypto_async_request *async, 135 struct crypto_async_request *async,
131 bool *should_complete, int *ret) 136 bool *should_complete, int *ret)
132{ 137{
133 struct skcipher_request *req = skcipher_request_cast(async); 138 struct skcipher_request *req = skcipher_request_cast(async);
134 struct safexcel_result_desc *rdesc; 139 struct safexcel_result_desc *rdesc;
@@ -265,7 +270,6 @@ static int safexcel_aes_send(struct crypto_async_request *async,
265 spin_unlock_bh(&priv->ring[ring].egress_lock); 270 spin_unlock_bh(&priv->ring[ring].egress_lock);
266 271
267 request->req = &req->base; 272 request->req = &req->base;
268 ctx->base.handle_result = safexcel_handle_result;
269 273
270 *commands = n_cdesc; 274 *commands = n_cdesc;
271 *results = n_rdesc; 275 *results = n_rdesc;
@@ -341,8 +345,6 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
341 345
342 ring = safexcel_select_ring(priv); 346 ring = safexcel_select_ring(priv);
343 ctx->base.ring = ring; 347 ctx->base.ring = ring;
344 ctx->base.needs_inv = false;
345 ctx->base.send = safexcel_aes_send;
346 348
347 spin_lock_bh(&priv->ring[ring].queue_lock); 349 spin_lock_bh(&priv->ring[ring].queue_lock);
348 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async); 350 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
@@ -359,6 +361,26 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
359 return ndesc; 361 return ndesc;
360} 362}
361 363
364static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
365 struct crypto_async_request *async,
366 bool *should_complete, int *ret)
367{
368 struct skcipher_request *req = skcipher_request_cast(async);
369 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
370 int err;
371
372 if (sreq->needs_inv) {
373 sreq->needs_inv = false;
374 err = safexcel_handle_inv_result(priv, ring, async,
375 should_complete, ret);
376 } else {
377 err = safexcel_handle_req_result(priv, ring, async,
378 should_complete, ret);
379 }
380
381 return err;
382}
383
362static int safexcel_cipher_send_inv(struct crypto_async_request *async, 384static int safexcel_cipher_send_inv(struct crypto_async_request *async,
363 int ring, struct safexcel_request *request, 385 int ring, struct safexcel_request *request,
364 int *commands, int *results) 386 int *commands, int *results)
@@ -368,8 +390,6 @@ static int safexcel_cipher_send_inv(struct crypto_async_request *async,
368 struct safexcel_crypto_priv *priv = ctx->priv; 390 struct safexcel_crypto_priv *priv = ctx->priv;
369 int ret; 391 int ret;
370 392
371 ctx->base.handle_result = safexcel_handle_inv_result;
372
373 ret = safexcel_invalidate_cache(async, &ctx->base, priv, 393 ret = safexcel_invalidate_cache(async, &ctx->base, priv,
374 ctx->base.ctxr_dma, ring, request); 394 ctx->base.ctxr_dma, ring, request);
375 if (unlikely(ret)) 395 if (unlikely(ret))
@@ -381,28 +401,46 @@ static int safexcel_cipher_send_inv(struct crypto_async_request *async,
381 return 0; 401 return 0;
382} 402}
383 403
404static int safexcel_send(struct crypto_async_request *async,
405 int ring, struct safexcel_request *request,
406 int *commands, int *results)
407{
408 struct skcipher_request *req = skcipher_request_cast(async);
409 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
410 int ret;
411
412 if (sreq->needs_inv)
413 ret = safexcel_cipher_send_inv(async, ring, request,
414 commands, results);
415 else
416 ret = safexcel_aes_send(async, ring, request,
417 commands, results);
418 return ret;
419}
420
384static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm) 421static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm)
385{ 422{
386 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); 423 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
387 struct safexcel_crypto_priv *priv = ctx->priv; 424 struct safexcel_crypto_priv *priv = ctx->priv;
388 struct skcipher_request req; 425 SKCIPHER_REQUEST_ON_STACK(req, __crypto_skcipher_cast(tfm));
426 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
389 struct safexcel_inv_result result = {}; 427 struct safexcel_inv_result result = {};
390 int ring = ctx->base.ring; 428 int ring = ctx->base.ring;
391 429
392 memset(&req, 0, sizeof(struct skcipher_request)); 430 memset(req, 0, sizeof(struct skcipher_request));
393 431
394 /* create invalidation request */ 432 /* create invalidation request */
395 init_completion(&result.completion); 433 init_completion(&result.completion);
396 skcipher_request_set_callback(&req, CRYPTO_TFM_REQ_MAY_BACKLOG, 434 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
397 safexcel_inv_complete, &result); 435 safexcel_inv_complete, &result);
398 436
399 skcipher_request_set_tfm(&req, __crypto_skcipher_cast(tfm)); 437 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
400 ctx = crypto_tfm_ctx(req.base.tfm); 438 ctx = crypto_tfm_ctx(req->base.tfm);
401 ctx->base.exit_inv = true; 439 ctx->base.exit_inv = true;
402 ctx->base.send = safexcel_cipher_send_inv; 440 sreq->needs_inv = true;
403 441
404 spin_lock_bh(&priv->ring[ring].queue_lock); 442 spin_lock_bh(&priv->ring[ring].queue_lock);
405 crypto_enqueue_request(&priv->ring[ring].queue, &req.base); 443 crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
406 spin_unlock_bh(&priv->ring[ring].queue_lock); 444 spin_unlock_bh(&priv->ring[ring].queue_lock);
407 445
408 if (!priv->ring[ring].need_dequeue) 446 if (!priv->ring[ring].need_dequeue)
@@ -424,19 +462,21 @@ static int safexcel_aes(struct skcipher_request *req,
424 enum safexcel_cipher_direction dir, u32 mode) 462 enum safexcel_cipher_direction dir, u32 mode)
425{ 463{
426 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 464 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
465 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
427 struct safexcel_crypto_priv *priv = ctx->priv; 466 struct safexcel_crypto_priv *priv = ctx->priv;
428 int ret, ring; 467 int ret, ring;
429 468
469 sreq->needs_inv = false;
430 ctx->direction = dir; 470 ctx->direction = dir;
431 ctx->mode = mode; 471 ctx->mode = mode;
432 472
433 if (ctx->base.ctxr) { 473 if (ctx->base.ctxr) {
434 if (ctx->base.needs_inv) 474 if (ctx->base.needs_inv) {
435 ctx->base.send = safexcel_cipher_send_inv; 475 sreq->needs_inv = true;
476 ctx->base.needs_inv = false;
477 }
436 } else { 478 } else {
437 ctx->base.ring = safexcel_select_ring(priv); 479 ctx->base.ring = safexcel_select_ring(priv);
438 ctx->base.send = safexcel_aes_send;
439
440 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool, 480 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
441 EIP197_GFP_FLAGS(req->base), 481 EIP197_GFP_FLAGS(req->base),
442 &ctx->base.ctxr_dma); 482 &ctx->base.ctxr_dma);
@@ -476,6 +516,11 @@ static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
476 alg.skcipher.base); 516 alg.skcipher.base);
477 517
478 ctx->priv = tmpl->priv; 518 ctx->priv = tmpl->priv;
519 ctx->base.send = safexcel_send;
520 ctx->base.handle_result = safexcel_handle_result;
521
522 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
523 sizeof(struct safexcel_cipher_req));
479 524
480 return 0; 525 return 0;
481} 526}