aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--drivers/crypto/ux500/cryp/cryp.c4
-rw-r--r--drivers/crypto/ux500/cryp/cryp_core.c26
-rw-r--r--drivers/crypto/ux500/hash/hash_core.c33
-rw-r--r--drivers/dma/ste_dma40.c18
4 files changed, 40 insertions, 41 deletions
diff --git a/drivers/crypto/ux500/cryp/cryp.c b/drivers/crypto/ux500/cryp/cryp.c
index 3eafa903ebcd..43a0c8a26ab0 100644
--- a/drivers/crypto/ux500/cryp/cryp.c
+++ b/drivers/crypto/ux500/cryp/cryp.c
@@ -291,7 +291,7 @@ void cryp_save_device_context(struct cryp_device_data *device_data,
291 int cryp_mode) 291 int cryp_mode)
292{ 292{
293 enum cryp_algo_mode algomode; 293 enum cryp_algo_mode algomode;
294 struct cryp_register *src_reg = device_data->base; 294 struct cryp_register __iomem *src_reg = device_data->base;
295 struct cryp_config *config = 295 struct cryp_config *config =
296 (struct cryp_config *)device_data->current_ctx; 296 (struct cryp_config *)device_data->current_ctx;
297 297
@@ -349,7 +349,7 @@ void cryp_save_device_context(struct cryp_device_data *device_data,
349void cryp_restore_device_context(struct cryp_device_data *device_data, 349void cryp_restore_device_context(struct cryp_device_data *device_data,
350 struct cryp_device_context *ctx) 350 struct cryp_device_context *ctx)
351{ 351{
352 struct cryp_register *reg = device_data->base; 352 struct cryp_register __iomem *reg = device_data->base;
353 struct cryp_config *config = 353 struct cryp_config *config =
354 (struct cryp_config *)device_data->current_ctx; 354 (struct cryp_config *)device_data->current_ctx;
355 355
diff --git a/drivers/crypto/ux500/cryp/cryp_core.c b/drivers/crypto/ux500/cryp/cryp_core.c
index 4f8b11af29a6..0257f6b32642 100644
--- a/drivers/crypto/ux500/cryp/cryp_core.c
+++ b/drivers/crypto/ux500/cryp/cryp_core.c
@@ -553,10 +553,10 @@ static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
553 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer " 553 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
554 "(TO_DEVICE)", __func__); 554 "(TO_DEVICE)", __func__);
555 555
556 desc = channel->device->device_prep_slave_sg(channel, 556 desc = dmaengine_prep_slave_sg(channel,
557 ctx->device->dma.sg_src, 557 ctx->device->dma.sg_src,
558 ctx->device->dma.sg_src_len, 558 ctx->device->dma.sg_src_len,
559 direction, DMA_CTRL_ACK, NULL); 559 direction, DMA_CTRL_ACK);
560 break; 560 break;
561 561
562 case DMA_FROM_DEVICE: 562 case DMA_FROM_DEVICE:
@@ -577,12 +577,12 @@ static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
577 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer " 577 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
578 "(FROM_DEVICE)", __func__); 578 "(FROM_DEVICE)", __func__);
579 579
580 desc = channel->device->device_prep_slave_sg(channel, 580 desc = dmaengine_prep_slave_sg(channel,
581 ctx->device->dma.sg_dst, 581 ctx->device->dma.sg_dst,
582 ctx->device->dma.sg_dst_len, 582 ctx->device->dma.sg_dst_len,
583 direction, 583 direction,
584 DMA_CTRL_ACK | 584 DMA_CTRL_ACK |
585 DMA_PREP_INTERRUPT, NULL); 585 DMA_PREP_INTERRUPT);
586 586
587 desc->callback = cryp_dma_out_callback; 587 desc->callback = cryp_dma_out_callback;
588 desc->callback_param = ctx; 588 desc->callback_param = ctx;
@@ -594,7 +594,7 @@ static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
594 return -EFAULT; 594 return -EFAULT;
595 } 595 }
596 596
597 cookie = desc->tx_submit(desc); 597 cookie = dmaengine_submit(desc);
598 dma_async_issue_pending(channel); 598 dma_async_issue_pending(channel);
599 599
600 return 0; 600 return 0;
@@ -607,12 +607,12 @@ static void cryp_dma_done(struct cryp_ctx *ctx)
607 dev_dbg(ctx->device->dev, "[%s]: ", __func__); 607 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
608 608
609 chan = ctx->device->dma.chan_mem2cryp; 609 chan = ctx->device->dma.chan_mem2cryp;
610 chan->device->device_control(chan, DMA_TERMINATE_ALL, 0); 610 dmaengine_device_control(chan, DMA_TERMINATE_ALL, 0);
611 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src, 611 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
612 ctx->device->dma.sg_src_len, DMA_TO_DEVICE); 612 ctx->device->dma.sg_src_len, DMA_TO_DEVICE);
613 613
614 chan = ctx->device->dma.chan_cryp2mem; 614 chan = ctx->device->dma.chan_cryp2mem;
615 chan->device->device_control(chan, DMA_TERMINATE_ALL, 0); 615 dmaengine_device_control(chan, DMA_TERMINATE_ALL, 0);
616 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst, 616 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
617 ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE); 617 ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE);
618} 618}
diff --git a/drivers/crypto/ux500/hash/hash_core.c b/drivers/crypto/ux500/hash/hash_core.c
index 9ca6fbb5e30d..95490f14ddb8 100644
--- a/drivers/crypto/ux500/hash/hash_core.c
+++ b/drivers/crypto/ux500/hash/hash_core.c
@@ -180,9 +180,9 @@ static int hash_set_dma_transfer(struct hash_ctx *ctx, struct scatterlist *sg,
180 180
181 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer " 181 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
182 "(TO_DEVICE)", __func__); 182 "(TO_DEVICE)", __func__);
183 desc = channel->device->device_prep_slave_sg(channel, 183 desc = dmaengine_prep_slave_sg(channel,
184 ctx->device->dma.sg, ctx->device->dma.sg_len, 184 ctx->device->dma.sg, ctx->device->dma.sg_len,
185 direction, DMA_CTRL_ACK | DMA_PREP_INTERRUPT, NULL); 185 direction, DMA_CTRL_ACK | DMA_PREP_INTERRUPT);
186 if (!desc) { 186 if (!desc) {
187 dev_err(ctx->device->dev, 187 dev_err(ctx->device->dev,
188 "[%s]: device_prep_slave_sg() failed!", __func__); 188 "[%s]: device_prep_slave_sg() failed!", __func__);
@@ -192,7 +192,7 @@ static int hash_set_dma_transfer(struct hash_ctx *ctx, struct scatterlist *sg,
192 desc->callback = hash_dma_callback; 192 desc->callback = hash_dma_callback;
193 desc->callback_param = ctx; 193 desc->callback_param = ctx;
194 194
195 cookie = desc->tx_submit(desc); 195 cookie = dmaengine_submit(desc);
196 dma_async_issue_pending(channel); 196 dma_async_issue_pending(channel);
197 197
198 return 0; 198 return 0;
@@ -203,7 +203,7 @@ static void hash_dma_done(struct hash_ctx *ctx)
203 struct dma_chan *chan; 203 struct dma_chan *chan;
204 204
205 chan = ctx->device->dma.chan_mem2hash; 205 chan = ctx->device->dma.chan_mem2hash;
206 chan->device->device_control(chan, DMA_TERMINATE_ALL, 0); 206 dmaengine_device_control(chan, DMA_TERMINATE_ALL, 0);
207 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg, 207 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg,
208 ctx->device->dma.sg_len, DMA_TO_DEVICE); 208 ctx->device->dma.sg_len, DMA_TO_DEVICE);
209 209
@@ -473,12 +473,12 @@ static void hash_hw_write_key(struct hash_device_data *device_data,
473 HASH_SET_DIN(&word, nwords); 473 HASH_SET_DIN(&word, nwords);
474 } 474 }
475 475
476 while (device_data->base->str & HASH_STR_DCAL_MASK) 476 while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK)
477 cpu_relax(); 477 cpu_relax();
478 478
479 HASH_SET_DCAL; 479 HASH_SET_DCAL;
480 480
481 while (device_data->base->str & HASH_STR_DCAL_MASK) 481 while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK)
482 cpu_relax(); 482 cpu_relax();
483} 483}
484 484
@@ -661,7 +661,7 @@ static void hash_messagepad(struct hash_device_data *device_data,
661 if (index_bytes) 661 if (index_bytes)
662 HASH_SET_DIN(message, nwords); 662 HASH_SET_DIN(message, nwords);
663 663
664 while (device_data->base->str & HASH_STR_DCAL_MASK) 664 while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK)
665 cpu_relax(); 665 cpu_relax();
666 666
667 /* num_of_bytes == 0 => NBLW <- 0 (32 bits valid in DATAIN) */ 667 /* num_of_bytes == 0 => NBLW <- 0 (32 bits valid in DATAIN) */
@@ -676,7 +676,7 @@ static void hash_messagepad(struct hash_device_data *device_data,
676 (int)(readl_relaxed(&device_data->base->str) & 676 (int)(readl_relaxed(&device_data->base->str) &
677 HASH_STR_NBLW_MASK)); 677 HASH_STR_NBLW_MASK));
678 678
679 while (device_data->base->str & HASH_STR_DCAL_MASK) 679 while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK)
680 cpu_relax(); 680 cpu_relax();
681} 681}
682 682
@@ -776,7 +776,7 @@ void hash_begin(struct hash_device_data *device_data, struct hash_ctx *ctx)
776 /* HW and SW initializations */ 776 /* HW and SW initializations */
777 /* Note: there is no need to initialize buffer and digest members */ 777 /* Note: there is no need to initialize buffer and digest members */
778 778
779 while (device_data->base->str & HASH_STR_DCAL_MASK) 779 while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK)
780 cpu_relax(); 780 cpu_relax();
781 781
782 /* 782 /*
@@ -792,8 +792,7 @@ void hash_begin(struct hash_device_data *device_data, struct hash_ctx *ctx)
792 HASH_CLEAR_BITS(&device_data->base->str, HASH_STR_NBLW_MASK); 792 HASH_CLEAR_BITS(&device_data->base->str, HASH_STR_NBLW_MASK);
793} 793}
794 794
795int hash_process_data( 795static int hash_process_data(struct hash_device_data *device_data,
796 struct hash_device_data *device_data,
797 struct hash_ctx *ctx, struct hash_req_ctx *req_ctx, 796 struct hash_ctx *ctx, struct hash_req_ctx *req_ctx,
798 int msg_length, u8 *data_buffer, u8 *buffer, u8 *index) 797 int msg_length, u8 *data_buffer, u8 *buffer, u8 *index)
799{ 798{
@@ -962,7 +961,7 @@ static int hash_dma_final(struct ahash_request *req)
962 wait_for_completion(&ctx->device->dma.complete); 961 wait_for_completion(&ctx->device->dma.complete);
963 hash_dma_done(ctx); 962 hash_dma_done(ctx);
964 963
965 while (device_data->base->str & HASH_STR_DCAL_MASK) 964 while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK)
966 cpu_relax(); 965 cpu_relax();
967 966
968 if (ctx->config.oper_mode == HASH_OPER_MODE_HMAC && ctx->key) { 967 if (ctx->config.oper_mode == HASH_OPER_MODE_HMAC && ctx->key) {
@@ -992,7 +991,7 @@ out:
992 * hash_hw_final - The final hash calculation function 991 * hash_hw_final - The final hash calculation function
993 * @req: The hash request for the job. 992 * @req: The hash request for the job.
994 */ 993 */
995int hash_hw_final(struct ahash_request *req) 994static int hash_hw_final(struct ahash_request *req)
996{ 995{
997 int ret = 0; 996 int ret = 0;
998 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 997 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
@@ -1060,7 +1059,7 @@ int hash_hw_final(struct ahash_request *req)
1060 req_ctx->state.index); 1059 req_ctx->state.index);
1061 } else { 1060 } else {
1062 HASH_SET_DCAL; 1061 HASH_SET_DCAL;
1063 while (device_data->base->str & HASH_STR_DCAL_MASK) 1062 while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK)
1064 cpu_relax(); 1063 cpu_relax();
1065 } 1064 }
1066 1065
@@ -1189,7 +1188,7 @@ int hash_resume_state(struct hash_device_data *device_data,
1189 temp_cr = device_state->temp_cr; 1188 temp_cr = device_state->temp_cr;
1190 writel_relaxed(temp_cr & HASH_CR_RESUME_MASK, &device_data->base->cr); 1189 writel_relaxed(temp_cr & HASH_CR_RESUME_MASK, &device_data->base->cr);
1191 1190
1192 if (device_data->base->cr & HASH_CR_MODE_MASK) 1191 if (readl(&device_data->base->cr) & HASH_CR_MODE_MASK)
1193 hash_mode = HASH_OPER_MODE_HMAC; 1192 hash_mode = HASH_OPER_MODE_HMAC;
1194 else 1193 else
1195 hash_mode = HASH_OPER_MODE_HASH; 1194 hash_mode = HASH_OPER_MODE_HASH;
@@ -1233,7 +1232,7 @@ int hash_save_state(struct hash_device_data *device_data,
1233 * actually makes sure that there isn't any ongoing calculation in the 1232 * actually makes sure that there isn't any ongoing calculation in the
1234 * hardware. 1233 * hardware.
1235 */ 1234 */
1236 while (device_data->base->str & HASH_STR_DCAL_MASK) 1235 while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK)
1237 cpu_relax(); 1236 cpu_relax();
1238 1237
1239 temp_cr = readl_relaxed(&device_data->base->cr); 1238 temp_cr = readl_relaxed(&device_data->base->cr);
@@ -1242,7 +1241,7 @@ int hash_save_state(struct hash_device_data *device_data,
1242 1241
1243 device_state->din_reg = readl_relaxed(&device_data->base->din); 1242 device_state->din_reg = readl_relaxed(&device_data->base->din);
1244 1243
1245 if (device_data->base->cr & HASH_CR_MODE_MASK) 1244 if (readl(&device_data->base->cr) & HASH_CR_MODE_MASK)
1246 hash_mode = HASH_OPER_MODE_HMAC; 1245 hash_mode = HASH_OPER_MODE_HMAC;
1247 else 1246 else
1248 hash_mode = HASH_OPER_MODE_HASH; 1247 hash_mode = HASH_OPER_MODE_HASH;
diff --git a/drivers/dma/ste_dma40.c b/drivers/dma/ste_dma40.c
index fa4f9a33a74d..5ab5880d5c90 100644
--- a/drivers/dma/ste_dma40.c
+++ b/drivers/dma/ste_dma40.c
@@ -78,7 +78,7 @@ static int dma40_memcpy_channels[] = {
78}; 78};
79 79
80/* Default configuration for physcial memcpy */ 80/* Default configuration for physcial memcpy */
81struct stedma40_chan_cfg dma40_memcpy_conf_phy = { 81static struct stedma40_chan_cfg dma40_memcpy_conf_phy = {
82 .mode = STEDMA40_MODE_PHYSICAL, 82 .mode = STEDMA40_MODE_PHYSICAL,
83 .dir = DMA_MEM_TO_MEM, 83 .dir = DMA_MEM_TO_MEM,
84 84
@@ -92,7 +92,7 @@ struct stedma40_chan_cfg dma40_memcpy_conf_phy = {
92}; 92};
93 93
94/* Default configuration for logical memcpy */ 94/* Default configuration for logical memcpy */
95struct stedma40_chan_cfg dma40_memcpy_conf_log = { 95static struct stedma40_chan_cfg dma40_memcpy_conf_log = {
96 .mode = STEDMA40_MODE_LOGICAL, 96 .mode = STEDMA40_MODE_LOGICAL,
97 .dir = DMA_MEM_TO_MEM, 97 .dir = DMA_MEM_TO_MEM,
98 98
@@ -3537,7 +3537,6 @@ static int __init d40_probe(struct platform_device *pdev)
3537{ 3537{
3538 struct stedma40_platform_data *plat_data = pdev->dev.platform_data; 3538 struct stedma40_platform_data *plat_data = pdev->dev.platform_data;
3539 struct device_node *np = pdev->dev.of_node; 3539 struct device_node *np = pdev->dev.of_node;
3540 int err;
3541 int ret = -ENOENT; 3540 int ret = -ENOENT;
3542 struct d40_base *base = NULL; 3541 struct d40_base *base = NULL;
3543 struct resource *res = NULL; 3542 struct resource *res = NULL;
@@ -3649,6 +3648,7 @@ static int __init d40_probe(struct platform_device *pdev)
3649 base->lcpa_regulator = regulator_get(base->dev, "lcla_esram"); 3648 base->lcpa_regulator = regulator_get(base->dev, "lcla_esram");
3650 if (IS_ERR(base->lcpa_regulator)) { 3649 if (IS_ERR(base->lcpa_regulator)) {
3651 d40_err(&pdev->dev, "Failed to get lcpa_regulator\n"); 3650 d40_err(&pdev->dev, "Failed to get lcpa_regulator\n");
3651 ret = PTR_ERR(base->lcpa_regulator);
3652 base->lcpa_regulator = NULL; 3652 base->lcpa_regulator = NULL;
3653 goto failure; 3653 goto failure;
3654 } 3654 }
@@ -3664,13 +3664,13 @@ static int __init d40_probe(struct platform_device *pdev)
3664 } 3664 }
3665 3665
3666 base->initialized = true; 3666 base->initialized = true;
3667 err = d40_dmaengine_init(base, num_reserved_chans); 3667 ret = d40_dmaengine_init(base, num_reserved_chans);
3668 if (err) 3668 if (ret)
3669 goto failure; 3669 goto failure;
3670 3670
3671 base->dev->dma_parms = &base->dma_parms; 3671 base->dev->dma_parms = &base->dma_parms;
3672 err = dma_set_max_seg_size(base->dev, STEDMA40_MAX_SEG_SIZE); 3672 ret = dma_set_max_seg_size(base->dev, STEDMA40_MAX_SEG_SIZE);
3673 if (err) { 3673 if (ret) {
3674 d40_err(&pdev->dev, "Failed to set dma max seg size\n"); 3674 d40_err(&pdev->dev, "Failed to set dma max seg size\n");
3675 goto failure; 3675 goto failure;
3676 } 3676 }
@@ -3678,8 +3678,8 @@ static int __init d40_probe(struct platform_device *pdev)
3678 d40_hw_init(base); 3678 d40_hw_init(base);
3679 3679
3680 if (np) { 3680 if (np) {
3681 err = of_dma_controller_register(np, d40_xlate, NULL); 3681 ret = of_dma_controller_register(np, d40_xlate, NULL);
3682 if (err && err != -ENODEV) 3682 if (ret)
3683 dev_err(&pdev->dev, 3683 dev_err(&pdev->dev,
3684 "could not register of_dma_controller\n"); 3684 "could not register of_dma_controller\n");
3685 } 3685 }