diff options
Diffstat (limited to 'drivers/crypto')
-rw-r--r-- | drivers/crypto/ux500/cryp/cryp.c | 4 | ||||
-rw-r--r-- | drivers/crypto/ux500/cryp/cryp.h | 7 | ||||
-rw-r--r-- | drivers/crypto/ux500/cryp/cryp_core.c | 57 | ||||
-rw-r--r-- | drivers/crypto/ux500/hash/hash_alg.h | 5 | ||||
-rw-r--r-- | drivers/crypto/ux500/hash/hash_core.c | 57 |
5 files changed, 93 insertions, 37 deletions
diff --git a/drivers/crypto/ux500/cryp/cryp.c b/drivers/crypto/ux500/cryp/cryp.c index 3eafa903ebcd..43a0c8a26ab0 100644 --- a/drivers/crypto/ux500/cryp/cryp.c +++ b/drivers/crypto/ux500/cryp/cryp.c | |||
@@ -291,7 +291,7 @@ void cryp_save_device_context(struct cryp_device_data *device_data, | |||
291 | int cryp_mode) | 291 | int cryp_mode) |
292 | { | 292 | { |
293 | enum cryp_algo_mode algomode; | 293 | enum cryp_algo_mode algomode; |
294 | struct cryp_register *src_reg = device_data->base; | 294 | struct cryp_register __iomem *src_reg = device_data->base; |
295 | struct cryp_config *config = | 295 | struct cryp_config *config = |
296 | (struct cryp_config *)device_data->current_ctx; | 296 | (struct cryp_config *)device_data->current_ctx; |
297 | 297 | ||
@@ -349,7 +349,7 @@ void cryp_save_device_context(struct cryp_device_data *device_data, | |||
349 | void cryp_restore_device_context(struct cryp_device_data *device_data, | 349 | void cryp_restore_device_context(struct cryp_device_data *device_data, |
350 | struct cryp_device_context *ctx) | 350 | struct cryp_device_context *ctx) |
351 | { | 351 | { |
352 | struct cryp_register *reg = device_data->base; | 352 | struct cryp_register __iomem *reg = device_data->base; |
353 | struct cryp_config *config = | 353 | struct cryp_config *config = |
354 | (struct cryp_config *)device_data->current_ctx; | 354 | (struct cryp_config *)device_data->current_ctx; |
355 | 355 | ||
diff --git a/drivers/crypto/ux500/cryp/cryp.h b/drivers/crypto/ux500/cryp/cryp.h index 14cfd05b777a..d1d6606fe56c 100644 --- a/drivers/crypto/ux500/cryp/cryp.h +++ b/drivers/crypto/ux500/cryp/cryp.h | |||
@@ -114,6 +114,9 @@ enum cryp_status_id { | |||
114 | }; | 114 | }; |
115 | 115 | ||
116 | /* Cryp DMA interface */ | 116 | /* Cryp DMA interface */ |
117 | #define CRYP_DMA_TX_FIFO 0x08 | ||
118 | #define CRYP_DMA_RX_FIFO 0x10 | ||
119 | |||
117 | enum cryp_dma_req_type { | 120 | enum cryp_dma_req_type { |
118 | CRYP_DMA_DISABLE_BOTH, | 121 | CRYP_DMA_DISABLE_BOTH, |
119 | CRYP_DMA_ENABLE_IN_DATA, | 122 | CRYP_DMA_ENABLE_IN_DATA, |
@@ -217,7 +220,8 @@ struct cryp_dma { | |||
217 | 220 | ||
218 | /** | 221 | /** |
219 | * struct cryp_device_data - structure for a cryp device. | 222 | * struct cryp_device_data - structure for a cryp device. |
220 | * @base: Pointer to the hardware base address. | 223 | * @base: Pointer to virtual base address of the cryp device. |
224 | * @phybase: Pointer to physical memory location of the cryp device. | ||
221 | * @dev: Pointer to the devices dev structure. | 225 | * @dev: Pointer to the devices dev structure. |
222 | * @clk: Pointer to the device's clock control. | 226 | * @clk: Pointer to the device's clock control. |
223 | * @pwr_regulator: Pointer to the device's power control. | 227 | * @pwr_regulator: Pointer to the device's power control. |
@@ -232,6 +236,7 @@ struct cryp_dma { | |||
232 | */ | 236 | */ |
233 | struct cryp_device_data { | 237 | struct cryp_device_data { |
234 | struct cryp_register __iomem *base; | 238 | struct cryp_register __iomem *base; |
239 | phys_addr_t phybase; | ||
235 | struct device *dev; | 240 | struct device *dev; |
236 | struct clk *clk; | 241 | struct clk *clk; |
237 | struct regulator *pwr_regulator; | 242 | struct regulator *pwr_regulator; |
diff --git a/drivers/crypto/ux500/cryp/cryp_core.c b/drivers/crypto/ux500/cryp/cryp_core.c index 8c2777cf02f6..83d79b964d12 100644 --- a/drivers/crypto/ux500/cryp/cryp_core.c +++ b/drivers/crypto/ux500/cryp/cryp_core.c | |||
@@ -475,6 +475,19 @@ static int cryp_get_device_data(struct cryp_ctx *ctx, | |||
475 | static void cryp_dma_setup_channel(struct cryp_device_data *device_data, | 475 | static void cryp_dma_setup_channel(struct cryp_device_data *device_data, |
476 | struct device *dev) | 476 | struct device *dev) |
477 | { | 477 | { |
478 | struct dma_slave_config mem2cryp = { | ||
479 | .direction = DMA_MEM_TO_DEV, | ||
480 | .dst_addr = device_data->phybase + CRYP_DMA_TX_FIFO, | ||
481 | .dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES, | ||
482 | .dst_maxburst = 4, | ||
483 | }; | ||
484 | struct dma_slave_config cryp2mem = { | ||
485 | .direction = DMA_DEV_TO_MEM, | ||
486 | .src_addr = device_data->phybase + CRYP_DMA_RX_FIFO, | ||
487 | .src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES, | ||
488 | .src_maxburst = 4, | ||
489 | }; | ||
490 | |||
478 | dma_cap_zero(device_data->dma.mask); | 491 | dma_cap_zero(device_data->dma.mask); |
479 | dma_cap_set(DMA_SLAVE, device_data->dma.mask); | 492 | dma_cap_set(DMA_SLAVE, device_data->dma.mask); |
480 | 493 | ||
@@ -490,6 +503,9 @@ static void cryp_dma_setup_channel(struct cryp_device_data *device_data, | |||
490 | stedma40_filter, | 503 | stedma40_filter, |
491 | device_data->dma.cfg_cryp2mem); | 504 | device_data->dma.cfg_cryp2mem); |
492 | 505 | ||
506 | dmaengine_slave_config(device_data->dma.chan_mem2cryp, &mem2cryp); | ||
507 | dmaengine_slave_config(device_data->dma.chan_cryp2mem, &cryp2mem); | ||
508 | |||
493 | init_completion(&device_data->dma.cryp_dma_complete); | 509 | init_completion(&device_data->dma.cryp_dma_complete); |
494 | } | 510 | } |
495 | 511 | ||
@@ -537,10 +553,10 @@ static int cryp_set_dma_transfer(struct cryp_ctx *ctx, | |||
537 | dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer " | 553 | dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer " |
538 | "(TO_DEVICE)", __func__); | 554 | "(TO_DEVICE)", __func__); |
539 | 555 | ||
540 | desc = channel->device->device_prep_slave_sg(channel, | 556 | desc = dmaengine_prep_slave_sg(channel, |
541 | ctx->device->dma.sg_src, | 557 | ctx->device->dma.sg_src, |
542 | ctx->device->dma.sg_src_len, | 558 | ctx->device->dma.sg_src_len, |
543 | direction, DMA_CTRL_ACK, NULL); | 559 | direction, DMA_CTRL_ACK); |
544 | break; | 560 | break; |
545 | 561 | ||
546 | case DMA_FROM_DEVICE: | 562 | case DMA_FROM_DEVICE: |
@@ -561,12 +577,12 @@ static int cryp_set_dma_transfer(struct cryp_ctx *ctx, | |||
561 | dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer " | 577 | dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer " |
562 | "(FROM_DEVICE)", __func__); | 578 | "(FROM_DEVICE)", __func__); |
563 | 579 | ||
564 | desc = channel->device->device_prep_slave_sg(channel, | 580 | desc = dmaengine_prep_slave_sg(channel, |
565 | ctx->device->dma.sg_dst, | 581 | ctx->device->dma.sg_dst, |
566 | ctx->device->dma.sg_dst_len, | 582 | ctx->device->dma.sg_dst_len, |
567 | direction, | 583 | direction, |
568 | DMA_CTRL_ACK | | 584 | DMA_CTRL_ACK | |
569 | DMA_PREP_INTERRUPT, NULL); | 585 | DMA_PREP_INTERRUPT); |
570 | 586 | ||
571 | desc->callback = cryp_dma_out_callback; | 587 | desc->callback = cryp_dma_out_callback; |
572 | desc->callback_param = ctx; | 588 | desc->callback_param = ctx; |
@@ -578,7 +594,7 @@ static int cryp_set_dma_transfer(struct cryp_ctx *ctx, | |||
578 | return -EFAULT; | 594 | return -EFAULT; |
579 | } | 595 | } |
580 | 596 | ||
581 | cookie = desc->tx_submit(desc); | 597 | cookie = dmaengine_submit(desc); |
582 | dma_async_issue_pending(channel); | 598 | dma_async_issue_pending(channel); |
583 | 599 | ||
584 | return 0; | 600 | return 0; |
@@ -591,12 +607,12 @@ static void cryp_dma_done(struct cryp_ctx *ctx) | |||
591 | dev_dbg(ctx->device->dev, "[%s]: ", __func__); | 607 | dev_dbg(ctx->device->dev, "[%s]: ", __func__); |
592 | 608 | ||
593 | chan = ctx->device->dma.chan_mem2cryp; | 609 | chan = ctx->device->dma.chan_mem2cryp; |
594 | chan->device->device_control(chan, DMA_TERMINATE_ALL, 0); | 610 | dmaengine_device_control(chan, DMA_TERMINATE_ALL, 0); |
595 | dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src, | 611 | dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src, |
596 | ctx->device->dma.sg_src_len, DMA_TO_DEVICE); | 612 | ctx->device->dma.sg_src_len, DMA_TO_DEVICE); |
597 | 613 | ||
598 | chan = ctx->device->dma.chan_cryp2mem; | 614 | chan = ctx->device->dma.chan_cryp2mem; |
599 | chan->device->device_control(chan, DMA_TERMINATE_ALL, 0); | 615 | dmaengine_device_control(chan, DMA_TERMINATE_ALL, 0); |
600 | dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst, | 616 | dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst, |
601 | ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE); | 617 | ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE); |
602 | } | 618 | } |
@@ -1431,6 +1447,7 @@ static int ux500_cryp_probe(struct platform_device *pdev) | |||
1431 | goto out_kfree; | 1447 | goto out_kfree; |
1432 | } | 1448 | } |
1433 | 1449 | ||
1450 | device_data->phybase = res->start; | ||
1434 | device_data->base = ioremap(res->start, resource_size(res)); | 1451 | device_data->base = ioremap(res->start, resource_size(res)); |
1435 | if (!device_data->base) { | 1452 | if (!device_data->base) { |
1436 | dev_err(dev, "[%s]: ioremap failed!", __func__); | 1453 | dev_err(dev, "[%s]: ioremap failed!", __func__); |
@@ -1458,11 +1475,17 @@ static int ux500_cryp_probe(struct platform_device *pdev) | |||
1458 | goto out_regulator; | 1475 | goto out_regulator; |
1459 | } | 1476 | } |
1460 | 1477 | ||
1478 | ret = clk_prepare(device_data->clk); | ||
1479 | if (ret) { | ||
1480 | dev_err(dev, "[%s]: clk_prepare() failed!", __func__); | ||
1481 | goto out_clk; | ||
1482 | } | ||
1483 | |||
1461 | /* Enable device power (and clock) */ | 1484 | /* Enable device power (and clock) */ |
1462 | ret = cryp_enable_power(device_data->dev, device_data, false); | 1485 | ret = cryp_enable_power(device_data->dev, device_data, false); |
1463 | if (ret) { | 1486 | if (ret) { |
1464 | dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__); | 1487 | dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__); |
1465 | goto out_clk; | 1488 | goto out_clk_unprepare; |
1466 | } | 1489 | } |
1467 | 1490 | ||
1468 | cryp_error = cryp_check(device_data); | 1491 | cryp_error = cryp_check(device_data); |
@@ -1518,11 +1541,16 @@ static int ux500_cryp_probe(struct platform_device *pdev) | |||
1518 | goto out_power; | 1541 | goto out_power; |
1519 | } | 1542 | } |
1520 | 1543 | ||
1544 | dev_info(dev, "successfully registered\n"); | ||
1545 | |||
1521 | return 0; | 1546 | return 0; |
1522 | 1547 | ||
1523 | out_power: | 1548 | out_power: |
1524 | cryp_disable_power(device_data->dev, device_data, false); | 1549 | cryp_disable_power(device_data->dev, device_data, false); |
1525 | 1550 | ||
1551 | out_clk_unprepare: | ||
1552 | clk_unprepare(device_data->clk); | ||
1553 | |||
1526 | out_clk: | 1554 | out_clk: |
1527 | clk_put(device_data->clk); | 1555 | clk_put(device_data->clk); |
1528 | 1556 | ||
@@ -1593,6 +1621,7 @@ static int ux500_cryp_remove(struct platform_device *pdev) | |||
1593 | dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed", | 1621 | dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed", |
1594 | __func__); | 1622 | __func__); |
1595 | 1623 | ||
1624 | clk_unprepare(device_data->clk); | ||
1596 | clk_put(device_data->clk); | 1625 | clk_put(device_data->clk); |
1597 | regulator_put(device_data->pwr_regulator); | 1626 | regulator_put(device_data->pwr_regulator); |
1598 | 1627 | ||
diff --git a/drivers/crypto/ux500/hash/hash_alg.h b/drivers/crypto/ux500/hash/hash_alg.h index cd9351cb24df..be6eb54da40f 100644 --- a/drivers/crypto/ux500/hash/hash_alg.h +++ b/drivers/crypto/ux500/hash/hash_alg.h | |||
@@ -11,6 +11,7 @@ | |||
11 | #include <linux/bitops.h> | 11 | #include <linux/bitops.h> |
12 | 12 | ||
13 | #define HASH_BLOCK_SIZE 64 | 13 | #define HASH_BLOCK_SIZE 64 |
14 | #define HASH_DMA_FIFO 4 | ||
14 | #define HASH_DMA_ALIGN_SIZE 4 | 15 | #define HASH_DMA_ALIGN_SIZE 4 |
15 | #define HASH_DMA_PERFORMANCE_MIN_SIZE 1024 | 16 | #define HASH_DMA_PERFORMANCE_MIN_SIZE 1024 |
16 | #define HASH_BYTES_PER_WORD 4 | 17 | #define HASH_BYTES_PER_WORD 4 |
@@ -347,7 +348,8 @@ struct hash_req_ctx { | |||
347 | 348 | ||
348 | /** | 349 | /** |
349 | * struct hash_device_data - structure for a hash device. | 350 | * struct hash_device_data - structure for a hash device. |
350 | * @base: Pointer to the hardware base address. | 351 | * @base: Pointer to virtual base address of the hash device. |
352 | * @phybase: Pointer to physical memory location of the hash device. | ||
351 | * @list_node: For inclusion in klist. | 353 | * @list_node: For inclusion in klist. |
352 | * @dev: Pointer to the device dev structure. | 354 | * @dev: Pointer to the device dev structure. |
353 | * @ctx_lock: Spinlock for current_ctx. | 355 | * @ctx_lock: Spinlock for current_ctx. |
@@ -361,6 +363,7 @@ struct hash_req_ctx { | |||
361 | */ | 363 | */ |
362 | struct hash_device_data { | 364 | struct hash_device_data { |
363 | struct hash_register __iomem *base; | 365 | struct hash_register __iomem *base; |
366 | phys_addr_t phybase; | ||
364 | struct klist_node list_node; | 367 | struct klist_node list_node; |
365 | struct device *dev; | 368 | struct device *dev; |
366 | struct spinlock ctx_lock; | 369 | struct spinlock ctx_lock; |
diff --git a/drivers/crypto/ux500/hash/hash_core.c b/drivers/crypto/ux500/hash/hash_core.c index 3b8f661d0edf..496ae6aae316 100644 --- a/drivers/crypto/ux500/hash/hash_core.c +++ b/drivers/crypto/ux500/hash/hash_core.c | |||
@@ -122,6 +122,13 @@ static void hash_dma_setup_channel(struct hash_device_data *device_data, | |||
122 | struct device *dev) | 122 | struct device *dev) |
123 | { | 123 | { |
124 | struct hash_platform_data *platform_data = dev->platform_data; | 124 | struct hash_platform_data *platform_data = dev->platform_data; |
125 | struct dma_slave_config conf = { | ||
126 | .direction = DMA_MEM_TO_DEV, | ||
127 | .dst_addr = device_data->phybase + HASH_DMA_FIFO, | ||
128 | .dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES, | ||
129 | .dst_maxburst = 16, | ||
130 | }; | ||
131 | |||
125 | dma_cap_zero(device_data->dma.mask); | 132 | dma_cap_zero(device_data->dma.mask); |
126 | dma_cap_set(DMA_SLAVE, device_data->dma.mask); | 133 | dma_cap_set(DMA_SLAVE, device_data->dma.mask); |
127 | 134 | ||
@@ -131,6 +138,8 @@ static void hash_dma_setup_channel(struct hash_device_data *device_data, | |||
131 | platform_data->dma_filter, | 138 | platform_data->dma_filter, |
132 | device_data->dma.cfg_mem2hash); | 139 | device_data->dma.cfg_mem2hash); |
133 | 140 | ||
141 | dmaengine_slave_config(device_data->dma.chan_mem2hash, &conf); | ||
142 | |||
134 | init_completion(&device_data->dma.complete); | 143 | init_completion(&device_data->dma.complete); |
135 | } | 144 | } |
136 | 145 | ||
@@ -171,9 +180,9 @@ static int hash_set_dma_transfer(struct hash_ctx *ctx, struct scatterlist *sg, | |||
171 | 180 | ||
172 | dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer " | 181 | dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer " |
173 | "(TO_DEVICE)", __func__); | 182 | "(TO_DEVICE)", __func__); |
174 | desc = channel->device->device_prep_slave_sg(channel, | 183 | desc = dmaengine_prep_slave_sg(channel, |
175 | ctx->device->dma.sg, ctx->device->dma.sg_len, | 184 | ctx->device->dma.sg, ctx->device->dma.sg_len, |
176 | direction, DMA_CTRL_ACK | DMA_PREP_INTERRUPT, NULL); | 185 | direction, DMA_CTRL_ACK | DMA_PREP_INTERRUPT); |
177 | if (!desc) { | 186 | if (!desc) { |
178 | dev_err(ctx->device->dev, | 187 | dev_err(ctx->device->dev, |
179 | "[%s]: device_prep_slave_sg() failed!", __func__); | 188 | "[%s]: device_prep_slave_sg() failed!", __func__); |
@@ -183,7 +192,7 @@ static int hash_set_dma_transfer(struct hash_ctx *ctx, struct scatterlist *sg, | |||
183 | desc->callback = hash_dma_callback; | 192 | desc->callback = hash_dma_callback; |
184 | desc->callback_param = ctx; | 193 | desc->callback_param = ctx; |
185 | 194 | ||
186 | cookie = desc->tx_submit(desc); | 195 | cookie = dmaengine_submit(desc); |
187 | dma_async_issue_pending(channel); | 196 | dma_async_issue_pending(channel); |
188 | 197 | ||
189 | return 0; | 198 | return 0; |
@@ -194,7 +203,7 @@ static void hash_dma_done(struct hash_ctx *ctx) | |||
194 | struct dma_chan *chan; | 203 | struct dma_chan *chan; |
195 | 204 | ||
196 | chan = ctx->device->dma.chan_mem2hash; | 205 | chan = ctx->device->dma.chan_mem2hash; |
197 | chan->device->device_control(chan, DMA_TERMINATE_ALL, 0); | 206 | dmaengine_device_control(chan, DMA_TERMINATE_ALL, 0); |
198 | dma_unmap_sg(chan->device->dev, ctx->device->dma.sg, | 207 | dma_unmap_sg(chan->device->dev, ctx->device->dma.sg, |
199 | ctx->device->dma.sg_len, DMA_TO_DEVICE); | 208 | ctx->device->dma.sg_len, DMA_TO_DEVICE); |
200 | 209 | ||
@@ -464,12 +473,12 @@ static void hash_hw_write_key(struct hash_device_data *device_data, | |||
464 | HASH_SET_DIN(&word, nwords); | 473 | HASH_SET_DIN(&word, nwords); |
465 | } | 474 | } |
466 | 475 | ||
467 | while (device_data->base->str & HASH_STR_DCAL_MASK) | 476 | while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK) |
468 | cpu_relax(); | 477 | cpu_relax(); |
469 | 478 | ||
470 | HASH_SET_DCAL; | 479 | HASH_SET_DCAL; |
471 | 480 | ||
472 | while (device_data->base->str & HASH_STR_DCAL_MASK) | 481 | while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK) |
473 | cpu_relax(); | 482 | cpu_relax(); |
474 | } | 483 | } |
475 | 484 | ||
@@ -652,7 +661,7 @@ static void hash_messagepad(struct hash_device_data *device_data, | |||
652 | if (index_bytes) | 661 | if (index_bytes) |
653 | HASH_SET_DIN(message, nwords); | 662 | HASH_SET_DIN(message, nwords); |
654 | 663 | ||
655 | while (device_data->base->str & HASH_STR_DCAL_MASK) | 664 | while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK) |
656 | cpu_relax(); | 665 | cpu_relax(); |
657 | 666 | ||
658 | /* num_of_bytes == 0 => NBLW <- 0 (32 bits valid in DATAIN) */ | 667 | /* num_of_bytes == 0 => NBLW <- 0 (32 bits valid in DATAIN) */ |
@@ -667,7 +676,7 @@ static void hash_messagepad(struct hash_device_data *device_data, | |||
667 | (int)(readl_relaxed(&device_data->base->str) & | 676 | (int)(readl_relaxed(&device_data->base->str) & |
668 | HASH_STR_NBLW_MASK)); | 677 | HASH_STR_NBLW_MASK)); |
669 | 678 | ||
670 | while (device_data->base->str & HASH_STR_DCAL_MASK) | 679 | while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK) |
671 | cpu_relax(); | 680 | cpu_relax(); |
672 | } | 681 | } |
673 | 682 | ||
@@ -767,7 +776,7 @@ void hash_begin(struct hash_device_data *device_data, struct hash_ctx *ctx) | |||
767 | /* HW and SW initializations */ | 776 | /* HW and SW initializations */ |
768 | /* Note: there is no need to initialize buffer and digest members */ | 777 | /* Note: there is no need to initialize buffer and digest members */ |
769 | 778 | ||
770 | while (device_data->base->str & HASH_STR_DCAL_MASK) | 779 | while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK) |
771 | cpu_relax(); | 780 | cpu_relax(); |
772 | 781 | ||
773 | /* | 782 | /* |
@@ -783,8 +792,7 @@ void hash_begin(struct hash_device_data *device_data, struct hash_ctx *ctx) | |||
783 | HASH_CLEAR_BITS(&device_data->base->str, HASH_STR_NBLW_MASK); | 792 | HASH_CLEAR_BITS(&device_data->base->str, HASH_STR_NBLW_MASK); |
784 | } | 793 | } |
785 | 794 | ||
786 | int hash_process_data( | 795 | static int hash_process_data(struct hash_device_data *device_data, |
787 | struct hash_device_data *device_data, | ||
788 | struct hash_ctx *ctx, struct hash_req_ctx *req_ctx, | 796 | struct hash_ctx *ctx, struct hash_req_ctx *req_ctx, |
789 | int msg_length, u8 *data_buffer, u8 *buffer, u8 *index) | 797 | int msg_length, u8 *data_buffer, u8 *buffer, u8 *index) |
790 | { | 798 | { |
@@ -953,7 +961,7 @@ static int hash_dma_final(struct ahash_request *req) | |||
953 | wait_for_completion(&ctx->device->dma.complete); | 961 | wait_for_completion(&ctx->device->dma.complete); |
954 | hash_dma_done(ctx); | 962 | hash_dma_done(ctx); |
955 | 963 | ||
956 | while (device_data->base->str & HASH_STR_DCAL_MASK) | 964 | while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK) |
957 | cpu_relax(); | 965 | cpu_relax(); |
958 | 966 | ||
959 | if (ctx->config.oper_mode == HASH_OPER_MODE_HMAC && ctx->key) { | 967 | if (ctx->config.oper_mode == HASH_OPER_MODE_HMAC && ctx->key) { |
@@ -983,7 +991,7 @@ out: | |||
983 | * hash_hw_final - The final hash calculation function | 991 | * hash_hw_final - The final hash calculation function |
984 | * @req: The hash request for the job. | 992 | * @req: The hash request for the job. |
985 | */ | 993 | */ |
986 | int hash_hw_final(struct ahash_request *req) | 994 | static int hash_hw_final(struct ahash_request *req) |
987 | { | 995 | { |
988 | int ret = 0; | 996 | int ret = 0; |
989 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); | 997 | struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); |
@@ -1051,7 +1059,7 @@ int hash_hw_final(struct ahash_request *req) | |||
1051 | req_ctx->state.index); | 1059 | req_ctx->state.index); |
1052 | } else { | 1060 | } else { |
1053 | HASH_SET_DCAL; | 1061 | HASH_SET_DCAL; |
1054 | while (device_data->base->str & HASH_STR_DCAL_MASK) | 1062 | while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK) |
1055 | cpu_relax(); | 1063 | cpu_relax(); |
1056 | } | 1064 | } |
1057 | 1065 | ||
@@ -1180,7 +1188,7 @@ int hash_resume_state(struct hash_device_data *device_data, | |||
1180 | temp_cr = device_state->temp_cr; | 1188 | temp_cr = device_state->temp_cr; |
1181 | writel_relaxed(temp_cr & HASH_CR_RESUME_MASK, &device_data->base->cr); | 1189 | writel_relaxed(temp_cr & HASH_CR_RESUME_MASK, &device_data->base->cr); |
1182 | 1190 | ||
1183 | if (device_data->base->cr & HASH_CR_MODE_MASK) | 1191 | if (readl(&device_data->base->cr) & HASH_CR_MODE_MASK) |
1184 | hash_mode = HASH_OPER_MODE_HMAC; | 1192 | hash_mode = HASH_OPER_MODE_HMAC; |
1185 | else | 1193 | else |
1186 | hash_mode = HASH_OPER_MODE_HASH; | 1194 | hash_mode = HASH_OPER_MODE_HASH; |
@@ -1224,7 +1232,7 @@ int hash_save_state(struct hash_device_data *device_data, | |||
1224 | * actually makes sure that there isn't any ongoing calculation in the | 1232 | * actually makes sure that there isn't any ongoing calculation in the |
1225 | * hardware. | 1233 | * hardware. |
1226 | */ | 1234 | */ |
1227 | while (device_data->base->str & HASH_STR_DCAL_MASK) | 1235 | while (readl(&device_data->base->str) & HASH_STR_DCAL_MASK) |
1228 | cpu_relax(); | 1236 | cpu_relax(); |
1229 | 1237 | ||
1230 | temp_cr = readl_relaxed(&device_data->base->cr); | 1238 | temp_cr = readl_relaxed(&device_data->base->cr); |
@@ -1233,7 +1241,7 @@ int hash_save_state(struct hash_device_data *device_data, | |||
1233 | 1241 | ||
1234 | device_state->din_reg = readl_relaxed(&device_data->base->din); | 1242 | device_state->din_reg = readl_relaxed(&device_data->base->din); |
1235 | 1243 | ||
1236 | if (device_data->base->cr & HASH_CR_MODE_MASK) | 1244 | if (readl(&device_data->base->cr) & HASH_CR_MODE_MASK) |
1237 | hash_mode = HASH_OPER_MODE_HMAC; | 1245 | hash_mode = HASH_OPER_MODE_HMAC; |
1238 | else | 1246 | else |
1239 | hash_mode = HASH_OPER_MODE_HASH; | 1247 | hash_mode = HASH_OPER_MODE_HASH; |
@@ -1699,6 +1707,7 @@ static int ux500_hash_probe(struct platform_device *pdev) | |||
1699 | goto out_kfree; | 1707 | goto out_kfree; |
1700 | } | 1708 | } |
1701 | 1709 | ||
1710 | device_data->phybase = res->start; | ||
1702 | device_data->base = ioremap(res->start, resource_size(res)); | 1711 | device_data->base = ioremap(res->start, resource_size(res)); |
1703 | if (!device_data->base) { | 1712 | if (!device_data->base) { |
1704 | dev_err(dev, "[%s] ioremap() failed!", | 1713 | dev_err(dev, "[%s] ioremap() failed!", |
@@ -1726,11 +1735,17 @@ static int ux500_hash_probe(struct platform_device *pdev) | |||
1726 | goto out_regulator; | 1735 | goto out_regulator; |
1727 | } | 1736 | } |
1728 | 1737 | ||
1738 | ret = clk_prepare(device_data->clk); | ||
1739 | if (ret) { | ||
1740 | dev_err(dev, "[%s] clk_prepare() failed!", __func__); | ||
1741 | goto out_clk; | ||
1742 | } | ||
1743 | |||
1729 | /* Enable device power (and clock) */ | 1744 | /* Enable device power (and clock) */ |
1730 | ret = hash_enable_power(device_data, false); | 1745 | ret = hash_enable_power(device_data, false); |
1731 | if (ret) { | 1746 | if (ret) { |
1732 | dev_err(dev, "[%s]: hash_enable_power() failed!", __func__); | 1747 | dev_err(dev, "[%s]: hash_enable_power() failed!", __func__); |
1733 | goto out_clk; | 1748 | goto out_clk_unprepare; |
1734 | } | 1749 | } |
1735 | 1750 | ||
1736 | ret = hash_check_hw(device_data); | 1751 | ret = hash_check_hw(device_data); |
@@ -1756,12 +1771,15 @@ static int ux500_hash_probe(struct platform_device *pdev) | |||
1756 | goto out_power; | 1771 | goto out_power; |
1757 | } | 1772 | } |
1758 | 1773 | ||
1759 | dev_info(dev, "[%s] successfully probed\n", __func__); | 1774 | dev_info(dev, "successfully registered\n"); |
1760 | return 0; | 1775 | return 0; |
1761 | 1776 | ||
1762 | out_power: | 1777 | out_power: |
1763 | hash_disable_power(device_data, false); | 1778 | hash_disable_power(device_data, false); |
1764 | 1779 | ||
1780 | out_clk_unprepare: | ||
1781 | clk_unprepare(device_data->clk); | ||
1782 | |||
1765 | out_clk: | 1783 | out_clk: |
1766 | clk_put(device_data->clk); | 1784 | clk_put(device_data->clk); |
1767 | 1785 | ||
@@ -1826,6 +1844,7 @@ static int ux500_hash_remove(struct platform_device *pdev) | |||
1826 | dev_err(dev, "[%s]: hash_disable_power() failed", | 1844 | dev_err(dev, "[%s]: hash_disable_power() failed", |
1827 | __func__); | 1845 | __func__); |
1828 | 1846 | ||
1847 | clk_unprepare(device_data->clk); | ||
1829 | clk_put(device_data->clk); | 1848 | clk_put(device_data->clk); |
1830 | regulator_put(device_data->regulator); | 1849 | regulator_put(device_data->regulator); |
1831 | 1850 | ||