diff options
Diffstat (limited to 'drivers/dma/iop-adma.c')
-rw-r--r-- | drivers/dma/iop-adma.c | 124 |
1 files changed, 43 insertions, 81 deletions
diff --git a/drivers/dma/iop-adma.c b/drivers/dma/iop-adma.c index b011b5ae22a2..eda841c60690 100644 --- a/drivers/dma/iop-adma.c +++ b/drivers/dma/iop-adma.c | |||
@@ -443,17 +443,6 @@ iop_adma_tx_submit(struct dma_async_tx_descriptor *tx) | |||
443 | return cookie; | 443 | return cookie; |
444 | } | 444 | } |
445 | 445 | ||
446 | static void | ||
447 | iop_adma_set_dest(dma_addr_t addr, struct dma_async_tx_descriptor *tx, | ||
448 | int index) | ||
449 | { | ||
450 | struct iop_adma_desc_slot *sw_desc = tx_to_iop_adma_slot(tx); | ||
451 | struct iop_adma_chan *iop_chan = to_iop_adma_chan(tx->chan); | ||
452 | |||
453 | /* to do: support transfers lengths > IOP_ADMA_MAX_BYTE_COUNT */ | ||
454 | iop_desc_set_dest_addr(sw_desc->group_head, iop_chan, addr); | ||
455 | } | ||
456 | |||
457 | static void iop_chan_start_null_memcpy(struct iop_adma_chan *iop_chan); | 446 | static void iop_chan_start_null_memcpy(struct iop_adma_chan *iop_chan); |
458 | static void iop_chan_start_null_xor(struct iop_adma_chan *iop_chan); | 447 | static void iop_chan_start_null_xor(struct iop_adma_chan *iop_chan); |
459 | 448 | ||
@@ -486,7 +475,6 @@ static int iop_adma_alloc_chan_resources(struct dma_chan *chan) | |||
486 | 475 | ||
487 | dma_async_tx_descriptor_init(&slot->async_tx, chan); | 476 | dma_async_tx_descriptor_init(&slot->async_tx, chan); |
488 | slot->async_tx.tx_submit = iop_adma_tx_submit; | 477 | slot->async_tx.tx_submit = iop_adma_tx_submit; |
489 | slot->async_tx.tx_set_dest = iop_adma_set_dest; | ||
490 | INIT_LIST_HEAD(&slot->chain_node); | 478 | INIT_LIST_HEAD(&slot->chain_node); |
491 | INIT_LIST_HEAD(&slot->slot_node); | 479 | INIT_LIST_HEAD(&slot->slot_node); |
492 | INIT_LIST_HEAD(&slot->async_tx.tx_list); | 480 | INIT_LIST_HEAD(&slot->async_tx.tx_list); |
@@ -547,18 +535,9 @@ iop_adma_prep_dma_interrupt(struct dma_chan *chan) | |||
547 | return sw_desc ? &sw_desc->async_tx : NULL; | 535 | return sw_desc ? &sw_desc->async_tx : NULL; |
548 | } | 536 | } |
549 | 537 | ||
550 | static void | ||
551 | iop_adma_memcpy_set_src(dma_addr_t addr, struct dma_async_tx_descriptor *tx, | ||
552 | int index) | ||
553 | { | ||
554 | struct iop_adma_desc_slot *sw_desc = tx_to_iop_adma_slot(tx); | ||
555 | struct iop_adma_desc_slot *grp_start = sw_desc->group_head; | ||
556 | |||
557 | iop_desc_set_memcpy_src_addr(grp_start, addr); | ||
558 | } | ||
559 | |||
560 | static struct dma_async_tx_descriptor * | 538 | static struct dma_async_tx_descriptor * |
561 | iop_adma_prep_dma_memcpy(struct dma_chan *chan, size_t len, int int_en) | 539 | iop_adma_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dma_dest, |
540 | dma_addr_t dma_src, size_t len, int int_en) | ||
562 | { | 541 | { |
563 | struct iop_adma_chan *iop_chan = to_iop_adma_chan(chan); | 542 | struct iop_adma_chan *iop_chan = to_iop_adma_chan(chan); |
564 | struct iop_adma_desc_slot *sw_desc, *grp_start; | 543 | struct iop_adma_desc_slot *sw_desc, *grp_start; |
@@ -578,9 +557,10 @@ iop_adma_prep_dma_memcpy(struct dma_chan *chan, size_t len, int int_en) | |||
578 | grp_start = sw_desc->group_head; | 557 | grp_start = sw_desc->group_head; |
579 | iop_desc_init_memcpy(grp_start, int_en); | 558 | iop_desc_init_memcpy(grp_start, int_en); |
580 | iop_desc_set_byte_count(grp_start, iop_chan, len); | 559 | iop_desc_set_byte_count(grp_start, iop_chan, len); |
560 | iop_desc_set_dest_addr(grp_start, iop_chan, dma_dest); | ||
561 | iop_desc_set_memcpy_src_addr(grp_start, dma_src); | ||
581 | sw_desc->unmap_src_cnt = 1; | 562 | sw_desc->unmap_src_cnt = 1; |
582 | sw_desc->unmap_len = len; | 563 | sw_desc->unmap_len = len; |
583 | sw_desc->async_tx.tx_set_src = iop_adma_memcpy_set_src; | ||
584 | } | 564 | } |
585 | spin_unlock_bh(&iop_chan->lock); | 565 | spin_unlock_bh(&iop_chan->lock); |
586 | 566 | ||
@@ -588,8 +568,8 @@ iop_adma_prep_dma_memcpy(struct dma_chan *chan, size_t len, int int_en) | |||
588 | } | 568 | } |
589 | 569 | ||
590 | static struct dma_async_tx_descriptor * | 570 | static struct dma_async_tx_descriptor * |
591 | iop_adma_prep_dma_memset(struct dma_chan *chan, int value, size_t len, | 571 | iop_adma_prep_dma_memset(struct dma_chan *chan, dma_addr_t dma_dest, |
592 | int int_en) | 572 | int value, size_t len, int int_en) |
593 | { | 573 | { |
594 | struct iop_adma_chan *iop_chan = to_iop_adma_chan(chan); | 574 | struct iop_adma_chan *iop_chan = to_iop_adma_chan(chan); |
595 | struct iop_adma_desc_slot *sw_desc, *grp_start; | 575 | struct iop_adma_desc_slot *sw_desc, *grp_start; |
@@ -610,6 +590,7 @@ iop_adma_prep_dma_memset(struct dma_chan *chan, int value, size_t len, | |||
610 | iop_desc_init_memset(grp_start, int_en); | 590 | iop_desc_init_memset(grp_start, int_en); |
611 | iop_desc_set_byte_count(grp_start, iop_chan, len); | 591 | iop_desc_set_byte_count(grp_start, iop_chan, len); |
612 | iop_desc_set_block_fill_val(grp_start, value); | 592 | iop_desc_set_block_fill_val(grp_start, value); |
593 | iop_desc_set_dest_addr(grp_start, iop_chan, dma_dest); | ||
613 | sw_desc->unmap_src_cnt = 1; | 594 | sw_desc->unmap_src_cnt = 1; |
614 | sw_desc->unmap_len = len; | 595 | sw_desc->unmap_len = len; |
615 | } | 596 | } |
@@ -618,19 +599,10 @@ iop_adma_prep_dma_memset(struct dma_chan *chan, int value, size_t len, | |||
618 | return sw_desc ? &sw_desc->async_tx : NULL; | 599 | return sw_desc ? &sw_desc->async_tx : NULL; |
619 | } | 600 | } |
620 | 601 | ||
621 | static void | ||
622 | iop_adma_xor_set_src(dma_addr_t addr, struct dma_async_tx_descriptor *tx, | ||
623 | int index) | ||
624 | { | ||
625 | struct iop_adma_desc_slot *sw_desc = tx_to_iop_adma_slot(tx); | ||
626 | struct iop_adma_desc_slot *grp_start = sw_desc->group_head; | ||
627 | |||
628 | iop_desc_set_xor_src_addr(grp_start, index, addr); | ||
629 | } | ||
630 | |||
631 | static struct dma_async_tx_descriptor * | 602 | static struct dma_async_tx_descriptor * |
632 | iop_adma_prep_dma_xor(struct dma_chan *chan, unsigned int src_cnt, size_t len, | 603 | iop_adma_prep_dma_xor(struct dma_chan *chan, dma_addr_t dma_dest, |
633 | int int_en) | 604 | dma_addr_t *dma_src, unsigned int src_cnt, size_t len, |
605 | int int_en) | ||
634 | { | 606 | { |
635 | struct iop_adma_chan *iop_chan = to_iop_adma_chan(chan); | 607 | struct iop_adma_chan *iop_chan = to_iop_adma_chan(chan); |
636 | struct iop_adma_desc_slot *sw_desc, *grp_start; | 608 | struct iop_adma_desc_slot *sw_desc, *grp_start; |
@@ -651,29 +623,22 @@ iop_adma_prep_dma_xor(struct dma_chan *chan, unsigned int src_cnt, size_t len, | |||
651 | grp_start = sw_desc->group_head; | 623 | grp_start = sw_desc->group_head; |
652 | iop_desc_init_xor(grp_start, src_cnt, int_en); | 624 | iop_desc_init_xor(grp_start, src_cnt, int_en); |
653 | iop_desc_set_byte_count(grp_start, iop_chan, len); | 625 | iop_desc_set_byte_count(grp_start, iop_chan, len); |
626 | iop_desc_set_dest_addr(grp_start, iop_chan, dma_dest); | ||
654 | sw_desc->unmap_src_cnt = src_cnt; | 627 | sw_desc->unmap_src_cnt = src_cnt; |
655 | sw_desc->unmap_len = len; | 628 | sw_desc->unmap_len = len; |
656 | sw_desc->async_tx.tx_set_src = iop_adma_xor_set_src; | 629 | while (src_cnt--) |
630 | iop_desc_set_xor_src_addr(grp_start, src_cnt, | ||
631 | dma_src[src_cnt]); | ||
657 | } | 632 | } |
658 | spin_unlock_bh(&iop_chan->lock); | 633 | spin_unlock_bh(&iop_chan->lock); |
659 | 634 | ||
660 | return sw_desc ? &sw_desc->async_tx : NULL; | 635 | return sw_desc ? &sw_desc->async_tx : NULL; |
661 | } | 636 | } |
662 | 637 | ||
663 | static void | ||
664 | iop_adma_xor_zero_sum_set_src(dma_addr_t addr, | ||
665 | struct dma_async_tx_descriptor *tx, | ||
666 | int index) | ||
667 | { | ||
668 | struct iop_adma_desc_slot *sw_desc = tx_to_iop_adma_slot(tx); | ||
669 | struct iop_adma_desc_slot *grp_start = sw_desc->group_head; | ||
670 | |||
671 | iop_desc_set_zero_sum_src_addr(grp_start, index, addr); | ||
672 | } | ||
673 | |||
674 | static struct dma_async_tx_descriptor * | 638 | static struct dma_async_tx_descriptor * |
675 | iop_adma_prep_dma_zero_sum(struct dma_chan *chan, unsigned int src_cnt, | 639 | iop_adma_prep_dma_zero_sum(struct dma_chan *chan, dma_addr_t *dma_src, |
676 | size_t len, u32 *result, int int_en) | 640 | unsigned int src_cnt, size_t len, u32 *result, |
641 | int int_en) | ||
677 | { | 642 | { |
678 | struct iop_adma_chan *iop_chan = to_iop_adma_chan(chan); | 643 | struct iop_adma_chan *iop_chan = to_iop_adma_chan(chan); |
679 | struct iop_adma_desc_slot *sw_desc, *grp_start; | 644 | struct iop_adma_desc_slot *sw_desc, *grp_start; |
@@ -697,7 +662,9 @@ iop_adma_prep_dma_zero_sum(struct dma_chan *chan, unsigned int src_cnt, | |||
697 | __FUNCTION__, grp_start->xor_check_result); | 662 | __FUNCTION__, grp_start->xor_check_result); |
698 | sw_desc->unmap_src_cnt = src_cnt; | 663 | sw_desc->unmap_src_cnt = src_cnt; |
699 | sw_desc->unmap_len = len; | 664 | sw_desc->unmap_len = len; |
700 | sw_desc->async_tx.tx_set_src = iop_adma_xor_zero_sum_set_src; | 665 | while (src_cnt--) |
666 | iop_desc_set_zero_sum_src_addr(grp_start, src_cnt, | ||
667 | dma_src[src_cnt]); | ||
701 | } | 668 | } |
702 | spin_unlock_bh(&iop_chan->lock); | 669 | spin_unlock_bh(&iop_chan->lock); |
703 | 670 | ||
@@ -882,13 +849,12 @@ static int __devinit iop_adma_memcpy_self_test(struct iop_adma_device *device) | |||
882 | goto out; | 849 | goto out; |
883 | } | 850 | } |
884 | 851 | ||
885 | tx = iop_adma_prep_dma_memcpy(dma_chan, IOP_ADMA_TEST_SIZE, 1); | ||
886 | dest_dma = dma_map_single(dma_chan->device->dev, dest, | 852 | dest_dma = dma_map_single(dma_chan->device->dev, dest, |
887 | IOP_ADMA_TEST_SIZE, DMA_FROM_DEVICE); | 853 | IOP_ADMA_TEST_SIZE, DMA_FROM_DEVICE); |
888 | iop_adma_set_dest(dest_dma, tx, 0); | ||
889 | src_dma = dma_map_single(dma_chan->device->dev, src, | 854 | src_dma = dma_map_single(dma_chan->device->dev, src, |
890 | IOP_ADMA_TEST_SIZE, DMA_TO_DEVICE); | 855 | IOP_ADMA_TEST_SIZE, DMA_TO_DEVICE); |
891 | iop_adma_memcpy_set_src(src_dma, tx, 0); | 856 | tx = iop_adma_prep_dma_memcpy(dma_chan, dest_dma, src_dma, |
857 | IOP_ADMA_TEST_SIZE, 1); | ||
892 | 858 | ||
893 | cookie = iop_adma_tx_submit(tx); | 859 | cookie = iop_adma_tx_submit(tx); |
894 | iop_adma_issue_pending(dma_chan); | 860 | iop_adma_issue_pending(dma_chan); |
@@ -929,6 +895,7 @@ iop_adma_xor_zero_sum_self_test(struct iop_adma_device *device) | |||
929 | struct page *dest; | 895 | struct page *dest; |
930 | struct page *xor_srcs[IOP_ADMA_NUM_SRC_TEST]; | 896 | struct page *xor_srcs[IOP_ADMA_NUM_SRC_TEST]; |
931 | struct page *zero_sum_srcs[IOP_ADMA_NUM_SRC_TEST + 1]; | 897 | struct page *zero_sum_srcs[IOP_ADMA_NUM_SRC_TEST + 1]; |
898 | dma_addr_t dma_srcs[IOP_ADMA_NUM_SRC_TEST + 1]; | ||
932 | dma_addr_t dma_addr, dest_dma; | 899 | dma_addr_t dma_addr, dest_dma; |
933 | struct dma_async_tx_descriptor *tx; | 900 | struct dma_async_tx_descriptor *tx; |
934 | struct dma_chan *dma_chan; | 901 | struct dma_chan *dma_chan; |
@@ -981,17 +948,13 @@ iop_adma_xor_zero_sum_self_test(struct iop_adma_device *device) | |||
981 | } | 948 | } |
982 | 949 | ||
983 | /* test xor */ | 950 | /* test xor */ |
984 | tx = iop_adma_prep_dma_xor(dma_chan, IOP_ADMA_NUM_SRC_TEST, | ||
985 | PAGE_SIZE, 1); | ||
986 | dest_dma = dma_map_page(dma_chan->device->dev, dest, 0, | 951 | dest_dma = dma_map_page(dma_chan->device->dev, dest, 0, |
987 | PAGE_SIZE, DMA_FROM_DEVICE); | 952 | PAGE_SIZE, DMA_FROM_DEVICE); |
988 | iop_adma_set_dest(dest_dma, tx, 0); | 953 | for (i = 0; i < IOP_ADMA_NUM_SRC_TEST; i++) |
989 | 954 | dma_srcs[i] = dma_map_page(dma_chan->device->dev, xor_srcs[i], | |
990 | for (i = 0; i < IOP_ADMA_NUM_SRC_TEST; i++) { | 955 | 0, PAGE_SIZE, DMA_TO_DEVICE); |
991 | dma_addr = dma_map_page(dma_chan->device->dev, xor_srcs[i], 0, | 956 | tx = iop_adma_prep_dma_xor(dma_chan, dest_dma, dma_srcs, |
992 | PAGE_SIZE, DMA_TO_DEVICE); | 957 | IOP_ADMA_NUM_SRC_TEST, PAGE_SIZE, 1); |
993 | iop_adma_xor_set_src(dma_addr, tx, i); | ||
994 | } | ||
995 | 958 | ||
996 | cookie = iop_adma_tx_submit(tx); | 959 | cookie = iop_adma_tx_submit(tx); |
997 | iop_adma_issue_pending(dma_chan); | 960 | iop_adma_issue_pending(dma_chan); |
@@ -1032,13 +995,13 @@ iop_adma_xor_zero_sum_self_test(struct iop_adma_device *device) | |||
1032 | 995 | ||
1033 | zero_sum_result = 1; | 996 | zero_sum_result = 1; |
1034 | 997 | ||
1035 | tx = iop_adma_prep_dma_zero_sum(dma_chan, IOP_ADMA_NUM_SRC_TEST + 1, | 998 | for (i = 0; i < IOP_ADMA_NUM_SRC_TEST + 1; i++) |
1036 | PAGE_SIZE, &zero_sum_result, 1); | 999 | dma_srcs[i] = dma_map_page(dma_chan->device->dev, |
1037 | for (i = 0; i < IOP_ADMA_NUM_SRC_TEST + 1; i++) { | 1000 | zero_sum_srcs[i], 0, PAGE_SIZE, |
1038 | dma_addr = dma_map_page(dma_chan->device->dev, zero_sum_srcs[i], | 1001 | DMA_TO_DEVICE); |
1039 | 0, PAGE_SIZE, DMA_TO_DEVICE); | 1002 | tx = iop_adma_prep_dma_zero_sum(dma_chan, dma_srcs, |
1040 | iop_adma_xor_zero_sum_set_src(dma_addr, tx, i); | 1003 | IOP_ADMA_NUM_SRC_TEST + 1, PAGE_SIZE, |
1041 | } | 1004 | &zero_sum_result, 1); |
1042 | 1005 | ||
1043 | cookie = iop_adma_tx_submit(tx); | 1006 | cookie = iop_adma_tx_submit(tx); |
1044 | iop_adma_issue_pending(dma_chan); | 1007 | iop_adma_issue_pending(dma_chan); |
@@ -1060,10 +1023,9 @@ iop_adma_xor_zero_sum_self_test(struct iop_adma_device *device) | |||
1060 | } | 1023 | } |
1061 | 1024 | ||
1062 | /* test memset */ | 1025 | /* test memset */ |
1063 | tx = iop_adma_prep_dma_memset(dma_chan, 0, PAGE_SIZE, 1); | ||
1064 | dma_addr = dma_map_page(dma_chan->device->dev, dest, 0, | 1026 | dma_addr = dma_map_page(dma_chan->device->dev, dest, 0, |
1065 | PAGE_SIZE, DMA_FROM_DEVICE); | 1027 | PAGE_SIZE, DMA_FROM_DEVICE); |
1066 | iop_adma_set_dest(dma_addr, tx, 0); | 1028 | tx = iop_adma_prep_dma_memset(dma_chan, dma_addr, 0, PAGE_SIZE, 1); |
1067 | 1029 | ||
1068 | cookie = iop_adma_tx_submit(tx); | 1030 | cookie = iop_adma_tx_submit(tx); |
1069 | iop_adma_issue_pending(dma_chan); | 1031 | iop_adma_issue_pending(dma_chan); |
@@ -1089,13 +1051,13 @@ iop_adma_xor_zero_sum_self_test(struct iop_adma_device *device) | |||
1089 | 1051 | ||
1090 | /* test for non-zero parity sum */ | 1052 | /* test for non-zero parity sum */ |
1091 | zero_sum_result = 0; | 1053 | zero_sum_result = 0; |
1092 | tx = iop_adma_prep_dma_zero_sum(dma_chan, IOP_ADMA_NUM_SRC_TEST + 1, | 1054 | for (i = 0; i < IOP_ADMA_NUM_SRC_TEST + 1; i++) |
1093 | PAGE_SIZE, &zero_sum_result, 1); | 1055 | dma_srcs[i] = dma_map_page(dma_chan->device->dev, |
1094 | for (i = 0; i < IOP_ADMA_NUM_SRC_TEST + 1; i++) { | 1056 | zero_sum_srcs[i], 0, PAGE_SIZE, |
1095 | dma_addr = dma_map_page(dma_chan->device->dev, zero_sum_srcs[i], | 1057 | DMA_TO_DEVICE); |
1096 | 0, PAGE_SIZE, DMA_TO_DEVICE); | 1058 | tx = iop_adma_prep_dma_zero_sum(dma_chan, dma_srcs, |
1097 | iop_adma_xor_zero_sum_set_src(dma_addr, tx, i); | 1059 | IOP_ADMA_NUM_SRC_TEST + 1, PAGE_SIZE, |
1098 | } | 1060 | &zero_sum_result, 1); |
1099 | 1061 | ||
1100 | cookie = iop_adma_tx_submit(tx); | 1062 | cookie = iop_adma_tx_submit(tx); |
1101 | iop_adma_issue_pending(dma_chan); | 1063 | iop_adma_issue_pending(dma_chan); |